answer
stringlengths
17
10.2M
package com.extjs.selenium; import com.sdl.selenium.web.WebLocator; import org.apache.log4j.Logger; import org.openqa.selenium.OutputType; import org.openqa.selenium.TakesScreenshot; import org.openqa.selenium.WebDriver; import java.awt.*; import java.awt.datatransfer.ClipboardOwner; import java.awt.datatransfer.StringSelection; import java.awt.datatransfer.Transferable; import java.io.*; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Date; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; /** * TODO move this class to new pack near WebLocator (so can be used with no extjs pack) */ public class Utils { private static final Logger logger = Logger.getLogger(Utils.class); public static void sleep(long milliseconds) { try { if (milliseconds > 0) { Thread.sleep(milliseconds); } } catch (InterruptedException e) { logger.error(e); } } public static void conditionalSleep(int maxSeconds, boolean condition) { int count = 0; while ((count < maxSeconds) && !condition) { sleep(1000); count++; } } /** * TODO find better solution * * @param milliseconds Try to make more specific method for grids for example (with parameter loadMsg) * @return */ public static boolean pleaseWait(int milliseconds) { int i = 0; do { Utils.sleep(milliseconds); i++; if (i == 60) { logger.warn("pleaseWait. Waited for 60x" + milliseconds + " milliseconds."); return false; } } while (ExtJsComponent.driver.getPageSource().contains("Please Wait...")); return true; } /** * TODO find better solution * Try to make more specific method for grids for example (with parameter loadMsg) * * @param milliseconds * @return */ public static boolean loading(int milliseconds) { int i = 0; do { Utils.sleep(milliseconds); i++; if (i == 60) { logger.warn("loading. Waited for 60x" + milliseconds + " milliseconds."); return false; } } while (ExtJsComponent.driver.getPageSource().contains("Loading...")); return true; } /** * remove the first " and " * * @param selector * @return */ public static String fixPathSelector(String selector) { if (selector.startsWith(" and ")) { selector = selector.substring(5); } return selector; } public static String getEscapeQuotesText(String text) { boolean hasDoubleQuote = text.contains("\""); boolean hasSingeQuote = text.contains("'"); if (hasDoubleQuote && hasSingeQuote) { boolean quoteIsLast = false; if (text.lastIndexOf("\"") == text.length() - 1) { quoteIsLast = true; } String[] substrings = text.split("\""); StringBuilder quoted = new StringBuilder("concat("); for (int i = 0; i < substrings.length; i++) { quoted.append("\"").append(substrings[i]).append("\""); quoted.append(((i == substrings.length - 1) ? (quoteIsLast ? ", '\"')" : ")") : ", '\"', ")); } return quoted.toString(); } else if (hasDoubleQuote || !hasSingeQuote) { return String.format("'%s'", text); } return String.format("\"%s\"", text); } /** * add 'css=' at the beginning of the string * * @param selector * @return */ public static String fixCssSelector(String selector) { selector = selector.replaceAll("css=", ""); // selector = "css=" + selector; if (selector.endsWith("*")) { selector = selector.substring(0, selector.length() - 1); } return selector; } public static void copyToClipboard(final String text) { final StringSelection stringSelection = new StringSelection(text); Toolkit.getDefaultToolkit().getSystemClipboard().setContents(stringSelection, new ClipboardOwner() { @Override public void lostOwnership(final java.awt.datatransfer.Clipboard clipboard, final Transferable contents) { // do nothing } }); } public static String getValidFileName(String fileName) { String regex = "\\\\|:|/|\\*|\\?|\\<|\\>|\\|"; // matches special characters: ,(comma) (space)&><@?\/'" fileName = fileName.replaceAll(regex, "_"); fileName = fileName.replaceAll(";", "_"); // replace semicolon only after replacing characters like &amp, &gt etc return fileName; } public static String getScreenShot(String fileName, String screensPath) { WebDriver driver = WebLocator.getDriver(); DateFormat dfm = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss"); fileName = (dfm.format(new Date())) + "-" + fileName + ".jpg"; fileName = getValidFileName(fileName); String filePath = screensPath + fileName; try { // TestProperties properties = TestProperties.getInstance(); // String screensPath = properties.getProjectDir()+ "\\reports\\screens\\"; File screensDir = new File(screensPath); screensDir.mkdirs(); logger.info("Screenshot: " + filePath); TakesScreenshot takesScreenshot = (TakesScreenshot) driver; File screenShot = takesScreenshot.getScreenshotAs(OutputType.FILE); screenShot.setWritable(true); File file = new File(filePath); screenShot.renameTo(file); } catch (Exception e) { logger.error("Failed to capture screenshot: ", e); } return fileName; } public static boolean eq(Object a, Object b) { return a == b || (a != null && a.equals(b)); } public static boolean eqArray(String[] a, String[] b) { return Arrays.equals(a, b); } public static String getTextFromFile(String pathFile) { String strLine = ""; try { BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(pathFile), "UTF8")); String tmp; while ((tmp = br.readLine()) != null) { strLine += tmp; } logger.debug(strLine.length()); br.close(); } catch (Exception e) { logger.debug("Error: " + e.getMessage()); } return strLine; } public static final void copyInputStream(InputStream in, OutputStream out) throws IOException { byte[] buffer = new byte[1024]; int len; while ((len = in.read(buffer)) >= 0) out.write(buffer, 0, len); in.close(); out.close(); } /** * * @param zipFilePath * @param outputFolderPath if null of empty will extract in same folder as zipFilePath * @return */ public static boolean unZip(String zipFilePath, String outputFolderPath) { byte[] buffer = new byte[1024]; try { long startMs = System.currentTimeMillis(); //create output directory if doesn't exists if(outputFolderPath == null || "".equals(outputFolderPath)){ // unzip in same folder as zip file outputFolderPath = new File(zipFilePath).getParent(); } File folder = new File(outputFolderPath); if (!folder.exists()) { folder.mkdir(); } //get the zip file content ZipInputStream zis = new ZipInputStream(new FileInputStream(zipFilePath)); //get the zipped file list entry ZipEntry ze = zis.getNextEntry(); while (ze != null) { String fileName = ze.getName(); File newFile = new File(outputFolderPath + File.separator + fileName); logger.info("file unzip : " + newFile.getAbsoluteFile()); //create all non exists folders //else you will hit FileNotFoundException for compressed folder new File(newFile.getParent()).mkdirs(); FileOutputStream fos = new FileOutputStream(newFile); int len; while ((len = zis.read(buffer)) > 0) { fos.write(buffer, 0, len); } fos.close(); ze = zis.getNextEntry(); } zis.closeEntry(); zis.close(); logger.info("Unzip Done: " + zipFilePath); long endMs = System.currentTimeMillis(); logger.debug(String.format("unzip took %s ms", endMs - startMs)); } catch (IOException ex) { ex.printStackTrace(); return false; } return true; } /** * @deprecated * use {@link #unZip(String, String)} * @param filePath * @param extractedFilePath * @return */ public static boolean unZip2(String filePath, String extractedFilePath) { return unZip(filePath, extractedFilePath); } public static boolean deleteFile(String filePath) { File file = new File(filePath); return (file.delete()); } public static boolean compareFiles(String filePath1, String filePath2) { boolean equal = true; BufferedReader br1 = null; BufferedReader br2 = null; try { br1 = new BufferedReader(new FileReader(filePath1)); br2 = new BufferedReader(new FileReader(filePath2)); String strLine1, strLine2; do { strLine1 = br1.readLine(); strLine2 = br2.readLine(); if (strLine1 == null && strLine2 == null) { br1.close(); br2.close(); return true; } else if (strLine1 == null || strLine2 == null || !strLine1.equals(strLine2)) { logger.debug("The files are not equal." + strLine1 + " != " + strLine2); br1.close(); br2.close(); return false; } } while (true); } catch (IOException ex) { logger.debug("Exception occured" + ex); } return equal; } public static boolean compareFileSize(String filePath1, String filePath2) { boolean equal = false; File file1 = new File(filePath1); File file2 = new File(filePath2); logger.info("file1.length = " + file1.length()); logger.info("file2.length = " + file2.length()); if (file1.length() == file2.length()) { equal = true; } return equal; } public static String obtainFileName(String fileName, String insertedText) { return obtainFileName(fileName, insertedText, null); } public static String obtainFileName(String fileName, String insertedText, String extension) { String[] pathSplit = fileName.split("\\\\"); String[] splitted = pathSplit[pathSplit.length - 1].split("\\."); String result = ""; for (int i = 0; i < (splitted.length - 1); i++) { result += splitted[i]; } result += insertedText + "." + (extension == null ? splitted[splitted.length - 1] : extension); return result; } public static String getFileNameFromPath(String filePath) { File file = new File(filePath); return file.getName(); } public static void main(String args[]) { String string = " and contains(@name, 'name')"; long startMs = System.currentTimeMillis(); for (int i = 0; i < 1000; i++) { fixPathSelector(string); } long endMs = System.currentTimeMillis(); logger.info(String.format("fixPathSelector took %s ms", endMs - startMs)); } }
package com.actelion.research.share.gui.editor; import com.actelion.research.chem.*; import com.actelion.research.chem.coords.CoordinateInventor; import com.actelion.research.chem.reaction.IReactionMapper; import com.actelion.research.chem.reaction.Reaction; import com.actelion.research.chem.reaction.ReactionEncoder; import com.actelion.research.share.gui.Arrow; import com.actelion.research.share.gui.editor.chem.AbstractExtendedDepictor; import com.actelion.research.share.gui.editor.chem.IDrawingObject; import com.actelion.research.share.gui.editor.geom.GeomFactory; import com.actelion.research.share.gui.editor.listeners.IChangeListener; import com.actelion.research.share.gui.editor.listeners.IValidationListener; import java.awt.*; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; public abstract class Model { public interface AtomHighlightCallback { void onHighlight(int atom, boolean selected); } public interface BondHighlightCallback { void onHighlight(int atom, boolean selected); } // protected static GeomFactory builder = GeomFactory.getGeomFactory() ; protected GeomFactory geomFactory; public static final int KEY_IS_ATOM_LABEL = 1; public static final int KEY_IS_SUBSTITUENT = 2; public static final int KEY_IS_VALID_START = 3; public static final int KEY_IS_INVALID = 4; // private StereoMolecule selectedMolecule; public static final int MODE_MULTIPLE_FRAGMENTS = 1; public static final int MODE_MARKUSH_STRUCTURE = 2; public static final int MODE_REACTION = 4; public static final int MODE_DRAWING_OBJECTS = 8; public static final int MAX_CONNATOMS = 8; public static final int MIN_BOND_LENGTH_SQUARE = 100; private static final float FRAGMENT_MAX_CLICK_DISTANCE = 24.0f; private static final float FRAGMENT_GROUPING_DISTANCE = 1.2f; // in average bond lengths private static final float FRAGMENT_CLEANUP_DISTANCE = 1.5f; // in average bond lengths private static final float DEFAULT_ARROW_LENGTH = 0.08f; // relative to panel width public static final int FAKE_ATOM_NO = 100; /* protected static final int UPDATE_NONE = 0; protected static final int UPDATE_REDRAW = 1; // redraw molecules and drawing objects with their current coordinates protected static final int UPDATE_CHECK_VIEW = 2; // redraw with on-the-fly coordinate transformation only if current coords do not fit within view area // (new coords are generated for one draw() only; the original coords are not changed) protected static final int UPDATE_CHECK_COORDS = 3; // redraw with in-place coordinate transformation only if current coords do not fit within view area // (the original atom and object coords are replaced by the new ones) protected static final int UPDATE_SCALE_COORDS = 4; // redraw with in-place coordinate transformation; molecules and objects are scaled to fill // the view unless the maximum average bond length reaches the optimum. protected static final int UPDATE_SCALE_COORDS_USE_FRAGMENTS = 5; // as UPDATE_SCALE_COORDS but uses fragments from mFragment rather than creating a // fresh mFragment list from mMol. Used for setting a reaction or fragment list from outside. protected static final int UPDATE_INVENT_COORDS = 6; // redraw with in-place coordinate transformation; first all molecules' coordinates // are generated from scratch, then molecules and objects are scaled to fill // the view unless the maximum average bond length reaches the optimum. */ public static final int MAX_UNDO_SIZE = 5; // private List<DrawItems> _undoList = new ArrayList<DrawItems>(); private List<StereoMolecule> _undoList = new ArrayList<StereoMolecule>(); private int selectedESRType = 0; private int selectedAtom = -1; private int selectedBond = -1; private int displayMode = 0; // private int reactantIndex = 0; private int mReactantCount; private int[] mFragmentNo; private boolean mAtomColorSupported; private Point2D.Double arrowPos = new Point.Double(0,0); private List<IValidationListener> validationListeners = new ArrayList<IValidationListener>(); private List<IChangeListener> changeListeners = new ArrayList<IChangeListener>(); private boolean needslayout = false; private int mMode = 0; private Dimension displaySize = new Dimension(0, 0); private StereoMolecule mMol = new StereoMolecule(); // molecule being modified directly by the drawing editor private StringBuilder mAtomKeyStrokeBuffer = new StringBuilder(); private List<IDrawingObject> mDrawingObjectList;//, mUndoDrawingObjectList; private StereoMolecule[] mFragment; // in case of MODE_MULTIPLE_FRAGMENTS contains valid stereo fragments private IDrawingObject selectedDrawingObject; private IReactionMapper mapper; private ImageProvider imageProvider; public Model(GeomFactory factory, int mode) { this.geomFactory = factory; mDrawingObjectList = new ArrayList<IDrawingObject>(); mMode = mode; if ((mMode & (MODE_REACTION | MODE_MARKUSH_STRUCTURE)) != 0) { mMode |= (MODE_MULTIPLE_FRAGMENTS); } if ((mMode & (MODE_DRAWING_OBJECTS | MODE_REACTION)) != 0) { } if (isReactionMode()) { arrowPos = new Point2D.Double(0,0); Arrow arrow = new Arrow(factory.getDrawConfig(), 0, 0, 0, 0); mDrawingObjectList.add(arrow); } } public GeomFactory getGeomFactory() { return geomFactory; } /* public void resizeReaction(Dimension os, Dimension ns) { Reaction rxn = getReaction(false); double offsetx = ns.getWidth() - os.getWidth(); double offsety = ns.getHeight() - os.getHeight(); double scale; if (Math.abs(offsetx) > Math.abs(offsety)) { scale = ns.getHeight() / os.getHeight(); } else { scale = ns.getWidth() / os.getWidth(); } ChemistryHelper.transformReaction(rxn, offsetx, offsety, scale); setValue(rxn); } */ public void cleanReaction(boolean cleanAll) { Reaction reaction = getReaction(); // // System.out.print("cleanreaction %s\n",reaction.getReactants()); Dimension dim = getDisplaySize(); double w = dim.getWidth(); double h = dim.getHeight(); double width = w / 5; if (w > 0 && h > 0) { IDrawingObject arrow = getDrawingObjects().get(0);// new Arrow(mx , my, dx,20); arrow.setRect((float) (0.5f * w), (float) (0.5f * h), (float) (0.5f * .16 * w), 20); arrowPos = new Point2D.Double((0.5f * w), (0.5f * h)); mMode = MODE_MULTIPLE_FRAGMENTS; if (cleanAll) cleanupCoordinates(true, true); ChemistryHelper.scaleInto(reaction, 0, 0, dim.getWidth(), dim.getHeight(), width); setValue(reaction); } } /* public void setSelectedMolecule(StereoMolecule selectedMolecule) { if (selectedMolecule == null) { // Exception e = new Exception("Passed NULL selected Molecule"); // e.printStackTrace(); } // this.selectedMolecule = selectedMolecule; } */ // public StereoMolecule getSelectedMolecule() // //return selectedMolecule; // return mMol; public StereoMolecule getSelectedCopy(StereoMolecule sourceMol) { int atomCount = 0; for (int atom = 0; atom < sourceMol.getAllAtoms(); atom++) { if (sourceMol.isSelectedAtom(atom)) { atomCount++; } } if (atomCount == 0) { return null; } int bondCount = 0; for (int bond = 0; bond < sourceMol.getAllBonds(); bond++) { if (sourceMol.isSelectedBond(bond)) { bondCount++; } } boolean[] includeAtom = new boolean[sourceMol.getAllAtoms()]; for (int atom = 0; atom < sourceMol.getAllAtoms(); atom++) { includeAtom[atom] = sourceMol.isSelectedAtom(atom); } StereoMolecule destMol = new StereoMolecule(atomCount, bondCount); sourceMol.copyMoleculeByAtoms(destMol, includeAtom, false, null); return destMol; } public void scale(float dx, float dy) { mMol.scaleCoords(Math.min(dx, dy)); } public Reaction getSelectedReaction() { Reaction rxn = new Reaction(); for (int i = 0; i < mFragment.length; i++) { StereoMolecule selectedMol = getSelectedCopy(mFragment[i]); if (selectedMol != null) { if (i < mReactantCount) { rxn.addReactant(selectedMol); } else { rxn.addProduct(selectedMol); } } } return rxn; } private int findFragment(float x, float y) { int fragment = -1; double minDistance = Float.MAX_VALUE; for (int atom = 0; atom < mMol.getAllAtoms(); atom++) { double dx = x - mMol.getAtomX(atom); double dy = y - mMol.getAtomY(atom); double distance = Math.sqrt(dx * dx + dy * dy); if (distance < FRAGMENT_MAX_CLICK_DISTANCE && minDistance > distance) { minDistance = distance; fragment = mFragmentNo[atom]; } } return fragment; } public StereoMolecule[] getFragments() { return mFragment; } public void setFragments(StereoMolecule[] fragment) { mMol.deleteMolecule(); mFragment = fragment; for (int i = 0; i < fragment.length; i++) { mMol.addMolecule(mFragment[i]); } pushUndo(); mFragmentNo = new int[mMol.getAllAtoms()]; for (int atom = 0, f = 0; f < mFragment.length; f++) { for (int j = 0; j < mFragment[f].getAllAtoms(); j++) { mFragmentNo[atom++] = f; } } mMode = MODE_MULTIPLE_FRAGMENTS; notifyChange(); // fireEvent(new DrawAreaEvent(this, DrawAreaEvent.TYPE_MOLECULE_CHANGED, false)); // update(UPDATE_SCALE_COORDS_USE_FRAGMENTS); } public Reaction getReaction() { if ((mMode & MODE_REACTION) == 0) { return null; } Reaction rxn = new Reaction(); syncFragments(); for (int i = 0; i < mFragment.length; i++) { if (i < mReactantCount) { rxn.addReactant(mFragment[i]); } else { rxn.addProduct(mFragment[i]); } } // for (int i = 0; i < mFragment.length; i++) { // for (int j = 0; j < mFragment[i].getAllAtoms(); j++) { // // System.out.print("getReaction Fragment[%s] atom[%s]=%s\n",i,j,mFragment[i].getAtomicNo(j)); return rxn; } public void setReaction(Reaction rxn) { Dimension displaySize = getDisplaySize(); // // System.out.print("SetReaction %s\n", displaySize); mMol = new StereoMolecule(); mFragment = new StereoMolecule[rxn.getMolecules()]; mReactantCount = rxn.getReactants(); boolean isFragment = false; for (int i = 0; i < rxn.getMolecules(); i++) { isFragment |= rxn.getMolecule(i).isFragment(); StereoMolecule molecule = rxn.getMolecule(i); Rectangle2D.Double boundingRect = ChemistryHelper.getBoundingRect(molecule); // if(i < mReactantCount) { // arrowPos = new Point2D.Double(boundingRect.getX()+boundingRect.getWidth(),boundingRect.getY()+boundingRect.getHeight()/2); mFragment[i] = molecule; mMol.addMolecule(mFragment[i]); } // // System.out.print("SetReaction %s mols = %d\n", rxn.getReactants(),rxn.getMolecules()); // ChemistryHelper.scaleInto(rxn,0,0,(double)displaySize.width,(double)displaySize.height); mMol.setFragment(isFragment); mFragmentNo = new int[mMol.getAllAtoms()]; for (int atom = 0, f = 0; f < mFragment.length; f++) { for (int j = 0; j < mFragment[f].getAllAtoms(); j++) { mFragmentNo[atom++] = f; } } try { mMol.validate(); } catch (Exception e) { System.out.println("WARNING:" + e); // e.printStackTrace(); } // fireEvent(new DrawAreaEvent(this, DrawAreaEvent.TYPE_MOLECULE_CHANGED, false)); mMode = MODE_MULTIPLE_FRAGMENTS | MODE_REACTION; // for (int i = 0; i < mFragment.length; i++) { // for (int j = 0; j < mFragment[i].getAllAtoms(); j++) { // // System.out.print("setReaction Fragment[%s] atom[%s]=%s\n",i,j,mFragment[i].getAtomicNo(j)); notifyChange(); // update(UPDATE_SCALE_COORDS_USE_FRAGMENTS); } public MarkushStructure getMarkushStructure() { if ((mMode & MODE_MARKUSH_STRUCTURE) == 0) { return null; } MarkushStructure markush = new MarkushStructure(); for (int i = 0; i < mFragment.length; i++) { if (i < mReactantCount) { markush.addCore(mFragment[i]); } else { markush.addRGroup(mFragment[i]); } } return markush; } public void setMarkushStructure(MarkushStructure markush) { mMol.deleteMolecule(); mFragment = new StereoMolecule[markush.getCoreCount() + markush.getRGroupCount()]; mReactantCount = markush.getCoreCount(); boolean isFragment = false; for (int i = 0; i < markush.getCoreCount() + markush.getRGroupCount(); i++) { mFragment[i] = (i < markush.getCoreCount()) ? markush.getCoreStructure(i) : markush.getRGroup(i - markush.getCoreCount()); isFragment |= mFragment[i].isFragment(); mMol.addMolecule(mFragment[i]); } mMol.setFragment(isFragment); pushUndo(); mFragmentNo = new int[mMol.getAllAtoms()]; for (int atom = 0, f = 0; f < mFragment.length; f++) { for (int j = 0; j < mFragment[f].getAllAtoms(); j++) { mFragmentNo[atom++] = f; } } // fireEvent(new DrawAreaEvent(this, DrawAreaEvent.TYPE_MOLECULE_CHANGED, false)); mMode = MODE_MULTIPLE_FRAGMENTS | MODE_MARKUSH_STRUCTURE; notifyChange(); } public void setDisplayMode(int dMode) { displayMode = dMode; notifyChange(); } public int getMode() { return mMode; } public StereoMolecule getMolecule() { return mMol; } public boolean isAtomColorSupported() { return mAtomColorSupported; } public void setAtomColorSupported(boolean acs) { mAtomColorSupported = acs; } protected void cleanupCoordinates(boolean multifragment, boolean invent) { int selectedAtomCount = 0; for (int atom = 0; atom < mMol.getAllAtoms(); atom++) { if (mMol.isSelectedAtom(atom)) { selectedAtomCount++; } } boolean selectedOnly = (selectedAtomCount != 0 && selectedAtomCount != mMol.getAllAtoms()); if (!multifragment) { AbstractDepictor depictor = createDepictor(getMolecule()); cleanupMoleculeCoordinates(depictor, invent, selectedOnly); } else { AbstractExtendedDepictor depictor = createExtendedDepictor(); cleanupMultiFragmentCoordinates(depictor, selectedOnly, invent); } if (selectedOnly) mMol.removeAtomMarkers(); } private void cleanupMoleculeCoordinates(AbstractDepictor depictor, boolean invent, boolean selectedOnly) { //if (mUpdateMode == UPDATE_INVENT_COORDS) if (invent) { if (selectedOnly) { for (int atom = 0; atom < mMol.getAllAtoms(); atom++) { mMol.setAtomMarker(atom, !mMol.isSelectedAtom(atom)); } } new CoordinateInventor(selectedOnly ? CoordinateInventor.MODE_KEEP_MARKED_ATOM_COORDS : 0).invent(mMol); // mMol.setStereoBondsFromParity(); not needed anymore } DepictorTransformation dt = depictor.simpleValidateView(new Rectangle2D.Double(0, 0, this.getWidth(), this.getHeight()), AbstractDepictor.cModeInflateToMaxAVBL); if (dt != null) dt.applyTo(mMol); } private float getHeight() { return (float) displaySize.getHeight(); } private float getWidth() { return (float) displaySize.getWidth(); } /* private void cleanupMultiFragmentCoordinatesEx(AbstractExtendedDepictor depictor, boolean selectedOnly, boolean invent) { //if (selectedOnly && mUpdateMode == UPDATE_INVENT_COORDS) if (invent) { int[] fragmentAtom = new int[mFragment.length]; for (int atom = 0; atom < mMol.getAllAtoms(); atom++) { int fragment = mFragmentNo[atom]; mFragment[fragment].setAtomMarker(fragmentAtom[fragment], !mMol.isSelectedAtom(atom)); fragmentAtom[fragment]++; } } java.awt.geom.Rectangle2D.Double[] boundingRect = new java.awt.geom.Rectangle2D.Double[mFragment.length]; // float fragmentWidth = 0.0f; for (int fragment = 0; fragment < mFragment.length; fragment++) { //if (mUpdateMode == UPDATE_INVENT_COORDS) if (invent) { new CoordinateInventor(selectedOnly ? CoordinateInventor.MODE_KEEP_MARKED_ATOM_COORDS : 0).invent(mFragment[fragment]); mFragment[fragment].setStereoBondsFromParity(); } AbstractDepictor d = createDepictor(mFragment[fragment]);// new Depictor(mFragment[fragment]); // depictor.updateCoords(null, null, AbstractDepictor.cModeInflateToMaxAVBL); depictor.updateCoords(null, null, AbstractDepictor.cModeInflateToMaxAVBL); // boundingRect[fragment] = d.getBoundingRect(); // fragmentWidth += boundingRect[fragment].width; } double spacing = FRAGMENT_CLEANUP_DISTANCE * AbstractDepictor.cOptAvBondLen; double avbl = mMol.getAverageBondLength(); // float arrowWidth = ((mMode & MODE_REACTION) == 0) ? // 0f // : (mUpdateMode == UPDATE_SCALE_COORDS_USE_FRAGMENTS) ? // DEFAULT_ARROW_LENGTH * getWidth() // : ((IArrow) mDrawingObjectList.get(0)).getLength() * AbstractDepictor.cOptAvBondLen / avbl; double rawX = 0.5 * spacing; // for (int fragment = 0; fragment <= mFragment.length; fragment++) { // if ((mMode & MODE_REACTION) != 0 && fragment == mReactantCount) { // ((IArrow) mDrawingObjectList.get(0)).setCoordinates( // rawX - spacing / 2, getHeight() / 2, rawX - spacing / 2 + arrowWidth, getHeight() / 2); // rawX += arrowWidth; // } // // if (fragment == mFragment.length) { // break; // } // // float dx = rawX - boundingRect[fragment].x; // float dy = 0.5f * (getHeight() - boundingRect[fragment].height) // - boundingRect[fragment].y; // mFragment[fragment].translateCoords(dx, dy); // // rawX += spacing + boundingRect[fragment].width; // } // depictor.updateCoords(null, new java.awt.geom.Rectangle2D.Double(0, 0, getWidth(), getHeight()), // AbstractDepictor.cModeInflateToMaxAVBL); int[] fragmentAtom = new int[mFragment.length]; for (int atom = 0; atom < mMol.getAllAtoms(); atom++) { int fragment = mFragmentNo[atom]; mMol.setAtomX(atom, mFragment[fragment].getAtomX(fragmentAtom[fragment])); mMol.setAtomY(atom, mFragment[fragment].getAtomY(fragmentAtom[fragment])); fragmentAtom[fragment]++; } mMol.setStereoBondsFromParity(); } */ private int maxUpdateMode() { return AbstractDepictor.cModeInflateToMaxAVBL /*+ HiDPIHelper.scale(AbstractDepictor.cOptAvBondLen)*/; } private void cleanupMultiFragmentCoordinates(AbstractExtendedDepictor depictor, boolean selectedOnly, boolean invent) { if (selectedOnly && invent) { int[] fragmentAtom = new int[mFragment.length]; for (int atom = 0; atom < mMol.getAllAtoms(); atom++) { int fragment = mFragmentNo[atom]; mFragment[fragment].setAtomMarker(fragmentAtom[fragment], !mMol.isSelectedAtom(atom)); fragmentAtom[fragment]++; } } Rectangle2D.Double[] boundingRect = new Rectangle2D.Double[mFragment.length]; // float fragmentWidth = 0.0f; for (int fragment = 0; fragment < mFragment.length; fragment++) { if (invent) { new CoordinateInventor(selectedOnly ? CoordinateInventor.MODE_KEEP_MARKED_ATOM_COORDS : 0).invent(mFragment[fragment]); // mFragment[fragment].setStereoBondsFromParity(); not needed anymore } AbstractDepictor d = createDepictor(mFragment[fragment]); d.updateCoords(null, null, AbstractDepictor.cModeInflateToMaxAVBL); boundingRect[fragment] = d.getBoundingRect(); } double spacing = FRAGMENT_CLEANUP_DISTANCE * AbstractDepictor.cOptAvBondLen; double avbl = mMol.getAverageBondLength(); double arrowWidth = isReaction() ? DEFAULT_ARROW_LENGTH * getWidth() : 0; // 0f : true ? // : (mMode == UPDATE_SCALE_COORDS_USE_FRAGMENTS) ? // DEFAULT_ARROW_LENGTH * getWidth() // : mDrawingObjectList.get(0).getBoundingRect().getWidth() * AbstractDepictor.cOptAvBondLen / avbl; double rawX = 0.5 * spacing; for (int fragment = 0; fragment <= mFragment.length; fragment++) { if (isReaction() && fragment == mReactantCount) { mDrawingObjectList.get(0).setRect( (float) (rawX - spacing / 20), getHeight() / 2, (float) (/*rawX - spacing / 2 + */arrowWidth), getHeight() / 2); rawX += arrowWidth; } if (fragment == mFragment.length) { break; } double dx = rawX - boundingRect[fragment].x; double dy = 0.5 * (getHeight() - boundingRect[fragment].height) - boundingRect[fragment].y; mFragment[fragment].translateCoords(dx, dy); rawX += spacing + boundingRect[fragment].width; } depictor.updateCoords(null, new Rectangle2D.Double(0, 0, getWidth(), getHeight()), maxUpdateMode()); int[] fragmentAtom = new int[mFragment.length]; for (int atom = 0; atom < mMol.getAllAtoms(); atom++) { int fragment = mFragmentNo[atom]; mMol.setAtomX(atom, mFragment[fragment].getAtomX(fragmentAtom[fragment])); mMol.setAtomY(atom, mFragment[fragment].getAtomY(fragmentAtom[fragment])); fragmentAtom[fragment]++; } mMol.setStereoBondsFromParity(); } public void analyzeFragmentMembership() { mMol.ensureHelperArrays(Molecule.cHelperParities); int[] fragmentNo = new int[mMol.getAllAtoms()]; int fragments = mMol.getFragmentNumbers(fragmentNo, false, true); fragments = joinCloseFragments(fragmentNo, fragments); sortFragmentsByPosition(fragmentNo, fragments); mFragmentNo = fragmentNo; mFragment = mMol.getFragments(fragmentNo, fragments); } private void syncFragments() { mMol.ensureHelperArrays(Molecule.cHelperParities); int[] fragmentNo = new int[mMol.getAllAtoms()]; int fragments = mMol.getFragmentNumbers(fragmentNo, false, true); mFragment = mMol.getFragments(fragmentNo, fragments); fragments = joinCloseFragments(fragmentNo, fragments); sortFragmentsByPosition(fragmentNo, fragments); mFragmentNo = fragmentNo; mFragment = mMol.getFragments(fragmentNo, fragments); for (StereoMolecule m : mFragment) { m.ensureHelperArrays(Molecule.cHelperParities); } } private int joinCloseFragments(int[] fragmentNo, int fragments) { if (fragments < 2) { return fragments; } boolean[][] mergeFragments = new boolean[fragments][]; for (int i = 1; i < fragments; i++) { mergeFragments[i] = new boolean[i]; } double avbl = mMol.getAverageBondLength(); for (int atom1 = 1; atom1 < mMol.getAllAtoms(); atom1++) { for (int atom2 = 0; atom2 < atom1; atom2++) { double dx = mMol.getAtomX(atom2) - mMol.getAtomX(atom1); double dy = mMol.getAtomY(atom2) - mMol.getAtomY(atom1); double distance = Math.sqrt(dx * dx + dy * dy); if (distance < FRAGMENT_GROUPING_DISTANCE * avbl) { int fragment1 = fragmentNo[atom1]; int fragment2 = fragmentNo[atom2]; if (fragment1 != fragment2) { if (fragment1 > fragment2) { mergeFragments[fragment1][fragment2] = true; } else { mergeFragments[fragment2][fragment1] = true; } } } } } int[] newFragmentIndex = new int[fragments]; for (int fragment = 0; fragment < fragments; fragment++) { newFragmentIndex[fragment] = fragment; } int mergeCount = 0; for (int i = 1; i < fragments; i++) { for (int j = 0; j < i; j++) { if (mergeFragments[i][j]) { int index1 = newFragmentIndex[i]; int index2 = newFragmentIndex[j]; if (index1 != index2) { mergeCount++; int minIndex = Math.min(index1, index2); int maxIndex = Math.max(index1, index2); for (int k = 0; k < fragments; k++) { if (newFragmentIndex[k] == maxIndex) { newFragmentIndex[k] = minIndex; } else if (newFragmentIndex[k] > maxIndex) { newFragmentIndex[k] } } } } } } for (int atom = 0; atom < mMol.getAllAtoms(); atom++) { fragmentNo[atom] = newFragmentIndex[fragmentNo[atom]]; } return fragments - mergeCount; } private void sortFragmentsByPosition(int[] fragmentNo, int fragments) { int[][] fragmentDescriptor = new int[fragments][((mMode & (MODE_REACTION | MODE_MARKUSH_STRUCTURE)) != 0) ? 2 : 1]; for (int fragment = 0; fragment < fragments; fragment++) { fragmentDescriptor[fragment][0] = fragment; } Point[] fragmentCOG = calculateFragmentCenterOfGravity(fragmentNo, fragments); if (isReactionMode()) { mReactantCount = 0; // Arrow arrow = ((mMode & MODE_REACTION) != 0) ? (Arrow) mDrawingObjectList.get(0) : null; // // System.out.print("Arrow placement %s\n",arrow.getBoundingRect()); for (int fragment = 0; fragment < fragments; fragment++) { fragmentDescriptor[fragment][1] = isOnProductSide(fragmentCOG[fragment].x,fragmentCOG[fragment].y) // arrow.isOnProductSide(fragmentCOG[fragment].x,fragmentCOG[fragment].y) ? 1 : 0; if (fragmentDescriptor[fragment][1] == 0) { mReactantCount++; } } } else if ((mMode & MODE_MARKUSH_STRUCTURE) != 0) { mReactantCount = fragments; for (int atom = 0; atom < mMol.getAllAtoms(); atom++) { if (mMol.getAtomicNo(atom) == 0 && fragmentDescriptor[fragmentNo[atom]][1] == 0) { fragmentDescriptor[fragmentNo[atom]][1] = 1; mReactantCount } } } final Point[] cog = fragmentCOG; Arrays.sort(fragmentDescriptor, new Comparator<int[]>() { public int compare(int[] fragmentDescriptor1, int[] fragmentDescriptor2) { if ((mMode & (MODE_REACTION | MODE_MARKUSH_STRUCTURE)) != 0) { if (fragmentDescriptor1[1] != fragmentDescriptor2[1]) { return (fragmentDescriptor1[1] == 0) ? -1 : 1; } } return cog[fragmentDescriptor1[0]].x < cog[fragmentDescriptor2[0]].x ? -1 : 1; // return (cog[fragmentDescriptor1[0]].x // + cog[fragmentDescriptor1[0]].y // < cog[fragmentDescriptor2[0]].x // + cog[fragmentDescriptor2[0]].y) ? -1 : 1; } }); int[] newFragmentIndex = new int[fragments]; Point[] centerOfGravity = new Point[fragments]; for (int fragment = 0; fragment < fragments; fragment++) { int oldIndex = fragmentDescriptor[fragment][0]; newFragmentIndex[oldIndex] = fragment; centerOfGravity[fragment] = fragmentCOG[oldIndex]; } fragmentCOG = centerOfGravity; for (int atom1 = 0; atom1 < mMol.getAllAtoms(); atom1++) { fragmentNo[atom1] = newFragmentIndex[fragmentNo[atom1]]; } // // System.out.print("SortFragments Count %s\n",mReactantCount); } private boolean isReactionMode() { return (mMode & MODE_REACTION) != 0; } public boolean isOnProductSide(double x, double y) { // Arrow arrow = ((mMode & MODE_REACTION) != 0) ? (Arrow) mDrawingObjectList.get(0) : null; // if (arrow != null) { // // System.out.print("Arrow determines product side"); // return arrow.isOnProductSide((float) x, (float) y); if (isReactionMode()) { return x > arrowPos.getX(); } return x > getDisplaySize().getWidth() / 2; } private Point[] calculateFragmentCenterOfGravity(int[] fragmentNo, int fragments) { Point[] fragmentCOG = new Point[fragments]; int[] fragmentAtoms = new int[fragments]; for (int fragment = 0; fragment < fragments; fragment++) { fragmentCOG[fragment] = new Point(0, 0); } for (int atom = 0; atom < mMol.getAllAtoms(); atom++) { fragmentCOG[fragmentNo[atom]].x += mMol.getAtomX(atom); fragmentCOG[fragmentNo[atom]].y += mMol.getAtomY(atom); fragmentAtoms[fragmentNo[atom]]++; } for (int fragment = 0; fragment < fragments; fragment++) { fragmentCOG[fragment].x /= fragmentAtoms[fragment]; fragmentCOG[fragment].y /= fragmentAtoms[fragment]; } return fragmentCOG; } public void setMapper(IReactionMapper mapper) { this.mapper = mapper; } public void mapReaction(int atom, Point2D left, Point2D right) { StereoMolecule mol = getMolecule();// getSelectedMolecule(); if (mol != null && left != null && right != null) { int freeMapNo = getNextMapNo(); StereoMolecule source = getFragmentAt(left, false); StereoMolecule target = getFragmentAt(right, false); boolean b1 = isOnProductSide(left.getX(), left.getY()); boolean b2 = isOnProductSide(right.getX(), right.getY()); if (target != null && target != source && b1 ^ b2) { int dest = mol.findAtom((int) right.getX(), (int) right.getY()); if (dest != -1) { mol.setAtomMapNo(atom, freeMapNo, false); mol.setAtomMapNo(dest, freeMapNo, false); } if (mapper != null) tryAutoMapReaction(); } } } public int getNextMapNo() { int freeMapNo = 1; // for (StereoMolecule mol : drawElement.molecules) { StereoMolecule mol = mMol; for (int i = 0; i < mol.getAtoms(); i++) { freeMapNo = Math.max(mol.getAtomMapNo(i) + 1, freeMapNo); } } return freeMapNo; } public void popUndo() { if (_undoList.size() > 0) { setValue(_undoList.get(_undoList.size() - 1), false); _undoList.remove(_undoList.size() - 1); } } public void pushUndo() { // _undoList.add(new StereoMolecule(getMol())); _undoList.add(new StereoMolecule(mMol)); if (_undoList.size() > MAX_UNDO_SIZE) { _undoList.remove(0); } // mUndoDrawingObjectList = (mDrawingObjectList == null) ? // null : new ArrayList<IDrawingObject>(mDrawingObjectList); } public int getESRType() { return selectedESRType; } public void setESRType(int type) { selectedESRType = type; notifyChange(); } public void addValidationListener(IValidationListener l) { if (!validationListeners.contains(l)) { validationListeners.add(l); } } public void removeValidationListener(IValidationListener l) { if (validationListeners.contains(l)) { validationListeners.remove(l); } } public void addChangeListener(IChangeListener l) { if (!changeListeners.contains(l)) { changeListeners.add(l); } } public void removeChangeListener(IChangeListener l) { if (changeListeners.contains(l)) { changeListeners.remove(l); } } public void setDisplaySize(Dimension displaySize) { if (isReactionMode()) { if (displaySize.getWidth() != 0 && displaySize.getHeight() != 0) { if (displaySize.getWidth() != this.displaySize.getWidth() || displaySize.getHeight() != this.displaySize.getHeight()) { double dx = displaySize.getWidth()/this.displaySize.getWidth(); double dy = displaySize.getHeight()/this.displaySize.getHeight(); double scale = Math.min(dx,dy); scale(scale); } } } this.displaySize = displaySize; } private void scale(double scale) { if (!Double.isInfinite(scale)) { if (scale != 1 && scale > 0) { // // System.out.print("Scale %f\n",scale); AbstractDepictor d = createDepictor(mMol); DepictorTransformation dt = d.simpleValidateView(new Rectangle2D.Double(0, 0, this.getWidth(), this.getHeight()), AbstractDepictor.cModeInflateToMaxAVBL + (int)mMol.getAverageBondLength()); if (dt != null) dt.applyTo(mMol); } } } public Dimension getDisplaySize() { return displaySize; } public void deleteMolecule(StereoMolecule tm) { System.err.println("DeleteMolecule needs to be implemented????"); // drawElement.removeMolecule(tm); } public final void setValue(StereoMolecule value, boolean b) { needsLayout(b); mMol = value; notifyChange(); } public void setValue(Reaction rxn) { setReaction(rxn); } public void changed() { notifyChange(); } public void valueInvalidated() { for (IValidationListener l : validationListeners) { l.valueInvalidated(); } } private void notifyChange() { for (IChangeListener l : changeListeners) { l.onChange(); } } public StereoMolecule getMoleculeAt(java.awt.geom.Point2D pt, boolean includeBond) { StereoMolecule mol = mMol; { if (mol.findAtom(pt.getX(), pt.getY()) != -1) { return mol; } /* for (int atom = 0; atom < mol.getAllAtoms(); atom++) { java.awt.geom.Point2D ap = new Point2D.Double(mol.getAtomX(atom), mol.getAtomY(atom)); if (Math.abs(ap.distance(pt)) < 5) { // System.out.println("getMoleculeAt Atom\n"); return mol; } } */ if (includeBond) { for (int i = 0; i < mol.getAllBonds(); i++) { int source = mol.getBondAtom(0, i); int target = mol.getBondAtom(1, i); java.awt.geom.Line2D line = new java.awt.geom.Line2D.Double(mol.getAtomX(source), mol.getAtomY(source), mol.getAtomX(target), mol.getAtomY(target)); double dist = line.ptSegDist(pt.getX(), pt.getY()); if (dist < 5) { /* if (Math.abs(new Point2D.Double(mol.getAtomX(source), mol.getAtomY(source)).distance(pt)) < 5 && (Math.abs(new Point2D.Double(mol.getAtomX(target), mol.getAtomY(target)).distance(pt)) < 5)) { return mol; } */ return mol; } } } } return null; } private int getFragmentByAtom(int atom) { if (atom >= 0 && atom < getMolecule().getAllAtoms()) { int idx = mFragmentNo[atom]; if (idx >= 0 && idx < mFragmentNo.length) return idx; } return -1; } public void selectFragmentByAtom(int rootAtom) { int fragment = getFragmentByAtom(rootAtom); for (int i = 0; fragment != -1 && i < mMol.getAllAtoms(); i++) { if (mFragmentNo[i] == fragment) { mMol.setAtomSelection(i, true); } } } private boolean isPointOnAtomOrBond(StereoMolecule mol, Point2D pt, boolean includeBond) { for (int atom = 0; atom < mol.getAllAtoms(); atom++) { Point2D ap = new Point2D.Double(mol.getAtomX(atom), mol.getAtomY(atom)); if (Math.abs(ap.distance(pt)) < 5) { return true; } } if (includeBond) { for (int i = 0; i < mol.getAllBonds(); i++) { int source = mol.getBondAtom(0, i); int target = mol.getBondAtom(1, i); java.awt.geom.Line2D line = new java.awt.geom.Line2D.Double( mol.getAtomX(source), mol.getAtomY(source), mol.getAtomX(target), mol.getAtomY(target)); double dist = line.ptSegDist(pt.getX(), pt.getY()); if (dist < 5) { /* if (Math.abs(new Point2D.Double(mol.getAtomX(source), mol.getAtomY(source)).distance(pt)) < 5 && (Math.abs(new Point2D.Double(mol.getAtomX(target), mol.getAtomY(target)).distance(pt)) < 5)) { return true; } */ return true; } } } return false; } public StereoMolecule getFragmentAt(java.awt.geom.Point2D pt, boolean includeBond) { for (StereoMolecule mol : getFragments()) { if (isPointOnAtomOrBond(mol, pt, includeBond)) return mol; } return null; } public static int rowFromESRType(int type) { switch (type) { case Molecule.cESRTypeAbs: return 0; case Molecule.cESRTypeOr: return 1; case Molecule.cESRTypeAnd: return 2; } return 0; } public static int esrTypeFromRow(int row) { switch (row) { case 0: return Molecule.cESRTypeAbs; case 1: return Molecule.cESRTypeOr; case 2: return Molecule.cESRTypeAnd; } return Molecule.cESRTypeAbs; } public int getSelectedAtom() { return selectedAtom; } public void setSelectedAtom(int theAtom) { if (selectedAtom != theAtom) { if (atomHighlightCallback != null) { atomHighlightCallback.onHighlight(theAtom != -1 ? theAtom : selectedAtom, theAtom != -1); } } this.selectedAtom = theAtom; } public int getSelectedBond() { return selectedBond; } public void setSelectedBond(int theBond) { if (selectedBond != theBond) { if (bondHighlightCallback != null) { bondHighlightCallback.onHighlight(theBond != -1 ? theBond : selectedBond, theBond != -1); } } this.selectedBond = theBond; } public final void setMode(int mode) { this.mMode = mode; if ((mMode & (MODE_REACTION | MODE_MARKUSH_STRUCTURE)) != 0) { mMode |= (MODE_MULTIPLE_FRAGMENTS); } // if ((mMode & (MODE_DRAWING_OBJECTS | MODE_REACTION)) != 0) { // drawElement.drawingObjects = new ArrayList<DrawingObject>(); // drawElement.drawingObjects.add(new JFXReactionArrow()); } public List<IDrawingObject> getDrawingObjects() { return mDrawingObjectList; } public void addDrawingObject(IDrawingObject o) { if (!mDrawingObjectList.contains(o)) { pushUndo(); mDrawingObjectList.add(o); // drawElement.drawingObjects.add(o); } } public boolean isReaction() { return isReactionMode(); } public boolean isFragment() { boolean fragement = false; fragement = mMol.isFragment(); return fragement; } public void setFragment(boolean fragment) { mMol.setFragment(fragment); notifyChange(); } public void setNewMolecule() { StereoMolecule mol = new StereoMolecule(); mol.setFragment(isFragment()); setValue(mol, true); } public void needsLayout(boolean set) { needslayout = set; // if (set) // // System.out.print("NeedsLayout %s\n",set); } public boolean needsLayout() { return needslayout; } public int getDisplayMode() { return displayMode; } java.awt.geom.Point2D calculateCenter(StereoMolecule r) { float x = 0; float y = 0; int atoms = r.getAllAtoms(); for (int atom = 0; atom < atoms; atom++) { x += r.getAtomX(atom); y += r.getAtomY(atom); } return new Point2D.Double(x / atoms, y / atoms); } public String getIDCode() { if (!isReaction()) { StereoMolecule mol = getMolecule();//getSelectedMolecule(); if (mol != null && mMol.getAllAtoms() > 0) { Canonizer can = new Canonizer(mol); return (can.getIDCode() + " " + can.getEncodedCoordinates()); } } else { Reaction rxn = getReaction(); String idc = ReactionEncoder.encode(rxn, true, ReactionEncoder.INCLUDE_DEFAULT); // if (idc != null) // ReactionEncoder.decode(idc, ReactionEncoder.INCLUDE_DEFAULT, null); return idc; } return null; } public StringBuilder getKeyStrokeBuffer() { return mAtomKeyStrokeBuffer; } public int getAtomKeyStrokeValidity(String s) { if (Molecule.getAtomicNoFromLabel(s) != 0) return KEY_IS_ATOM_LABEL; if (NamedSubstituents.getSubstituentIDCode(s) != null) return KEY_IS_SUBSTITUENT; if (isValidAtomKeyStrokeStart(s)) return KEY_IS_VALID_START; return KEY_IS_INVALID; } /** * @param s * @return true if s is either a valid atom symbol or a valid substituent name */ private boolean isValidAtomKeyStroke(String s) { return Molecule.getAtomicNoFromLabel(s) != 0 || NamedSubstituents.getSubstituentIDCode(s) != null; } /** * @param s * @return true if adding one or more chars may still create a valid key stroke sequence */ private boolean isValidAtomKeyStrokeStart(String s) { if (s.length() < 3) for (int i = 1; i < Molecule.cAtomLabel.length; i++) { if (Molecule.cAtomLabel[i].startsWith(s)) return true; } return NamedSubstituents.isValidSubstituentNameStart(s); } public int getMarkushCount() { return 0; } public void tryAutoMapReaction() { SSSearcher sss = new MySSSearcher(); Reaction rxn = getReaction();//new Reaction(reaction); // Mark the manually mapped atoms, so we may re-assign them later for (int i = 0; i < rxn.getMolecules(); i++) { StereoMolecule mol = rxn.getMolecule(i); for (int a = 0; a < mol.getAtoms(); a++) { if (mol.getAtomMapNo(a) > 0) { mol.setAtomicNo(a, FAKE_ATOM_NO + mol.getAtomMapNo(a)); } } } rxn = mapper.mapReaction(rxn, sss); if (rxn != null) { int offset = 0; // Sync the display molecule with the reaction fragments { int[] fragmentAtom = new int[mFragment.length]; for (int atom = 0; atom < mMol.getAllAtoms(); atom++) { int fragment = mFragmentNo[atom]; if (mFragment[fragment].getAtomicNo(fragmentAtom[fragment]) > FAKE_ATOM_NO) { mMol.setAtomMapNo(atom, mFragment[fragment].getAtomicNo(fragmentAtom[fragment]) - FAKE_ATOM_NO, false); offset = Math.max(mMol.getAtomMapNo(atom), offset); } fragmentAtom[fragment]++; } } { int[] fragmentAtom = new int[mFragment.length]; for (int atom = 0; atom < mMol.getAllAtoms(); atom++) { int fragment = mFragmentNo[atom]; if (mFragment[fragment].getAtomMapNo(fragmentAtom[fragment]) > 0 && (mFragment[fragment].getAtomicNo(fragmentAtom[fragment]) <= FAKE_ATOM_NO)) { mMol.setAtomMapNo(atom, mFragment[fragment].getAtomMapNo(fragmentAtom[fragment]) + offset, true); } fragmentAtom[fragment]++; } } } syncFragments(); } public String getMolFile(boolean v3) { if (v3) return new MolfileV3Creator(mMol).getMolfile(); else return new MolfileCreator(mMol).getMolfile(); } public void setMolFile(String molFile) { pushUndo(); try { MolfileParser p = new MolfileParser(); StereoMolecule mol = new StereoMolecule(); p.parse(mol, molFile); setValue(mol, true); } catch (Exception e) { e.printStackTrace(); } } public String getSmiles() { return new IsomericSmilesCreator(mMol).getSmiles(); } public void setSmiles(String smiles) { try { SmilesParser p = new SmilesParser(); StereoMolecule mol = new StereoMolecule(); p.parse(mol, smiles); setValue(mol, true); } catch (Exception e) { e.printStackTrace(); } } public int getReactantCount() { // // System.out.print("getReactantCount Count %s\n",mReactantCount); return mReactantCount; } static class MySSSearcher extends SSSearcher { @Override public boolean areAtomsSimilar(int moleculeAtom, int fragmentAtom) { if (mMolecule.getAtomicNo(moleculeAtom) == mFragment.getAtomicNo(fragmentAtom)) if (mMolecule.isAromaticAtom(moleculeAtom) || mFragment.isAromaticAtom(fragmentAtom)) return true; return super.areAtomsSimilar(moleculeAtom, fragmentAtom); } @Override public boolean areBondsSimilar(int moleculeBond, int fragmentBond) { if (mMolecule.isAromaticBond(moleculeBond) || mMolecule.isDelocalizedBond(moleculeBond) || mFragment.isAromaticBond(fragmentBond) || mFragment.isDelocalizedBond(fragmentBond) ) return true; return super.areBondsSimilar(moleculeBond, fragmentBond); //return true; } } private Point2D calculateCenterOfGravity() { int atoms = mMol.getAllAtoms(); double sumx = 0; double sumy = 0; for (int atom = 0; atom < atoms; atom++) { sumx += mMol.getAtomX(atom); sumy += mMol.getAtomY(atom); } return atoms > 0 ? new Point2D.Double(sumx / atoms, sumy / atoms) : null; } public void flip(boolean horiz) { Point2D pt = calculateCenterOfGravity(); if (pt != null) { // center moveCoords((float) -pt.getX(), (float) -pt.getY()); if (horiz) { scaleCoords(-1, 1); } else { scaleCoords(1, -1); } moveCoords((float) pt.getX(), (float) pt.getY()); // invert stereo bonds for (int bond = 0; bond < mMol.getAllBonds(); bond++) { if (mMol.getBondType(bond) == Molecule.cBondTypeUp) mMol.setBondType(bond, Molecule.cBondTypeDown); else if (mMol.getBondType(bond) == Molecule.cBondTypeDown) mMol.setBondType(bond, Molecule.cBondTypeUp); } } } private void scaleCoords(float scalex, float scaley) { int atoms = mMol.getAllAtoms(); for (int atom = 0; atom < atoms; atom++) { mMol.setAtomX(atom, mMol.getAtomX(atom) * scalex); mMol.setAtomY(atom, mMol.getAtomY(atom) * scaley); } } private void moveCoords(float cx, float cy) { int atoms = mMol.getAllAtoms(); for (int atom = 0; atom < atoms; atom++) { mMol.setAtomX(atom, mMol.getAtomX(atom) + cx); mMol.setAtomY(atom, mMol.getAtomY(atom) + cy); } } private AtomHighlightCallback atomHighlightCallback = null; private BondHighlightCallback bondHighlightCallback = null; public void registerAtomHighlightCallback(AtomHighlightCallback cb) { atomHighlightCallback = cb; } public void registerBondHighlightCallback(BondHighlightCallback cb) { bondHighlightCallback = cb; } public void addMolecule(StereoMolecule mol,double x ,double y) { if (mol != null && mol.getAllAtoms() != 0) { if (mMol.getAllAtoms() == 0) { int avbl = 0; boolean isFragment = mMol.isFragment(); scaleIntoView(mol, avbl,0,0); mol.copyMolecule(mMol); mMol.setFragment(isFragment); notifyChange(); } else { int avbl = (int) mMol.getAverageBondLength(); scaleIntoView(mol, avbl,x,y); int originalAtoms = mMol.getAllAtoms(); boolean isFragment = mMol.isFragment(); mMol.addMolecule(mol); for (int atom = 0; atom < mMol.getAllAtoms(); atom++) { mMol.setAtomSelection(atom, atom >= originalAtoms); } mMol.setFragment(isFragment); notifyChange(); } } } private void scaleIntoView(StereoMolecule mol, int avbl,double cx,double cy) { AbstractDepictor d = createDepictor(mol); DepictorTransformation dt = d.simpleValidateView(new Rectangle2D.Double(0, 0, this.getWidth(), this.getHeight()), AbstractDepictor.cModeInflateToMaxAVBL + avbl); if (dt != null) { dt.move(cx,cy); // System.out.print("Transform %s %s\n",dt.getOffsetX(),cx); dt.applyTo(mol); } } public IDrawingObject getSelectedDrawingObject() { return selectedDrawingObject; } public void setSelectedDrawingObject(IDrawingObject sel) { if (selectedDrawingObject != sel) { setSelectedAtom(-1); setSelectedBond(-1); } this.selectedDrawingObject = sel; // for (IDrawingObject d : getDrawingObjects()) // d.setSelected(false); if (sel != null) this.selectedDrawingObject.setSelected(true); } /* public void cleanupCoordinates() { AbstractDepictor depictor = createDepictor(getMolecule()); DepictorTransformation dt = depictor.simpleValidateView(new Rectangle2D.Double(0, 0, this.getWidth(), this.getHeight()), AbstractDepictor.cModeInflateToMaxAVBL); if (dt != null) dt.applyTo(mMol); }*/ public void cleanMolecule(boolean invent, boolean selectedOnly) { cleanupCoordinates(false, invent); valueInvalidated(); } // public abstract void cleanMolecule(boolean selectedOnly); protected abstract AbstractExtendedDepictor createExtendedDepictor(); protected abstract AbstractDepictor createDepictor(StereoMolecule stereoMolecule); public abstract void analyzeReaction(); public abstract boolean copyMolecule(boolean selected); public abstract boolean copyReaction(boolean selected); public abstract StereoMolecule pasteMolecule(double cx,double cy); public abstract Reaction pasteReaction(double cx,double cy); public ImageProvider getImageProvider() { return imageProvider; } public void setImageProvider(ImageProvider p) { imageProvider = p; } /* private void cleanupMultiFragmentCoordinates(ExtendedDepictor depictor) { Rectangle2D.Double[] boundingRect = new Rectangle2D.Double[mFragment.length]; // float fragmentWidth = 0.0f; for (int fragment = 0; fragment < mFragment.length; fragment++) { Depictor d = new Depictor(mFragment[fragment]); d.updateCoords(null, null, AbstractDepictor.cModeInflateToMaxAVBL); boundingRect[fragment] = d.getBoundingRect(); // fragmentWidth += boundingRect[fragment].width; } double spacing = FRAGMENT_CLEANUP_DISTANCE * AbstractDepictor.cOptAvBondLen; double avbl = mMol.getAverageBondLength(); double arrowWidth = DEFAULT_ARROW_LENGTH * getWidth(); double rawX = 0.5 * spacing; for (int fragment = 0; fragment <= mFragment.length; fragment++) { if (isReactionMode() && fragment == mReactantCount) { mDrawingObjectList.get(0).setRect( (float)(rawX - spacing / 2), getHeight() / 2, (float)(rawX - spacing / 2 + arrowWidth), getHeight() / 2); rawX += arrowWidth; } if (fragment == mFragment.length) { break; } double dx = rawX - boundingRect[fragment].x; double dy = 0.5 * (getHeight() - boundingRect[fragment].height) - boundingRect[fragment].y; mFragment[fragment].translateCoords(dx, dy); rawX += spacing + boundingRect[fragment].width; } depictor.updateCoords(null, new Rectangle2D.Double(0, 0, getWidth(), getHeight()), maxUpdateMode()); int[] fragmentAtom = new int[mFragment.length]; for (int atom = 0; atom < mMol.getAllAtoms(); atom++) { int fragment = mFragmentNo[atom]; mMol.setAtomX(atom, mFragment[fragment].getAtomX(fragmentAtom[fragment])); mMol.setAtomY(atom, mFragment[fragment].getAtomY(fragmentAtom[fragment])); fragmentAtom[fragment]++; } // mMol.setStereoBondsFromParity(); } */ }
package org.lightmare.utils; import java.lang.reflect.Array; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; /** * Utility class to work with {@link Collection} instances * * @author Levan * @since 0.0.81-SNAPSHOT */ public abstract class CollectionUtils { // First index of array public static final int FIRST_INDEX = 0; // Second index of array public static final int SECOND_INDEX = 1; // Index of not existing data in collection public static final int NOT_EXISTING_INDEX = -1; // Length of empty array public static final int EMPTY_ARRAY_LENGTH = 0; // Empty array of objects public static final Object[] EMPTY_ARRAY = {}; /** * Checks if passed {@link Collection} instance is not empty * * @param collection * @return <code>boolean</code> */ public static boolean notEmpty(Collection<?> collection) { return !collection.isEmpty(); } /** * Checks passed {@link Collection} instance on null and on emptiness * returns true if it is not null and is not empty * * @param collection * @return <code></code> */ public static boolean valid(Collection<?> collection) { return collection != null && !collection.isEmpty(); } /** * Checks passed {@link Map} instance on null and emptiness returns true if * it is not null and is not empty * * @param map * @return <code>boolean</code> */ public static boolean valid(Map<?, ?> map) { return map != null && !map.isEmpty(); } /** * Checks if passed {@link Map} instance is null or is empty * * @param map * @return <code>boolean</code> */ public static boolean invalid(Map<?, ?> map) { return !valid(map); } /** * Checks if passed {@link Collection} instance is null or is empty * * @param collection * @return <code>boolean</code> */ public static boolean invalid(Collection<?> collection) { return !valid(collection); } /** * Checks if there is null or empty {@link Collection} instance is passed * collections * * @param collections * @return <code>boolean</code> */ public static boolean invalidAll(Collection<?>... collections) { return !valid(collections); } /** * Checks if each of passed {@link Map} instances is not null and is not * empty * * @param maps * @return <code>boolean</code> */ public static boolean validAll(Map<?, ?>... maps) { boolean avaliable = ObjectUtils.notNull(maps); if (avaliable) { Map<?, ?> map; for (int i = FIRST_INDEX; i < maps.length && avaliable; i++) { map = maps[i]; avaliable = avaliable && valid(map); } } return avaliable; } /** * Checks if passed array of {@link Object}'s instances is not null and is * not empty * * @param array * @return <code>boolean</code> */ public static boolean valid(Object[] array) { return array != null && array.length > EMPTY_ARRAY_LENGTH; } /** * Checks if passed {@link Object} array is null or is empty * @param array * @return <code>boolean</code> */ public static boolean invalid(Object[] array) { return !valid(array); } public static boolean validAll(Collection<?>... collections) { boolean avaliable = ObjectUtils.notNull(collections); if (avaliable) { Collection<?> collection; for (int i = FIRST_INDEX; i < collections.length && avaliable; i++) { collection = collections[i]; avaliable = avaliable && valid(collection); } } return avaliable; } public static boolean validAll(Object[]... arrays) { boolean avaliable = ObjectUtils.notNull(arrays); if (avaliable) { Object[] collection; int length = arrays.length; for (int i = FIRST_INDEX; i < length && avaliable; i++) { collection = arrays[i]; avaliable = avaliable && valid(collection); } } return avaliable; } /** * Gets value from passed {@link Map} as other {@link Map} instance * * @param key * @param from * @return {@link Map}<K,V> */ public static <K, V> Map<K, V> getAsMap(Object key, Map<?, ?> from) { Map<K, V> result; if (valid(from)) { Object objectValue = from.get(key); if (objectValue instanceof Map) { result = ObjectUtils.cast(objectValue); } else { result = null; } } else { result = null; } return result; } /** * Gets values from passed {@link Map} as other {@link Map} instance * recursively by passed keys array * * @param from * @param keys * @return {@link Map} */ public static Map<?, ?> getAsMap(Map<?, ?> from, Object... keys) { Map<?, ?> result = from; int length = keys.length; Object key; for (int i = FIRST_INDEX; i < length && ObjectUtils.notNull(result); i++) { key = keys[i]; result = getAsMap(key, result); } return result; } /** * Gets values from passed {@link Map} as other {@link Map} instance * recursively by passed keys array and for first key get value from last * {@link Map} instance * * @param from * @param keys * @return <code>V</code> */ public static <V> V getSubValue(Map<?, ?> from, Object... keys) { V value; int length = keys.length - 1; Object[] subKeys = new Object[length]; Object key = keys[length]; for (int i = FIRST_INDEX; i < length; i++) { subKeys[i] = keys[i]; } Map<?, ?> result = getAsMap(from, subKeys); if (valid(result)) { value = ObjectUtils.cast(result.get(key)); } else { value = null; } return value; } /** * Puts passed value to passed {@link Map} instance on passed key of such * does not contained * * @param map * @param key * @param value */ public static <K, V> void putIfAbscent(Map<K, V> map, K key, V value) { boolean contained = map.containsKey(key); if (ObjectUtils.notTrue(contained)) { map.put(key, value); } } /** * Creates new {@link Set} from passed {@link Collection} instance * * @param collection * @return {@link Set}<code><T></code> */ public static <T> Set<T> translateToSet(Collection<T> collection) { Set<T> set; if (valid(collection)) { set = new HashSet<T>(collection); } else { set = Collections.emptySet(); } return set; } /** * Creates new {@link Set} from passed array instance * * @param array * @return {@link Set}<code><T></code> */ public static <T> Set<T> translateToSet(T[] array) { List<T> collection; if (valid(array)) { collection = Arrays.asList(array); } else { collection = null; } return translateToSet(collection); } /** * Creates new {@link List} from passed {@link Collection} instance * * @param collection * @return {@link List}<code><T></code> */ public static <T> List<T> translateToList(Collection<T> collection) { List<T> list; if (valid(collection)) { list = new ArrayList<T>(collection); } else { list = Collections.emptyList(); } return list; } private static <T> T[] toArray(Class<T> type, int size) { Object arrayObject = Array.newInstance(type, size); T[] array = ObjectUtils.cast(arrayObject); return array; } /** * Checks if passed {@link Object} is array * * @param data * @return <code>boolean</code> */ public static boolean isArray(final Object data) { boolean valid = (data instanceof Object[] || data instanceof boolean[] || data instanceof byte[] || data instanceof short[] || data instanceof char[] || data instanceof int[] || data instanceof long[] || data instanceof float[] || data instanceof double[]); return valid; } /** * Checks if passed {@link Object} is {@link Object} types array * * @param data * @return <code>boolean</code> */ public static boolean isObjectArray(final Object data) { boolean valid = (data instanceof Object[]); return valid; } /** * Checks if passed {@link Object} is primitive types array * * @param data * @return <code>boolean</code> */ public static boolean isPrimitiveArray(final Object data) { boolean valid = (data instanceof boolean[] || data instanceof byte[] || data instanceof short[] || data instanceof char[] || data instanceof int[] || data instanceof long[] || data instanceof float[] || data instanceof double[]); return valid; } /** * Converts passed {@link Collection} to array of appropriated {@link Class} * type * * @param collection * @param type * @return <code>T[]</code> */ public static <T> T[] toArray(Collection<T> collection, Class<T> type) { T[] array; if (ObjectUtils.notNull(collection)) { array = toArray(type, collection.size()); array = collection.toArray(array); } else { array = null; } return array; } /** * Creates empty array of passed type * * @param type * @return <code>T[]</code> */ public static <T> T[] emptyArray(Class<T> type) { T[] empty = toArray(type, EMPTY_ARRAY_LENGTH); return empty; } /** * Peaks first element from list * * @param list * @return T */ private static <T> T getFirstFromList(List<T> list) { T value; if (valid(list)) { value = list.get(FIRST_INDEX); } else { value = null; } return value; } /** * Peaks first element from collection * * @param collection * @return T */ public static <T> T getFirst(Collection<T> collection) { T value; if (valid(collection)) { if (collection instanceof List) { value = getFirstFromList(((List<T>) collection)); } else { Iterator<T> iterator = collection.iterator(); value = iterator.next(); } } else { value = null; } return value; } /** * Peaks first element from array * * @param collection * @return T */ public static <T> T getFirst(T[] values) { T value; if (valid(values)) { value = values[FIRST_INDEX]; } else { value = null; } return value; } }
package nu.validator.datatype; import java.util.HashSet; import java.util.Set; import org.relaxng.datatype.DatatypeException; public final class SandboxAllowList extends AbstractDatatype { /** * The singleton instance. */ public static final SandboxAllowList THE_INSTANCE = new SandboxAllowList(); private SandboxAllowList() { super(); } private static final HashSet<String> allowedKeywords = new HashSet<>(); private static final boolean WARN = System.getProperty( "nu.validator.datatype.warn", "").equals("true"); private boolean hasAllowScripts; private boolean hasAllowSameOrigin; private boolean hasAllowTopNavigation; private boolean hasAllowTopNavigationByUserActivation; static { allowedKeywords.add("allow-forms"); allowedKeywords.add("allow-modals"); allowedKeywords.add("allow-orientation-lock"); allowedKeywords.add("allow-pointer-lock"); allowedKeywords.add("allow-popups"); allowedKeywords.add("allow-popups-to-escape-sandbox"); allowedKeywords.add("allow-presentation"); allowedKeywords.add("allow-same-origin"); allowedKeywords.add("allow-scripts"); allowedKeywords.add("allow-top-navigation"); allowedKeywords.add("allow-top-navigation-by-user-activation"); } @Override public void checkValid(CharSequence literal) throws DatatypeException { hasAllowScripts = false; hasAllowSameOrigin= false; hasAllowTopNavigation = false; hasAllowTopNavigationByUserActivation = false; Set<String> tokensSeen = new HashSet<>(); StringBuilder builder = new StringBuilder(); int len = literal.length(); for (int i = 0; i < len; i++) { char c = literal.charAt(i); if (isWhitespace(c) && builder.length() > 0) { checkToken(literal, builder, i, tokensSeen); builder.setLength(0); } else if (!isWhitespace(c)) { builder.append(toAsciiLowerCase(c)); } } if (builder.length() > 0) { checkToken(literal, builder, len, tokensSeen); } if (hasAllowScripts && hasAllowSameOrigin) { throw newDatatypeException( "Setting both \u201callow-scripts\u201d and" + " \u201callow-same-origin\u201d is not" + " recommended, because it effectively enables an" + " embedded page to break out of all sandboxing.", WARN); } if (hasAllowTopNavigation && hasAllowTopNavigationByUserActivation) { throw newDatatypeException( "\u201callow-top-navigation-by-user-activation\u201d and" + " \u201callow-top-navigation\u201d must not both" + " be specified at the same time. If both are" + " present, only \u201callow-top-navigation\u201d" + " will have effect."); } } private void checkToken(CharSequence literal, StringBuilder builder, int i, Set<String> tokensSeen) throws DatatypeException { String token = builder.toString(); if (tokensSeen.contains(token)) { throw newDatatypeException(i - 1, "Duplicate keyword \u201c" + token + "\u201d."); } tokensSeen.add(token); if (!allowedKeywords.contains(token)) { throw newDatatypeException(i - 1, "The string \u201c" + token + "\u201d is not a valid keyword."); } if ("allow-scripts".equals(token)) { hasAllowScripts = true; } if ("allow-same-origin".equals(token)) { hasAllowSameOrigin = true; } if ("allow-top-navigation".equals(token)) { hasAllowTopNavigation = true; } if ("allow-top-navigation-by-user-activation".equals(token)) { hasAllowTopNavigationByUserActivation = true; } } @Override public String getName() { return "sandbox allow list"; } }
package org.eclipse.che.workspace.infrastructure.openshift.bootstrapper; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.inject.assistedinject.Assisted; import java.util.List; import javax.inject.Inject; import javax.inject.Named; import org.eclipse.che.api.core.model.workspace.runtime.RuntimeIdentity; import org.eclipse.che.api.core.notification.EventService; import org.eclipse.che.api.installer.shared.model.Installer; import org.eclipse.che.api.workspace.server.bootstrap.AbstractBootstrapper; import org.eclipse.che.api.workspace.server.spi.InfrastructureException; import org.eclipse.che.workspace.infrastructure.openshift.OpenShiftMachine; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Bootstraps installers in OpenShift machine. * * @author Sergii Leshchenko */ public class OpenShiftBootstrapper extends AbstractBootstrapper { private static final Logger LOG = LoggerFactory.getLogger(OpenShiftBootstrapper.class); private static final Gson GSON = new GsonBuilder().disableHtmlEscaping().create(); private static final String BOOTSTRAPPER_BASE_DIR = "/tmp/"; private static final String BOOTSTRAPPER_DIR = BOOTSTRAPPER_BASE_DIR + "bootstrapper/"; private static final String BOOTSTRAPPER_FILE = "bootstrapper"; private static final String CONFIG_FILE = "config.json"; private final RuntimeIdentity runtimeIdentity; private final List<Installer> installers; private final int serverCheckPeriodSeconds; private final int installerTimeoutSeconds; private final OpenShiftMachine openShiftMachine; private final String bootstrapperBinaryUrl; @Inject public OpenShiftBootstrapper( @Assisted RuntimeIdentity runtimeIdentity, @Assisted List<Installer> installers, @Assisted OpenShiftMachine openShiftMachine, @Named("che.websocket.endpoint") String cheWebsocketEndpoint, @Named("che.infra.openshift.bootstrapper.binary_url") String bootstrapperBinaryUrl, @Named("che.infra.openshift.bootstrapper.timeout_min") int bootstrappingTimeoutMinutes, @Named("che.infra.openshift.bootstrapper.installer_timeout_sec") int installerTimeoutSeconds, @Named("che.infra.openshift.bootstrapper.server_check_period_sec") int serverCheckPeriodSeconds, EventService eventService) { super( openShiftMachine.getName(), runtimeIdentity, bootstrappingTimeoutMinutes, cheWebsocketEndpoint, cheWebsocketEndpoint, eventService); this.bootstrapperBinaryUrl = bootstrapperBinaryUrl; this.runtimeIdentity = runtimeIdentity; this.installers = installers; this.serverCheckPeriodSeconds = serverCheckPeriodSeconds; this.installerTimeoutSeconds = installerTimeoutSeconds; this.openShiftMachine = openShiftMachine; } @Override protected void doBootstrapAsync(String installerWebsocketEndpoint, String outputWebsocketEndpoint) throws InfrastructureException { injectBootstrapper(); openShiftMachine.exec( "sh", "-c", BOOTSTRAPPER_DIR + BOOTSTRAPPER_FILE + " -machine-name " + openShiftMachine.getName() + " -runtime-id " + String.format( "%s:%s:%s", runtimeIdentity.getWorkspaceId(), runtimeIdentity.getEnvName(), runtimeIdentity.getOwner()) + " -push-endpoint " + installerWebsocketEndpoint + " -push-logs-endpoint " + outputWebsocketEndpoint + " -server-check-period " + Integer.toString(serverCheckPeriodSeconds) + " -enable-auth" + " -installer-timeout " + Integer.toString(installerTimeoutSeconds) + " -file " + BOOTSTRAPPER_DIR + CONFIG_FILE // redirects command output and makes the bootstrapping process detached, // to avoid the holding of the socket connection for exec watcher. + " &>/dev/null &"); } private void injectBootstrapper() throws InfrastructureException { LOG.debug("Creating folder for bootstrapper"); openShiftMachine.exec("mkdir", "-p", BOOTSTRAPPER_DIR); LOG.debug("Downloading bootstrapper binary"); openShiftMachine.exec( "curl", "-o", BOOTSTRAPPER_DIR + BOOTSTRAPPER_FILE, bootstrapperBinaryUrl); openShiftMachine.exec("chmod", "+x", BOOTSTRAPPER_DIR + BOOTSTRAPPER_FILE); LOG.debug("Creating bootstrapper config file"); openShiftMachine.exec( "sh", "-c", "cat > " + BOOTSTRAPPER_DIR + CONFIG_FILE + " << 'EOF'\n" + GSON.toJson(installers) + "\nEOF"); } }
// samskivert library - useful routines for java programs // This library is free software; you can redistribute it and/or modify it // (at your option) any later version. // This library is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // You should have received a copy of the GNU Lesser General Public // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA package com.samskivert.jdbc.depot; import java.sql.SQLException; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import com.samskivert.jdbc.depot.Key.WhereCondition; import com.samskivert.jdbc.depot.annotation.Computed; import com.samskivert.jdbc.depot.clause.DeleteClause; import com.samskivert.jdbc.depot.clause.FieldDefinition; import com.samskivert.jdbc.depot.clause.FieldOverride; import com.samskivert.jdbc.depot.clause.ForUpdate; import com.samskivert.jdbc.depot.clause.FromOverride; import com.samskivert.jdbc.depot.clause.GroupBy; import com.samskivert.jdbc.depot.clause.InsertClause; import com.samskivert.jdbc.depot.clause.Join; import com.samskivert.jdbc.depot.clause.Limit; import com.samskivert.jdbc.depot.clause.OrderBy; import com.samskivert.jdbc.depot.clause.SelectClause; import com.samskivert.jdbc.depot.clause.UpdateClause; import com.samskivert.jdbc.depot.clause.Where; import com.samskivert.jdbc.depot.expression.ColumnExp; import com.samskivert.jdbc.depot.expression.ExpressionVisitor; import com.samskivert.jdbc.depot.expression.FunctionExp; import com.samskivert.jdbc.depot.expression.LiteralExp; import com.samskivert.jdbc.depot.expression.SQLExpression; import com.samskivert.jdbc.depot.expression.ValueExp; import com.samskivert.jdbc.depot.operator.Conditionals.Exists; import com.samskivert.jdbc.depot.operator.Conditionals.In; import com.samskivert.jdbc.depot.operator.Conditionals.IsNull; import com.samskivert.jdbc.depot.operator.Conditionals.FullTextMatch; import com.samskivert.jdbc.depot.operator.Logic.Not; import com.samskivert.jdbc.depot.operator.SQLOperator.BinaryOperator; import com.samskivert.jdbc.depot.operator.SQLOperator.MultiOperator; /** * Implements the base functionality of the SQL-building pass of {@link SQLBuilder}. Dialectal * subclasses of this should be created and returned from {@link SQLBuilder#getBuildVisitor()}. * * This class is intimately paired with {#link BindVisitor}. */ public abstract class BuildVisitor implements ExpressionVisitor { public String getQuery () { return _builder.toString(); } public void visit (FromOverride override) throws Exception { _builder.append(" from " ); List<Class<? extends PersistentRecord>> from = override.getFromClasses(); for (int ii = 0; ii < from.size(); ii++) { if (ii > 0) { _builder.append(", "); } appendTableName(from.get(ii)); _builder.append(" as "); appendTableAbbreviation(from.get(ii)); } } public void visit (FieldDefinition definition) throws Exception { definition.getDefinition().accept(this); if (_enableAliasing) { _builder.append(" as "); appendIdentifier(definition.getField()); } } public void visit (WhereCondition<? extends PersistentRecord> whereCondition) throws Exception { Class<? extends PersistentRecord> pClass = whereCondition.getPersistentClass(); String[] keyFields = Key.getKeyFields(pClass); List<Comparable<?>> values = whereCondition.getValues(); for (int ii = 0; ii < keyFields.length; ii ++) { if (ii > 0) { _builder.append(" and "); } // A Key's WHERE clause must mirror what's actually retrieved for the persistent // object, so we turn on overrides here just as we do when expanding SELECT fields boolean saved = _enableOverrides; _enableOverrides = true; appendRhsColumn(pClass, keyFields[ii]); _enableOverrides = saved; _builder.append(values.get(ii) == null ? " is null " : " = ? "); } } public void visit (Key<? extends PersistentRecord> key) throws Exception { _builder.append(" where "); key.condition.accept(this); } public void visit (MultiKey<? extends PersistentRecord> key) throws Exception { _builder.append(" where "); boolean first = true; for (Map.Entry<String, Comparable<?>> entry : key.getSingleFieldsMap().entrySet()) { if (first) { first = false; } else { _builder.append(" and "); } // A MultiKey's WHERE clause must mirror what's actually retrieved for the persistent // object, so we turn on overrides here just as we do when expanding SELECT fields boolean saved = _enableOverrides; _enableOverrides = true; appendRhsColumn(key.getPersistentClass(), entry.getKey()); _enableOverrides = saved; _builder.append(entry.getValue() == null ? " is null " : " = ? "); } if (!first) { _builder.append(" and "); } appendRhsColumn(key.getPersistentClass(), key.getMultiField()); _builder.append(" in ("); Comparable<?>[] values = key.getMultiValues(); for (int ii = 0; ii < values.length; ii ++) { if (ii > 0) { _builder.append(", "); } _builder.append("?"); } _builder.append(")"); } public void visit (FunctionExp functionExp) throws Exception { _builder.append(functionExp.getFunction()); _builder.append("("); SQLExpression[] arguments = functionExp.getArguments(); for (int ii = 0; ii < arguments.length; ii ++) { if (ii > 0) { _builder.append(", "); } arguments[ii].accept(this); } _builder.append(")"); } public void visit (MultiOperator multiOperator) throws Exception { SQLExpression[] conditions = multiOperator.getConditions(); for (int ii = 0; ii < conditions.length; ii++) { if (ii > 0) { _builder.append(" ").append(multiOperator.operator()).append(" "); } _builder.append("("); conditions[ii].accept(this); _builder.append(")"); } } public void visit (BinaryOperator binaryOperator) throws Exception { _builder.append('('); binaryOperator.getLeftHandSide().accept(this); _builder.append(binaryOperator.operator()); binaryOperator.getRightHandSide().accept(this); _builder.append(')'); } public void visit (IsNull isNull) throws Exception { isNull.getColumn().accept(this); _builder.append(" is null"); } public void visit (In in) throws Exception { in.getColumn().accept(this); _builder.append(" in ("); Comparable<?>[] values = in.getValues(); for (int ii = 0; ii < values.length; ii ++) { if (ii > 0) { _builder.append(", "); } _builder.append("?"); } _builder.append(")"); } public abstract void visit (FullTextMatch match) throws Exception; public void visit (ColumnExp columnExp) throws Exception { appendRhsColumn(columnExp.getPersistentClass(), columnExp.getField()); } public void visit (Not not) throws Exception { _builder.append(" not ("); not.getCondition().accept(this); _builder.append(")"); } public void visit (GroupBy groupBy) throws Exception { _builder.append(" group by "); SQLExpression[] values = groupBy.getValues(); for (int ii = 0; ii < values.length; ii++) { if (ii > 0) { _builder.append(", "); } values[ii].accept(this); } } public void visit (ForUpdate forUpdate) throws Exception { _builder.append(" for update "); } public void visit (OrderBy orderBy) throws Exception { _builder.append(" order by "); SQLExpression[] values = orderBy.getValues(); OrderBy.Order[] orders = orderBy.getOrders(); for (int ii = 0; ii < values.length; ii++) { if (ii > 0) { _builder.append(", "); } values[ii].accept(this); _builder.append(" ").append(orders[ii]); } } public void visit (Where where) throws Exception { _builder.append(" where "); where.getCondition().accept(this); } public void visit (Join join) throws Exception { switch (join.getType()) { case INNER: _builder.append(" inner join " ); break; case LEFT_OUTER: _builder.append(" left outer join " ); break; case RIGHT_OUTER: _builder.append(" right outer join " ); break; } appendTableName(join.getJoinClass()); _builder.append(" as "); appendTableAbbreviation(join.getJoinClass()); _builder.append(" on "); join.getJoinCondition().accept(this); } public void visit (Limit limit) throws Exception { _builder.append(" limit ? offset ? "); } public void visit (LiteralExp literalExp) throws Exception { _builder.append(literalExp.getText()); } public void visit (ValueExp valueExp) throws Exception { _builder.append("?"); } public void visit (Exists<? extends PersistentRecord> exists) throws Exception { _builder.append("exists "); exists.getSubClause().accept(this); } public void visit (SelectClause<? extends PersistentRecord> selectClause) throws Exception { Class<? extends PersistentRecord> pClass = selectClause.getPersistentClass(); boolean isInner = _innerClause; _innerClause = true; if (isInner) { _builder.append("("); } _builder.append("select "); if (_definitions.containsKey(pClass)) { throw new IllegalArgumentException( "Can not yet nest SELECTs on the same persistent record."); } Map<String, FieldDefinition> definitionMap = new HashMap<String, FieldDefinition>(); for (FieldDefinition definition : selectClause.getFieldDefinitions()) { definitionMap.put(definition.getField(), definition); } _definitions.put(pClass, definitionMap); try { // iterate over the fields we're filling in and figure out whence each one comes boolean skip = true; // while expanding column names in the SELECT query, do aliasing and expansion _enableAliasing = _enableOverrides = true; for (String field : selectClause.getFields()) { if (!skip) { _builder.append(", "); } skip = false; int len = _builder.length(); appendRhsColumn(pClass, field); // if nothing was added, don't add a comma if (_builder.length() == len) { skip = true; } } // then stop _enableAliasing = _enableOverrides = false; if (selectClause.getFromOverride() != null) { selectClause.getFromOverride().accept(this); } else { Computed computed = _types.getMarshaller(pClass).getComputed(); Class<? extends PersistentRecord> tClass; if (computed != null && !PersistentRecord.class.equals(computed.shadowOf())) { tClass = computed.shadowOf(); } else if (_types.getTableName(pClass) != null) { tClass = pClass; } else { throw new SQLException("Query on @Computed entity with no FromOverrideClause."); } _builder.append(" from "); appendTableName(tClass); _builder.append(" as "); appendTableAbbreviation(tClass); } for (Join clause : selectClause.getJoinClauses()) { clause.accept(this); } if (selectClause.getWhereClause() != null) { selectClause.getWhereClause().accept(this); } if (selectClause.getGroupBy() != null) { selectClause.getGroupBy().accept(this); } if (selectClause.getOrderBy() != null) { selectClause.getOrderBy().accept(this); } if (selectClause.getLimit() != null) { selectClause.getLimit().accept(this); } if (selectClause.getForUpdate() != null) { selectClause.getForUpdate().accept(this); } } finally { _definitions.remove(pClass); } if (isInner) { _builder.append(")"); } } public void visit (UpdateClause<? extends PersistentRecord> updateClause) throws Exception { if (updateClause.getWhereClause() == null) { throw new SQLException("I dare not currently perform UPDATE without a WHERE clause."); } Class<? extends PersistentRecord> pClass = updateClause.getPersistentClass(); _innerClause = true; _builder.append("update "); appendTableName(pClass); _builder.append(" as "); appendTableAbbreviation(pClass); _builder.append(" set "); String[] fields = updateClause.getFields(); Object pojo = updateClause.getPojo(); SQLExpression[] values = updateClause.getValues(); for (int ii = 0; ii < fields.length; ii ++) { if (ii > 0) { _builder.append(", "); } appendLhsColumn(pClass, fields[ii]); _builder.append(" = "); if (pojo != null) { _builder.append("?"); } else { values[ii].accept(this); } } updateClause.getWhereClause().accept(this); } public void visit (DeleteClause<? extends PersistentRecord> deleteClause) throws Exception { _builder.append("delete from "); appendTableName(deleteClause.getPersistentClass()); _builder.append(" as "); appendTableAbbreviation(deleteClause.getPersistentClass()); _builder.append(" "); deleteClause.getWhereClause().accept(this); } public void visit (InsertClause<? extends PersistentRecord> insertClause) throws Exception { Class<? extends PersistentRecord> pClass = insertClause.getPersistentClass(); DepotMarshaller<?> marsh = _types.getMarshaller(pClass); _innerClause = true; String[] fields = marsh.getColumnFieldNames(); _builder.append("insert into "); appendTableName(insertClause.getPersistentClass()); _builder.append(" ("); for (int ii = 0; ii < fields.length; ii ++) { if (ii > 0) { _builder.append(", "); } appendLhsColumn(pClass, fields[ii]); } _builder.append(") values("); Set<String> idFields = insertClause.getIdentityFields(); for (int ii = 0; ii < fields.length; ii++) { if (ii > 0) { _builder.append(", "); } if (idFields.contains(fields[ii])) { _builder.append("DEFAULT"); } else { _builder.append("?"); } } _builder.append(")"); } protected abstract void appendIdentifier (String field); protected void appendTableName (Class<? extends PersistentRecord> type) { appendIdentifier(_types.getTableName(type)); } protected void appendTableAbbreviation (Class<? extends PersistentRecord> type) { appendIdentifier(_types.getTableAbbreviation(type)); } // Constructs a name used for assignment in e.g. INSERT/UPDATE. This is the SQL // equivalent of an lvalue; something that can appear to the left of an equals sign. // We do not prepend this identifier with a table abbreviation, nor do we expand // field overrides, shadowOf declarations, or the like: it is just a column name. protected void appendLhsColumn (Class<? extends PersistentRecord> type, String field) throws Exception { DepotMarshaller<?> dm = _types.getMarshaller(type); if (dm == null) { throw new IllegalArgumentException( "Unknown field on persistent record [record=" + type + ", field=" + field + "]"); } FieldMarshaller<?> fm = dm.getFieldMarshaller(field); appendIdentifier(fm.getColumnName()); } // Appends an expression for the given field on the given persistent record; this can // appear in a SELECT list, in WHERE clauses, etc, etc. protected void appendRhsColumn (Class<? extends PersistentRecord> type, String field) throws Exception { DepotMarshaller<?> dm = _types.getMarshaller(type); if (dm == null) { throw new IllegalArgumentException( "Unknown field on persistent record [record=" + type + ", field=" + field + "]"); } FieldMarshaller<?> fm = dm.getFieldMarshaller(field); Map<String, FieldDefinition> fieldOverrides = _definitions.get(type); if (fieldOverrides != null) { // first, see if there's a field override FieldDefinition override = fieldOverrides.get(field); if (override != null) { boolean useOverride; if (override instanceof FieldOverride) { if (fm.getComputed() != null || dm.getComputed() != null) { throw new IllegalArgumentException( "FieldOverride cannot be used on @Computed field: " + field); } useOverride = _enableOverrides; } else if (fm.getComputed() == null && dm.getComputed() == null) { throw new IllegalArgumentException( "FieldDefinition must not be used on concrete field: " + field); } else { useOverride = true; } if (useOverride) { // If a FieldOverride's target is in turn another FieldOverride, the second // one is ignored. As an example, when creating ItemRecords from CloneRecords, // we make Item.itemId = Clone.itemId. We also make Item.parentId = Item.itemId // and would be dismayed to find Item.parentID = Item.itemId = Clone.itemId. boolean saved = _enableOverrides; _enableOverrides = false; override.accept(this); _enableOverrides = saved; return; } } } Computed entityComputed = dm.getComputed(); // figure out the class we're selecting from unless we're otherwise overriden: // for a concrete record, simply use the corresponding table; for a computed one, // default to the shadowed concrete record, or null if there isn't one Class<? extends PersistentRecord> tableClass; if (entityComputed == null) { tableClass = type; } else if (!PersistentRecord.class.equals(entityComputed.shadowOf())) { tableClass = entityComputed.shadowOf(); } else { tableClass = null; } // handle the field-level @Computed annotation, if there is one Computed fieldComputed = fm.getComputed(); if (fieldComputed != null) { // check if the computed field has a literal SQL definition if (fieldComputed.fieldDefinition().length() > 0) { _builder.append(fieldComputed.fieldDefinition()); if (_enableAliasing) { _builder.append(" as "); appendIdentifier(field); } return; } // or if we can simply ignore the field if (!fieldComputed.required()) { return; } // else see if there's an overriding shadowOf definition if (fieldComputed.shadowOf() != null) { tableClass = fieldComputed.shadowOf(); } } // if we get this far we hopefully have a table to select from if (tableClass != null) { appendTableAbbreviation(tableClass); _builder.append("."); appendIdentifier(fm.getColumnName()); return; } // else owie throw new IllegalArgumentException( "Persistent field has no definition [class=" + type + ", field=" + field + "]"); } protected BuildVisitor (DepotTypes types) { _types = types; } protected DepotTypes _types; /** A StringBuilder to hold the constructed SQL. */ protected StringBuilder _builder = new StringBuilder(); /** A mapping of field overrides per persistent record. */ protected Map<Class<? extends PersistentRecord>, Map<String, FieldDefinition>> _definitions= new HashMap<Class<? extends PersistentRecord>, Map<String,FieldDefinition>>(); /** A flag that's set to true for inner SELECT's */ protected boolean _innerClause = false; protected boolean _enableOverrides = false; protected boolean _enableAliasing = false; }
package com.groupbyinc.api; import com.groupbyinc.api.model.Bias; import com.groupbyinc.api.model.Biasing; import com.groupbyinc.api.model.CustomUrlParam; import com.groupbyinc.api.model.MatchStrategy; import com.groupbyinc.api.model.Navigation; import com.groupbyinc.api.model.NumericBoost; import com.groupbyinc.api.model.PartialMatchRule; import com.groupbyinc.api.model.Refinement; import com.groupbyinc.api.model.Sort; import com.groupbyinc.api.model.refinement.RefinementRange; import com.groupbyinc.api.model.refinement.RefinementValue; import com.groupbyinc.api.model.sort.FieldSort; import com.groupbyinc.api.model.sort.SortByIds; import com.groupbyinc.api.request.RefinementsRequest; import com.groupbyinc.api.request.Request; import com.groupbyinc.api.request.RestrictNavigation; import com.groupbyinc.api.request.SelectedRefinement; import com.groupbyinc.api.request.refinement.SelectedRefinementRange; import com.groupbyinc.api.request.refinement.SelectedRefinementValue; import com.groupbyinc.common.apache.commons.collections4.CollectionUtils; import com.groupbyinc.common.apache.commons.lang3.StringUtils; import com.groupbyinc.common.jackson.Mappers; import com.groupbyinc.common.jregex.Pattern; import com.groupbyinc.common.security.AesContent; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.logging.Logger; import static java.util.Arrays.asList; public class Query { private static final Pattern REFINEMENTS_SPLITTER_PATTERN = new Pattern("~((?=[\\w.]*[=:]))"); private static final String[] EMPTY_REFINEMENTS = new String[]{}; private static final Logger LOG = Logger.getLogger(Query.class.getName()); private static final String DOTS = "\\.\\."; private List<Sort> sort = new ArrayList<Sort>(); private MatchStrategy matchStrategy; private boolean wildcardSearchEnabled; private List<String> includedNavigations = new ArrayList<String>(); private List<String> excludedNavigations = new ArrayList<String>(); private String sessionId; private String visitorId; private String query; private AesContent securedPayload; private int skip = 0; private int pageSize = 10; private String collection; private String area; private String biasingProfile; private String matchStrategyName; private String language; private Map<String, String> queryUrlParams = new HashMap<String, String>(); private List<CustomUrlParam> customUrlParams = new ArrayList<CustomUrlParam>(); private LinkedHashMap<String, Navigation> navigations = new LinkedHashMap<String, Navigation>(); private List<String> fields = new ArrayList<String>(); private List<String> orFields = new ArrayList<String>(); private List<String> subsetIds = new ArrayList<String>(); private boolean pruneRefinements = true; private boolean returnBinary = true; private boolean disableAutocorrection = false; private RestrictNavigation restrictNavigation; private Biasing biasing = new Biasing(); private boolean bot = false; /** * <code> * Used internally by the bridge object to generate the JSON that is sent to the search service. * </code> * * @param clientKey * The client key used to authenticate this request. * * @return A JSON representation of this query object. */ public String getBridgeJson(String clientKey) { return requestToJson(populateRequest(clientKey)); } private static String requestToJson(Request request) { try { return Mappers.writeValueAsString(request); } catch (IllegalArgumentException e) { return "{}"; } } private Request populateRequest(String clientKey) { Request request = new Request(); request.setSessionId(sessionId); request.setVisitorId(visitorId); request.setIncludedNavigations(includedNavigations); request.setExcludedNavigations(excludedNavigations); request.setClientKey(clientKey); request.setArea(area); request.setCollection(collection); request.setQuery(query); request.setFields(fields); request.setOrFields(orFields); request.setLanguage(language); request.setBiasingProfile(biasingProfile); request.setMatchStrategyName(matchStrategyName); request.setPageSize(pageSize); request.setSkip(skip); request.setBiasing(convertBiasing(biasing)); request.setCustomUrlParams(getCustomUrlParams()); request.setRefinements(generateSelectedRefinements(navigations)); request.setNavigations(generateNavigations(navigations)); request.setRestrictNavigation(convertRestrictNavigation()); request.setWildcardSearchEnabled(isWildcardSearchEnabled()); request.setSecuredPayload(securedPayload); if (CollectionUtils.isNotEmpty(sort)) { for (Sort s : sort) { request.setSort(convertSort(s)); } } request.setMatchStrategy(convertPartialMatchStrategy(matchStrategy)); if (!pruneRefinements) { request.setPruneRefinements(false); } if (returnBinary) { request.setReturnBinary(true); } if (disableAutocorrection) { request.setDisableAutocorrection(true); } return request; } protected static com.groupbyinc.api.request.Biasing convertBiasing(Biasing biasing) { com.groupbyinc.api.request.Biasing convertedBiasing = new com.groupbyinc.api.request.Biasing(); boolean hasData = false; if (biasing != null) { convertedBiasing.setAugmentBiases(biasing.isAugmentBiases()); if (CollectionUtils.isNotEmpty(biasing.getRestrictToIds())) { convertedBiasing.setRestrictToIds(new ArrayList<String>(biasing.getRestrictToIds())); hasData = true; } if (CollectionUtils.isNotEmpty(biasing.getBringToTop())) { convertedBiasing.setBringToTop(new ArrayList<String>(biasing.getBringToTop())); hasData = true; } if (CollectionUtils.isNotEmpty(biasing.getBiases())) { convertedBiasing.setBiases(new ArrayList<com.groupbyinc.api.request.Bias>(convertBiases(biasing.getBiases()))); hasData = true; } if (biasing.getInfluence() != null) { convertedBiasing.setInfluence(biasing.getInfluence()); hasData = true; } if (CollectionUtils.isNotEmpty(biasing.getNumericBoosts())) { convertedBiasing.setNumericBoosts(convertNumericBoosts(biasing.getNumericBoosts())); hasData = true; } } return hasData ? convertedBiasing : null; } /** * @return A list of custom url params */ public List<CustomUrlParam> getCustomUrlParams() { return customUrlParams; } private List<SelectedRefinement> generateSelectedRefinements(LinkedHashMap<String, Navigation> navigations) { List<SelectedRefinement> refinements = new ArrayList<SelectedRefinement>(); for (Navigation n : navigations.values()) { for (Refinement r : n.getRefinements()) { switch (r.getType()) { case Range: { RefinementRange rr = (RefinementRange) r; refinements.add(new SelectedRefinementRange().setNavigationName(n.getName()).setLow(rr.getLow()).setHigh(rr.getHigh()).setExclude(rr.getExclude())); break; } case Value: { RefinementValue rv = (RefinementValue) r; refinements.add(new SelectedRefinementValue().setNavigationName(n.getName()).setValue(rv.getValue()).setExclude(rv.getExclude())); break; } default: break; } } } return refinements; } private List<com.groupbyinc.api.request.Navigation> generateNavigations(LinkedHashMap<String, Navigation> navigations) { List<com.groupbyinc.api.request.Navigation> overrides = new ArrayList<com.groupbyinc.api.request.Navigation>(); for (Navigation n : navigations.values()) { if (CollectionUtils.isNotEmpty(n.getPinnedRefinements())) { overrides.add(new com.groupbyinc.api.request.Navigation().setName(n.getName()).setPinnedRefinements(n.getPinnedRefinements())); } } return overrides; } private RestrictNavigation convertRestrictNavigation() { return restrictNavigation == null ? null : new RestrictNavigation().setName(restrictNavigation.getName()).setCount(restrictNavigation.getCount()); } public boolean isWildcardSearchEnabled() { return wildcardSearchEnabled; } protected static com.groupbyinc.api.request.Sort convertSort(Sort sort) { com.groupbyinc.api.request.Sort convertedSort = null; if (sort != null) { if (sort instanceof FieldSort) { FieldSort fieldSort = (FieldSort) sort; com.groupbyinc.api.request.sort.FieldSort converted = new com.groupbyinc.api.request.sort.FieldSort().setField(fieldSort.getField()); switch (fieldSort.getOrder()) { case Ascending: converted.setOrder(com.groupbyinc.api.request.Sort.Order.Ascending); break; case Descending: converted.setOrder(com.groupbyinc.api.request.Sort.Order.Descending); break; default: break; } convertedSort = converted; } else if (sort instanceof SortByIds) { SortByIds sortByIds = (SortByIds) sort; convertedSort = new com.groupbyinc.api.request.sort.SortByIds().setIds(sortByIds.getIds()); } } return convertedSort; } protected static com.groupbyinc.api.request.MatchStrategy convertPartialMatchStrategy(MatchStrategy strategy) { com.groupbyinc.api.request.MatchStrategy convertedStrategy = null; if (strategy != null && CollectionUtils.isNotEmpty(strategy.getRules())) { convertedStrategy = new com.groupbyinc.api.request.MatchStrategy(); for (PartialMatchRule r : strategy.getRules()) { convertedStrategy.addRule(r); } } return convertedStrategy; } private static List<com.groupbyinc.api.request.Bias> convertBiases(List<Bias> biases) { List<com.groupbyinc.api.request.Bias> convertedBiases = new ArrayList<com.groupbyinc.api.request.Bias>(); for (Bias bias : biases) { convertedBiases.add(convertBias(bias)); } return convertedBiases; } private static com.groupbyinc.api.request.Bias convertBias(Bias bias) { return new com.groupbyinc.api.request.Bias().setName(bias.getName()).setContent(bias.getContent()).setStrength(convertStrength(bias.getStrength())); } private static List<com.groupbyinc.api.request.NumericBoost> convertNumericBoosts(List<NumericBoost> numericBoosts) { List<com.groupbyinc.api.request.NumericBoost> convertedBiases = new ArrayList<com.groupbyinc.api.request.NumericBoost>(); for (NumericBoost numericBoost : numericBoosts) { convertedBiases.add(convertNumericBoost(numericBoost)); } return convertedBiases; } private static com.groupbyinc.api.request.NumericBoost convertNumericBoost(NumericBoost numericBoost) { return new com.groupbyinc.api.request.NumericBoost().setName(numericBoost.getName()).setStrength(numericBoost.getStrength()).setInverted(numericBoost.isInverted()); } private static com.groupbyinc.api.request.Bias.Strength convertStrength(Bias.Strength strength) { com.groupbyinc.api.request.Bias.Strength convertedStrength; try { convertedStrength = com.groupbyinc.api.request.Bias.Strength.valueOf(strength.name()); } catch (IllegalArgumentException e) { LOG.warning("Could not convert bias strength: " + strength.name()); convertedStrength = com.groupbyinc.api.request.Bias.Strength.Leave_Unchanged; } return convertedStrength; } /** * <code> * Indicate if the *(star) character in the search string should be treated as a wildcard prefix search. * For example, `sta*` will match `star` and `start`. * * JSON Reference: * * { "wildcardSearchEnabled" : true } * * </code> * * @param wildcardSearchEnabled true to enable wildcard search, false otherwise. * @return the Query object itself */ public Query setWildcardSearchEnabled(boolean wildcardSearchEnabled) { this.wildcardSearchEnabled = wildcardSearchEnabled; return this; } /** * <code> * Used internally by the bridge object to generate the JSON that is sent to the search service. * </code> * * @param clientKey * The client key used to authenticate this request. * * @return A JSON representation of this query object. */ public String getBridgeRefinementsJson(String clientKey, String navigationName) { RefinementsRequest request = new RefinementsRequest(); request.setOriginalQuery(populateRequest(clientKey)); request.setNavigationName(navigationName); return requestToJson(request); } private static String requestToJson(RefinementsRequest request) { try { return Mappers.writeValueAsString(request); } catch (IllegalArgumentException e) { return "{}"; } } /** * @return The current search string. */ public String getQuery() { return query; } /** * <code> * Set a search string. If query is blank all records are considered. There are some limits enforced on the search string, it: * * - must not exceed 60 characters * - must not exceed 10 terms. * * If the limits are exceeded, the search string is truncated until all limits are satisfied. For example, the following search string * * The quick brown fox jumps over the colorful wide bridge into the cold river. * * will get truncated to: * * The quick brown fox jumps over the colorful wide bridge * * The terms `the`, `cold`, and `river` were truncated because the term limit was exceed, and `into` was also removed because the * resulting string exceeded the character limit. Stop words are included in the string when determining if limits are exceeded. If * there is only one term and it exceeds the character limit, the query will fail. * * JSON Reference: * * { "query": "gloves" } * * </code> * * @param query * The search string to fire against the engine. */ public Query setQuery(String query) { this.query = query; return this; } /** * @return The data collection * * @deprecated since 2.0, use getCollection instead. */ public String getSubCollection() { return collection; } /** * @param subCollection * The string representation of a collection query. * * @deprecated since 2.0, use setCollection instead. */ public Query setSubCollection(String subCollection) { collection = subCollection; return this; } /** * @return The data collection */ public String getCollection() { return collection; } /** * <code> * The collection to use. If you don't pass this parameter, `default` will be the collection used. * This is case sensitive and should be same as your collection name. * * JSON Reference: * * { "collection": "FAQs" } * </code> * * @param collection * The string representation of a collection query. */ public Query setCollection(String collection) { this.collection = collection; return this; } /** * @return The area name */ public String getArea() { return area; } /** * <code> * The area you wish to fire against, production, staging, etc... * If not specified, the `Production` area will be used (and if one doesn't exist, an error will be returned). * * JSON Reference: * * { "area": "Development" } * * </code> * * @param area * The area name. */ public Query setArea(String area) { this.area = area; return this; } /** * @return A string representation of all of the currently set refinements */ public String getRefinementString() { if (CollectionUtils.isNotEmpty(navigations.values())) { StringBuilder result = new StringBuilder(); for (Navigation n : navigations.values()) { for (Refinement r : n.getRefinements()) { result.append("~").append(n.getName()).append(r.toTildeString()); } } if (result.length() > 0) { return result.toString(); } } return null; } /** * @return A string representation of all of the currently set custom url * parameters */ public String getCustomUrlParamsString() { if (CollectionUtils.isEmpty(customUrlParams)) { return null; } StringBuilder result = new StringBuilder(); for (CustomUrlParam customUrlParam : customUrlParams) { result.append("~").append(customUrlParam.getKey()).append("=").append(customUrlParam.getValue()); } return result.toString(); } /** * @param clientKey * Your client key * * @internal */ protected String getBridgeJsonRefinementSearch(String clientKey) { Request request = new Request(); request.setIncludedNavigations(includedNavigations); request.setExcludedNavigations(excludedNavigations); request.setClientKey(clientKey); request.setCollection(collection); request.setArea(area); request.setRefinementQuery(query); request.setWildcardSearchEnabled(isWildcardSearchEnabled()); request.setSecuredPayload(securedPayload); if (CollectionUtils.isNotEmpty(sort)) { for (Sort s : sort) { request.setSort(convertSort(s)); } } request.setMatchStrategy(convertPartialMatchStrategy(matchStrategy)); return requestToJson(request); } /** * <code> * A helper method to parse and set refinements. * If you pass in refinements of the format * * Brand=Bose~price:20..80 * * The query object will correctly parse out the refinements. * </code> * * @param refinementString * A tilde separated list of refinements */ public Query addRefinementsByString(String refinementString) { if (refinementString == null) { return this; } String[] filterStrings = splitRefinements(refinementString); for (String filterString : filterStrings) { if (StringUtils.isBlank(filterString) || "=".equals(filterString)) { continue; } int colon = filterString.indexOf(":"); int equals = filterString.indexOf("="); boolean isRange = colon != -1 && equals == -1; String[] nameValue = filterString.split("[:=]", 2); Refinement refinement; if (isRange) { RefinementRange rr = new RefinementRange(); if (nameValue[1].endsWith("..")) { rr.setLow(nameValue[1].split(DOTS)[0]); rr.setHigh(""); } else if (nameValue[1].startsWith("..")) { rr.setLow(""); rr.setHigh(nameValue[1].split(DOTS)[1]); } else { String[] lowHigh = nameValue[1].split(DOTS); rr.setLow(lowHigh[0]); rr.setHigh(lowHigh[1]); } refinement = rr; } else { refinement = new RefinementValue(); ((RefinementValue) refinement).setValue(nameValue[1]); } if (StringUtils.isNotBlank(nameValue[0])) { addRefinement(nameValue[0], refinement); } } return this; } protected String[] splitRefinements(String refinementString) { return StringUtils.isBlank(refinementString) ? EMPTY_REFINEMENTS : REFINEMENTS_SPLITTER_PATTERN.tokenizer(refinementString).split(); } /** * <code> * Add a refinement. Please note that refinements are case-sensitive * * JSON Reference: * * Value and range refinements are both appended to an array on the refinements field. * Note the 'type' field, which marks the refinement as either a value or range refinement. * * { "refinements": [ {"type": "Range", "navigationName": "price", "low": "1.0", "high": "2.0"}, * {"type": "Value", "navigationName": "brand", "value": "Nike" } ] } * * Refinements can be negated by setting the exclude property. An excluded refinement will return * results that do not match the value or fall into the range specified in the refinement. * * { "refinements": [ {"type": "Range", "navigationName": "price", "low": "1.0", "high": "2.0", "exclude": true}, * {"type": "Value", "navigationName": "brand", "value": "Nike", "exclude": true } ] } * * </code> * * @param navigationName * The name of the refinement * @param refinement * The refinement to add */ private Query addRefinement(String navigationName, Refinement refinement) { Navigation navigation = navigations.get(navigationName); if (navigation == null) { navigation = new Navigation().setName(navigationName); navigation.setRange(refinement instanceof RefinementRange); navigations.put(navigationName, navigation); } navigation.getRefinements().add(refinement); return this; } /** * <code> * Sets any additional parameters that can be used to trigger rules. * Takes a CustomUrlParam object. * </code> * * @param customUrlParam * The parameter to add */ public Query addCustomUrlParam(CustomUrlParam customUrlParam) { customUrlParams.add(customUrlParam); return this; } /** * <code> * Sets any additional parameters that can be used to trigger rules. * Takes a name and a value. * * JSON Reference: * * Custom URL parameters separated by ~ in the form: * * { "customUrlParams": [ { "key": "region", "value": "east" } ] } * * </code> * * @param key * The parameter key * @param value * The parameter value */ public Query addCustomUrlParam(String key, String value) { customUrlParams.add(new CustomUrlParam().setKey(key).setValue(value)); return this; } /** * <code> * Helper method that takes a ~ separated string of additional parameters that can be * used to trigger rules. Takes ~ separated name/value list * </code> * * @param values * The list of name/values */ public Query addCustomUrlParamsByString(String values) { if (values == null) { return this; } String[] params = values.split("&"); for (String value : params) { if (StringUtils.isNotBlank(value)) { String[] keyValue = value.split("="); if (keyValue.length == 2 && StringUtils.isNotBlank(keyValue[0]) && StringUtils.isNotBlank(keyValue[1])) { customUrlParams.add(new CustomUrlParam().setKey(keyValue[0]).setValue(keyValue[1])); } } } return this; } /** * @return A list of fields that will be returned by the engine. */ public List<String> getFields() { return fields; } /** * <code> * Specify which fields should be returned on each record that comes back from the engine. You may specify more * than one field, if you specify <b>\\*</b> all fields will be returned. * If this parameter is blank, the search service will return an error. * If this parameter is omitted, the search service will return only the `title` field. * The `title` field is always returned. * You can exclude fields from being returned using `-`. Exclusion will take precedence over inclusion. * * * * JSON Reference: * * { "fields": [ "width", "brand", "height" ] } * { "fields" : [ "*", "-height", "-price" ] } * * </code> * * @param name * The case-sensitive name of the attribute to return */ public Query addFields(String... name) { return addField(fields, name); } protected Query addField(List<String> fields, String... name) { if (name == null) { return this; } Collections.addAll(fields, name); return this; } /** * @return A list of the fields that the search service will treat as OR-able. */ public List<String> getOrFields() { return orFields; } /** * <code> * Specify which fields should be queried with 'OR' instead of the default 'AND'. * This behavior is typically defined in command center on a per navigation basis. However, * you can set which fields should be treated as an OR field at the query level if desired. * As with normal refinement selections, once you have refined, the list of refinements for * that selected navigation will no longer be returned. * * JSON Reference: * * { "orFields": [ "field1", "field2" ] } * * </code> * * @param name * The field that should be treated as OR by the search service before * being executed. */ public Query addOrField(String... name) { return addField(orFields, name); } /** * <code> * Add a range refinement. Takes a refinement name, a lower and upper bounds. * </code> * * @param navigationName * The name of the refinement * @param low * The low value * @param high * The high value */ public Query addRangeRefinement(String navigationName, String low, String high) { return addRangeRefinement(navigationName, low, high, false); } /** * <code> * Add a range refinement. Takes a refinement name, a lower and upper bounds, and whether or not to exclude * this refinement. * </code> * * @param navigationName * The name of the refinement * @param low * The low value * @param high * The high value * @param exclude * True if the results should exclude this range refinement, false otherwise */ public Query addRangeRefinement(String navigationName, String low, String high, boolean exclude) { return addRefinement(navigationName, new RefinementRange().setLow(low).setHigh(high).setExclude(exclude)); } /** * <code> * Add a value refinement. Takes a refinement name and a value. * </code> * * @param navigationName * The name of the navigation * @param value * The refinement value */ public Query addValueRefinement(String navigationName, String value) { return addValueRefinement(navigationName, value, false); } /** * <code> * Add a value refinement. Takes a refinement name, a value, and whether or not to exclude this refinement. * </code> * * @param navigationName * The name of the navigation * @param value * The refinement value * @param exclude * True if the results should exclude this value refinement, false otherwise */ public Query addValueRefinement(String navigationName, String value, boolean exclude) { return addRefinement(navigationName, new RefinementValue().setValue(value).setExclude(exclude)); } /** * <code> * Add pinned value refinement. Takes a refinement name and a set of values. * </code> * * @param navigationName * The name of the navigation * @param values * The refinement values * * @return */ public Query setPinnedRefinements(String navigationName, String... values) { Navigation navigation = navigations.get(navigationName); if (navigation == null) { navigation = new Navigation().setName(navigationName); navigations.put(navigationName, navigation); } navigation.setPinnedRefinements(asList(values)); return this; } /** * <code> * * By default, the engine will return up to twenty refinements for a navigation. These refinements are ordered by either count or value. * However, there are cases where the business may require a particular refinement to be always returned at the top of the list regardless * of count or value (e.g. a promoted or 'house' brand.) * * These refinements can be defined as `pinnedRefinements` within the `navigations` array, so that they are always returned at the * top of the list in the Search API Response. There is a limit of 20 `pinnedRefinements` per navigation. * * To define `pinnedRefinements`, you must always include the navigation name within the array, as shown below: * * * <b>JSON Reference</b>: * * { "navigations": [ {"name": "brand", "pinnedRefinements": ["Apple", "Bose", "Sennheiser"]} ] } * * </code> * * @param navigationName * The name of the navigation * @param values * The refinement values */ public Query setPinnedRefinements(String navigationName, List<String> values) { Navigation navigation = navigations.get(navigationName); if (navigation == null) { navigation = new Navigation().setName(navigationName); navigations.put(navigationName, navigation); } navigation.setPinnedRefinements(new ArrayList<String>(values)); return this; } /** * @return The number of documents to skip */ public int getSkip() { return skip; } /** * <code> * Tell the search service to offset by N records. For example, if N is 10, the records returned will start at 11. * * JSON Reference: * * { "skip": 400 } * * </code> * * @param skip * The number of documents to skip */ public Query setSkip(int skip) { this.skip = skip; return this; } /** * @return The current page size */ public int getPageSize() { return pageSize; } /** * <code> * Page size. Default is 10. * * JSON Reference: * * { "pageSize": 8 } * * </code> * * @param pageSize * The number of records to return with the query. */ public Query setPageSize(int pageSize) { this.pageSize = pageSize; return this; } /** * @return A map of the currently set refinements */ public Map<String, Navigation> getNavigations() { return navigations; } /** * @return Is return JSON set to true. */ public boolean isReturnBinary() { return returnBinary; } /** * <code> * Tells the search service to return binary data. This is enabled by default in the APIs for more efficient transport. * To disable this in an API, set this to `false`. * * JSON Reference: * * If passed true, informs the search service to return binary data rather than JSON. * * { "returnBinary": true } * * </code> * * @param returnBinary * Whether to tell the search service to return binary data rather than JSON. */ public Query setReturnBinary(boolean returnBinary) { this.returnBinary = returnBinary; return this; } /** * @return The current biasing profile name. */ public String getBiasingProfile() { return biasingProfile; } /** * <code> * Override the biasing profile used for this query - takes precedence over any * biasing profile set in the command center. * * JSON Reference: * * { "biasingProfile": "PopularityBias" } * * </code> * * @param biasingProfile The name of the biasing profile */ public Query setBiasingProfile(String biasingProfile) { this.biasingProfile = biasingProfile; return this; } /** * @return The current match strategy name. */ public String getMatchStrategyName() { return matchStrategyName; } /** * <code> * Override the match strategy used for this query - takes precedence over any * match strategy set in the command center. * * JSON Reference: * * { "matchStrategyName": "RelaxedMatch" } * * </code> * * @param matchStrategyName The name of the match strategy */ public Query setMatchStrategyName(String matchStrategyName) { this.matchStrategyName = matchStrategyName; return this; } /** * @return The current language filter on the query. */ public String getLanguage() { return language; } /** * <code> * Sets the language filter on the query and restricts the results to a certain language. If you do not specify a * language, english ("lang_en") will be considered the default. An unrecognized language will result in an error. * * Currently supported languages are: * * lang_en * * JSON Reference: * * { "language": "lang_en" } * * </code> * * @param language * The value for language restrict */ public Query setLanguage(String language) { this.language = language; return this; } /** * @return Are refinements with zero counts being removed. * * @internal */ public boolean isPruneRefinements() { return pruneRefinements; } public Query setPruneRefinements(boolean pruneRefinements) { this.pruneRefinements = pruneRefinements; return this; } /** * @return Is the auto-correction behavior disabled * * @internal */ public boolean isAutocorrectionDisabled() { return disableAutocorrection; } /** * <code> * Specifies whether the auto-correction behavior should be disabled. By default, when no results are returned * for the given query (and there is a did-you-mean available), the first did-you-mean is automatically queried * instead. * Defaults to false * * JSON Reference: * * { "disableAutocorrection": false } * * </code> * * @param disableAutocorrection true to disable autocorrection, false otherwise */ public Query setDisableAutocorrection(boolean disableAutocorrection) { this.disableAutocorrection = disableAutocorrection; return this; } /** * <code> * <b>Warning</b> This will count as two queries against your search index. * Typically, this feature is used when you have a large number of navigation items that will overwhelm the end * user. It works by using one of the existing navigation items to decide what the query is about and fires a second * query to restrict the navigation to the most relevant set of navigation items for this search term. * For example, if you pass in a search of `paper` and a restrict navigation of `category:2` * The bridge will find the category navigation refinements in the first query and fire a second query for the top 2 * most populous categories. Therefore, a search for something generic like "paper" will bring back top category * matches like copy paper (1,030), paper pads (567). The bridge will fire off the second query with the search * term, plus an OR refinement with the most likely categories. The navigation items in the first query are * entirely replaced with the navigation items in the second query, except for the navigation that was used for the * restriction so that users still have the ability to navigate by all category types. * * JSON Reference: * * { "restrictNavigation": { "name": "category", "count": 2 } } * * </code> * * @param restrictNavigation * Restriction criteria * * @return this query */ public Query setRestrictNavigation(RestrictNavigation restrictNavigation) { this.restrictNavigation = restrictNavigation; return this; } /** * <code> * <b>Warning</b> See {@link Query#setRestrictNavigation(RestrictNavigation)}. This is a convenience method. * </code> * * @param name * the name of the field should be used in the navigation restriction in the second query. * @param count * the number of fields matches * * @return this query */ public Query setRestrictNavigation(String name, int count) { this.restrictNavigation = new RestrictNavigation().setName(name).setCount(count); return this; } /** * @return The current list of sort parameters */ public List<Sort> getSort() { return sort; } /** * <code> * Specifies the sort order applied to the fields in the order specified. If no sort criteria are specified, the * default is to sort by relevance. There is a special sort field `_relevance`, which also specifies sorting by * relevance. It is possible to specify multiple sort criteria. The criteria order matters, as the records will be * sorted by the first criteria and then any matches will be tie-broken using the next criteria. Given an example * where the sort is specified as `category` then `_relevance`, results will be sorted first by `category` and * relevance will only affect the order between records that have the same category. Records can also be sorted by * a specific ID as well when you want to return items in a specific order. If a record ID is included as a sort, * but that record not a part of the result set, that item will not be included (unlike push to top). There is a limit * of 1000 id's that you can sort by. Any ID's beyond this limit will be ignored. ID sort can also be used with * other types of sorts. * * Please note, sorting is based on the actual value in the record. For example, if sorting on `price`, and * `price` is a `Range` navigation, the records will be sorted according to the actual price value in the record * and not the bucket value. * * The order field can be set to either `Ascending` or `Descending`. When sorting by relevance, the order is always * `Descending`. For any other field, the default order is `Ascending`. * * JSON Reference: * * { "sort": { "field": "price", "order": "Descending" } } * { "sort": [{ "field": "_relevance" }, { "field": "price", "order": "Descending" }] } * { "sort": [{ "field": "brand", "order":"Ascending" }, { "field": "_relevance" }, { "field": "price" }] } * { "sort": [{ "type": "ByIds", "ids": ["1234"," 5678"]}] } * { "sort": [{ "type": "ByIds", "ids": ["1234"," 5678"]}, { "field": "price", "order": "Descending" }] } * * </code> * * @param sort Any number of sort criteria. */ public Query setSort(Sort... sort) { CollectionUtils.addAll(this.sort, sort); return this; } /** * @return The current match strategy. */ public MatchStrategy getMatchStrategy() { return this.matchStrategy; } /** * <code> * A match strategy allows you to explicitly manage recall on a per query basis. There must always be one term * matching in a query, thus `termsGreaterThan` can only be defined from 1 upwards and `terms` can only be defined * from 2 upwards. It is not possible to match more terms than passed into the query. Relative `mustMatch` values * can be used in conjunction with `termsGreaterThan`. A `"percentage": true` flag denotes a relative `mustMatch` * to the portion of the terms and will always round down (i.e. 50% must match of 3 terms, means that 1 term must * match). * * The following is the default match strategy: * * ``` * { "matchStrategy": { "rules":[{ "terms": 2, "mustMatch": 2 }, * { "terms": 3, "mustMatch": 2 }, * { "terms": 4, "mustMatch": 3 }, * { "terms": 5, "mustMatch": 3 }, * { "terms": 6, "mustMatch": 4 }, * { "terms": 7, "mustMatch": 4 }, * { "terms": 8, "mustMatch": 5 }, * { "termsGreaterThan": 8, "mustMatch": 60, "percentage": true }] } } * ``` * * An exact matching strategy would be: * * ``` * { "matchStrategy": { "rules": { "termsGreaterThan": 1, "mustMatch": 100, "percentage": true } } } * ``` * * Please note, it is highly recommended that the highest rule is defined with `termsGreaterThan` * and a relative `mustMatch` as that guarantees that the number of matches required grows with the number of terms * passed into the query. * * JSON Reference: * * { "matchStrategy": { "rules":[{ "terms": 2, "mustMatch": 2 }, * { "terms": 3, "mustMatch": 2 }, * { "terms": 4, "mustMatch": 3 }, * { "terms": 5, "mustMatch": 3 }, * { "terms": 6, "mustMatch": 4 }, * { "terms": 7, "mustMatch": 4 }, * { "terms": 8, "mustMatch": 5 }, * { "termsGreaterThan": 8, "mustMatch": 60, "percentage": true }] } } * { "matchStrategy": { "rules": { "termsGreaterThan": 1, "mustMatch": 100, "percentage": true } } } * { "matchStrategy": { "rules":[{ "terms": 2, "mustMatch": 1 }, * { "termsGreaterThan": 2, "mustMatch": 75, "percentage": true }] } } * * </code> * * @param matchStrategy A match strategy composed of partial matching rules. */ public Query setMatchStrategy(MatchStrategy matchStrategy) { this.matchStrategy = matchStrategy; return this; } /** * @return A list of navigations that will be included with the response. */ public List<String> getIncludeNavigations() { return includedNavigations; } /** * <code> * An array that specifies which navigations should be returned. * * If set, this overrides the navigations defined in Command Center and only returns the navigations specified. * If this parameter is blank the Dynamic Navigations from Command Center are returned. * * The values here must be defined via Command Center or Bulk Upload. If a navigation is specified that has not been defined, * it will be ignored. * * This means, if this parameter uses a `dummy` navigation that is not real, this will both override any Command Center definitions, and will return nothing, as the navigation does not exist. * * * The field name supports two types of wildcard characters: '?' and '\*'. * The '?' wildcard will match one character. For example "????_price" will match "sale_price", * but not "sales_price". The '\*' wildcard will match any number of characters. For example, a * name of "\*_price" will match both "sale_price and "sales_price", but not "sale_prices". * * * * JSON Reference: * * { "includedNavigations": [ "width", "brand", "categories.categories.value" ] } * </code> * * @param navigationName * The case-sensitive name of the navigation to return */ public Query addIncludedNavigations(String... navigationName) { return addField(includedNavigations, navigationName); } /** * @return A list of navigations that will be excluded from the response. */ public List<String> getExcludeNavigations() { return excludedNavigations; } /** * <code> * Specify which navigations should not be returned. If set, this forces the response to * exclude certain navigations defined in Command Center. If this parameter is blank all * navigations in Command Center are returned. If a navigation name is specified that does * not exist, it will be ignored. If "includedNavigations" are specified, then all * "excludedNavigations" are ignored. Please see the documentation on "includedNavigations" * for details on wildcard characters in the field name. * * JSON Reference: * * { "excludedNavigations": [ "width", "brand", "categories.categories.value" ] } * * </code> * * @param navigationName * The case-sensitive name of the navigation to exclude */ public Query addExcludedNavigations(String... navigationName) { return addField(excludedNavigations, navigationName); } /** * @return The query level url-parameters. */ public Map<String, String> getQueryUrlParams() { return queryUrlParams; } /** * <code> * * Sets the query level url parameters. These will be used in the future to enable and disable * features, such as disabling Navigations in the response. * * </code> * * @param queryUrlParams * The map of query level url parameters */ public Query setQueryUrlParams(Map<String, String> queryUrlParams) { this.queryUrlParams = queryUrlParams; return this; } /** * <code> * * See {@link Query#setQueryUrlParams(Map)}. This is a convenience method for when you have no * value for the url parameter. * * </code> * * @param key * The key of the url parameter */ public Query addQueryUrlParams(String key) { return addQueryUrlParams(key, null); } /** * <code> * * See {@link Query#setQueryUrlParams(Map)}. * * </code> * * @param key * The key of the url parameter * @param value * The value of the url parameter */ public Query addQueryUrlParams(String key, String value) { this.queryUrlParams.put(key, value); return this; } /** * <code> * * See {@link Query#setBiasing(Biasing)}. This is a convenience method to set which products should be * brought to the top of the result set. * * </code> * * @param bringToTop * Up to 300 of product IDs to bring to the top of the result set. */ public Query setBringToTop(String... bringToTop) { CollectionUtils.addAll(this.biasing.getBringToTop(), bringToTop); return this; } /** * <code> * * See {@link Query#setBiasing(Biasing)}. This is a convenience method to set the list of products IDs that will be * used for `restrictToIds`. * * </code> * * @param restrictToIds * Up to 300 of product IDs that will be used to restrict the result set. You can use this to specify against which set of records any additional query or refinement actions should be taken. */ public Query setRestrictToIds(String... restrictToIds) { CollectionUtils.addAll(this.biasing.getRestrictToIds(), restrictToIds); return this; } /** * <code> * * See {@link Query#setBiasing(Biasing)}. This is a convenience method to set the biasing augment status. * * </code> * * @param augment * True to replace the biases defined in Command Center, false to augment. */ public Query setBiasingAugment(boolean augment) { biasing.setAugmentBiases(augment); return this; } /** * <code> * * See {@link Query#setBiasing(Biasing)}. This is a convenience method to set the biasing influence. * * </code> * * @param influence * The influence */ public Query setInfluence(Float influence) { biasing.setInfluence(influence); return this; } public Query addBias(String name, String content, Bias.Strength strength) { biasing.getBiases().add(new Bias().setName(name).setContent(content).setStrength(strength)); return this; } /** * @return The session ID */ public String getSessionId() { return sessionId; } /** * <code> * A unique string identifier of the session that your customer is currently in. The sessionID should be a unique value for a * given user that persists for them as long as that user is active on the site for that session. We define a session as the * time that you would consider a duration of an A/B test. In future, A/B testing tools within our solution will leverage * the session ID to group customers into different experiences. Ensuring that session ID is persistent throughout a measure * of time will help ensure that the customer experience is consistent as they shop and browse your site. Therefore, the * sessionID should update only if the user is inactive for some period - we recommend keeping this in alignment for what * you consider a shopping session for your customers. For example, you can align this to the timeout of items stored in the * shopping cart. Session ID should not change when the user logs in and can be used to track a user changing from anonymous * to logged in. Session ID must also be consistent between the Search and Recommendations APIs to ensure correct monitoring of * conversion metrics. * * |@warn * | Sending raw session IDs is a security risk. Encrypt or hash session IDs prior to transmission. * * </code> * @param sessionId * The session ID */ public Query setSessionId(String sessionId) { this.sessionId = sessionId; return this; } /** * @return The user ID */ public String getVisitorId() { return visitorId; } /** * <code> * * A unique string identifier of an end customer. Anonymous users (not logged in) should have a visitorID that is * a randomly generated v4 UUID. This visitorID should stay with the anonymous user for as long as possible or * until they log in. When a user logs in, their visitorID change to a known globally unique identifier for that customer. * Visitor ID should remain the same for a particular customer over different sessions. Also, it must be consistent * between the Search and Recommendations APIs to ensure correct monitoring of conversion metrics. * * |@warn * | Sending raw session IDs is a security risk. Encrypt or hash session IDs prior to transmission. * * </code> * @param visitorId * The visitor ID */ public Query setVisitorId(String visitorId) { this.visitorId = visitorId; return this; } /** * @return The biasing */ public Biasing getBiasing() { return biasing; } public Query setBiasing(Biasing biasing) { this.biasing = biasing; return this; } /** * <code> * Add a secured payload to the query. * * * JSON Reference: * * { * "securedPayload": { * "cipherText":"", * "initialValue":"", * "messageAuthenticationCode":"" * } * } * * * </code> * * @param securedPayload * The secured payload received at login * * @internal */ public Query setSecuredPayload(AesContent securedPayload) { this.securedPayload = securedPayload; return this; } public boolean isBot() { return bot; } /** * <code> * Url Parameter to indicate whether this query is bot traffic or not. * * * </code> * * @param bot * True if this query is from a bot * * @internal */ public void setBot(boolean bot) { this.bot = bot; if (bot) { this.queryUrlParams.put("bot", "true"); } else { this.queryUrlParams.remove("bot"); } } }
package org.lightmare.utils; import java.lang.reflect.Array; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; /** * Utility class to work with {@link Collection} instances * * @author Levan * */ public class CollectionUtils { public static final int FIRST_INDEX = 0; /** * Creates new {@link Set} from passed {@link Collection} instance * * @param collection * @return {@link Set}<code><T></code> */ public static <T> Set<T> translateToSet(Collection<T> collection) { Set<T> set; if (ObjectUtils.available(collection)) { set = new HashSet<T>(collection); } else { set = Collections.emptySet(); } return set; } /** * Creates new {@link Set} from passed array instance * * @param array * @return {@link Set}<code><T></code> */ public static <T> Set<T> translateToSet(T[] array) { List<T> collection; if (ObjectUtils.available(array)) { collection = Arrays.asList(array); } else { collection = null; } return translateToSet(collection); } /** * Creates new {@link List} from passed {@link Collection} instance * * @param collection * @return {@link List}<code><T></code> */ public static <T> List<T> translateToList(Collection<T> collection) { List<T> list; if (ObjectUtils.available(collection)) { list = new ArrayList<T>(collection); } else { list = Collections.emptyList(); } return list; } private static <T> T[] toArray(Class<T> type, int size) { Object arrayObject = Array.newInstance(type, size); T[] array = ObjectUtils.cast(arrayObject); return array; } /** * Checks if passed {@link Object} is array * * @param data * @return <code>boolean</code> */ public static boolean isArray(final Object data) { boolean valid = data instanceof Object[] || data instanceof boolean[] || data instanceof byte[] || data instanceof short[] || data instanceof char[] || data instanceof int[] || data instanceof long[] || data instanceof float[] || data instanceof double[]; return valid; } /** * Checks if passed {@link Object} is {@link Object} types array * * @param data * @return <code>boolean</code> */ public static boolean isObjectArray(final Object data) { boolean valid = data instanceof Object[]; return valid; } /** * Checks if passed {@link Object} is primitive types array * * @param data * @return <code>boolean</code> */ public static boolean isPrimitiveArray(final Object data) { boolean valid = data instanceof boolean[] || data instanceof byte[] || data instanceof short[] || data instanceof char[] || data instanceof int[] || data instanceof long[] || data instanceof float[] || data instanceof double[]; return valid; } /** * Converts passed {@link Collection} to array of appropriated {@link Class} * type * * @param collection * @param type * @return <code>T[]</code> */ public static <T> T[] toArray(Collection<T> collection, Class<T> type) { T[] array; if (ObjectUtils.notNull(collection)) { array = toArray(type, collection.size()); array = collection.toArray(array); } else { array = null; } return array; } /** * Creates empty array of passed type * * @param type * @return <code>T[]</code> */ public static <T> T[] emptyArray(Class<T> type) { T[] empty = toArray(type, ObjectUtils.EMPTY_ARRAY_LENGTH); return empty; } /** * Peaks first element from list * * @param list * @return T */ private static <T> T getFirstFromList(List<T> list) { T value; if (ObjectUtils.available(list)) { value = list.get(FIRST_INDEX); } else { value = null; } return value; } /** * Peaks first element from collection * * @param collection * @return T */ public static <T> T getFirst(Collection<T> collection) { T value; if (ObjectUtils.available(collection)) { if (collection instanceof List) { value = getFirstFromList(((List<T>) collection)); } else { Iterator<T> iterator = collection.iterator(); value = iterator.next(); } } else { value = null; } return value; } /** * Peaks first element from array * * @param collection * @return T */ public static <T> T getFirst(T[] values) { T value; if (ObjectUtils.available(values)) { value = values[FIRST_INDEX]; } else { value = null; } return value; } }
package com.ofg.infrastructure.autoconfigure; import com.ofg.infrastructure.config.EnableMicroserviceDocumentation; import org.springframework.boot.autoconfigure.AutoConfigureAfter; import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression; import org.springframework.context.annotation.Configuration; /** * {@link org.springframework.boot.autoconfigure.EnableAutoConfiguration * Auto-configuration} for Swagger API Documentation. Equivalent to enabling * {@link com.ofg.infrastructure.config.EnableMicroserviceDocumentation} in your configuration. * <p/> * The configuration will not be activated if {@literal com.ofg.infra.microservice.documentation.auto=false}. * * @see com.ofg.infrastructure.config.EnableMicroserviceDocumentation */ @Configuration @ConditionalOnExpression("${com.ofg.infra.microservice.documentation.auto:true}") @AutoConfigureAfter(org.springframework.boot.autoconfigure.web.WebMvcAutoConfiguration.class) @EnableMicroserviceDocumentation public class MicroserviceDocumentationAutoConfiguration { }
package org.apache.cassandra.c3; import org.apache.cassandra.config.DatabaseDescriptor; import java.net.InetAddress; public class RateController { // Constants for send/receive rate tracking private static final long RECEIVE_RATE_INITIAL = 100; private static final long RATE_INTERVAL_MS = DatabaseDescriptor.getC3RateIntervalMs(); private static final double RATE_LIMITER_MAX_TOKENS = DatabaseDescriptor.getC3RateLimiterMaxTokens(); // Constants for cubic function private static final double CUBIC_BETA = 0.2; private static final double CUBIC_C = 0.000004; private static final double CUBIC_SMAX = 10; private static final double CUBIC_HYSTERISIS_FACTOR = 4; private static final double CUBIC_BETA_BY_C = CUBIC_BETA / CUBIC_C; private static final double CUBIC_HYSTERISIS_DURATION = RATE_INTERVAL_MS * CUBIC_HYSTERISIS_FACTOR; // Cubic growth variables private long timeOfLastRateDecrease = 0L; private long timeOfLastRateIncrease = 0L; private double Rmax = 0; private final SimpleRateLimiter sendingRateLimiter; private final SlottedRateTracker receiveRateTracker; public RateController() { this.sendingRateLimiter = new SimpleRateLimiter(1, RATE_INTERVAL_MS, RATE_LIMITER_MAX_TOKENS); this.receiveRateTracker = new SlottedRateTracker(RECEIVE_RATE_INITIAL, RATE_INTERVAL_MS); } public synchronized void updateCubicSendingRate() { final double currentReceiveRate = receiveRateTracker.getCurrentRate(); final double currentSendingRate = sendingRateLimiter.getRate(); final long now = System.currentTimeMillis(); if (currentSendingRate > currentReceiveRate && (now - timeOfLastRateIncrease > CUBIC_HYSTERISIS_DURATION)) { Rmax = currentSendingRate; sendingRateLimiter.setRate(Math.max(currentSendingRate * CUBIC_BETA, 0.1)); timeOfLastRateDecrease = now; } else if (currentSendingRate < currentReceiveRate) { final double T = System.currentTimeMillis() - timeOfLastRateDecrease; timeOfLastRateIncrease = now; final double scalingFactor = Math.cbrt(Rmax * CUBIC_BETA_BY_C); final double newSendingRate = CUBIC_C * Math.pow(T - scalingFactor, 3) + Rmax; if (newSendingRate - currentSendingRate > CUBIC_SMAX) { sendingRateLimiter.setRate(currentSendingRate + CUBIC_SMAX); } else { sendingRateLimiter.setRate(newSendingRate); } assert (newSendingRate > 0); } } public double tryAcquire() { return sendingRateLimiter.tryAcquire(); } public void receiveRateTrackerTick() { receiveRateTracker.add(1); } private class SlottedRateTracker { private double currentRate; private long interval; private long lastTick = 0; private long eventCount = 0; private double lastRate = 0.0; private final double ALPHA = 0.9; /** * @param initialRate Initial setting for the rate parameter * @param interval Interval in milliseconds over which rate is calculated */ public SlottedRateTracker(double initialRate, long interval) { this.currentRate = initialRate; this.interval = interval; } public synchronized double getCurrentRate() { add(0); return this.currentRate; } public synchronized void setInterval(long interval) { this.interval = interval; } /** * Add to the rate counter. During an interval, an exponential * weighted average of the rate is maintained. Once we're outside * the granularity of an interval, this rate is reset. In that sense, * we're not really tracking a moving average. * * @param requests */ public synchronized void add(long requests) { final long now = System.currentTimeMillis() / interval; if (now - lastTick < 2) { eventCount += requests; if (now > lastTick) { currentRate = ALPHA * ((double) eventCount) + (1 - ALPHA ) * currentRate; lastTick = now; eventCount = 0; } } else { currentRate = ALPHA * ((double) eventCount) + (1 - ALPHA ) * currentRate; lastTick = now; eventCount = 0; } } } private class SimpleRateLimiter { private long lastSent; private double tokens; private double rate; private double rateIntervalInMillis; // in milliseconds private final double maxTokens; public SimpleRateLimiter(double initialRate, double rateIntervalInNanos, double maxTokens) { this.rate = initialRate; this.rateIntervalInMillis = rateIntervalInNanos * 1000000; this.maxTokens = maxTokens; this.tokens = maxTokens; this.lastSent = System.nanoTime(); } public synchronized double tryAcquire() { double currentTokens = Math.min(maxTokens, tokens + (rate / rateIntervalInMillis * (System.nanoTime() - lastSent))); if (currentTokens >= 1) { tokens = currentTokens - 1; lastSent = System.nanoTime(); return 0; } else { return (1 - currentTokens) * rateIntervalInMillis / rate; // Nanoseconds } } public synchronized double getRate() { return rate; } public synchronized void setRate(final double rate) { this.rate = rate; } } }
package org.lightmare.utils.beans; import javax.ejb.Stateless; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.StringUtils; /** * Utility class for EJB beans * * @author levan * */ public class BeanUtils { private static final String REMOTE_IDENT = "Remote"; private static final String LOCAL_IDENT = "Remote"; /** * Retrieves bean name from class name * * @param name * @return String */ public static String parseName(String name) { String simpleName = name; int index = name.lastIndexOf('.'); if (index > StringUtils.NOT_EXISTING_INDEX) { index++; simpleName = name.substring(index); } return simpleName; } /** * Removes <b>Remote</b> or <b>Local</b> part from bean interface name * * @param interfaceClass * @return */ public static String nameFromInterface(Class<?> interfaceClass) { String interfaceName = interfaceClass.getSimpleName(); String beanName; int start; if (interfaceName.endsWith(REMOTE_IDENT)) { start = interfaceName.lastIndexOf(REMOTE_IDENT); beanName = interfaceName.substring(0, start); } else if (interfaceName.endsWith(LOCAL_IDENT)) { start = interfaceName.lastIndexOf(LOCAL_IDENT); beanName = interfaceName.substring(0, start); } else { beanName = interfaceName; } return beanName; } public static String beanName(Class<?> beanClass) { Stateless annotation = beanClass.getAnnotation(Stateless.class); String beanEjbName = annotation.name(); if (ObjectUtils.notAvailable(beanEjbName)) { beanEjbName = beanClass.getSimpleName(); } return beanEjbName; } }
package com.jaamsim.ui; import java.awt.Point; import java.awt.event.MouseEvent; import java.util.ArrayList; import javax.swing.JScrollPane; import javax.swing.JTable; import javax.swing.table.AbstractTableModel; import javax.swing.table.TableModel; import com.jaamsim.basicsim.Entity; import com.jaamsim.input.OutputHandle; import com.jaamsim.units.DimensionlessUnit; import com.jaamsim.units.Unit; public class OutputBox extends FrameBox { private static OutputBox myInstance; private Entity currentEntity; OutputTableModel tableModel; private final ArrayList<Object> entries = new ArrayList<>(); public OutputBox() { super( "Output Viewer" ); setDefaultCloseOperation(FrameBox.DISPOSE_ON_CLOSE); addWindowListener(FrameBox.getCloseListener("ShowOutputViewer")); tableModel = new OutputTableModel(); OutputTable table = new OutputTable(tableModel); JScrollPane scrollPane = new JScrollPane(table); getContentPane().add( scrollPane ); setLocation(GUIFrame.COL3_START, GUIFrame.LOWER_START); setSize(GUIFrame.COL3_WIDTH, GUIFrame.LOWER_HEIGHT); } /** * Returns the only instance of the property box */ public synchronized static OutputBox getInstance() { if (myInstance == null) myInstance = new OutputBox(); return myInstance; } @Override public void setEntity( Entity entity ) { currentEntity = entity; if (currentEntity == null) { setTitle("Output Viewer"); entries.clear(); return; } setTitle("Output Viewer - " + currentEntity.getName()); // Build up the row list, leaving extra rows for entity names Class<?> currClass = null; entries.clear(); ArrayList<OutputHandle> handles = OutputHandle.getOutputHandleList(currentEntity); for (OutputHandle h : handles) { Class<?> klass = h.getDeclaringClass(); if (currClass != klass) { // This is the first time we've seen this class, add a place holder row currClass = klass; entries.add(klass); } entries.add(h); } } @Override public void updateValues(double simTime) { if (tableModel == null) return; tableModel.simTime = simTime; tableModel.fireTableDataChanged(); } private synchronized static void killInstance() { myInstance = null; } @Override public void dispose() { killInstance(); super.dispose(); } private class OutputTable extends JTable { public OutputTable(TableModel model) { super(model); setDefaultRenderer(Object.class, colRenderer); getColumnModel().getColumn(0).setWidth(150); getColumnModel().getColumn(1).setWidth(100); this.getTableHeader().setFont(FrameBox.boldFont); this.getTableHeader().setReorderingAllowed(false); } @Override public String getToolTipText(MouseEvent event) { Point p = event.getPoint(); int row = rowAtPoint(p); if (currentEntity == null || row >= entries.size() || entries.get(row) instanceof Class) { return null; } OutputHandle output = (OutputHandle)entries.get(row); String desc = new String(output.getDescription()); desc = desc.replaceAll("&", "&amp;"); desc = desc.replaceAll("<", "&lt;"); desc = desc.replaceAll(">", "&gt;"); desc = desc.replaceAll("\n", "<BR>"); return GUIFrame.formatOutputToolTip(output.getName(), desc); } @Override public void doLayout() { FrameBox.fitTableToLastColumn(this); } } private class OutputTableModel extends AbstractTableModel { double simTime = 0.0d; @Override public int getColumnCount() { return 2; } @Override public String getColumnName(int column) { switch (column) { case 0: return "Output"; case 1: return "Value"; } return "Unknown"; } @Override public int getRowCount() { return entries.size(); } @Override public Object getValueAt(int row, int col) { Object entry = entries.get(row); switch (col) { case 0: if (entry instanceof Class) return String.format("<HTML><B>%s</B></HTML>", ((Class<?>)entry).getSimpleName()); return String.format(" %s", ((OutputHandle)entry).getName()); case 1: if (entry instanceof Class) return ""; try { OutputHandle o = (OutputHandle)entry; if (o.isNumericValue()) { double d = o.getValueAsDouble(simTime, Double.NaN); Class<? extends Unit> ut = o.getUnitType(); if (ut == Unit.class || ut == DimensionlessUnit.class) { return String.format("%g", d); } else { return String.format("%g %s", d/Unit.getDisplayedUnitFactor(ut), Unit.getDisplayedUnit(ut)); } } String s = o.getValue(simTime, o.getReturnType()).toString(); if (o.getUnitType() == Unit.class ) return s; else return s + " " + Unit.getSIUnit(o.getUnitType()); } catch (Throwable e) { return ""; } default: assert false; return null; } } @Override public boolean isCellEditable(int rowIndex, int columnIndex) { return false; } } }
package org.mycore.xml; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.mycore.xml.abbyy.v10.BlockType; import org.mycore.xml.abbyy.v10.*; import org.mycore.xml.alto.v2.*; import org.mycore.xml.alto.v2.Alto.Description; import org.mycore.xml.alto.v2.Alto.Layout; import org.mycore.xml.alto.v2.Alto.Layout.Page; import org.mycore.xml.alto.v2.Alto.Styles; import org.mycore.xml.alto.v2.Alto.Description.OCRProcessing; import org.mycore.xml.alto.v2.Alto.Styles.ParagraphStyle; import org.mycore.xml.alto.v2.Alto.Styles.TextStyle; import org.mycore.xml.alto.v2.TextBlockType.TextLine; import org.mycore.xml.alto.v2.TextBlockType.TextLine.SP; import javax.xml.bind.JAXBElement; import java.math.BigInteger; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.*; import java.util.Map.Entry; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import java.util.stream.Stream; /** * Base class to convert a single abbyy xml file to an alto one. Just call {@link #convert(Document)}. * You can use {@link JAXBUtil#unmarshalAbbyyDocument(java.io.InputStream)} to load an abbyy document. * * @author Matthias Eichner */ public class AbbyyToAltoConverter { private static Logger LOGGER = LogManager.getLogger(); private String defaultFontFamily = null; private Float defaultFontSize = null; private boolean enableConfidence = true; /** * Converts the given abbyy document to an ALTO one. * * @param abbyyDocument the abbyy document * @return alto xml as POJO */ public Alto convert(Document abbyyDocument) { Alto alto = buildAlto(); Page page = buildAltoPage(alto); // Build Processing metadata from ABBYY producer String producer = abbyyDocument.getProducer(); alto.getDescription().getOCRProcessing().add( buildProcessing(producer, null) ); // Add Processing metadata for the conversion itself alto.getDescription().getOCRProcessing().add( buildProcessing("abbyy-to-alto", buildIsoDate(new Date())) ); abbyyDocument.getPage().stream().findFirst().ifPresent(abbyyPage -> { page.setWIDTH(abbyyPage.getWidth().intValue()); page.setHEIGHT(abbyyPage.getHeight().intValue()); }); PageSpaceType pageSpace = new PageSpaceType(); page.setPrintSpace(pageSpace); Stream<BlockType> blockStream = abbyyDocument.getPage().stream().flatMap(p -> p.getBlock().stream()); Rectangle pageRect = new Rectangle(); AtomicInteger composedBlockCount = new AtomicInteger(0); AtomicInteger paragraphCount = new AtomicInteger(0); AtomicInteger illustrationCount = new AtomicInteger(0); AtomicInteger graphicalElementCount = new AtomicInteger(0); AtomicInteger tableElementCount = new AtomicInteger(0); blockStream.forEach(abbyyBlock -> { String blockType = abbyyBlock.getBlockType(); Rectangle blockRect = new Rectangle(abbyyBlock); pageRect.maximize(blockRect); if (blockType.equals("Text")) { ComposedBlockType composedBlock = new ComposedBlockType(); abbyyBlock.getText().stream().flatMap(text -> text.getPar().stream()).forEach(abbyyParagraph -> { handleParagraph(alto, composedBlock, abbyyParagraph, paragraphCount); }); if (composedBlock.getContent().isEmpty()) { return; } composedBlock.setTYPE("text"); blockRect.applyOnBlock(composedBlock); composedBlock.setID("ComposedBlock_" + composedBlockCount.incrementAndGet()); pageSpace.getContent().add(composedBlock); } else if (blockType.equals("Picture")) { IllustrationType illustration = new IllustrationType(); blockRect.applyOnBlock(illustration); illustration.setID("Illustration_" + illustrationCount.incrementAndGet()); pageSpace.getContent().add(illustration); } else if (blockType.equals("Table")) { ComposedBlockType tableBlock = new ComposedBlockType(); tableBlock.setTYPE("table"); blockRect.applyOnBlock(tableBlock); abbyyBlock.getRow().stream() .flatMap(row -> row.getCell().stream()) .flatMap(cell -> cell.getText().stream()) .flatMap(text -> text.getPar().stream()) .forEach(abbyyParagraph -> { handleParagraph(alto, tableBlock, abbyyParagraph, paragraphCount); }); tableBlock.setID("Table_" + tableElementCount.incrementAndGet()); pageSpace.getContent().add(tableBlock); } else if (blockType.equals("Separator") || blockType.equals("SeparatorsBox")) { GraphicalElementType graphicalSeparator = new GraphicalElementType(); blockRect.applyOnGraphicalElement(graphicalSeparator); graphicalSeparator.setID("GraphicalElement_" + graphicalElementCount.incrementAndGet()); pageSpace.getContent().add(graphicalSeparator); } else if (blockType.equals("Barcode")) { LOGGER.warn("Unsupported block type '" + blockType + "' at " + blockRect); } else { throw new ConvertException("Invalid block type " + blockType + " at " + blockRect); } }); pageRect.applyOnPageSpace(pageSpace); optimizeFonts(pageSpace); updateParagraphStyles(alto.getStyles().getParagraphStyle()); return alto; } /** * Builds an empty {@link Alto} object with description and styles. * * @return alto as java object */ private Alto buildAlto() { Alto alto = new Alto(); alto.setDescription(buildDescription(alto)); alto.setStyles(new Styles()); return alto; } private Description buildDescription(Alto alto) { Description description = new Description(); description.setMeasurementUnit("pixel"); return description; } private Page buildAltoPage(Alto alto) { Layout layout = new Layout(); alto.setLayout(layout); Page page = new Page(); layout.getPage().add(page); page.setID("Page"); return page; } private OCRProcessing buildProcessing(String softwareName, String dateTime) { ProcessingStepType processingStep = new ProcessingStepType(); if (softwareName != null) { ProcessingSoftwareType processingSoftware = new ProcessingSoftwareType(); processingSoftware.setSoftwareName(softwareName); processingStep.setProcessingSoftware(processingSoftware); } if (dateTime != null) { processingStep.setProcessingDateTime(dateTime); } OCRProcessing processing = new OCRProcessing(); processing.setOcrProcessingStep(processingStep); return processing; } private String buildIsoDate(Date date) { DateFormat df = new SimpleDateFormat("yyyy-MM-dd"); TimeZone tz = TimeZone.getTimeZone("UTC"); df.setTimeZone(tz); return df.format(date); } /** * Handles the convert process of one abbyy paragraph. * * @param alto the alto document * @param composedBlock the parent alto composed block * @param abbyyParagraph the source abbyy paragraph * @param paragraphCount an count of how many paragraphs are already added, used for id generation of * the alto paragraph's (text block's) */ private void handleParagraph(Alto alto, ComposedBlockType composedBlock, ParagraphType abbyyParagraph, AtomicInteger paragraphCount) { TextBlockType paragraphBlock = new TextBlockType(); Rectangle textBlockRect = new Rectangle(); if (abbyyParagraph.getLine().isEmpty()) { return; } abbyyParagraph.getLine().forEach(abbyyLine -> { handleLine(alto, paragraphBlock, textBlockRect, abbyyLine); }); if (paragraphBlock.getTextLine().isEmpty()) { return; } textBlockRect.applyOnBlock(paragraphBlock); paragraphBlock.setID("Paragraph_" + paragraphCount.incrementAndGet()); ParagraphStyle paragraphStyle = getParagraphStyle(alto.getStyles(), abbyyParagraph); if (paragraphStyle != null) { paragraphBlock.getSTYLEREFS().add(paragraphStyle); } composedBlock.getContent().add(paragraphBlock); } /** * Handles the convert process of one abbyy text line. * * @param alto the alto document * @param textBlock the alto text blocl * @param textBlockRect an rectangle with the bounds of the alto block * @param abbyyLine the source abbyy text line */ private void handleLine(Alto alto, TextBlockType textBlock, Rectangle textBlockRect, LineType abbyyLine) { Rectangle lineRect = new Rectangle(abbyyLine); if (lineRect.area() == 0) { return; } textBlockRect.maximize(lineRect); TextLine altoLine = new TextLine(); lineRect.applyOnLine(altoLine); abbyyLine.getFormatting().forEach(abbyyFormatting -> { handleFormatting(alto, altoLine, abbyyFormatting); }); if (ignoreAltoLine(altoLine)) { return; } textBlock.getTextLine().add(altoLine); } /** * Checks if the given alto line should be ignored and not added to its corresponding block. * By default this method checks if the alto line is empty or contains just white spaces. * * @param altoLine the alto line to check * @return true if the line should be ignored, otherwise false */ protected boolean ignoreAltoLine(TextLine altoLine) { for (Object content : altoLine.getStringAndSP()) { // due to the fact that only SP and String are allowed we can just check // the instance type of the content and if its a string the line should'nt // be ignored if (content instanceof StringType) { return false; } } return true; } /** * Handles the convert process of one abbyy formatting. * * @param alto the alto document * @param altoLine the alto line where the abbyy formatting is added to * @param abbyyFormatting the source abbyy formatting */ private void handleFormatting(Alto alto, TextLine altoLine, FormattingType abbyyFormatting) { // get the text style for this formatting TextStyle style = getTextStyle(alto.getStyles(), abbyyFormatting); // get the characters of the formatting Stream<CharParamsType> charParamsStream = abbyyFormatting.getContent().stream().filter(c -> { return c instanceof JAXBElement && ((JAXBElement<?>) c).getValue() instanceof CharParamsType; }).map(c -> (CharParamsType) ((JAXBElement<?>) c).getValue()); // build the word list List<Word> words = new ArrayList<>(); AtomicReference<Word> wordRef = new AtomicReference<>(); charParamsStream.forEach(charParam -> { String value = charParam.getContent() .stream() .filter(c -> c instanceof String) .map(String.class::cast) .collect(Collectors.joining()) .trim(); if (value.isEmpty()) { Word space = new Word(true); space.addCharParam(charParam); words.add(space); wordRef.set(null); return; } if (wordRef.get() == null) { wordRef.set(new Word(false)); words.add(wordRef.get()); } wordRef.get().addCharParam(charParam); }); // add the words to the alto line addToLine(altoLine, style, words); } /** * Adds each word of the word list to the line with the given text style. * * @param altoLine the alto line where the words are added to * @param style the text style of the line * @param words a list of words */ private void addToLine(TextLine altoLine, TextStyle style, List<Word> words) { words.forEach(word -> { Rectangle wordRect = word.getRectangle(); if (word.isSpace()) { SP sp = new SP(); wordRect.applyOnSP(sp); altoLine.getStringAndSP().add(sp); } else { StringType string = new StringType(); string.setCONTENT(word.getValue()); if (enableConfidence) { try { string.setWC(word.getWC()); } catch (Exception exc) { LOGGER.warn("Error while getting word confidence (WC) of " + word.getValue()); string.setWC(0f); } try { string.setCC(word.getCC()); } catch (Exception exc) { LOGGER.warn("Error while getting character confidence (CC) of " + word.getValue()); } } string.getSTYLEREFS().add(style); wordRect.applyOnString(string); altoLine.getStringAndSP().add(string); } }); } /** * Gets the alto {@link TextStyle} by the abbyy formatting. * * @param styles the alto styles * @param abbyyFormatting the abbyy formatting * @return the alto text style object */ private TextStyle getTextStyle(Styles styles, FormattingType abbyyFormatting) { String fontFamily = abbyyFormatting.getFf() != null ? abbyyFormatting.getFf() : getDefaultFontFamily(); if (fontFamily == null) { throw new ConvertException("Unable to set font familiy of TextStyle cause the ff attribute of a formatting " + " element is missing. There is no default font family configured. Please use setDefaultFontFamily(string) to " + "fix this!"); } Float fontSize = abbyyFormatting.getFs() != null ? abbyyFormatting.getFs() : getDefaultFontSize(); if (fontSize == null) { throw new ConvertException("Unable to set font size of TextStyle cause the fs attribute of a formatting " + "element is missing. There is no default font size configured. Please use setDefaultFontSize(float) to " + "fix this!"); } List<String> fontStyles = new ArrayList<>(); if (abbyyFormatting.isBold()) { fontStyles.add("bold"); } if (abbyyFormatting.isItalic()) { fontStyles.add("italic"); } if (abbyyFormatting.isUnderline()) { fontStyles.add("underline"); } return getTextStyle(styles, fontFamily, fontSize, fontStyles); } /** * Gets the {@link TextStyle} for the given font family and size. Each alto document * have a pre defined list of fonts. If no text style is found, this method creates * an appropriate one. * * @param styles the alto styles * @param fontFamily the font family * @param fontSize the font size * @param fontStyles list of font styles e.g. bold or italics * @return the text style */ private TextStyle getTextStyle(Styles styles, String fontFamily, Float fontSize, List<String> fontStyles) { List<TextStyle> textStyles = styles.getTextStyle(); TextStyle textStyle = textStyles.stream().filter(ts -> { boolean equalFFamily = fontFamily.equals(ts.getFONTFAMILY()); boolean equalFSize = fontSize.equals(ts.getFONTSIZE()); boolean equalFStyles = (fontStyles.isEmpty() && ts.getFONTSTYLE() == null) || (fontStyles.equals(ts.getFONTSTYLE())); return equalFFamily && equalFSize && equalFStyles; }).findFirst().orElse(null); if (textStyle == null) { textStyle = new TextStyle(); textStyle.setFONTFAMILY(fontFamily); textStyle.setFONTSIZE(fontSize); if (!fontStyles.isEmpty()) { textStyle.setFONTSTYLE(fontStyles); } textStyle.setID("font" + textStyles.size()); textStyles.add(textStyle); } return textStyle; } /** * <p>Returns a new or existing paragraph style of a given abbbyy par element. * If the paragraph style didn't exists yet, this method will add it to * the style element too.</p> * <p>If the par element doesn't has any attributes, this method will return * null</p> * * @param styles the existing styles * @param paragraph the abbyy par element * @return a alto paragraph style */ private ParagraphStyle getParagraphStyle(Styles styles, ParagraphType paragraph) { ParagraphStyle paragraphStyle = createParagraphStyle(paragraph); if (paragraphStyle == null) { return null; } List<ParagraphStyle> paragraphStyles = styles.getParagraphStyle(); return paragraphStyles.stream() .filter(ps -> Objects.equals(ps.getALIGN(), paragraphStyle.getALIGN()) && Objects.equals(ps.getLEFT(), paragraphStyle.getLEFT()) && Objects.equals(ps.getRIGHT(), paragraphStyle.getRIGHT()) && Objects.equals(ps.getLINESPACE(), paragraphStyle.getLINESPACE()) && Objects.equals(ps.getFIRSTLINE(), paragraphStyle.getFIRSTLINE())) .findFirst() .orElseGet(() -> { styles.getParagraphStyle().add(paragraphStyle); return paragraphStyle; }); } /** * Creates a new alto paragraph style based on the given attributes * of the abbyy par element. The ID of the ParagraphStyle is null! * * @param paragraph the abbyy par element * @return a new alto paragraph style */ private ParagraphStyle createParagraphStyle(ParagraphType paragraph) { ParagraphStyle ps = new ParagraphStyle(); ParagraphAlignment align = paragraph.getAlign(); boolean hasAttributes = false; if (!align.equals(ParagraphAlignment.LEFT)) { String alignValue = align.name().toLowerCase(); alignValue = alignValue.equals("justified") ? "block" : alignValue; ps.setALIGN(alignValue.substring(0, 1).toUpperCase() + alignValue.substring(1)); hasAttributes = true; } if (!paragraph.getLeftIndent().equals(new BigInteger("0"))) { ps.setLEFT(paragraph.getLeftIndent().floatValue()); hasAttributes = true; } if (!paragraph.getRightIndent().equals(new BigInteger("0"))) { ps.setRIGHT(paragraph.getRightIndent().floatValue()); hasAttributes = true; } if (!paragraph.getLineSpacing().equals(new BigInteger("0"))) { ps.setLINESPACE(paragraph.getLineSpacing().floatValue()); hasAttributes = true; } if (!paragraph.getStartIndent().equals(new BigInteger("0"))) { ps.setFIRSTLINE(paragraph.getStartIndent().floatValue()); hasAttributes = true; } return hasAttributes ? ps : null; } /** * A helper class which describes a single word. A word is constructed by abbyy {@link #addCharParam(CharParamsType)}. * Each char param describes a single character. */ private static class Word { private boolean space = false; private List<CharParamsType> charParams = new ArrayList<>(); public Word(boolean isSpace) { this.space = isSpace; } /** * Adds a new character to this word. * * @param charParam the new character */ public void addCharParam(CharParamsType charParam) { this.charParams.add(charParam); } /** * Returns the surrounding rectangle for this word. * * @return a rectangle containing all characters of this word */ public Rectangle getRectangle() { Rectangle rect = new Rectangle(); rect.left = charParams.stream() .map(CharParamsType::getL) .min(new BigIntegerComparator()) .orElse(BigInteger.ZERO) .intValue(); rect.top = charParams.stream() .map(CharParamsType::getT) .min(new BigIntegerComparator()) .orElse(BigInteger.ZERO) .intValue(); rect.right = charParams.stream() .map(CharParamsType::getR) .max(new BigIntegerComparator()) .orElse(BigInteger.ZERO) .intValue(); rect.bottom = charParams.stream() .map(CharParamsType::getB) .max(new BigIntegerComparator()) .orElse(BigInteger.ZERO) .intValue(); return rect; } /** * Returns the word as string. * * @return the word as string */ public String getValue() { return charParams.stream() .flatMap(cp -> cp.getContent().stream()) .filter(c -> c instanceof String) .map(String.class::cast) .collect(Collectors.joining()) .trim(); } /** * Confidence level of the OCR results for this word. A float value between 0 (unsure) and 1 (confident). * * @return the word confidence. */ public Float getWC() { boolean dictionaryCheckAvailable = !charParams.stream() .map(CharParamsType::isWordFromDictionary) .filter(dic -> dic == null) .findAny() .isPresent(); if (dictionaryCheckAvailable) { return (float) charParams.stream() .map(CharParamsType::isWordFromDictionary) .mapToDouble(b -> b ? 1d : 0d) .average() .orElse(0); } return getAverageCC(); } /** * Confidence level of each character in that string. A list of numbers, one number between 0 (sure) * and 9 (unsure) for each character. * * @return the character confidence as list if integers separated by spaces */ public String getCC() { return charParams.stream().map(CharParamsType::getCharConfidence).map(BigInteger::intValue).map(cc -> { cc = cc == -1 ? 0 : cc; return String.valueOf((9 - Math.round((cc / 100f) * 9f))); }).collect(Collectors.joining(" ")); } /** * Gets the average charConfidence of the word. * * @return the average CC */ public Float getAverageCC() { Double wc = charParams.stream() .map(CharParamsType::getCharConfidence) .mapToInt(BigInteger::intValue) .average() .orElse(0); wc = wc == -1d ? 0 : wc; wc = wc / 100; return wc.floatValue(); } /** * If this word is actually just a space character. This is used for convenience. * * @return true if this word is a space character */ public boolean isSpace() { return space; } private static class BigIntegerComparator implements Comparator<BigInteger> { @Override public int compare(BigInteger b1, BigInteger b2) { return b1.compareTo(b2); } } } /** * Simple helper class which represents a rectangle. */ private static class Rectangle { int left, right, top, bottom; /** * Creates a new rectangle where left, right, top and bottom are -1. */ public Rectangle() { this.left = -1; this.right = -1; this.top = -1; this.bottom = -1; } /** * Creates a new rectangle based on the dimensions of the given abbyy block. * * @param abbyyBlock the abbyy block */ public Rectangle(BlockType abbyyBlock) { this.bottom = abbyyBlock.getB().intValue(); this.top = abbyyBlock.getT().intValue(); this.left = abbyyBlock.getL().intValue(); this.right = abbyyBlock.getR().intValue(); } /** * Creates a new rectangle based on the dimensions of the given abbyy line. * * @param abbyyLine the abbyy line */ public Rectangle(LineType abbyyLine) { this.bottom = abbyyLine.getB().intValue(); this.top = abbyyLine.getT().intValue(); this.left = abbyyLine.getL().intValue(); this.right = abbyyLine.getR().intValue(); } /** * Sets the height, width, hpos and vpos of the given alto block based on * this rectangle. * * @param altoBlock the alto block where the rectangle is applied on */ public void applyOnBlock(org.mycore.xml.alto.v2.BlockType altoBlock) { altoBlock.setHEIGHT(this.bottom - this.top); altoBlock.setWIDTH(this.right - this.left); altoBlock.setHPOS(this.left); altoBlock.setVPOS(this.top); } /** * Sets the height, width, hpos and vpos of the given alto graphical element based on * this rectangle. * * @param graphicalElement the alto graphical element where the rectangle is applied on */ public void applyOnGraphicalElement(GraphicalElementType graphicalElement) { graphicalElement.setHEIGHT(this.bottom - this.top); graphicalElement.setWIDTH(this.right - this.left); graphicalElement.setHPOS(this.left); graphicalElement.setVPOS(this.top); } /** * Sets the height, width, hpos and vpos of the given alto text line based on * this rectangle. * * @param altoLine the alto text line where the rectangle is applied on */ public void applyOnLine(TextLine altoLine) { altoLine.setHEIGHT(this.bottom - this.top); altoLine.setWIDTH(this.right - this.left); altoLine.setHPOS(this.left); altoLine.setVPOS(this.top); } /** * Sets the height, width, hpos and vpos of the given alto page space based on * this rectangle. * * @param altoPageSpace the alto page space where the rectangle is applied on */ public void applyOnPageSpace(PageSpaceType altoPageSpace) { altoPageSpace.setHEIGHT(this.bottom - this.top); altoPageSpace.setWIDTH(this.right - this.left); altoPageSpace.setHPOS(this.left); altoPageSpace.setVPOS(this.top); } /** * Sets the height, width, hpos and vpos of the given alto string based on * this rectangle. * * @param string the alto string where the rectangle is applied on */ public void applyOnString(StringType string) { string.setHEIGHT((float) this.bottom - this.top); string.setWIDTH((float) this.right - this.left); string.setHPOS((float) this.left); string.setVPOS((float) this.top); } /** * Sets the width, hpos and vpos of the given alto space based on this rectangle. * * @param sp the alto space where the rectangle is applied on */ public void applyOnSP(SP sp) { sp.setHPOS((float) this.left); sp.setVPOS((float) this.top); sp.setWIDTH((float) this.right - this.left); } /** * Tries to maximize the bounds of this rectangle. If the other rectangle is greater * on any side (left, top, right, bottom), this rectangle uses those sides. For left * and top values the lower value is used. For right and bottom the higher one. * * @param other the other rectangle */ public void maximize(Rectangle other) { this.left = (this.left == -1 || other.left < this.left) ? other.left : this.left; this.top = (this.top == -1 || other.top < this.top) ? other.top : this.top; this.right = (this.right == -1 || other.right > this.right) ? other.right : this.right; this.bottom = (this.bottom == -1 || other.bottom > this.bottom) ? other.bottom : this.bottom; } /** * Calculates the area of this rectangle * * @return the area */ public int area() { return (this.right - this.left) * (this.bottom - this.top); } @Override public String toString() { return "[left: " + left + " top: " + top + " right: " + right + " bottom: " + bottom + "]"; } } /** * By default every string has its own font style. But in most cases each string in a line * has the same font. This method collects the font of each string per line and adds * the most common font to the line. * * @param pageSpace the page space to optimize */ private void optimizeFonts(PageSpaceType pageSpace) { Stream<ComposedBlockType> composedBlockStream = pageSpace.getContent().stream() .filter(block -> block instanceof ComposedBlockType).map(ComposedBlockType.class::cast); Stream<TextBlockType> textBlockStream = composedBlockStream.flatMap(cblock -> cblock.getContent().stream()) .filter(block -> block instanceof TextBlockType).map(TextBlockType.class::cast); Stream<TextLine> lineStream = textBlockStream.flatMap(textBlock -> textBlock.getTextLine().stream()); // move font's from string to line lineStream.forEach(line -> { // create string list Stream<StringType> stringStream = line.getStringAndSP().stream() .filter(object -> object instanceof StringType).map(StringType.class::cast); List<StringType> stringList = stringStream.collect(Collectors.toList()); // create text style list Map<TextStyle, Long> styleCountMap = stringList.stream().flatMap(string -> string.getSTYLEREFS().stream()) .filter(style -> style instanceof TextStyle).map(TextStyle.class::cast) .collect(Collectors.groupingBy(textStyle -> textStyle, Collectors.counting())); TextStyle mostCommonTextStyle = styleCountMap.entrySet().stream() .sorted((o1, o2) -> o2.getValue().compareTo(o1.getValue())) .map(Entry::getKey) .findFirst() .orElse(null); line.getSTYLEREFS().add(mostCommonTextStyle); stringList.forEach(string -> { if (string.getSTYLEREFS().contains(mostCommonTextStyle)) { string.setSTYLEREFS(null); } }); }); } /** * Add a ID attribute to each paragraph style, its required. * * @param styles the paragraph styles */ private void updateParagraphStyles(List<ParagraphStyle> styles) { for (int i = 0; i < styles.size(); i++) { styles.get(i).setID("paragraph" + i); } } /** * Returns the default font family or null if nothing is set explicit. * * @return the default font size */ public String getDefaultFontFamily() { return defaultFontFamily; } /** * Sets the default font family for an abbyy formatting element if * there is no ff attribute specified. * * @param defaultFontFamily the default font family to use */ public void setDefaultFontFamily(String defaultFontFamily) { this.defaultFontFamily = defaultFontFamily; } /** * Returns the default font size or null if nothing is set explicit. * * @return the default font size */ public Float getDefaultFontSize() { return defaultFontSize; } /** * Sets the default font size for an abbyy formatting element if * there is no fs attribute specified. * * @param defaultFontSize the default font size to use */ public void setDefaultFontSize(Float defaultFontSize) { this.defaultFontSize = defaultFontSize; } /** * Checks if the WC and CC attributes of the ALTO should be calculated and applied. * By default the confidence attributes are enabled. * * @return true if they are added to the ALTO document, false otherwise */ public boolean isConfidenceEnabled() { return this.enableConfidence; } /** * If the confidence attributes WC and CC should be calculated and applied. * * @param enableConfidence true = the WC and CC attributes are added, false = they are ignored */ public void setEnableConfidence(boolean enableConfidence) { this.enableConfidence = enableConfidence; } }
package org.apache.commons.fileupload; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.io.OutputStream; import javax.activation.DataSource; /** * <p> This class represents a file that was received by Turbine using * <code>multipart/form-data</code> POST request. * * <p> After retrieving an instance of this class from the {@link * org.apache.commons.fileupload.FileUpload FileUpload} (see * {@link org.apache.commons.fileupload.FileUpload * #parseRequest(javax.servlet.http.HttpServletRequest, String) * You may either request all * contents of file at once using {@link #get()} or request an {@link * java.io.InputStream InputStream} with {@link #getStream()} and * process the file without attempting to load it into memory, which * may come handy with large files. * * Implements the javax.activation.DataSource interface (which allows * for example the adding of a FileItem as an attachment to a multipart * email). * * @author <a href="mailto:Rafal.Krzewski@e-point.pl">Rafal Krzewski</a> * @author <a href="mailto:sean@informage.net">Sean Legassick</a> * @author <a href="mailto:jvanzyl@apache.org">Jason van Zyl</a> * @version $Id: FileItem.java,v 1.2 2002/04/11 05:54:55 jmcnally Exp $ */ public interface FileItem extends DataSource { /** * The maximal size of request that will have it's elements stored * in memory. */ public static final int DEFAULT_UPLOAD_SIZE_THRESHOLD = 10240; /** * Returns the original filename in the user's filesystem. * (implements DataSource method) * * @return The original filename in the user's filesystem. */ public String getName(); /** * Returns the original filename in the user's filesystem. * * @return The original filename in the user's filesystem. */ public String getFileName(); /** * Returns the content type passed by the browser or * <code>null</code> if not defined. (implements * DataSource method). * * @return The content type passed by the browser or * <code>null</code> if not defined. */ public String getContentType(); /** * Provides a hint if the file contents will be read from memory. * * @return <code>True</code> if the file contents will be read * from memory. */ public boolean inMemory(); /** * Returns the size of the file. * * @return The size of the file. */ public long getSize(); /** * Returns the contents of the file as an array of bytes. If the * contents of the file were not yet cached int the memory, they * will be loaded from the disk storage and chached. * * @return The contents of the file as an array of bytes. */ public byte[] get(); /** * Returns the contents of the file as a String, using specified * encoding. This method uses {@link #get()} to retireve the * contents of the file.<br> * * @param encoding The encoding to use. * @return The contents of the file. * @exception UnsupportedEncodingException. */ public String getString( String encoding ) throws UnsupportedEncodingException; public String getString(); /** * Returns an {@link java.io.InputStream InputStream} that can be * used to retrieve the contents of the file. (implements DataSource * method) * * @return An {@link java.io.InputStream InputStream} that can be * used to retrieve the contents of the file. * @exception Exception, a generic exception. */ public InputStream getInputStream() throws IOException; /** * Returns an {@link java.io.InputStream InputStream} that can be * used to retrieve the contents of the file. * * @return An {@link java.io.InputStream InputStream} that can be * used to retrieve the contents of the file. * @exception Exception, a generic exception. */ public InputStream getStream() throws IOException; /** * Returns the {@link java.io.File} objects for the DefaultFileItems's * data temporary location on the disk. Note that for * <code>DefaultFileItems</code> that have their data stored in memory * this method will return <code>null</code>. When handling large * files, you can use {@link java.io.File#renameTo(File)} to * move the file to new location without copying the data, if the * source and destination locations reside within the same logical * volume. * * @return A File. */ public File getStoreLocation(); /** * A convenience method to write an uploaded * file to disk. The client code is not concerned * whether or not the file is stored in memory, * or on disk in a temporary location. They just * want to write the uploaded file to disk. * * @param String full path to location where uploaded * should be stored. */ public void write(String file) throws Exception; public String getFieldName(); public void setFieldName(String name); public boolean isFormField(); public void setIsFormField(boolean state); }
package com.matt.forgehax; import com.google.common.base.Strings; import com.google.common.base.Throwables; import com.matt.forgehax.log.FileManager; import com.matt.forgehax.mods.services.MainMenuGuiService.CommandInputGui; import com.matt.forgehax.util.command.CommandGlobal; import com.matt.forgehax.util.mod.loader.ModManager; import java.util.Optional; import java.util.Scanner; import javax.annotation.Nullable; import net.minecraft.client.Minecraft; import net.minecraft.client.entity.EntityPlayerSP; import net.minecraft.client.multiplayer.PlayerControllerMP; import net.minecraft.client.multiplayer.WorldClient; import net.minecraft.entity.Entity; import net.minecraft.network.NetworkManager; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.text.ITextComponent; import net.minecraft.util.text.Style; import net.minecraft.util.text.TextComponentString; import net.minecraft.util.text.TextFormatting; import net.minecraft.world.World; import net.minecraftforge.fml.client.FMLClientHandler; import org.apache.logging.log4j.Logger; public class Helper implements Globals { public static CommandGlobal getGlobalCommand() { return CommandGlobal.getInstance(); } public static Minecraft getMinecraft() { return MC; } public static ModManager getModManager() { return ModManager.getInstance(); } public static FileManager getFileManager() { return FileManager.getInstance(); } public static Logger getLog() { return LOGGER; } public static EntityPlayerSP getLocalPlayer() { return MC.player; } @Nullable public static Entity getRidingEntity() { if (getLocalPlayer() != null) return getLocalPlayer().getRidingEntity(); else return null; } public static Optional<Entity> getOptionalRidingEntity() { return Optional.ofNullable(getRidingEntity()); } // Returns the riding entity if present, otherwise the local player @Nullable public static Entity getRidingOrPlayer() { return getRidingEntity() != null ? getRidingEntity() : getLocalPlayer(); } public static WorldClient getWorld() { return MC.world; } public static World getWorld(Entity entity) { return entity.getEntityWorld(); } public static World getWorld(TileEntity tileEntity) { return tileEntity.getWorld(); } @Nullable public static NetworkManager getNetworkManager() { return FMLClientHandler.instance().getClientToServerNetworkManager(); } public static PlayerControllerMP getPlayerController() { return MC.playerController; } public static void printMessageNaked( String startWith, String message, Style firstStyle, Style secondStyle) { if (!Strings.isNullOrEmpty(message)) { if (message.contains("\n")) { Scanner scanner = new Scanner(message); scanner.useDelimiter("\n"); Style s1 = firstStyle; Style s2 = secondStyle; while (scanner.hasNext()) { printMessageNaked(startWith, scanner.next(), s1, s2); // alternate between colors each newline Style cpy = s1; s1 = s2; s2 = cpy; } } else { TextComponentString string = new TextComponentString(startWith + message.replaceAll("\r", "")); string.setStyle(firstStyle); outputMessage(string.getFormattedText()); } } } // private function that is ultimately used to output the message private static void outputMessage(String text) { if (getLocalPlayer() != null) { getLocalPlayer().sendMessage(new TextComponentString(text)); } else if (MC.currentScreen instanceof CommandInputGui) { ((CommandInputGui) MC.currentScreen).print(text); } } public static void printMessageNaked(String append, String message, Style style) { printMessageNaked(append, message, style, style); } public static void printMessageNaked(String append, String message) { printMessageNaked( append, message, new Style().setColor(TextFormatting.WHITE), new Style().setColor(TextFormatting.GRAY)); } public static void printMessageNaked(String message) { printMessageNaked("", message); } // Will append '[FH] ' in front public static void printMessage(String message) { if (!Strings.isNullOrEmpty(message)) printMessageNaked("[FH] " + message); } public static void printMessage(String format, Object... args) { printMessage(String.format(format, args)); } private static ITextComponent getFormattedText( String text, TextFormatting color, boolean bold, boolean italic) { return new TextComponentString(text.replaceAll("\r", "")) .setStyle(new Style().setColor(color).setBold(bold).setItalic(italic)); } public static void printInform(String format, Object... args) { outputMessage( getFormattedText("[ForgeHax]", TextFormatting.GREEN, true, false) .appendSibling( getFormattedText( " " + String.format(format, args).trim(), TextFormatting.GRAY, false, false)) .getFormattedText()); } public static void printWarning(String format, Object... args) { outputMessage( getFormattedText("[ForgeHax]", TextFormatting.YELLOW, true, false) .appendSibling( getFormattedText( " " + String.format(format, args).trim(), TextFormatting.GRAY, false, false)) .getFormattedText()); } public static void printError(String format, Object... args) { outputMessage( getFormattedText("[ForgeHax]", TextFormatting.RED, true, false) .appendSibling( getFormattedText( " " + String.format(format, args).trim(), TextFormatting.GRAY, false, false)) .getFormattedText()); } public static void printStackTrace(Throwable t) { getLog().error(Throwables.getStackTraceAsString(t)); } public static void handleThrowable(Throwable t) { getLog() .error( String.format( "[%s] %s", t.getClass().getSimpleName(), Strings.nullToEmpty(t.getMessage()))); if (t.getCause() != null) handleThrowable(t.getCause()); printStackTrace(t); } public static void reloadChunks() { // credits to 0x22 if (getWorld() != null && getLocalPlayer() != null) MC.addScheduledTask( () -> { int x = (int) getLocalPlayer().posX; int y = (int) getLocalPlayer().posY; int z = (int) getLocalPlayer().posZ; int distance = MC.gameSettings.renderDistanceChunks * 16; MC.renderGlobal.markBlockRangeForRenderUpdate( x - distance, y - distance, z - distance, x + distance, y + distance, z + distance); }); } public static void reloadChunksHard() { MC.addScheduledTask( () -> { if (getWorld() != null && getLocalPlayer() != null) MC.renderGlobal.loadRenderers(); }); } }
package org.oregami.entities; import java.io.Serializable; import java.util.Date; import javax.persistence.Column; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.MappedSuperclass; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.Transient; import javax.persistence.Version; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import org.apache.commons.lang.builder.ToStringBuilder; import org.apache.commons.lang.builder.ToStringStyle; import org.hibernate.annotations.GenericGenerator; @MappedSuperclass @JsonIgnoreProperties({"hibernateLazyInitializer", "handler"}) public abstract class BaseEntityUUID implements Serializable { private static final long serialVersionUID = 8608953068007538072L; @Id @GeneratedValue(generator="system-uuid") @GenericGenerator(name="system-uuid", strategy = "uuid") @Column(name = "id", updatable = false, nullable = false) private String id = null; @Version @Column(name = "version") private int version = 0; @Temporal(TemporalType.TIMESTAMP) @Column(name = "veraenderung_zeitpunkt") @Transient private Date lastUpdate; protected void copy(final BaseEntityUUID source) { this.id = source.id; this.version = source.version; this.lastUpdate = source.lastUpdate; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (!(obj instanceof BaseEntityUUID)) { return false; } final BaseEntityUUID other = (BaseEntityUUID) obj; if (this.id != null && other.id != null) { if (this.getClass().equals(other.getClass()) && this.id == other.id) { return true; } } return false; } public String getId() { return this.id; } @Deprecated public void setId(final String id) { this.id = id; } public int getVersion() { return this.version; } @SuppressWarnings("unused") private void setVersion(final int version) { this.version = version; } public Date getLastUpdate() { return this.lastUpdate; } public void setLastUpdate(final Date lastUpdate) { this.lastUpdate = lastUpdate; } @Override public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.MULTI_LINE_STYLE); } }
package jagex.runescape.model.player; import jagex.runescape.model.Entity; /** * Player.java * * @author Ryley M. Kimmel <ryley.kimmel@live.com> * @version 1.0 * Aug 22, 2012 */ public final class Player extends Entity { /** * Creates the player * * @param index the index of this player */ public Player(int index) { super(index); } /** * This method is used for processing entities every server cycle (600ms) * * @throws Throwable if some error occurs */ public void process() throws Throwable { } }
package org.oryxeditor.server; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Collection; import java.util.List; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.xml.stream.FactoryConfigurationError; import javax.xml.stream.StreamFilter; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import org.apache.log4j.Logger; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; public class EditorHandler extends HttpServlet { /** * A special flag to set when running in development mode to use the disassembled resources, * loaded from js_files.json */ private static final String DEV_MODE = "designer.dev"; private static final Logger _logger = Logger.getLogger(EditorHandler.class); private static final String oryx_path = "/designer/"; private static final String defaultSS="stencilsets/bpmn2.0/bpmn2.0.json"; private static final long serialVersionUID = 1L; private Collection<String> availableProfiles; @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { availableProfiles=getAvailableProfileNames(); // if(availableProfiles.size()==0) // defaultHandlerBehaviour(); JSONObject conf= null; String sset=null; List<String> profiles= new ArrayList<String>(); String dev_flag = ""; String bootstrapLibs = "<script src=\"" + oryx_path + "oryx.js\" type=\"text/javascript\" />\n"; String extString=""; if (System.getProperty(DEV_MODE) != null) { dev_flag = "ORYX.CONFIG.DEV = true;\nvar ORYX_LOGLEVEL = 3;\n"; profiles.add("default"); // will be ignored. conf = new JSONObject(); // we can do a better job at configuring the conf object later on. String jsFolder = this.getServletContext().getRealPath("/") + File.separator + "js" + File.separator; // we place ourselves in dev mode, we will load the files from the file js_files.json try { // first, read the JSON files to know what core libs to include JSONObject dev = readJSONObjectFromFile(jsFolder + "js_files.json"); JSONArray array = dev.getJSONArray("files"); StringBuilder builder = new StringBuilder(); for (int i = 0 ; i < array.length() ; i++) { String filename = String.valueOf(array.get(i)); builder.append("<script src=\"" + oryx_path + "js/" + filename + "\" type=\"text/javascript\" />\n"); } bootstrapLibs = builder.toString(); // then append the plugins, using their source attribute. InputStream fileStream = null; try { fileStream = new FileInputStream(this.getServletContext().getRealPath("/") + File.separator + "profiles" + File.separator + "default.xml"); XMLInputFactory factory = XMLInputFactory.newInstance(); XMLStreamReader reader = factory.createXMLStreamReader(fileStream); JSONArray ssextensions = new JSONArray(); while(reader.hasNext()) { if (reader.next() == XMLStreamReader.START_ELEMENT) { if ("plugin".equals(reader.getLocalName())) { for (int i = 0 ; i < reader.getAttributeCount() ; i++) { if ("source".equals(reader.getAttributeLocalName(i))) { builder.append("<script src=\"" + oryx_path + "js/Plugins/" + reader.getAttributeValue(i) + "\" type=\"text/javascript\" />\n"); } } } else if ("profile".equals(reader.getLocalName())) { for (int i = 0 ; i < reader.getAttributeCount() ; i++) { if ("stencilset".equals(reader.getAttributeLocalName(i))) { sset = reader.getAttributeValue(i); } } } else if ("stencilsetextension".equals(reader.getLocalName())) { JSONObject obj = new JSONObject(); for (int i = 0 ; i < reader.getAttributeCount() ; i++) { obj.append(reader.getAttributeLocalName(i), reader.getAttributeValue(i)); } ssextensions.put(obj); } } } extString = ssextensions.toString(); } catch (XMLStreamException e) { _logger.error(e.getMessage(), e); throw new IllegalArgumentException("Could not read default.xml", e); } catch (FactoryConfigurationError e) { _logger.error(e.getMessage(), e); } finally { if (fileStream != null) { try { fileStream.close(); } catch(IOException e) {}}; } bootstrapLibs = builder.toString(); } catch (JSONException e) { _logger.error(e.getMessage(), e); } catch (IOException e) { _logger.error(e.getMessage(), e); } } else { String[] urlSplitted=request.getRequestURI().split(";"); if (urlSplitted.length>1){ for(int i=1;i<urlSplitted.length;i++){ profiles.add(urlSplitted[i]); } }else{ profiles.add("default"); } if(!availableProfiles.containsAll(profiles)){ //Some profiles not available response.sendError(HttpServletResponse.SC_NOT_FOUND, "Profile not found!"); profiles.retainAll(availableProfiles); response.setStatus(HttpServletResponse.SC_NOT_FOUND); return; } try { conf = readJSONObjectFromFile(this.getServletContext(). getRealPath("/profiles") + File.separator + profiles.get(0) + ".conf"); } catch (JSONException e) { _logger.error(e.getMessage(), e); } sset=conf.optString("stencilset"); JSONArray exts = conf.optJSONArray("stencilsetextension"); if(exts==null) exts=new JSONArray(); extString=exts.toString(); } if(sset==null || "".equals(sset)) sset=defaultSS; String uuid_flag = ""; if (request.getParameter("uuid") != null) { uuid_flag = "ORYX.CONFIG.UUID = \"" + request.getParameter("uuid") + "\""; } String content = "<script type='text/javascript'>" + " if(!ORYX) var ORYX = {};\n" + " if(!ORYX.CONFIG) ORYX.CONFIG = {};\n" + " " + dev_flag + "\n" + " " + uuid_flag + "\n" + " ORYX.CONFIG.PLUGINS_CONFIG = ORYX.CONFIG.PROFILE_PATH + '"+profiles.get(0)+".xml';\n" + " ORYX.CONFIG.PROFILE_CONFIG = ORYX.CONFIG.PROFILE_PATH + '"+profiles.get(0)+".conf';\n" + " ORYX.CONFIG.SSET='" + sset +"';" + " ORYX.CONFIG.SSEXTS=" + extString + ";"+ " if ('undefined' == typeof(window.onOryxResourcesLoaded)) { " + " ORYX.Log.warn('No adapter to repository specified, default used. You need a function window.onOryxResourcesLoaded that obtains model-JSON from your repository');" + " window.onOryxResourcesLoaded = function() {" + " if (location.hash.slice(1).length == 0 || location.hash.slice(1).indexOf('new')!=-1){" + " var stencilset=ORYX.Utils.getParamFromUrl('stencilset')?ORYX.Utils.getParamFromUrl('stencilset'):'"+sset+"';"+ " new ORYX.Editor({"+ " id: 'oryx-canvas123',"+ " stencilset: {"+ " url: '"+oryx_path+"'+stencilset" + " }" + " })}"+ " else{" + " ORYX.Editor.createByUrl('" + getRelativeServerPath(request) + "'+location.hash.slice(1)+'/json', {"+ " id: 'oryx-canvas123'" + " });" + " };" + "}}" + "</script>"; response.setContentType("application/xhtml+xml"); response.getWriter().println(this.getOryxModel("Process Designer", content, this.getLanguageCode(request), this.getCountryCode(request), profiles, bootstrapLibs)); response.setStatus(200); } /** * Reads a JSON file contents and place them into a JSONObject. * @param path the path to the file * @return the JSONOPbject parsed from the file. * @throws IOException * @throws JSONException */ private JSONObject readJSONObjectFromFile(String path) throws IOException, JSONException { JSONObject conf = null; InputStream fileStream = null; try { ByteArrayOutputStream output = new ByteArrayOutputStream(); fileStream = new FileInputStream(path); byte[] buffer = new byte[4096]; int read; while ((read = fileStream.read(buffer)) != -1) { output.write(buffer, 0, read); } conf = new JSONObject(output.toString()); } finally { if (fileStream != null) { try { fileStream.close(); } catch(IOException e) {}}; } return conf; } protected String getOryxModel(String title, String content, String languageCode, String countryCode, List<String> profiles, String bootstrapLibs) { return getOryxModel(title, content, languageCode, countryCode, "", profiles, bootstrapLibs); } protected String getOryxModel(String title, String content, String languageCode, String countryCode, String headExtentions, List<String> profiles, String bootstrapLibs) { String languageFiles = ""; if (new File(this.getOryxRootDirectory() + oryx_path + "i18n/translation_"+languageCode+".js").exists()) { languageFiles += "<script src=\"" + oryx_path + "i18n/translation_"+languageCode+".js\" type=\"text/javascript\" />\n"; } if (new File(this.getOryxRootDirectory() + oryx_path + "i18n/translation_" + languageCode+"_" + countryCode + ".js").exists()) { languageFiles += "<script src=\"" + oryx_path + "i18n/translation_" + languageCode+"_" + countryCode + ".js\" type=\"text/javascript\" />\n"; } StringBuilder profileBuilder = new StringBuilder(); for(String profile: profiles){ profileBuilder.append("<script src=\"" + oryx_path +"profiles/" + profile+".js\" type=\"text/javascript\" />\n"); } String profileFiles = profileBuilder.toString(); String analytics = getServletContext().getInitParameter("ANALYTICS_SNIPPET"); if (null == analytics) { analytics = ""; } return "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" + "<!DOCTYPE html PUBLIC \"- + "<html xmlns=\"http: + "xmlns:b3mn=\"http://b3mn.org/2007/b3mn\"\n" + "xmlns:ext=\"http://b3mn.org/2007/ext\"\n" + "xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns + "xmlns:atom=\"http://b3mn.org/2007/atom+xhtml\">\n" + "<head profile=\"http://purl.org/NET/erdf/profile\">\n" + "<title>" + title + "</title>\n" + "<!-- libraries -->\n" + "<script src=\"" + oryx_path + "lib/prototype-1.5.1.js\" type=\"text/javascript\" />\n" + "<script src=\"" + oryx_path + "lib/path_parser.js\" type=\"text/javascript\" />\n" + "<script src=\"" + oryx_path + "lib/ext-2.0.2/adapter/ext/ext-base.js\" type=\"text/javascript\" />\n" + "<script src=\"" + oryx_path + "lib/ext-2.0.2/ext-all.js\" type=\"text/javascript\" />\n" + "<script src=\"" + oryx_path + "lib/ext-2.0.2/color-field.js\" type=\"text/javascript\" />\n" + "<style media=\"screen\" type=\"text/css\">\n" + "@import url(\"" + oryx_path + "lib/ext-2.0.2/resources/css/ext-all.css\");\n" // blue theme is best //+ "@import url(\"" + oryx_path + "lib/ext-2.0.2/resources/css/xtheme-gray.css\");\n" + "</style>\n" + "<!-- oryx editor -->\n" // EN_US is default an base language + "<!-- language files -->\n" + "<script src=\"" + oryx_path + "i18n/translation_en_us.js\" type=\"text/javascript\" />\n" + languageFiles // Handle different profiles + bootstrapLibs + profileFiles + headExtentions + "<link rel=\"Stylesheet\" media=\"screen\" href=\"" + oryx_path + "css/theme_norm.css\" type=\"text/css\" />\n" + "<!-- erdf schemas -->\n" + "<link rel=\"schema.dc\" href=\"http://purl.org/dc/elements/1.1/\" />\n" + "<link rel=\"schema.dcTerms\" href=\"http://purl.org/dc/terms/\" />\n" + "<link rel=\"schema.b3mn\" href=\"http://b3mn.org\" />\n" + "<link rel=\"schema.oryx\" href=\"http://oryx-editor.org/\" />\n" + "<link rel=\"schema.raziel\" href=\"http://raziel.org/\" />\n" + content + "</head>\n" + "<body style=\"overflow:hidden;\"><div class='processdata' style='display:none'>\n" + "\n" + "</div>\n" + analytics + "</body>\n" + "</html>"; } protected String getOryxRootDirectory() { String realPath = this.getServletContext().getRealPath(""); File backendDir = new File(realPath); return backendDir.getParent(); } protected String getCountryCode(HttpServletRequest req) { return (String) req.getSession().getAttribute("countrycode"); } protected String getLanguageCode(HttpServletRequest req) { return (String) req.getSession().getAttribute("languagecode"); } protected String getRelativeServerPath(HttpServletRequest req){ return "/backend/poem"; //+ req.getServletPath(); } public Collection<String> getAvailableProfileNames() { Collection<String> profilNames = new ArrayList<String>(); File handlerDir=null; try { handlerDir = new File(this.getServletContext(). getRealPath("/profiles")); } catch (NullPointerException e) { return profilNames; } if(handlerDir == null || handlerDir.listFiles() == null) { return profilNames; } for (File source : handlerDir.listFiles()) { if (source.getName().endsWith(".js")) { profilNames.add(source.getName().substring(0, source.getName().lastIndexOf("."))); } } return profilNames; } }
package arjdbc.jdbc; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.PrintStream; import java.io.Reader; import java.io.StringReader; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.math.BigDecimal; import java.math.BigInteger; import java.sql.Array; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.SQLXML; import java.sql.Statement; import java.sql.Date; import java.sql.SQLFeatureNotSupportedException; import java.sql.Savepoint; import java.sql.Time; import java.sql.Timestamp; import java.sql.Types; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.TimeZone; import org.joda.time.DateTime; import org.jruby.Ruby; import org.jruby.RubyArray; import org.jruby.RubyBignum; import org.jruby.RubyBoolean; import org.jruby.RubyClass; import org.jruby.RubyException; import org.jruby.RubyFixnum; import org.jruby.RubyFloat; import org.jruby.RubyHash; import org.jruby.RubyIO; import org.jruby.RubyInteger; import org.jruby.RubyModule; import org.jruby.RubyNumeric; import org.jruby.RubyObject; import org.jruby.RubyString; import org.jruby.RubySymbol; import org.jruby.RubyTime; import org.jruby.anno.JRubyMethod; import org.jruby.exceptions.RaiseException; import org.jruby.javasupport.JavaEmbedUtils; import org.jruby.javasupport.JavaUtil; import org.jruby.runtime.Arity; import org.jruby.runtime.Block; import org.jruby.runtime.ObjectAllocator; import org.jruby.runtime.ThreadContext; import org.jruby.runtime.backtrace.RubyStackTraceElement; import org.jruby.runtime.builtin.IRubyObject; import org.jruby.util.ByteList; /** * Most of our ActiveRecord::ConnectionAdapters::JdbcConnection implementation. */ public class RubyJdbcConnection extends RubyObject { private static final String[] TABLE_TYPE = new String[] { "TABLE" }; private static final String[] TABLE_TYPES = new String[] { "TABLE", "VIEW", "SYNONYM" }; private JdbcConnectionFactory connectionFactory; protected RubyJdbcConnection(Ruby runtime, RubyClass metaClass) { super(runtime, metaClass); } private static final ObjectAllocator ALLOCATOR = new ObjectAllocator() { public IRubyObject allocate(Ruby runtime, RubyClass klass) { return new RubyJdbcConnection(runtime, klass); } }; public static RubyClass createJdbcConnectionClass(final Ruby runtime) { RubyClass jdbcConnection = getConnectionAdapters(runtime). defineClassUnder("JdbcConnection", runtime.getObject(), ALLOCATOR); jdbcConnection.defineAnnotatedMethods(RubyJdbcConnection.class); return jdbcConnection; } public static RubyClass getJdbcConnectionClass(final Ruby runtime) { return getConnectionAdapters(runtime).getClass("JdbcConnection"); } /** * @param runtime * @return <code>ActiveRecord::ConnectionAdapters</code> */ protected static RubyModule getConnectionAdapters(final Ruby runtime) { return (RubyModule) runtime.getModule("ActiveRecord").getConstant("ConnectionAdapters"); } /** * @param runtime * @return <code>ActiveRecord::Result</code> */ static RubyClass getResult(final Ruby runtime) { return runtime.getModule("ActiveRecord").getClass("Result"); } /** * @param runtime * @return <code>ActiveRecord::Base</code> */ protected static RubyClass getBase(final Ruby runtime) { return runtime.getModule("ActiveRecord").getClass("Base"); } /** * @param runtime * @return <code>ActiveRecord::ConnectionAdapters::IndexDefinition</code> */ protected static RubyClass getIndexDefinition(final Ruby runtime) { return getConnectionAdapters(runtime).getClass("IndexDefinition"); } /** * @param runtime * @return <code>ActiveRecord::ConnectionAdapters::ForeignKeyDefinition</code> * @note only since AR 4.2 */ protected static RubyClass getForeignKeyDefinition(final Ruby runtime) { return getConnectionAdapters(runtime).getClass("ForeignKeyDefinition"); } /** * @param runtime * @return <code>ActiveRecord::JDBCError</code> */ protected static RubyClass getJDBCError(final Ruby runtime) { return runtime.getModule("ActiveRecord").getClass("JDBCError"); } /** * @param runtime * @return <code>ActiveRecord::ConnectionNotEstablished</code> */ protected static RubyClass getConnectionNotEstablished(final Ruby runtime) { return runtime.getModule("ActiveRecord").getClass("ConnectionNotEstablished"); } /** * NOTE: Only available since AR-4.0 * @param runtime * @return <code>ActiveRecord::TransactionIsolationError</code> */ protected static RubyClass getTransactionIsolationError(final Ruby runtime) { return (RubyClass) runtime.getModule("ActiveRecord").getConstant("TransactionIsolationError"); } /** * @param runtime * @return <code>ActiveRecord::ConnectionAdapters::JdbcTypeConverter</code> */ private static RubyClass getJdbcTypeConverter(final Ruby runtime) { return getConnectionAdapters(runtime).getClass("JdbcTypeConverter"); } /* def transaction_isolation_levels { read_uncommitted: "READ UNCOMMITTED", read_committed: "READ COMMITTED", repeatable_read: "REPEATABLE READ", serializable: "SERIALIZABLE" } end */ public static int mapTransactionIsolationLevel(IRubyObject isolation) { if ( ! ( isolation instanceof RubySymbol ) ) { isolation = isolation.asString().callMethod("intern"); } final Object isolationString = isolation.toString(); // RubySymbol.toString if ( isolationString == "read_uncommitted" ) return Connection.TRANSACTION_READ_UNCOMMITTED; if ( isolationString == "read_committed" ) return Connection.TRANSACTION_READ_COMMITTED; if ( isolationString == "repeatable_read" ) return Connection.TRANSACTION_REPEATABLE_READ; if ( isolationString == "serializable" ) return Connection.TRANSACTION_SERIALIZABLE; throw new IllegalArgumentException( "unexpected isolation level: " + isolation + " (" + isolationString + ")" ); } @JRubyMethod(name = "supports_transaction_isolation?", optional = 1) public IRubyObject supports_transaction_isolation_p(final ThreadContext context, final IRubyObject[] args) throws SQLException { final IRubyObject isolation = args.length > 0 ? args[0] : null; return withConnection(context, new Callable<IRubyObject>() { public IRubyObject call(final Connection connection) throws SQLException { final DatabaseMetaData metaData = connection.getMetaData(); final boolean supported; if ( isolation != null && ! isolation.isNil() ) { final int level = mapTransactionIsolationLevel(isolation); supported = metaData.supportsTransactionIsolationLevel(level); } else { final int level = metaData.getDefaultTransactionIsolation(); supported = level > Connection.TRANSACTION_NONE; } return context.getRuntime().newBoolean(supported); } }); } @JRubyMethod(name = "begin", optional = 1) // optional isolation argument for AR-4.0 public IRubyObject begin(final ThreadContext context, final IRubyObject[] args) { final IRubyObject isolation = args.length > 0 ? args[0] : null; try { // handleException == false so we can handle setTXIsolation return withConnection(context, false, new Callable<IRubyObject>() { public IRubyObject call(final Connection connection) throws SQLException { connection.setAutoCommit(false); if ( isolation != null && ! isolation.isNil() ) { final int level = mapTransactionIsolationLevel(isolation); try { connection.setTransactionIsolation(level); } catch (SQLException e) { RubyClass txError = getTransactionIsolationError(context.getRuntime()); if ( txError != null ) throw wrapException(context, txError, e); throw e; // let it roll - will be wrapped into a JDBCError (non 4.0) } } return context.getRuntime().getNil(); } }); } catch (SQLException e) { return handleException(context, e); } } @JRubyMethod(name = "commit") public IRubyObject commit(final ThreadContext context) { final Connection connection = getConnection(true); try { if ( ! connection.getAutoCommit() ) { try { connection.commit(); resetSavepoints(context); // if any return context.getRuntime().newBoolean(true); } finally { connection.setAutoCommit(true); } } return context.getRuntime().getNil(); } catch (SQLException e) { return handleException(context, e); } } @JRubyMethod(name = "rollback") public IRubyObject rollback(final ThreadContext context) { final Connection connection = getConnection(true); try { if ( ! connection.getAutoCommit() ) { try { connection.rollback(); resetSavepoints(context); // if any return context.getRuntime().newBoolean(true); } finally { connection.setAutoCommit(true); } } return context.getRuntime().getNil(); } catch (SQLException e) { return handleException(context, e); } } @JRubyMethod(name = "supports_savepoints?") public IRubyObject supports_savepoints_p(final ThreadContext context) throws SQLException { return withConnection(context, new Callable<IRubyObject>() { public IRubyObject call(final Connection connection) throws SQLException { final DatabaseMetaData metaData = connection.getMetaData(); return context.getRuntime().newBoolean( metaData.supportsSavepoints() ); } }); } @JRubyMethod(name = "create_savepoint", optional = 1) public IRubyObject create_savepoint(final ThreadContext context, final IRubyObject[] args) { IRubyObject name = args.length > 0 ? args[0] : null; final Connection connection = getConnection(true); try { connection.setAutoCommit(false); final Savepoint savepoint ; // NOTE: this will auto-start a DB transaction even invoked outside // of a AR (Ruby) transaction (`transaction { ... create_savepoint }`) // it would be nice if AR knew about this TX although that's kind of // "really advanced" functionality - likely not to be implemented ... if ( name != null && ! name.isNil() ) { savepoint = connection.setSavepoint(name.toString()); } else { savepoint = connection.setSavepoint(); name = RubyString.newString( context.getRuntime(), Integer.toString( savepoint.getSavepointId() ) ); } getSavepoints(context).put(name, savepoint); return name; } catch (SQLException e) { return handleException(context, e); } } @JRubyMethod(name = "rollback_savepoint", required = 1) public IRubyObject rollback_savepoint(final ThreadContext context, final IRubyObject name) { if ( name == null || name.isNil() ) { throw context.getRuntime().newArgumentError("nil savepoint name given"); } final Connection connection = getConnection(true); try { Savepoint savepoint = getSavepoints(context).get(name); if ( savepoint == null ) { throw context.getRuntime().newRuntimeError("could not rollback savepoint: '" + name + "' (not set)"); } connection.rollback(savepoint); return context.getRuntime().getNil(); } catch (SQLException e) { return handleException(context, e); } } @JRubyMethod(name = "release_savepoint", required = 1) public IRubyObject release_savepoint(final ThreadContext context, final IRubyObject name) { if ( name == null || name.isNil() ) { throw context.getRuntime().newArgumentError("nil savepoint name given"); } final Connection connection = getConnection(true); try { Object savepoint = getSavepoints(context).remove(name); if ( savepoint == null ) { throw context.getRuntime().newRuntimeError("could not release savepoint: '" + name + "' (not set)"); } // NOTE: RubyHash.remove does not convert to Java as get does : if ( ! ( savepoint instanceof Savepoint ) ) { savepoint = ((IRubyObject) savepoint).toJava(Savepoint.class); } connection.releaseSavepoint((Savepoint) savepoint); return context.getRuntime().getNil(); } catch (SQLException e) { return handleException(context, e); } } // NOTE: this is iternal API - not to be used by user-code ! @JRubyMethod(name = "marked_savepoint_names") public IRubyObject marked_savepoint_names(final ThreadContext context) { if ( hasInstanceVariable("@savepoints") ) { Map<IRubyObject, Savepoint> savepoints = getSavepoints(context); final RubyArray names = context.getRuntime().newArray(); for ( Map.Entry<IRubyObject, ?> entry : savepoints.entrySet() ) { names.add( entry.getKey() ); // keys are RubyString instances } return names; } else { return context.getRuntime().newEmptyArray(); } } @SuppressWarnings("unchecked") protected Map<IRubyObject, Savepoint> getSavepoints(final ThreadContext context) { if ( hasInstanceVariable("@savepoints") ) { IRubyObject savepoints = getInstanceVariable("@savepoints"); return (Map<IRubyObject, Savepoint>) savepoints.toJava(Map.class); } else { // not using a RubyHash to preserve order on Ruby 1.8 as well : Map<IRubyObject, Savepoint> savepoints = new LinkedHashMap<IRubyObject, Savepoint>(4); setInstanceVariable("@savepoints", convertJavaToRuby(savepoints)); return savepoints; } } protected boolean resetSavepoints(final ThreadContext context) { if ( hasInstanceVariable("@savepoints") ) { removeInstanceVariable("@savepoints"); return true; } return false; } @JRubyMethod(name = "connection_factory") public IRubyObject connection_factory(final ThreadContext context) { return convertJavaToRuby( getConnectionFactory() ); } @JRubyMethod(name = "connection_factory=", required = 1) public IRubyObject set_connection_factory(final ThreadContext context, final IRubyObject factory) { setConnectionFactory( (JdbcConnectionFactory) factory.toJava(JdbcConnectionFactory.class) ); return context.getRuntime().getNil(); } /** * Called during <code>initialize</code> after the connection factory * has been set to check if we can connect and/or perform any initialization * necessary. * <br/> * NOTE: connection has not been configured at this point, * nor should we retry - we're creating a brand new JDBC connection * * @param context * @return connection */ @JRubyMethod(name = "init_connection") public synchronized IRubyObject init_connection(final ThreadContext context) { try { return initConnection(context); } catch (SQLException e) { return handleException(context, e); // throws } } private IRubyObject initConnection(final ThreadContext context) throws SQLException { final IRubyObject jdbcConnection = setConnection( newConnection() ); final IRubyObject adapter = callMethod("adapter"); // self.adapter if ( ! adapter.isNil() ) { if ( adapter.respondsTo("init_connection") ) { adapter.callMethod(context, "init_connection", jdbcConnection); } } else { warn(context, "WARN: adapter not set for: " + inspect() + " make sure you pass it on initialize(config, adapter)"); } return jdbcConnection; } @JRubyMethod(name = "connection") public IRubyObject connection(final ThreadContext context) { if ( getConnection(false) == null ) { synchronized (this) { if ( getConnection(false) == null ) { reconnect(context); } } } return getInstanceVariable("@connection"); } @JRubyMethod(name = "active?", alias = "valid?") public IRubyObject active_p(final ThreadContext context) { IRubyObject connection = getInstanceVariable("@connection"); if ( connection != null && ! connection.isNil() ) { return isConnectionValid(context, getConnection(false)) ? context.getRuntime().getTrue() : context.getRuntime().getFalse(); } return context.getRuntime().getFalse(); } @JRubyMethod(name = "disconnect!") public synchronized IRubyObject disconnect(final ThreadContext context) { // TODO: only here to try resolving multi-thread issues : if ( Boolean.getBoolean("arjdbc.disconnect.debug") ) { final List<?> backtrace = createCallerBacktrace(context); final Ruby runtime = context.getRuntime(); runtime.getOut().println(this + " connection.disconnect! occured: "); for ( Object element : backtrace ) { runtime.getOut().println(element); } runtime.getOut().flush(); } return setConnection(null); } @JRubyMethod(name = "reconnect!") public synchronized IRubyObject reconnect(final ThreadContext context) { try { final Connection connection = newConnection(); final IRubyObject result = setConnection( connection ); final IRubyObject adapter = callMethod("adapter"); if ( ! adapter.isNil() ) { if ( adapter.respondsTo("configure_connection") ) { adapter.callMethod(context, "configure_connection"); } } else { // NOTE: we warn on init_connection - should be enough } return result; } catch (SQLException e) { return handleException(context, e); } } @JRubyMethod(name = { "open?" /* "conn?" */ }) public IRubyObject open_p(final ThreadContext context) { final Connection connection = getConnection(false); if ( connection == null ) return context.getRuntime().getFalse(); try { // NOTE: isClosed method generally cannot be called to determine // whether a connection to a database is valid or invalid ... return context.getRuntime().newBoolean( ! connection.isClosed() ); } catch (SQLException e) { return handleException(context, e); } } @JRubyMethod(name = "close") public IRubyObject close(final ThreadContext context) { final Connection connection = getConnection(false); if ( connection == null ) return context.getRuntime().getFalse(); try { final boolean closed = connection.isClosed(); if ( closed ) return context.getRuntime().getFalse(); setConnection(null); // does connection.close(); return context.getRuntime().getTrue(); } catch (Exception e) { debugStackTrace(context, e); return context.getRuntime().getNil(); } } @JRubyMethod(name = "database_name") public IRubyObject database_name(final ThreadContext context) throws SQLException { final Connection connection = getConnection(true); String name = connection.getCatalog(); if (name == null) { name = connection.getMetaData().getUserName(); if (name == null) name = "db1"; // TODO why ? } return context.getRuntime().newString(name); } @JRubyMethod(name = "execute", required = 1) public IRubyObject execute(final ThreadContext context, final IRubyObject sql) { return withConnection(context, new Callable<IRubyObject>() { public IRubyObject call(final Connection connection) throws SQLException { Statement statement = null; final String query = sql.convertToString().getUnicodeValue(); try { statement = createStatement(context, connection); if ( doExecute(statement, query) ) { return mapResults(context, connection, statement, false); } else { return mapGeneratedKeysOrUpdateCount(context, connection, statement); } } catch (final SQLException e) { debugErrorSQL(context, query); throw e; } finally { close(statement); } } }); } protected Statement createStatement(final ThreadContext context, final Connection connection) throws SQLException { final Statement statement = connection.createStatement(); IRubyObject statementEscapeProcessing = getConfigValue(context, "statement_escape_processing"); // NOTE: disable (driver) escape processing by default, it's not really // needed for AR statements ... if users need it they might configure : if ( statementEscapeProcessing.isNil() ) { statement.setEscapeProcessing(false); } else { statement.setEscapeProcessing(statementEscapeProcessing.isTrue()); } return statement; } /** * Execute a query using the given statement. * @param statement * @param query * @return true if the first result is a <code>ResultSet</code>; * false if it is an update count or there are no results * @throws SQLException */ protected boolean doExecute(final Statement statement, final String query) throws SQLException { return genericExecute(statement, query); } /** * @deprecated renamed to {@link #doExecute(Statement, String)} */ @Deprecated protected boolean genericExecute(final Statement statement, final String query) throws SQLException { return statement.execute(query); // Statement.RETURN_GENERATED_KEYS } @JRubyMethod(name = "execute_insert", required = 1) public IRubyObject execute_insert(final ThreadContext context, final IRubyObject sql) throws SQLException { final String query = sql.convertToString().getUnicodeValue(); return executeUpdate(context, query, true); } @JRubyMethod(name = "execute_insert", required = 2) public IRubyObject execute_insert(final ThreadContext context, final IRubyObject sql, final IRubyObject binds) throws SQLException { final String query = sql.convertToString().getUnicodeValue(); if ( binds == null || binds.isNil() ) { // no prepared statements return executeUpdate(context, query, true); } else { // we allow prepared statements with empty binds parameters return executePreparedUpdate(context, query, (List) binds, true); } } /** * Executes an UPDATE (DELETE) SQL statement. * @param context * @param sql * @return affected row count * @throws SQLException */ @JRubyMethod(name = {"execute_update", "execute_delete"}, required = 1) public IRubyObject execute_update(final ThreadContext context, final IRubyObject sql) throws SQLException { final String query = sql.convertToString().getUnicodeValue(); return executeUpdate(context, query, false); } /** * Executes an UPDATE (DELETE) SQL (prepared - if binds provided) statement. * @param context * @param sql * @return affected row count * @throws SQLException * * @see #execute_update(ThreadContext, IRubyObject) */ @JRubyMethod(name = {"execute_update", "execute_delete"}, required = 2) public IRubyObject execute_update(final ThreadContext context, final IRubyObject sql, final IRubyObject binds) throws SQLException { final String query = sql.convertToString().getUnicodeValue(); if ( binds == null || binds.isNil() ) { // no prepared statements return executeUpdate(context, query, false); } else { // we allow prepared statements with empty binds parameters return executePreparedUpdate(context, query, (List) binds, false); } } /** * @param context * @param query * @param returnGeneratedKeys * @return row count or generated keys * * @see #execute_insert(ThreadContext, IRubyObject) * @see #execute_update(ThreadContext, IRubyObject) */ protected IRubyObject executeUpdate(final ThreadContext context, final String query, final boolean returnGeneratedKeys) { return withConnection(context, new Callable<IRubyObject>() { public IRubyObject call(final Connection connection) throws SQLException { Statement statement = null; try { statement = createStatement(context, connection); if ( returnGeneratedKeys ) { statement.executeUpdate(query, Statement.RETURN_GENERATED_KEYS); IRubyObject keys = mapGeneratedKeys(context.getRuntime(), connection, statement); return keys == null ? context.getRuntime().getNil() : keys; } else { final int rowCount = statement.executeUpdate(query); return context.getRuntime().newFixnum(rowCount); } } catch (final SQLException e) { debugErrorSQL(context, query); throw e; } finally { close(statement); } } }); } private IRubyObject executePreparedUpdate(final ThreadContext context, final String query, final List<?> binds, final boolean returnGeneratedKeys) { return withConnection(context, new Callable<IRubyObject>() { public IRubyObject call(final Connection connection) throws SQLException { PreparedStatement statement = null; try { if ( returnGeneratedKeys ) { statement = connection.prepareStatement(query, Statement.RETURN_GENERATED_KEYS); setStatementParameters(context, connection, statement, binds); statement.executeUpdate(); IRubyObject keys = mapGeneratedKeys(context.getRuntime(), connection, statement); return keys == null ? context.getRuntime().getNil() : keys; } else { statement = connection.prepareStatement(query); setStatementParameters(context, connection, statement, binds); final int rowCount = statement.executeUpdate(); return context.getRuntime().newFixnum(rowCount); } } catch (final SQLException e) { debugErrorSQL(context, query); throw e; } finally { close(statement); } } }); } /** * NOTE: since 1.3 this behaves like <code>execute_query</code> in AR-JDBC 1.2 * @param context * @param sql * @param block (optional) block to yield row values * @return raw query result as a name => value Hash (unless block given) * @throws SQLException * @see #execute_query_raw(ThreadContext, IRubyObject[], Block) */ @JRubyMethod(name = "execute_query_raw", required = 1) // optional block public IRubyObject execute_query_raw(final ThreadContext context, final IRubyObject sql, final Block block) throws SQLException { final String query = sql.convertToString().getUnicodeValue(); return executeQueryRaw(context, query, 0, block); } /** * NOTE: since 1.3 this behaves like <code>execute_query</code> in AR-JDBC 1.2 * @param context * @param args * @param block (optional) block to yield row values * @return raw query result as a name => value Hash (unless block given) * @throws SQLException */ @JRubyMethod(name = "execute_query_raw", required = 2, optional = 1) // @JRubyMethod(name = "execute_query_raw", required = 1, optional = 2) public IRubyObject execute_query_raw(final ThreadContext context, final IRubyObject[] args, final Block block) throws SQLException { // args: (sql), (sql, max_rows), (sql, binds), (sql, max_rows, binds) final String query = args[0].convertToString().getUnicodeValue(); // sql IRubyObject max_rows = args.length > 1 ? args[1] : null; IRubyObject binds = args.length > 2 ? args[2] : null; final int maxRows; if ( max_rows == null || max_rows.isNil() ) maxRows = 0; else { if ( binds instanceof RubyNumeric ) { // (sql, max_rows) maxRows = RubyNumeric.fix2int(binds); binds = null; } else { if ( max_rows instanceof RubyNumeric ) { maxRows = RubyNumeric.fix2int(max_rows); } else { if ( binds == null ) binds = max_rows; // (sql, binds) maxRows = 0; } } } if ( binds == null || binds.isNil() ) { // no prepared statements return executeQueryRaw(context, query, maxRows, block); } else { // we allow prepared statements with empty binds parameters return executePreparedQueryRaw(context, query, (List) binds, maxRows, block); } } /** * @param context * @param query * @param maxRows * @param block * @return raw query result (in case no block was given) * * @see #execute_query_raw(ThreadContext, IRubyObject[], Block) */ protected IRubyObject executeQueryRaw(final ThreadContext context, final String query, final int maxRows, final Block block) { return doExecuteQueryRaw(context, query, maxRows, block, null); // binds == null } protected IRubyObject executePreparedQueryRaw(final ThreadContext context, final String query, final List<?> binds, final int maxRows, final Block block) { return doExecuteQueryRaw(context, query, maxRows, block, binds); } private IRubyObject doExecuteQueryRaw(final ThreadContext context, final String query, final int maxRows, final Block block, final List<?> binds) { return withConnection(context, new Callable<IRubyObject>() { public IRubyObject call(final Connection connection) throws SQLException { final Ruby runtime = context.getRuntime(); Statement statement = null; ResultSet resultSet = null; try { if ( binds == null ) { // plain statement statement = createStatement(context, connection); statement.setMaxRows(maxRows); // zero means there is no limit resultSet = statement.executeQuery(query); } else { final PreparedStatement prepStatement; statement = prepStatement = connection.prepareStatement(query); statement.setMaxRows(maxRows); // zero means there is no limit setStatementParameters(context, connection, prepStatement, binds); resultSet = prepStatement.executeQuery(); } if ( block != null && block.isGiven() ) { // yield(id1, name1) ... row 1 result data // yield(id2, name2) ... row 2 result data return yieldResultRows(context, runtime, connection, resultSet, block); } return mapToRawResult(context, runtime, connection, resultSet, false); } catch (final SQLException e) { debugErrorSQL(context, query); throw e; } finally { close(resultSet); close(statement); } } }); } /** * Executes a query and returns the (AR) result. * @param context * @param sql * @return raw query result as a name => value Hash (unless block given) * @throws SQLException * @see #execute_query(ThreadContext, IRubyObject[], Block) */ @JRubyMethod(name = "execute_query", required = 1) public IRubyObject execute_query(final ThreadContext context, final IRubyObject sql) throws SQLException { final String query = sql.convertToString().getUnicodeValue(); return executeQuery(context, query, 0); } /** * Executes a query and returns the (AR) result. * @param context * @param args * @return and <code>ActiveRecord::Result</code> * @throws SQLException * * @see #execute_query(ThreadContext, IRubyObject, IRubyObject, Block) */ @JRubyMethod(name = "execute_query", required = 2, optional = 1) // @JRubyMethod(name = "execute_query", required = 1, optional = 2) public IRubyObject execute_query(final ThreadContext context, final IRubyObject[] args) throws SQLException { // args: (sql), (sql, max_rows), (sql, binds), (sql, max_rows, binds) final String query = args[0].convertToString().getUnicodeValue(); // sql IRubyObject max_rows = args.length > 1 ? args[1] : null; IRubyObject binds = args.length > 2 ? args[2] : null; final int maxRows; if ( max_rows == null || max_rows.isNil() ) maxRows = 0; else { if ( binds instanceof RubyNumeric ) { // (sql, max_rows) maxRows = RubyNumeric.fix2int(binds); binds = null; } else { if ( max_rows instanceof RubyNumeric ) { maxRows = RubyNumeric.fix2int(max_rows); } else { if ( binds == null ) binds = max_rows; // (sql, binds) maxRows = 0; } } } if ( binds == null || binds.isNil() ) { // no prepared statements return executeQuery(context, query, maxRows); } else { // we allow prepared statements with empty binds parameters return executePreparedQuery(context, query, (List) binds, maxRows); } } /** * NOTE: This methods behavior changed in AR-JDBC 1.3 the old behavior is * achievable using {@link #executeQueryRaw(ThreadContext, String, int, Block)}. * * @param context * @param query * @param maxRows * @return AR (mapped) query result * * @see #execute_query(ThreadContext, IRubyObject) * @see #execute_query(ThreadContext, IRubyObject, IRubyObject) * @see #mapToResult(ThreadContext, Ruby, DatabaseMetaData, ResultSet, RubyJdbcConnection.ColumnData[]) */ protected IRubyObject executeQuery(final ThreadContext context, final String query, final int maxRows) { return withConnection(context, new Callable<IRubyObject>() { public IRubyObject call(final Connection connection) throws SQLException { Statement statement = null; ResultSet resultSet = null; try { statement = createStatement(context, connection); statement.setMaxRows(maxRows); // zero means there is no limit resultSet = statement.executeQuery(query); return mapQueryResult(context, connection, resultSet); } catch (final SQLException e) { debugErrorSQL(context, query); throw e; } finally { close(resultSet); close(statement); } } }); } protected IRubyObject executePreparedQuery(final ThreadContext context, final String query, final List<?> binds, final int maxRows) { return withConnection(context, new Callable<IRubyObject>() { public IRubyObject call(final Connection connection) throws SQLException { PreparedStatement statement = null; ResultSet resultSet = null; try { statement = connection.prepareStatement(query); statement.setMaxRows(maxRows); // zero means there is no limit setStatementParameters(context, connection, statement, binds); resultSet = statement.executeQuery(); return mapQueryResult(context, connection, resultSet); } catch (final SQLException e) { debugErrorSQL(context, query); throw e; } finally { close(resultSet); close(statement); } } }); } private IRubyObject mapQueryResult(final ThreadContext context, final Connection connection, final ResultSet resultSet) throws SQLException { final Ruby runtime = context.getRuntime(); final ColumnData[] columns = extractColumns(runtime, connection, resultSet, false); return mapToResult(context, runtime, connection, resultSet, columns); } /** * @deprecated please do not use this method */ @Deprecated // only used by Oracle adapter - also it's really a bad idea @JRubyMethod(name = "execute_id_insert", required = 2) public IRubyObject execute_id_insert(final ThreadContext context, final IRubyObject sql, final IRubyObject id) throws SQLException { final Ruby runtime = context.getRuntime(); callMethod("warn", RubyString.newUnicodeString(runtime, "DEPRECATED: execute_id_insert(sql, id) will be removed")); return withConnection(context, new Callable<IRubyObject>() { public IRubyObject call(final Connection connection) throws SQLException { PreparedStatement statement = null; final String insertSQL = sql.convertToString().getUnicodeValue(); try { statement = connection.prepareStatement(insertSQL); statement.setLong(1, RubyNumeric.fix2long(id)); statement.executeUpdate(); } catch (final SQLException e) { debugErrorSQL(context, insertSQL); throw e; } finally { close(statement); } return id; } }); } @JRubyMethod(name = "supported_data_types") public IRubyObject supported_data_types(final ThreadContext context) throws SQLException { final Ruby runtime = context.getRuntime(); final Connection connection = getConnection(true); final ResultSet typeDesc = connection.getMetaData().getTypeInfo(); final IRubyObject types; try { types = mapToRawResult(context, runtime, connection, typeDesc, true); } finally { close(typeDesc); } return types; } @JRubyMethod(name = "primary_keys", required = 1) public IRubyObject primary_keys(ThreadContext context, IRubyObject tableName) throws SQLException { @SuppressWarnings("unchecked") List<IRubyObject> primaryKeys = (List) primaryKeys(context, tableName.toString()); return context.getRuntime().newArray(primaryKeys); } protected static final int PRIMARY_KEYS_COLUMN_NAME = 4; @Deprecated // NOTE: this should go private protected List<RubyString> primaryKeys(final ThreadContext context, final String tableName) { return withConnection(context, new Callable<List<RubyString>>() { public List<RubyString> call(final Connection connection) throws SQLException { final String _tableName = caseConvertIdentifierForJdbc(connection, tableName); final TableName table = extractTableName(connection, null, _tableName); return primaryKeys(context, connection, table); } }); } protected List<RubyString> primaryKeys(final ThreadContext context, final Connection connection, final TableName table) throws SQLException { final DatabaseMetaData metaData = connection.getMetaData(); ResultSet resultSet = null; final List<RubyString> keyNames = new ArrayList<RubyString>(); try { resultSet = metaData.getPrimaryKeys(table.catalog, table.schema, table.name); final Ruby runtime = context.getRuntime(); while ( resultSet.next() ) { String columnName = resultSet.getString(PRIMARY_KEYS_COLUMN_NAME); columnName = caseConvertIdentifierForRails(connection, columnName); keyNames.add( RubyString.newUnicodeString(runtime, columnName) ); } } finally { close(resultSet); } return keyNames; } @Deprecated //@JRubyMethod(name = "tables") public IRubyObject tables(ThreadContext context) { return tables(context, null, null, null, TABLE_TYPE); } @Deprecated //@JRubyMethod(name = "tables") public IRubyObject tables(ThreadContext context, IRubyObject catalog) { return tables(context, toStringOrNull(catalog), null, null, TABLE_TYPE); } @Deprecated //@JRubyMethod(name = "tables") public IRubyObject tables(ThreadContext context, IRubyObject catalog, IRubyObject schemaPattern) { return tables(context, toStringOrNull(catalog), toStringOrNull(schemaPattern), null, TABLE_TYPE); } @Deprecated //@JRubyMethod(name = "tables") public IRubyObject tables(ThreadContext context, IRubyObject catalog, IRubyObject schemaPattern, IRubyObject tablePattern) { return tables(context, toStringOrNull(catalog), toStringOrNull(schemaPattern), toStringOrNull(tablePattern), TABLE_TYPE); } @JRubyMethod(name = "tables", required = 0, optional = 4) public IRubyObject tables(final ThreadContext context, final IRubyObject[] args) { switch ( args.length ) { case 0: return tables(context, null, null, null, TABLE_TYPE); case 1: // (catalog) return tables(context, toStringOrNull(args[0]), null, null, TABLE_TYPE); case 2: // (catalog, schemaPattern) return tables(context, toStringOrNull(args[0]), toStringOrNull(args[1]), null, TABLE_TYPE); case 3: // (catalog, schemaPattern, tablePattern) return tables(context, toStringOrNull(args[0]), toStringOrNull(args[1]), toStringOrNull(args[2]), TABLE_TYPE); } return tables(context, toStringOrNull(args[0]), toStringOrNull(args[1]), toStringOrNull(args[2]), getTypes(args[3])); } protected IRubyObject tables(final ThreadContext context, final String catalog, final String schemaPattern, final String tablePattern, final String[] types) { return withConnection(context, new Callable<IRubyObject>() { public IRubyObject call(final Connection connection) throws SQLException { return matchTables(context.getRuntime(), connection, catalog, schemaPattern, tablePattern, types, false); } }); } protected String[] getTableTypes() { return TABLE_TYPES; } @JRubyMethod(name = "table_exists?") public IRubyObject table_exists_p(final ThreadContext context, IRubyObject table) { if ( table.isNil() ) { throw context.getRuntime().newArgumentError("nil table name"); } final String tableName = table.toString(); return tableExists(context, null, tableName); } @JRubyMethod(name = "table_exists?") public IRubyObject table_exists_p(final ThreadContext context, IRubyObject table, IRubyObject schema) { if ( table.isNil() ) { throw context.getRuntime().newArgumentError("nil table name"); } final String tableName = table.toString(); final String defaultSchema = schema.isNil() ? null : schema.toString(); return tableExists(context, defaultSchema, tableName); } protected IRubyObject tableExists(final ThreadContext context, final String defaultSchema, final String tableName) { final Ruby runtime = context.getRuntime(); return withConnection(context, new Callable<RubyBoolean>() { public RubyBoolean call(final Connection connection) throws SQLException { final TableName components = extractTableName(connection, defaultSchema, tableName); return runtime.newBoolean( tableExists(runtime, connection, components) ); } }); } @JRubyMethod(name = {"columns", "columns_internal"}, required = 1, optional = 2) public IRubyObject columns_internal(final ThreadContext context, final IRubyObject[] args) throws SQLException { return withConnection(context, new Callable<RubyArray>() { public RubyArray call(final Connection connection) throws SQLException { ResultSet columns = null; try { final String tableName = args[0].toString(); // optionals (NOTE: catalog argumnet was never used before 1.3.0) : final String catalog = args.length > 1 ? toStringOrNull(args[1]) : null; final String defaultSchema = args.length > 2 ? toStringOrNull(args[2]) : null; final TableName components; if ( catalog == null ) { // backwards-compatibility with < 1.3.0 components = extractTableName(connection, defaultSchema, tableName); } else { components = extractTableName(connection, catalog, defaultSchema, tableName); } if ( ! tableExists(context.getRuntime(), connection, components) ) { throw new SQLException("table: " + tableName + " does not exist"); } final DatabaseMetaData metaData = connection.getMetaData(); columns = metaData.getColumns(components.catalog, components.schema, components.name, null); return mapColumnsResult(context, metaData, components, columns); } finally { close(columns); } } }); } @JRubyMethod(name = "indexes") public IRubyObject indexes(final ThreadContext context, IRubyObject tableName, IRubyObject name) { return indexes(context, toStringOrNull(tableName), toStringOrNull(name), null); } @JRubyMethod(name = "indexes") public IRubyObject indexes(final ThreadContext context, IRubyObject tableName, IRubyObject name, IRubyObject schemaName) { return indexes(context, toStringOrNull(tableName), toStringOrNull(name), toStringOrNull(schemaName)); } // NOTE: metaData.getIndexInfo row mappings : protected static final int INDEX_INFO_TABLE_NAME = 3; protected static final int INDEX_INFO_NON_UNIQUE = 4; protected static final int INDEX_INFO_NAME = 6; protected static final int INDEX_INFO_COLUMN_NAME = 9; /** * Default JDBC introspection for index metadata on the JdbcConnection. * * JDBC index metadata is denormalized (multiple rows may be returned for * one index, one row per column in the index), so a simple block-based * filter like that used for tables doesn't really work here. Callers * should filter the return from this method instead. */ protected IRubyObject indexes(final ThreadContext context, final String tableName, final String name, final String schemaName) { return withConnection(context, new Callable<IRubyObject>() { public IRubyObject call(final Connection connection) throws SQLException { final Ruby runtime = context.getRuntime(); final RubyClass IndexDefinition = getIndexDefinition(context); String _tableName = caseConvertIdentifierForJdbc(connection, tableName); String _schemaName = caseConvertIdentifierForJdbc(connection, schemaName); final TableName table = extractTableName(connection, _schemaName, _tableName); final List<RubyString> primaryKeys = primaryKeys(context, connection, table); ResultSet indexInfoSet = null; final List<IRubyObject> indexes = new ArrayList<IRubyObject>(); try { final DatabaseMetaData metaData = connection.getMetaData(); indexInfoSet = metaData.getIndexInfo(table.catalog, table.schema, table.name, false, true); String currentIndex = null; while ( indexInfoSet.next() ) { String indexName = indexInfoSet.getString(INDEX_INFO_NAME); if ( indexName == null ) continue; indexName = caseConvertIdentifierForRails(metaData, indexName); final String columnName = indexInfoSet.getString(INDEX_INFO_COLUMN_NAME); final RubyString rubyColumnName = RubyString.newUnicodeString( runtime, caseConvertIdentifierForRails(metaData, columnName) ); if ( primaryKeys.contains(rubyColumnName) ) continue; // We are working on a new index if ( ! indexName.equals(currentIndex) ) { currentIndex = indexName; String indexTableName = indexInfoSet.getString(INDEX_INFO_TABLE_NAME); indexTableName = caseConvertIdentifierForRails(metaData, indexTableName); final boolean nonUnique = indexInfoSet.getBoolean(INDEX_INFO_NON_UNIQUE); IRubyObject[] args = new IRubyObject[] { RubyString.newUnicodeString(runtime, indexTableName), // table_name RubyString.newUnicodeString(runtime, indexName), // index_name runtime.newBoolean( ! nonUnique ), // unique runtime.newArray() // [] for column names, we'll add to that in just a bit // orders, (since AR 3.2) where, type, using (AR 4.0) }; indexes.add( IndexDefinition.callMethod(context, "new", args) ); // IndexDefinition.new } // One or more columns can be associated with an index IRubyObject lastIndexDef = indexes.isEmpty() ? null : indexes.get(indexes.size() - 1); if (lastIndexDef != null) { lastIndexDef.callMethod(context, "columns").callMethod(context, "<<", rubyColumnName); } } return runtime.newArray(indexes); } finally { close(indexInfoSet); } } }); } protected RubyClass getIndexDefinition(final ThreadContext context) { final RubyClass adapterClass = getAdapter(context).getMetaClass(); IRubyObject IDef = adapterClass.getConstantAt("IndexDefinition"); return IDef != null ? (RubyClass) IDef : getIndexDefinition(context.runtime); } @JRubyMethod public IRubyObject foreign_keys(final ThreadContext context, IRubyObject table_name) { return foreignKeys(context, table_name.toString(), null, null); } protected IRubyObject foreignKeys(final ThreadContext context, final String tableName, final String schemaName, final String catalog) { return withConnection(context, new Callable<IRubyObject>() { public IRubyObject call(final Connection connection) throws SQLException { final Ruby runtime = context.getRuntime(); final RubyClass FKDefinition = getForeignKeyDefinition(context); String _tableName = caseConvertIdentifierForJdbc(connection, tableName); String _schemaName = caseConvertIdentifierForJdbc(connection, schemaName); final TableName table = extractTableName(connection, catalog, _schemaName, _tableName); ResultSet fkInfoSet = null; final List<IRubyObject> fKeys = new ArrayList<IRubyObject>(8); try { final DatabaseMetaData metaData = connection.getMetaData(); fkInfoSet = metaData.getImportedKeys(table.catalog, table.schema, table.name); while ( fkInfoSet.next() ) { final RubyHash options = RubyHash.newHash(runtime); String fkName = fkInfoSet.getString("FK_NAME"); if (fkName != null) { fkName = caseConvertIdentifierForRails(metaData, fkName); options.put(runtime.newSymbol("name"), fkName); } String columnName = fkInfoSet.getString("FKCOLUMN_NAME"); options.put(runtime.newSymbol("column"), caseConvertIdentifierForRails(metaData, columnName)); columnName = fkInfoSet.getString("PKCOLUMN_NAME"); options.put(runtime.newSymbol("primary_key"), caseConvertIdentifierForRails(metaData, columnName)); String fkTableName = fkInfoSet.getString("FKTABLE_NAME"); fkTableName = caseConvertIdentifierForRails(metaData, fkTableName); String pkTableName = fkInfoSet.getString("PKTABLE_NAME"); pkTableName = caseConvertIdentifierForRails(metaData, pkTableName); final String onDelete = extractForeignKeyRule( fkInfoSet.getInt("DELETE_RULE") ); if ( onDelete != null ) options.op_aset(context, runtime.newSymbol("on_delete"), runtime.newSymbol(onDelete)); final String onUpdate = extractForeignKeyRule( fkInfoSet.getInt("UPDATE_RULE") ); if ( onUpdate != null ) options.op_aset(context, runtime.newSymbol("on_update"), runtime.newSymbol(onUpdate)); IRubyObject[] args = new IRubyObject[] { RubyString.newUnicodeString(runtime, fkTableName), // from_table RubyString.newUnicodeString(runtime, pkTableName), // to_table options }; fKeys.add( FKDefinition.callMethod(context, "new", args) ); // ForeignKeyDefinition.new } return runtime.newArray(fKeys); } finally { close(fkInfoSet); } } }); } protected String extractForeignKeyRule(final int rule) { switch (rule) { case DatabaseMetaData.importedKeyNoAction : return null ; case DatabaseMetaData.importedKeyCascade : return "cascade" ; case DatabaseMetaData.importedKeySetNull : return "nullify" ; case DatabaseMetaData.importedKeySetDefault: return "default" ; } return null; } protected RubyClass getForeignKeyDefinition(final ThreadContext context) { final RubyClass adapterClass = getAdapter(context).getMetaClass(); IRubyObject FKDef = adapterClass.getConstantAt("ForeignKeyDefinition"); return FKDef != null ? (RubyClass) FKDef : getForeignKeyDefinition(context.runtime); } @JRubyMethod(name = "supports_foreign_keys?") public IRubyObject supports_foreign_keys_p(final ThreadContext context) throws SQLException { return withConnection(context, new Callable<IRubyObject>() { public IRubyObject call(final Connection connection) throws SQLException { final DatabaseMetaData metaData = connection.getMetaData(); return context.getRuntime().newBoolean( metaData.supportsIntegrityEnhancementFacility() ); } }); } @JRubyMethod(name = "supports_views?") public IRubyObject supports_views_p(final ThreadContext context) throws SQLException { return withConnection(context, new Callable<IRubyObject>() { public IRubyObject call(final Connection connection) throws SQLException { final DatabaseMetaData metaData = connection.getMetaData(); final ResultSet tableTypes = metaData.getTableTypes(); try { while ( tableTypes.next() ) { if ( "VIEW".equalsIgnoreCase( tableTypes.getString(1) ) ) { return context.getRuntime().newBoolean( true ); } } } finally { close(tableTypes); } return context.getRuntime().newBoolean( false ); } }); } // NOTE: this seems to be not used ... at all ?! /* * sql, values (array), types (column.type array), name = nil, pk = nil, id_value = nil, sequence_name = nil */ @Deprecated @JRubyMethod(name = "insert_bind", required = 3, rest = true) public IRubyObject insert_bind(final ThreadContext context, final IRubyObject[] args) throws SQLException { final Ruby runtime = context.getRuntime(); return withConnection(context, new Callable<IRubyObject>() { public IRubyObject call(final Connection connection) throws SQLException { final String sql = args[0].convertToString().toString(); PreparedStatement statement = null; try { statement = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS); setPreparedStatementValues(context, connection, statement, args[1], args[2]); statement.executeUpdate(); return mapGeneratedKeys(runtime, connection, statement); } finally { close(statement); } } }); } // NOTE: this seems to be not used ... at all ?! /* * sql, values (array), types (column.type array), name = nil */ @Deprecated @JRubyMethod(name = "update_bind", required = 3, rest = true) public IRubyObject update_bind(final ThreadContext context, final IRubyObject[] args) throws SQLException { final Ruby runtime = context.getRuntime(); Arity.checkArgumentCount(runtime, args, 3, 4); return withConnection(context, new Callable<IRubyObject>() { public IRubyObject call(final Connection connection) throws SQLException { final String sql = args[0].convertToString().toString(); PreparedStatement statement = null; try { statement = connection.prepareStatement(sql); setPreparedStatementValues(context, connection, statement, args[1], args[2]); statement.executeUpdate(); } finally { close(statement); } return runtime.getNil(); } }); } @JRubyMethod(name = "with_connection_retry_guard", frame = true) public IRubyObject with_connection_retry_guard(final ThreadContext context, final Block block) { return withConnection(context, new Callable<IRubyObject>() { public IRubyObject call(final Connection connection) throws SQLException { return block.call(context, new IRubyObject[] { convertJavaToRuby(connection) }); } }); } /* * (binary?, column_name, table_name, id_key, id_value, value) */ @Deprecated @JRubyMethod(name = "write_large_object", required = 6) public IRubyObject write_large_object(final ThreadContext context, final IRubyObject[] args) throws SQLException { final boolean binary = args[0].isTrue(); final String columnName = args[1].toString(); final String tableName = args[2].toString(); final String idKey = args[3].toString(); final IRubyObject idVal = args[4]; final IRubyObject lobValue = args[5]; int count = updateLobValue(context, tableName, columnName, null, idKey, idVal, null, lobValue, binary); return context.getRuntime().newFixnum(count); } @JRubyMethod(name = "update_lob_value", required = 3) public IRubyObject update_lob_value(final ThreadContext context, final IRubyObject record, final IRubyObject column, final IRubyObject value) throws SQLException { final boolean binary = // column.type == :binary column.callMethod(context, "type").toString() == (Object) "binary"; final IRubyObject recordClass = record.callMethod(context, "class"); final IRubyObject adapter = recordClass.callMethod(context, "connection"); IRubyObject columnName = column.callMethod(context, "name"); columnName = adapter.callMethod(context, "quote_column_name", columnName); IRubyObject tableName = recordClass.callMethod(context, "table_name"); tableName = adapter.callMethod(context, "quote_table_name", tableName); final IRubyObject idKey = recordClass.callMethod(context, "primary_key"); // callMethod(context, "quote", primaryKey); final IRubyObject idColumn = // record.class.columns_hash['id'] recordClass.callMethod(context, "columns_hash").callMethod(context, "[]", idKey); final IRubyObject id = record.callMethod(context, "id"); // record.id final int count = updateLobValue(context, tableName.toString(), columnName.toString(), column, idKey.toString(), id, idColumn, value, binary ); return context.getRuntime().newFixnum(count); } private int updateLobValue(final ThreadContext context, final String tableName, final String columnName, final IRubyObject column, final String idKey, final IRubyObject idValue, final IRubyObject idColumn, final IRubyObject value, final boolean binary) { final String sql = "UPDATE "+ tableName +" SET "+ columnName +" = ? WHERE "+ idKey +" = ?" ; return withConnection(context, new Callable<Integer>() { public Integer call(final Connection connection) throws SQLException { PreparedStatement statement = null; try { statement = connection.prepareStatement(sql); if ( binary ) { // blob setBlobParameter(context, connection, statement, 1, value, column, Types.BLOB); } else { // clob setClobParameter(context, connection, statement, 1, value, column, Types.CLOB); } setStatementParameter(context, context.getRuntime(), connection, statement, 2, idValue, idColumn); return statement.executeUpdate(); } finally { close(statement); } } }); } protected String caseConvertIdentifierForRails(final Connection connection, final String value) throws SQLException { if ( value == null ) return null; return caseConvertIdentifierForRails(connection.getMetaData(), value); } /** * Convert an identifier coming back from the database to a case which Rails is expecting. * * Assumption: Rails identifiers will be quoted for mixed or will stay mixed * as identifier names in Rails itself. Otherwise, they expect identifiers to * be lower-case. Databases which store identifiers uppercase should be made * lower-case. * * Assumption 2: It is always safe to convert all upper case names since it appears that * some adapters do not report StoresUpper/Lower/Mixed correctly (am I right postgres/mysql?). */ protected static String caseConvertIdentifierForRails(final DatabaseMetaData metaData, final String value) throws SQLException { if ( value == null ) return null; return metaData.storesUpperCaseIdentifiers() ? value.toLowerCase() : value; } protected String caseConvertIdentifierForJdbc(final Connection connection, final String value) throws SQLException { if ( value == null ) return null; return caseConvertIdentifierForJdbc(connection.getMetaData(), value); } /** * Convert an identifier destined for a method which cares about the databases internal * storage case. Methods like DatabaseMetaData.getPrimaryKeys() needs the table name to match * the internal storage name. Arbitrary queries and the like DO NOT need to do this. */ protected static String caseConvertIdentifierForJdbc(final DatabaseMetaData metaData, final String value) throws SQLException { if ( value == null ) return null; if ( metaData.storesUpperCaseIdentifiers() ) { return value.toUpperCase(); } else if ( metaData.storesLowerCaseIdentifiers() ) { return value.toLowerCase(); } return value; } @JRubyMethod(name = "jndi_config?", meta = true) public static IRubyObject jndi_config_p(final ThreadContext context, final IRubyObject self, final IRubyObject config) { // config[:jndi] || config[:data_source] final Ruby runtime = context.getRuntime(); IRubyObject configValue; if ( config.getClass() == RubyHash.class ) { // "optimized" version final RubyHash configHash = ((RubyHash) config); configValue = configHash.fastARef(runtime.newSymbol("jndi")); if ( configValue == null ) { configValue = configHash.fastARef(runtime.newSymbol("data_source")); } } else { configValue = config.callMethod(context, "[]", runtime.newSymbol("jndi")); if ( configValue.isNil() ) configValue = null; if ( configValue == null ) { configValue = config.callMethod(context, "[]", runtime.newSymbol("data_source")); } } final IRubyObject rubyFalse = runtime.newBoolean( false ); if ( configValue == null || configValue.isNil() || configValue == rubyFalse ) { return rubyFalse; } return context.getRuntime().newBoolean( true ); } private IRubyObject getConfig(final ThreadContext context) { return getInstanceVariable("@config"); // this.callMethod(context, "config"); } protected final IRubyObject getConfigValue(final ThreadContext context, final String key) { final IRubyObject config = getConfig(context); final RubySymbol keySym = context.runtime.newSymbol(key); if ( config instanceof RubyHash ) { return ((RubyHash) config).op_aref(context, keySym); } return config.callMethod(context, "[]", keySym); } /** * @deprecated renamed to {@link #getConfigValue(ThreadContext, String)} */ @Deprecated protected IRubyObject config_value(ThreadContext context, String key) { return getConfigValue(context, key); } private static String toStringOrNull(final IRubyObject arg) { return arg.isNil() ? null : arg.toString(); } protected final IRubyObject getAdapter(final ThreadContext context) { return callMethod(context, "adapter"); } protected final RubyClass getJdbcColumnClass(final ThreadContext context) { return (RubyClass) getAdapter(context).callMethod(context, "jdbc_column_class"); } protected JdbcConnectionFactory getConnectionFactory() throws RaiseException { if ( connectionFactory == null ) { // NOTE: only for (backwards) compatibility - no likely that anyone // overriden this - thus can likely be safely deleted (no needed) : IRubyObject connection_factory = getInstanceVariable("@connection_factory"); if ( connection_factory == null ) { throw getRuntime().newRuntimeError("@connection_factory not set"); } connectionFactory = (JdbcConnectionFactory) connection_factory.toJava(JdbcConnectionFactory.class); } return connectionFactory; } public void setConnectionFactory(JdbcConnectionFactory connectionFactory) { this.connectionFactory = connectionFactory; } protected Connection newConnection() throws RaiseException, SQLException { return getConnectionFactory().newConnection(); } private static String[] getTypes(final IRubyObject typeArg) { if ( typeArg instanceof RubyArray ) { IRubyObject[] rubyTypes = ((RubyArray) typeArg).toJavaArray(); final String[] types = new String[rubyTypes.length]; for ( int i = 0; i < types.length; i++ ) { types[i] = rubyTypes[i].toString(); } return types; } return new String[] { typeArg.toString() }; // expect a RubyString } /** * @deprecated this method is no longer used, instead consider overriding * {@link #mapToResult(ThreadContext, Ruby, DatabaseMetaData, ResultSet, RubyJdbcConnection.ColumnData[])} */ @Deprecated protected void populateFromResultSet( final ThreadContext context, final Ruby runtime, final List<IRubyObject> results, final ResultSet resultSet, final ColumnData[] columns) throws SQLException { final ResultHandler resultHandler = ResultHandler.getInstance(runtime); while ( resultSet.next() ) { results.add( resultHandler.mapRawRow(context, runtime, columns, resultSet, this) ); } } /** * Maps a query result into a <code>ActiveRecord</code> result. * @param context * @param runtime * @param metaData * @param resultSet * @param columns * @return since 3.1 expected to return a <code>ActiveRecord::Result</code> * @throws SQLException */ protected IRubyObject mapToResult(final ThreadContext context, final Ruby runtime, final Connection connection, final ResultSet resultSet, final ColumnData[] columns) throws SQLException { final ResultHandler resultHandler = ResultHandler.getInstance(runtime); final RubyArray resultRows = runtime.newArray(); while ( resultSet.next() ) { resultRows.append( resultHandler.mapRow(context, runtime, columns, resultSet, this) ); } return resultHandler.newResult(context, runtime, columns, resultRows); } @Deprecated protected IRubyObject jdbcToRuby(final Ruby runtime, final int column, final int type, final ResultSet resultSet) throws SQLException { return jdbcToRuby(runtime.getCurrentContext(), runtime, column, type, resultSet); } protected IRubyObject jdbcToRuby( final ThreadContext context, final Ruby runtime, final int column, final int type, final ResultSet resultSet) throws SQLException { try { switch (type) { case Types.BLOB: case Types.BINARY: case Types.VARBINARY: case Types.LONGVARBINARY: return streamToRuby(context, runtime, resultSet, column); case Types.CLOB: case Types.NCLOB: // JDBC 4.0 return readerToRuby(context, runtime, resultSet, column); case Types.LONGVARCHAR: case Types.LONGNVARCHAR: // JDBC 4.0 if ( runtime.is1_9() ) { return readerToRuby(context, runtime, resultSet, column); } else { return streamToRuby(context, runtime, resultSet, column); } case Types.TINYINT: case Types.SMALLINT: case Types.INTEGER: return integerToRuby(context, runtime, resultSet, column); case Types.REAL: case Types.FLOAT: case Types.DOUBLE: return doubleToRuby(context, runtime, resultSet, column); case Types.BIGINT: return bigIntegerToRuby(context, runtime, resultSet, column); case Types.NUMERIC: case Types.DECIMAL: return decimalToRuby(context, runtime, resultSet, column); case Types.DATE: return dateToRuby(context, runtime, resultSet, column); case Types.TIME: return timeToRuby(context, runtime, resultSet, column); case Types.TIMESTAMP: return timestampToRuby(context, runtime, resultSet, column); case Types.BIT: case Types.BOOLEAN: return booleanToRuby(context, runtime, resultSet, column); case Types.SQLXML: // JDBC 4.0 return xmlToRuby(context, runtime, resultSet, column); case Types.ARRAY: // we handle JDBC Array into (Ruby) [] return arrayToRuby(context, runtime, resultSet, column); case Types.NULL: return runtime.getNil(); // NOTE: (JDBC) exotic stuff just cause it's so easy with JRuby :) case Types.JAVA_OBJECT: case Types.OTHER: return objectToRuby(context, runtime, resultSet, column); // (default) String case Types.CHAR: case Types.VARCHAR: case Types.NCHAR: // JDBC 4.0 case Types.NVARCHAR: // JDBC 4.0 default: return stringToRuby(context, runtime, resultSet, column); } // NOTE: not mapped types : //case Types.DISTINCT: //case Types.STRUCT: //case Types.REF: //case Types.DATALINK: } catch (IOException e) { throw new SQLException(e.getMessage(), e); } } protected IRubyObject integerToRuby(final ThreadContext context, final Ruby runtime, final ResultSet resultSet, final int column) throws SQLException { final long value = resultSet.getLong(column); if ( value == 0 && resultSet.wasNull() ) return runtime.getNil(); return integerToRuby(runtime, resultSet, value); } @Deprecated protected IRubyObject integerToRuby( final Ruby runtime, final ResultSet resultSet, final long longValue) throws SQLException { if ( longValue == 0 && resultSet.wasNull() ) return runtime.getNil(); return runtime.newFixnum(longValue); } protected IRubyObject doubleToRuby(final ThreadContext context, final Ruby runtime, final ResultSet resultSet, final int column) throws SQLException { final double value = resultSet.getDouble(column); if ( value == 0 && resultSet.wasNull() ) return runtime.getNil(); return doubleToRuby(runtime, resultSet, value); } @Deprecated protected IRubyObject doubleToRuby( final Ruby runtime, final ResultSet resultSet, double doubleValue) throws SQLException { if ( doubleValue == 0 && resultSet.wasNull() ) return runtime.getNil(); return runtime.newFloat(doubleValue); } protected IRubyObject stringToRuby(final ThreadContext context, final Ruby runtime, final ResultSet resultSet, final int column) throws SQLException { final String value = resultSet.getString(column); if ( value == null && resultSet.wasNull() ) return runtime.getNil(); return stringToRuby(runtime, resultSet, value); } @Deprecated protected IRubyObject stringToRuby( final Ruby runtime, final ResultSet resultSet, final String string) throws SQLException { if ( string == null && resultSet.wasNull() ) return runtime.getNil(); return RubyString.newUnicodeString(runtime, string); } protected IRubyObject bigIntegerToRuby(final ThreadContext context, final Ruby runtime, final ResultSet resultSet, final int column) throws SQLException { final String value = resultSet.getString(column); if ( value == null && resultSet.wasNull() ) return runtime.getNil(); return bigIntegerToRuby(runtime, resultSet, value); } @Deprecated protected IRubyObject bigIntegerToRuby( final Ruby runtime, final ResultSet resultSet, final String intValue) throws SQLException { if ( intValue == null && resultSet.wasNull() ) return runtime.getNil(); return RubyBignum.bignorm(runtime, new BigInteger(intValue)); } protected IRubyObject decimalToRuby(final ThreadContext context, final Ruby runtime, final ResultSet resultSet, final int column) throws SQLException { final String value = resultSet.getString(column); if ( value == null && resultSet.wasNull() ) return runtime.getNil(); // NOTE: JRuby 1.6 -> 1.7 API change : moved org.jruby.RubyBigDecimal return runtime.getKernel().callMethod("BigDecimal", runtime.newString(value)); } protected static Boolean rawDateTime; static { final String dateTimeRaw = System.getProperty("arjdbc.datetime.raw"); if ( dateTimeRaw != null ) { rawDateTime = Boolean.parseBoolean(dateTimeRaw); } // NOTE: we do this since it will have a different value depending on // AR version - since 4.0 false by default otherwise will be true ... } @JRubyMethod(name = "raw_date_time?", meta = true) public static IRubyObject useRawDateTime(final ThreadContext context, final IRubyObject self) { if ( rawDateTime == null ) return context.getRuntime().getNil(); return context.getRuntime().newBoolean( rawDateTime.booleanValue() ); } @JRubyMethod(name = "raw_date_time=", meta = true) public static IRubyObject setRawDateTime(final IRubyObject self, final IRubyObject value) { if ( value instanceof RubyBoolean ) { rawDateTime = ((RubyBoolean) value).isTrue(); } else { rawDateTime = value.isNil() ? null : Boolean.TRUE; } return value; } private static IRubyObject typeCastFromDatabase(final ThreadContext context, final IRubyObject adapter, final RubySymbol typeName, final RubyString value) { final IRubyObject type = adapter.callMethod(context, "lookup_cast_type", typeName); return type.callMethod(context, "type_cast_from_database", value); } protected IRubyObject dateToRuby(final ThreadContext context, final Ruby runtime, final ResultSet resultSet, final int column) throws SQLException { final Date value = resultSet.getDate(column); if ( value == null ) { if ( resultSet.wasNull() ) return runtime.getNil(); return runtime.newString(); } final RubyString strValue = RubyString.newUnicodeString(runtime, value.toString()); if ( rawDateTime != null && rawDateTime.booleanValue() ) return strValue; final IRubyObject adapter = callMethod(context, "adapter"); // self.adapter if ( adapter.isNil() ) return strValue; // NOTE: we warn on init_connection if ( usesType(runtime) ) { return typeCastFromDatabase(context, adapter, runtime.newSymbol("date"), strValue); } return adapter.callMethod(context, "_string_to_date", strValue); } protected IRubyObject timeToRuby(final ThreadContext context, final Ruby runtime, final ResultSet resultSet, final int column) throws SQLException { final Time value = resultSet.getTime(column); if ( value == null ) { if ( resultSet.wasNull() ) return runtime.getNil(); return runtime.newString(); } final RubyString strValue = RubyString.newUnicodeString(runtime, value.toString()); if ( rawDateTime != null && rawDateTime.booleanValue() ) return strValue; final IRubyObject adapter = callMethod(context, "adapter"); // self.adapter if ( adapter.isNil() ) return strValue; // NOTE: we warn on init_connection if ( usesType(runtime) ) { return typeCastFromDatabase(context, adapter, runtime.newSymbol("time"), strValue); } return adapter.callMethod(context, "_string_to_time", strValue); } protected IRubyObject timestampToRuby(final ThreadContext context, // TODO final Ruby runtime, final ResultSet resultSet, final int column) throws SQLException { final Timestamp value = resultSet.getTimestamp(column); if ( value == null ) { if ( resultSet.wasNull() ) return runtime.getNil(); return runtime.newString(); } final RubyString strValue = timestampToRubyString(runtime, value.toString()); if ( rawDateTime != null && rawDateTime.booleanValue() ) return strValue; final IRubyObject adapter = callMethod(context, "adapter"); // self.adapter if ( adapter.isNil() ) return strValue; // NOTE: we warn on init_connection if ( usesType(runtime) ) { return typeCastFromDatabase(context, adapter, runtime.newSymbol("timestamp"), strValue); } return adapter.callMethod(context, "_string_to_timestamp", strValue); } protected static RubyString timestampToRubyString(final Ruby runtime, String value) { // Timestamp's format: yyyy-mm-dd hh:mm:ss.fffffffff String suffix; // assumes java.sql.Timestamp internals : if ( value.endsWith( suffix = " 00:00:00.0" ) ) { value = value.substring( 0, value.length() - suffix.length() ); } else if ( value.endsWith( suffix = ".0" ) ) { value = value.substring( 0, value.length() - suffix.length() ); } return RubyString.newUnicodeString(runtime, value); } @Deprecated protected IRubyObject timestampToRuby( final Ruby runtime, final ResultSet resultSet, final Timestamp value) throws SQLException { if ( value == null && resultSet.wasNull() ) return runtime.getNil(); return timestampToRubyString(runtime, value.toString()); } protected static Boolean rawBoolean; static { final String booleanRaw = System.getProperty("arjdbc.boolean.raw"); if ( booleanRaw != null ) { rawBoolean = Boolean.parseBoolean(booleanRaw); } } @JRubyMethod(name = "raw_boolean?", meta = true) public static IRubyObject useRawBoolean(final ThreadContext context, final IRubyObject self) { if ( rawBoolean == null ) return context.getRuntime().getNil(); return context.getRuntime().newBoolean( rawBoolean.booleanValue() ); } @JRubyMethod(name = "raw_boolean=", meta = true) public static IRubyObject setRawBoolean(final IRubyObject self, final IRubyObject value) { if ( value instanceof RubyBoolean ) { rawBoolean = ((RubyBoolean) value).isTrue(); } else { rawBoolean = value.isNil() ? null : Boolean.TRUE; } return value; } protected IRubyObject booleanToRuby(final ThreadContext context, final Ruby runtime, final ResultSet resultSet, final int column) throws SQLException { if ( rawBoolean != null && rawBoolean.booleanValue() ) { final String value = resultSet.getString(column); if ( resultSet.wasNull() ) return runtime.getNil(); return RubyString.newUnicodeString(runtime, value); } final boolean value = resultSet.getBoolean(column); if ( resultSet.wasNull() ) return runtime.getNil(); return booleanToRuby(runtime, resultSet, value); } @Deprecated protected IRubyObject booleanToRuby( final Ruby runtime, final ResultSet resultSet, final boolean value) throws SQLException { if ( value == false && resultSet.wasNull() ) return runtime.getNil(); return runtime.newBoolean(value); } protected static int streamBufferSize = 2048; protected IRubyObject streamToRuby(final ThreadContext context, final Ruby runtime, final ResultSet resultSet, final int column) throws SQLException, IOException { final InputStream stream = resultSet.getBinaryStream(column); try { if ( resultSet.wasNull() ) return runtime.getNil(); return streamToRuby(runtime, resultSet, stream); } finally { if ( stream != null ) stream.close(); } } @Deprecated protected IRubyObject streamToRuby( final Ruby runtime, final ResultSet resultSet, final InputStream stream) throws SQLException, IOException { if ( stream == null && resultSet.wasNull() ) return runtime.getNil(); final int bufSize = streamBufferSize; final ByteList string = new ByteList(bufSize); final byte[] buf = new byte[bufSize]; for (int len = stream.read(buf); len != -1; len = stream.read(buf)) { string.append(buf, 0, len); } return runtime.newString(string); } protected IRubyObject readerToRuby(final ThreadContext context, final Ruby runtime, final ResultSet resultSet, final int column) throws SQLException, IOException { final Reader reader = resultSet.getCharacterStream(column); try { if ( resultSet.wasNull() ) return runtime.getNil(); return readerToRuby(runtime, resultSet, reader); } finally { if ( reader != null ) reader.close(); } } @Deprecated protected IRubyObject readerToRuby( final Ruby runtime, final ResultSet resultSet, final Reader reader) throws SQLException, IOException { if ( reader == null && resultSet.wasNull() ) return runtime.getNil(); final int bufSize = streamBufferSize; final StringBuilder string = new StringBuilder(bufSize); final char[] buf = new char[bufSize]; for (int len = reader.read(buf); len != -1; len = reader.read(buf)) { string.append(buf, 0, len); } return RubyString.newUnicodeString(runtime, string.toString()); } protected IRubyObject objectToRuby(final ThreadContext context, final Ruby runtime, final ResultSet resultSet, final int column) throws SQLException { final Object value = resultSet.getObject(column); if ( value == null && resultSet.wasNull() ) return runtime.getNil(); return JavaUtil.convertJavaToRuby(runtime, value); } protected IRubyObject arrayToRuby(final ThreadContext context, final Ruby runtime, final ResultSet resultSet, final int column) throws SQLException { final Array value = resultSet.getArray(column); try { if ( value == null && resultSet.wasNull() ) return runtime.getNil(); final RubyArray array = runtime.newArray(); final ResultSet arrayResult = value.getResultSet(); // 1: index, 2: value final int baseType = value.getBaseType(); while ( arrayResult.next() ) { array.append( jdbcToRuby(context, runtime, 2, baseType, arrayResult) ); } return array; } finally { if ( value != null ) value.free(); } } protected IRubyObject xmlToRuby(final ThreadContext context, final Ruby runtime, final ResultSet resultSet, final int column) throws SQLException { final SQLXML xml = resultSet.getSQLXML(column); try { if ( xml == null || resultSet.wasNull() ) return runtime.getNil(); return RubyString.newUnicodeString(runtime, xml.getString()); } finally { if ( xml != null ) xml.free(); } } protected void setStatementParameters(final ThreadContext context, final Connection connection, final PreparedStatement statement, final List<?> binds) throws SQLException { final Ruby runtime = context.getRuntime(); for ( int i = 0; i < binds.size(); i++ ) { // [ [ column1, param1 ], [ column2, param2 ], ... ] Object param = binds.get(i); IRubyObject column = null; if ( param.getClass() == RubyArray.class ) { final RubyArray _param = (RubyArray) param; column = _param.eltInternal(0); param = _param.eltInternal(1); } else if ( param instanceof List ) { final List<?> _param = (List<?>) param; column = (IRubyObject) _param.get(0); param = _param.get(1); } else if ( param instanceof Object[] ) { final Object[] _param = (Object[]) param; column = (IRubyObject) _param[0]; param = _param[1]; } setStatementParameter(context, runtime, connection, statement, i + 1, param, column); } } protected void setStatementParameter(final ThreadContext context, final Ruby runtime, final Connection connection, final PreparedStatement statement, final int index, final Object rawValue, final IRubyObject column) throws SQLException { final Object value; if ( isAr42(column) ) { final IRubyObject castType = column.callMethod(context, "cast_type"); value = castType.callMethod(context, "type_cast_for_database", (IRubyObject) rawValue); } else { value = rawValue; } final int type = jdbcTypeFor(context, runtime, column, value); switch (type) { case Types.TINYINT: case Types.SMALLINT: case Types.INTEGER: if ( value instanceof RubyBignum ) { // e.g. HSQLDB / H2 report JDBC type 4 setBigIntegerParameter(context, connection, statement, index, (RubyBignum) value, column, type); } else { setIntegerParameter(context, connection, statement, index, value, column, type); } break; case Types.BIGINT: setBigIntegerParameter(context, connection, statement, index, value, column, type); break; case Types.REAL: case Types.FLOAT: case Types.DOUBLE: setDoubleParameter(context, connection, statement, index, value, column, type); break; case Types.NUMERIC: case Types.DECIMAL: setDecimalParameter(context, connection, statement, index, value, column, type); break; case Types.DATE: setDateParameter(context, connection, statement, index, value, column, type); break; case Types.TIME: setTimeParameter(context, connection, statement, index, value, column, type); break; case Types.TIMESTAMP: setTimestampParameter(context, connection, statement, index, value, column, type); break; case Types.BIT: case Types.BOOLEAN: setBooleanParameter(context, connection, statement, index, value, column, type); break; case Types.SQLXML: setXmlParameter(context, connection, statement, index, value, column, type); break; case Types.ARRAY: setArrayParameter(context, connection, statement, index, rawValue, column, type); break; case Types.JAVA_OBJECT: case Types.OTHER: setObjectParameter(context, connection, statement, index, value, column, type); break; case Types.BINARY: case Types.VARBINARY: case Types.LONGVARBINARY: case Types.BLOB: setBlobParameter(context, connection, statement, index, value, column, type); break; case Types.CLOB: case Types.NCLOB: // JDBC 4.0 setClobParameter(context, connection, statement, index, value, column, type); break; case Types.CHAR: case Types.VARCHAR: case Types.NCHAR: // JDBC 4.0 case Types.NVARCHAR: // JDBC 4.0 default: setStringParameter(context, connection, statement, index, value, column, type); } } @Deprecated // NOTE: only used from deprecated methods private void setPreparedStatementValues(final ThreadContext context, final Connection connection, final PreparedStatement statement, final IRubyObject valuesArg, final IRubyObject typesArg) throws SQLException { final Ruby runtime = context.getRuntime(); final RubyArray values = (RubyArray) valuesArg; final RubyArray types = (RubyArray) typesArg; // column types for( int i = 0, j = values.getLength(); i < j; i++ ) { setStatementParameter( context, runtime, connection, statement, i + 1, values.eltInternal(i), types.eltInternal(i) ); } } private RubySymbol resolveColumnType(final ThreadContext context, final Ruby runtime, final IRubyObject column) { if ( column instanceof RubySymbol ) { // deprecated behavior return (RubySymbol) column; } if ( column instanceof RubyString) { // deprecated behavior if ( runtime.is1_9() ) { return ( (RubyString) column ).intern19(); } else { return ( (RubyString) column ).intern(); } } if ( column == null || column.isNil() ) { throw runtime.newArgumentError("nil column passed"); } return (RubySymbol) column.callMethod(context, "type"); } protected static final Map<String, Integer> JDBC_TYPE_FOR = new HashMap<String, Integer>(16, 1); static { JDBC_TYPE_FOR.put("string", Types.VARCHAR); JDBC_TYPE_FOR.put("text", Types.CLOB); JDBC_TYPE_FOR.put("integer", Types.INTEGER); JDBC_TYPE_FOR.put("float", Types.FLOAT); JDBC_TYPE_FOR.put("decimal", Types.DECIMAL); JDBC_TYPE_FOR.put("date", Types.DATE); JDBC_TYPE_FOR.put("time", Types.TIME); JDBC_TYPE_FOR.put("datetime", Types.TIMESTAMP); JDBC_TYPE_FOR.put("timestamp", Types.TIMESTAMP); JDBC_TYPE_FOR.put("binary", Types.BLOB); JDBC_TYPE_FOR.put("boolean", Types.BOOLEAN); JDBC_TYPE_FOR.put("array", Types.ARRAY); JDBC_TYPE_FOR.put("xml", Types.SQLXML); } protected int jdbcTypeFor(final ThreadContext context, final Ruby runtime, final IRubyObject column, final Object value) throws SQLException { final String internedType; if ( column != null && ! column.isNil() ) { // NOTE: there's no ActiveRecord "convention" really for this ... // this is based on Postgre's initial support for arrays : // `column.type` contains the base type while there's `column.array?` if ( column.respondsTo("array?") && column.callMethod(context, "array?").isTrue() ) { internedType = "array"; } else { final RubySymbol columnType = resolveColumnType(context, runtime, column); internedType = columnType.asJavaString(); } } else { if ( value instanceof RubyInteger ) { internedType = "integer"; } else if ( value instanceof RubyNumeric ) { internedType = "float"; } else if ( value instanceof RubyTime ) { internedType = "timestamp"; } else { internedType = "string"; } } final Integer sqlType = JDBC_TYPE_FOR.get(internedType); if ( sqlType != null ) return sqlType.intValue(); return Types.OTHER; // -1 as well as 0 are used in Types } protected void setIntegerParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final Object value, final IRubyObject column, final int type) throws SQLException { if ( value instanceof IRubyObject ) { setIntegerParameter(context, connection, statement, index, (IRubyObject) value, column, type); } else { if ( value == null ) statement.setNull(index, Types.INTEGER); else { statement.setLong(index, ((Number) value).longValue()); } } } protected void setIntegerParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final IRubyObject value, final IRubyObject column, final int type) throws SQLException { if ( value.isNil() ) statement.setNull(index, Types.INTEGER); else { if ( value instanceof RubyFixnum ) { statement.setLong(index, ((RubyFixnum) value).getLongValue()); } else if ( value instanceof RubyNumeric ) { // NOTE: fix2int will call value.convertToIngeter for non-numeric // types which won't work for Strings since it uses `to_int` ... statement.setInt(index, RubyNumeric.fix2int(value)); } else { statement.setLong(index, value.convertToInteger("to_i").getLongValue()); } } } protected void setBigIntegerParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final Object value, final IRubyObject column, final int type) throws SQLException { if ( value instanceof IRubyObject ) { setBigIntegerParameter(context, connection, statement, index, (IRubyObject) value, column, type); } else { if ( value == null ) statement.setNull(index, Types.BIGINT); else { if ( value instanceof BigDecimal ) { statement.setBigDecimal(index, (BigDecimal) value); } else if ( value instanceof BigInteger ) { setLongOrDecimalParameter(statement, index, (BigInteger) value); } else { statement.setLong(index, ((Number) value).longValue()); } } } } protected void setBigIntegerParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final IRubyObject value, final IRubyObject column, final int type) throws SQLException { if ( value.isNil() ) statement.setNull(index, Types.INTEGER); else { if ( value instanceof RubyBignum ) { setLongOrDecimalParameter(statement, index, ((RubyBignum) value).getValue()); } else if ( value instanceof RubyInteger ) { statement.setLong(index, ((RubyInteger) value).getLongValue()); } else { setLongOrDecimalParameter(statement, index, value.convertToInteger("to_i").getBigIntegerValue()); } } } private static final BigInteger MAX_LONG = BigInteger.valueOf(Long.MAX_VALUE); private static final BigInteger MIN_LONG = BigInteger.valueOf(Long.MIN_VALUE); protected static void setLongOrDecimalParameter(final PreparedStatement statement, final int index, final BigInteger value) throws SQLException { if ( value.compareTo(MAX_LONG) <= 0 // -1 intValue < MAX_VALUE && value.compareTo(MIN_LONG) >= 0 ) { statement.setLong(index, value.longValue()); } else { statement.setBigDecimal(index, new BigDecimal(value)); } } protected void setDoubleParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final Object value, final IRubyObject column, final int type) throws SQLException { if ( value instanceof IRubyObject ) { setDoubleParameter(context, connection, statement, index, (IRubyObject) value, column, type); } else { if ( value == null ) statement.setNull(index, Types.DOUBLE); else { statement.setDouble(index, ((Number) value).doubleValue()); } } } protected void setDoubleParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final IRubyObject value, final IRubyObject column, final int type) throws SQLException { if ( value.isNil() ) statement.setNull(index, Types.DOUBLE); else { if ( value instanceof RubyNumeric ) { statement.setDouble(index, ((RubyNumeric) value).getDoubleValue()); } else { statement.setDouble(index, value.convertToFloat().getDoubleValue()); } } } protected void setDecimalParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final Object value, final IRubyObject column, final int type) throws SQLException { if ( value instanceof IRubyObject ) { setDecimalParameter(context, connection, statement, index, (IRubyObject) value, column, type); } else { if ( value == null ) statement.setNull(index, Types.DECIMAL); else { if ( value instanceof BigDecimal ) { statement.setBigDecimal(index, (BigDecimal) value); } else if ( value instanceof BigInteger ) { setLongOrDecimalParameter(statement, index, (BigInteger) value); } else { statement.setDouble(index, ((Number) value).doubleValue()); } } } } protected void setDecimalParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final IRubyObject value, final IRubyObject column, final int type) throws SQLException { if ( value.isNil() ) statement.setNull(index, Types.DECIMAL); else { // NOTE: RubyBigDecimal moved into org.jruby.ext.bigdecimal (1.6 -> 1.7) if ( value.getMetaClass().getName().indexOf("BigDecimal") != -1 ) { statement.setBigDecimal(index, getBigDecimalValue(value)); } else if ( value instanceof RubyInteger ) { statement.setBigDecimal(index, new BigDecimal(((RubyInteger) value).getBigIntegerValue())); } else if ( value instanceof RubyNumeric ) { statement.setDouble(index, ((RubyNumeric) value).getDoubleValue()); } else { // e.g. `BigDecimal '42.00000000000000000001'` IRubyObject v = callMethod(context, "BigDecimal", value); statement.setBigDecimal(index, getBigDecimalValue(v)); } } } private static BigDecimal getBigDecimalValue(final IRubyObject value) { try { // reflect ((RubyBigDecimal) value).getValue() : return (BigDecimal) value.getClass(). getMethod("getValue", (Class<?>[]) null). invoke(value, (Object[]) null); } catch (NoSuchMethodException e) { throw new RuntimeException(e); } catch (IllegalAccessException e) { throw new RuntimeException(e); } catch (InvocationTargetException e) { throw new RuntimeException(e.getCause() != null ? e.getCause() : e); } } protected void setTimestampParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final Object value, final IRubyObject column, final int type) throws SQLException { if ( value instanceof IRubyObject ) { setTimestampParameter(context, connection, statement, index, (IRubyObject) value, column, type); } else { if ( value == null ) statement.setNull(index, Types.TIMESTAMP); else { if ( value instanceof Timestamp ) { statement.setTimestamp(index, (Timestamp) value); } else if ( value instanceof java.util.Date ) { statement.setTimestamp(index, new Timestamp(((java.util.Date) value).getTime())); } else { statement.setTimestamp(index, Timestamp.valueOf(value.toString())); } } } } protected void setTimestampParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, IRubyObject value, final IRubyObject column, final int type) throws SQLException { if ( value.isNil() ) statement.setNull(index, Types.TIMESTAMP); else { value = getTimeInDefaultTimeZone(context, value); if ( value instanceof RubyTime ) { final RubyTime timeValue = (RubyTime) value; final DateTime dateTime = timeValue.getDateTime(); final Timestamp timestamp = new Timestamp( dateTime.getMillis() ); if ( type != Types.DATE ) { // 1942-11-30T01:02:03.123_456 // getMillis already set nanos to: 123_000_000 final int usec = (int) timeValue.getUSec(); // 456 on JRuby if ( usec >= 0 ) { timestamp.setNanos( timestamp.getNanos() + usec * 1000 ); } } statement.setTimestamp( index, timestamp, getTimeZoneCalendar(dateTime.getZone().getID()) ); } else if ( value instanceof RubyString ) { // yyyy-[m]m-[d]d hh:mm:ss[.f...] final Timestamp timestamp = Timestamp.valueOf( value.toString() ); statement.setTimestamp( index, timestamp ); // assume local time-zone } else { // DateTime ( ActiveSupport::TimeWithZone.to_time ) final RubyFloat timeValue = value.convertToFloat(); // to_f final Timestamp timestamp = convertToTimestamp(timeValue); statement.setTimestamp( index, timestamp, getTimeZoneCalendar("GMT") ); } } } protected static Timestamp convertToTimestamp(final RubyFloat value) { final Timestamp timestamp = new Timestamp(value.getLongValue() * 1000); // millis // for usec we shall not use: ((long) floatValue * 1000000) % 1000 // if ( usec >= 0 ) timestamp.setNanos( timestamp.getNanos() + usec * 1000 ); // due doubles inaccurate precision it's better to parse to_s : final ByteList strValue = ((RubyString) value.to_s()).getByteList(); final int dot1 = strValue.lastIndexOf('.') + 1, dot4 = dot1 + 3; final int len = strValue.getRealSize() - strValue.getBegin(); if ( dot1 > 0 && dot4 < len ) { // skip .123 but handle .1234 final int end = Math.min( len - dot4, 3 ); CharSequence usecSeq = strValue.subSequence(dot4, end); final int usec = Integer.parseInt( usecSeq.toString() ); if ( usec < 10 ) { // 0.1234 ~> 4 timestamp.setNanos( timestamp.getNanos() + usec * 100 ); } else if ( usec < 100 ) { // 0.12345 ~> 45 timestamp.setNanos( timestamp.getNanos() + usec * 10 ); } else { // if ( usec < 1000 ) { // 0.123456 ~> 456 timestamp.setNanos( timestamp.getNanos() + usec ); } } return timestamp; } protected static IRubyObject getTimeInDefaultTimeZone(final ThreadContext context, IRubyObject value) { if ( value.respondsTo("to_time") ) { value = value.callMethod(context, "to_time"); } final String method = isDefaultTimeZoneUTC(context) ? "getutc" : "getlocal"; if ( value.respondsTo(method) ) { value = value.callMethod(context, method); } return value; } protected static boolean isDefaultTimeZoneUTC(final ThreadContext context) { final RubyClass base = getBase(context.getRuntime()); final String tz = base.callMethod(context, "default_timezone").toString(); // :utc return "utc".equalsIgnoreCase(tz); } private static Calendar getTimeZoneCalendar(final String ID) { return Calendar.getInstance( TimeZone.getTimeZone(ID) ); } protected void setTimeParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final Object value, final IRubyObject column, final int type) throws SQLException { if ( value instanceof IRubyObject ) { setTimeParameter(context, connection, statement, index, (IRubyObject) value, column, type); } else { if ( value == null ) statement.setNull(index, Types.TIME); else { if ( value instanceof Time ) { statement.setTime(index, (Time) value); } else if ( value instanceof java.util.Date ) { statement.setTime(index, new Time(((java.util.Date) value).getTime())); } else { // hh:mm:ss statement.setTime(index, Time.valueOf(value.toString())); // statement.setString(index, value.toString()); } } } } protected void setTimeParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, IRubyObject value, final IRubyObject column, final int type) throws SQLException { if ( value.isNil() ) statement.setNull(index, Types.TIME); else { value = getTimeInDefaultTimeZone(context, value); if ( value instanceof RubyTime ) { final RubyTime timeValue = (RubyTime) value; final DateTime dateTime = timeValue.getDateTime(); final Time time = new Time( dateTime.getMillis() ); statement.setTime( index, time, getTimeZoneCalendar(dateTime.getZone().getID()) ); } else if ( value instanceof RubyString ) { final Time time = Time.valueOf( value.toString() ); statement.setTime( index, time ); // assume local time-zone } else { // DateTime ( ActiveSupport::TimeWithZone.to_time ) final RubyFloat timeValue = value.convertToFloat(); // to_f final Time time = new Time(timeValue.getLongValue() * 1000); // millis // java.sql.Time is expected to be only up to second precision statement.setTime( index, time, getTimeZoneCalendar("GMT") ); } } } protected void setDateParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final Object value, final IRubyObject column, final int type) throws SQLException { if ( value instanceof IRubyObject ) { setDateParameter(context, connection, statement, index, (IRubyObject) value, column, type); } else { if ( value == null ) statement.setNull(index, Types.DATE); else { if ( value instanceof Date ) { statement.setDate(index, (Date) value); } else if ( value instanceof java.util.Date ) { statement.setDate(index, new Date(((java.util.Date) value).getTime())); } else { // yyyy-[m]m-[d]d statement.setDate(index, Date.valueOf(value.toString())); // statement.setString(index, value.toString()); } } } } protected void setDateParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, IRubyObject value, final IRubyObject column, final int type) throws SQLException { if ( value.isNil() ) statement.setNull(index, Types.DATE); else { //if ( value instanceof RubyString ) { // final Date date = Date.valueOf( value.toString() ); // statement.setDate( index, date ); // assume local time-zone // return; if ( ! "Date".equals( value.getMetaClass().getName() ) ) { if ( value.respondsTo("to_date") ) { value = value.callMethod(context, "to_date"); } } final Date date = Date.valueOf( value.asString().toString() ); // to_s statement.setDate( index, date /*, getTimeZoneCalendar("GMT") */ ); } } protected void setBooleanParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final Object value, final IRubyObject column, final int type) throws SQLException { if ( value instanceof IRubyObject ) { setBooleanParameter(context, connection, statement, index, (IRubyObject) value, column, type); } else { if ( value == null ) statement.setNull(index, Types.BOOLEAN); else { statement.setBoolean(index, ((Boolean) value).booleanValue()); } } } protected void setBooleanParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final IRubyObject value, final IRubyObject column, final int type) throws SQLException { if ( value.isNil() ) statement.setNull(index, Types.BOOLEAN); else { statement.setBoolean(index, value.isTrue()); } } protected void setStringParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final Object value, final IRubyObject column, final int type) throws SQLException { if ( value instanceof IRubyObject ) { setStringParameter(context, connection, statement, index, (IRubyObject) value, column, type); } else { if ( value == null ) statement.setNull(index, Types.VARCHAR); else { statement.setString(index, value.toString()); } } } protected void setStringParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final IRubyObject value, final IRubyObject column, final int type) throws SQLException { if ( value.isNil() ) statement.setNull(index, Types.VARCHAR); else { statement.setString(index, value.asString().toString()); } } protected void setArrayParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final Object value, final IRubyObject column, final int type) throws SQLException { if ( value instanceof IRubyObject ) { setArrayParameter(context, connection, statement, index, (IRubyObject) value, column, type); } else { if ( value == null ) { statement.setNull(index, Types.ARRAY); } else { String typeName = resolveArrayBaseTypeName(context, value, column, type); Array array = connection.createArrayOf(typeName, (Object[]) value); statement.setArray(index, array); } } } protected void setArrayParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final IRubyObject value, final IRubyObject column, final int type) throws SQLException { if ( value.isNil() ) { statement.setNull(index, Types.ARRAY); } else { String typeName = resolveArrayBaseTypeName(context, value, column, type); Array array = connection.createArrayOf(typeName, ((RubyArray) value).toArray()); statement.setArray(index, array); } } protected String resolveArrayBaseTypeName(final ThreadContext context, final Object value, final IRubyObject column, final int type) { // return column.callMethod(context, "sql_type").toString(); String sqlType = column.callMethod(context, "sql_type").toString(); final int index = sqlType.indexOf('('); // e.g. "character varying(255)" if ( index > 0 ) sqlType = sqlType.substring(0, index); return sqlType; } protected void setXmlParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final Object value, final IRubyObject column, final int type) throws SQLException { if ( value instanceof IRubyObject ) { setXmlParameter(context, connection, statement, index, (IRubyObject) value, column, type); } else { if ( value == null ) statement.setNull(index, Types.SQLXML); else { SQLXML xml = connection.createSQLXML(); xml.setString(value.toString()); statement.setSQLXML(index, xml); } } } protected void setXmlParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final IRubyObject value, final IRubyObject column, final int type) throws SQLException { if ( value.isNil() ) statement.setNull(index, Types.SQLXML); else { SQLXML xml = connection.createSQLXML(); xml.setString(value.asString().toString()); statement.setSQLXML(index, xml); } } protected void setBlobParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final Object value, final IRubyObject column, final int type) throws SQLException { if ( value instanceof IRubyObject ) { setBlobParameter(context, connection, statement, index, (IRubyObject) value, column, type); } else { if ( value == null ) statement.setNull(index, Types.BLOB); else { //statement.setBlob(index, (InputStream) value); statement.setBinaryStream(index, (InputStream) value); } } } protected void setBlobParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final IRubyObject value, final IRubyObject column, final int type) throws SQLException { if ( value.isNil() ) statement.setNull(index, Types.BLOB); else { if ( value instanceof RubyIO ) { // IO/File //statement.setBlob(index, ((RubyIO) value).getInStream()); statement.setBinaryStream(index, ((RubyIO) value).getInStream()); } else { // should be a RubyString final ByteList blob = value.asString().getByteList(); statement.setBinaryStream(index, new ByteArrayInputStream(blob.unsafeBytes(), blob.getBegin(), blob.getRealSize()), blob.getRealSize() // length ); // JDBC 4.0 : //statement.setBlob(index, // new ByteArrayInputStream(bytes.unsafeBytes(), bytes.getBegin(), bytes.getRealSize()) } } } protected void setClobParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final Object value, final IRubyObject column, final int type) throws SQLException { if ( value instanceof IRubyObject ) { setClobParameter(context, connection, statement, index, (IRubyObject) value, column, type); } else { if ( value == null ) statement.setNull(index, Types.CLOB); else { statement.setClob(index, (Reader) value); } } } protected void setClobParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, final IRubyObject value, final IRubyObject column, final int type) throws SQLException { if ( value.isNil() ) statement.setNull(index, Types.CLOB); else { if ( value instanceof RubyIO ) { // IO/File statement.setClob(index, new InputStreamReader(((RubyIO) value).getInStream())); } else { // should be a RubyString final String clob = value.asString().decodeString(); statement.setCharacterStream(index, new StringReader(clob), clob.length()); // JDBC 4.0 : //statement.setClob(index, new StringReader(clob)); } } } protected void setObjectParameter(final ThreadContext context, final Connection connection, final PreparedStatement statement, final int index, Object value, final IRubyObject column, final int type) throws SQLException { if (value instanceof IRubyObject) { value = ((IRubyObject) value).toJava(Object.class); } if ( value == null ) statement.setNull(index, Types.JAVA_OBJECT); statement.setObject(index, value); } protected final Connection getConnection() { return getConnection(false); } protected Connection getConnection(boolean error) { final Connection connection = (Connection) dataGetStruct(); // synchronized if ( connection == null && error ) { final RubyClass errorClass = getConnectionNotEstablished( getRuntime() ); throw new RaiseException(getRuntime(), errorClass, "no connection available", false); } return connection; } private IRubyObject setConnection(final Connection connection) { close( getConnection(false) ); // close previously open connection if there is one final IRubyObject rubyConnectionObject = connection != null ? convertJavaToRuby(connection) : getRuntime().getNil(); setInstanceVariable( "@connection", rubyConnectionObject ); dataWrapStruct(connection); return rubyConnectionObject; } protected boolean isConnectionValid(final ThreadContext context, final Connection connection) { if ( connection == null ) return false; final IRubyObject alive_sql = getConfigValue(context, "connection_alive_sql"); Statement statement = null; try { RubyString aliveSQL = alive_sql.isNil() ? null : alive_sql.convertToString(); if ( aliveSQL != null && isSelect(aliveSQL) ) { // expect a SELECT/CALL SQL statement statement = createStatement(context, connection); statement.execute( aliveSQL.toString() ); return true; // connection alive } else { // alive_sql nil (or not a statement we can execute) return connection.isValid(0); // since JDBC 4.0 } } catch (Exception e) { debugMessage(context, "connection considered broken due: " + e.toString()); return false; } catch (AbstractMethodError e) { // non-JDBC 4.0 driver warn( context, "WARN: driver does not support checking if connection isValid()" + " please make sure you're using a JDBC 4.0 compilant driver or" + " set `connection_alive_sql: ...` in your database configuration" ); debugStackTrace(context, e); throw e; } finally { close(statement); } } private boolean tableExists(final Ruby runtime, final Connection connection, final TableName tableName) throws SQLException { final IRubyObject matchedTables = matchTables(runtime, connection, tableName.catalog, tableName.schema, tableName.name, getTableTypes(), true); // NOTE: allow implementers to ignore checkExistsOnly paramater - empty array means does not exists return matchedTables != null && ! matchedTables.isNil() && ( ! (matchedTables instanceof RubyArray) || ! ((RubyArray) matchedTables).isEmpty() ); } /** * Match table names for given table name (pattern). * @param runtime * @param connection * @param catalog * @param schemaPattern * @param tablePattern * @param types table types * @param checkExistsOnly an optimization flag (that might be ignored by sub-classes) * whether the result really matters if true no need to map table names and a truth-y * value is sufficient (except for an empty array which is considered that the table * did not exists). * @return matched (and Ruby mapped) table names * @see #mapTables(Ruby, DatabaseMetaData, String, String, String, ResultSet) * @throws SQLException */ protected IRubyObject matchTables(final Ruby runtime, final Connection connection, final String catalog, final String schemaPattern, final String tablePattern, final String[] types, final boolean checkExistsOnly) throws SQLException { final String _tablePattern = caseConvertIdentifierForJdbc(connection, tablePattern); final String _schemaPattern = caseConvertIdentifierForJdbc(connection, schemaPattern); final DatabaseMetaData metaData = connection.getMetaData(); ResultSet tablesSet = null; try { tablesSet = metaData.getTables(catalog, _schemaPattern, _tablePattern, types); if ( checkExistsOnly ) { // only check if given table exists return tablesSet.next() ? runtime.getTrue() : null; } else { return mapTables(runtime, metaData, catalog, _schemaPattern, _tablePattern, tablesSet); } } finally { close(tablesSet); } } // NOTE java.sql.DatabaseMetaData.getTables : protected final static int TABLES_TABLE_CAT = 1; protected final static int TABLES_TABLE_SCHEM = 2; protected final static int TABLES_TABLE_NAME = 3; protected final static int TABLES_TABLE_TYPE = 4; /** * @param runtime * @param metaData * @param catalog * @param schemaPattern * @param tablePattern * @param tablesSet * @return List<RubyString> * @throws SQLException */ // NOTE: change to accept a connection instead of meta-data protected RubyArray mapTables(final Ruby runtime, final DatabaseMetaData metaData, final String catalog, final String schemaPattern, final String tablePattern, final ResultSet tablesSet) throws SQLException { final RubyArray tables = runtime.newArray(); while ( tablesSet.next() ) { String name = tablesSet.getString(TABLES_TABLE_NAME); name = caseConvertIdentifierForRails(metaData, name); tables.add(RubyString.newUnicodeString(runtime, name)); } return tables; } /** * NOTE: since 1.3.0 only present for binary compatibility (with extensions). * * @depreacated no longer used - replaced with * {@link #matchTables(Ruby, Connection, String, String, String, String[], boolean)} * please update your sub-class esp. if you're overriding this method ! */ @Deprecated protected SQLBlock tableLookupBlock(final Ruby runtime, final String catalog, final String schemaPattern, final String tablePattern, final String[] types) { return new SQLBlock() { @Override public IRubyObject call(final Connection connection) throws SQLException { return matchTables(runtime, connection, catalog, schemaPattern, tablePattern, types, false); } }; } protected static final int COLUMN_NAME = 4; protected static final int DATA_TYPE = 5; protected static final int TYPE_NAME = 6; protected static final int COLUMN_SIZE = 7; protected static final int DECIMAL_DIGITS = 9; protected static final int COLUMN_DEF = 13; protected static final int IS_NULLABLE = 18; /** * Create a string which represents a SQL type usable by Rails from the * resultSet column meta-data * @param resultSet. */ protected String typeFromResultSet(final ResultSet resultSet) throws SQLException { final int precision = intFromResultSet(resultSet, COLUMN_SIZE); final int scale = intFromResultSet(resultSet, DECIMAL_DIGITS); final String type = resultSet.getString(TYPE_NAME); return formatTypeWithPrecisionAndScale(type, precision, scale); } protected static int intFromResultSet( final ResultSet resultSet, final int column) throws SQLException { final int precision = resultSet.getInt(column); return precision == 0 && resultSet.wasNull() ? -1 : precision; } protected static String formatTypeWithPrecisionAndScale( final String type, final int precision, final int scale) { if ( precision <= 0 ) return type; final StringBuilder typeStr = new StringBuilder().append(type); typeStr.append('(').append(precision); // type += "(" + precision; if ( scale > 0 ) typeStr.append(',').append(scale); // type += "," + scale; return typeStr.append(')').toString(); // type += ")"; } private static IRubyObject defaultValueFromResultSet(final Ruby runtime, final ResultSet resultSet) throws SQLException { final String defaultValue = resultSet.getString(COLUMN_DEF); return defaultValue == null ? runtime.getNil() : RubyString.newUnicodeString(runtime, defaultValue); } private static boolean usesType(final Ruby runtime) { // AR 4.2 return runtime.getModule("ActiveRecord").getConstantAt("Type") != null; } /** * This method is considered internal and is not part of AR-JDBC's Java ext * API and thus might be subject to change in the future. * Please copy it to your own class if you rely on it to avoid issues. */ protected static boolean isAr42(IRubyObject column) { return column.respondsTo("cast_type"); } protected RubyArray mapColumnsResult(final ThreadContext context, final DatabaseMetaData metaData, final TableName components, final ResultSet results) throws SQLException { final RubyClass Column = getJdbcColumnClass(context); final boolean lookupCastType = Column.isMethodBound("cast_type", false); // NOTE: primary/primary= methods were removed from Column in AR 4.2 // setPrimary = ! lookupCastType by default ... it's better than checking // whether primary= is bound since it might be a left over in AR-JDBC ext return mapColumnsResult(context, metaData, components, results, Column, lookupCastType, ! lookupCastType); } protected final RubyArray mapColumnsResult(final ThreadContext context, final DatabaseMetaData metaData, final TableName components, final ResultSet results, final RubyClass Column, final boolean lookupCastType, final boolean setPrimary) throws SQLException { final Ruby runtime = context.getRuntime(); final Collection<String> primaryKeyNames = setPrimary ? getPrimaryKeyNames(metaData, components) : null; final RubyArray columns = runtime.newArray(); final IRubyObject config = getConfig(context); while ( results.next() ) { final String colName = results.getString(COLUMN_NAME); final RubyString railsColumnName = RubyString.newUnicodeString(runtime, caseConvertIdentifierForRails(metaData, colName)); final IRubyObject defaultValue = defaultValueFromResultSet( runtime, results ); final RubyString sqlType = RubyString.newUnicodeString( runtime, typeFromResultSet(results) ); final RubyBoolean nullable = runtime.newBoolean( ! results.getString(IS_NULLABLE).trim().equals("NO") ); final IRubyObject[] args; if ( lookupCastType ) { final IRubyObject castType = getAdapter(context).callMethod(context, "lookup_cast_type", sqlType); args = new IRubyObject[] {config, railsColumnName, defaultValue, castType, sqlType, nullable}; } else { args = new IRubyObject[] {config, railsColumnName, defaultValue, sqlType, nullable}; } IRubyObject column = Column.callMethod(context, "new", args); columns.append(column); if ( primaryKeyNames != null ) { final RubyBoolean primary = runtime.newBoolean( primaryKeyNames.contains(colName) ); column.getInstanceVariables().setInstanceVariable("@primary", primary); } } return columns; } private static Collection<String> getPrimaryKeyNames(final DatabaseMetaData metaData, final TableName components) throws SQLException { ResultSet primaryKeys = null; try { primaryKeys = metaData.getPrimaryKeys(components.catalog, components.schema, components.name); final List<String> primaryKeyNames = new ArrayList<String>(4); while ( primaryKeys.next() ) { primaryKeyNames.add( primaryKeys.getString(COLUMN_NAME) ); } return primaryKeyNames; } finally { close(primaryKeys); } } protected IRubyObject mapGeneratedKeys( final Ruby runtime, final Connection connection, final Statement statement) throws SQLException { return mapGeneratedKeys(runtime, connection, statement, null); } protected IRubyObject mapGeneratedKeys( final Ruby runtime, final Connection connection, final Statement statement, final Boolean singleResult) throws SQLException { if ( supportsGeneratedKeys(connection) ) { ResultSet genKeys = null; try { genKeys = statement.getGeneratedKeys(); // drivers might report a non-result statement without keys // e.g. on derby with SQL: 'SET ISOLATION = SERIALIZABLE' if ( genKeys == null ) return runtime.getNil(); return doMapGeneratedKeys(runtime, genKeys, singleResult); } catch (SQLFeatureNotSupportedException e) { return null; // statement.getGeneratedKeys() } finally { close(genKeys); } } return null; // not supported } protected final IRubyObject doMapGeneratedKeys(final Ruby runtime, final ResultSet genKeys, final Boolean singleResult) throws SQLException { IRubyObject firstKey = null; // no generated keys - e.g. INSERT statement for a table that does // not have and auto-generated ID column : boolean next = genKeys.next() && genKeys.getMetaData().getColumnCount() > 0; // singleResult == null - guess if only single key returned if ( singleResult == null || singleResult.booleanValue() ) { if ( next ) { firstKey = mapGeneratedKey(runtime, genKeys); if ( singleResult != null || ! genKeys.next() ) { return firstKey; } next = true; // 2nd genKeys.next() returned true } else { /* if ( singleResult != null ) */ return runtime.getNil(); } } final RubyArray keys = runtime.newArray(); if ( firstKey != null ) keys.append(firstKey); // singleResult == null while ( next ) { keys.append( mapGeneratedKey(runtime, genKeys) ); next = genKeys.next(); } return keys; } protected IRubyObject mapGeneratedKey(final Ruby runtime, final ResultSet genKeys) throws SQLException { return runtime.newFixnum( genKeys.getLong(1) ); } protected IRubyObject mapGeneratedKeysOrUpdateCount(final ThreadContext context, final Connection connection, final Statement statement) throws SQLException { final Ruby runtime = context.getRuntime(); final IRubyObject key = mapGeneratedKeys(runtime, connection, statement); return ( key == null || key.isNil() ) ? runtime.newFixnum( statement.getUpdateCount() ) : key; } @Deprecated protected IRubyObject unmarshalKeysOrUpdateCount(final ThreadContext context, final Connection connection, final Statement statement) throws SQLException { return mapGeneratedKeysOrUpdateCount(context, connection, statement); } private Boolean supportsGeneratedKeys; protected boolean supportsGeneratedKeys(final Connection connection) throws SQLException { if (supportsGeneratedKeys == null) { synchronized(this) { if (supportsGeneratedKeys == null) { supportsGeneratedKeys = connection.getMetaData().supportsGetGeneratedKeys(); } } } return supportsGeneratedKeys.booleanValue(); } /** * @deprecated no longer used - kept for binary compatibility, this method * is confusing since it closes the result set it receives and thus was * replaced with {@link #mapGeneratedKeys(Ruby, Connection, Statement)} */ @Deprecated public static IRubyObject unmarshal_id_result( final Ruby runtime, final ResultSet genKeys) throws SQLException { try { if (genKeys.next() && genKeys.getMetaData().getColumnCount() > 0) { return runtime.newFixnum( genKeys.getLong(1) ); } return runtime.getNil(); } finally { close(genKeys); } } protected IRubyObject mapResults(final ThreadContext context, final Connection connection, final Statement statement, final boolean downCase) throws SQLException { final Ruby runtime = context.getRuntime(); IRubyObject result; ResultSet resultSet = statement.getResultSet(); try { result = mapToRawResult(context, runtime, connection, resultSet, downCase); } finally { close(resultSet); } if ( ! statement.getMoreResults() ) return result; final List<IRubyObject> results = new ArrayList<IRubyObject>(); results.add(result); do { resultSet = statement.getResultSet(); try { result = mapToRawResult(context, runtime, connection, resultSet, downCase); } finally { close(resultSet); } results.add(result); } while ( statement.getMoreResults() ); return runtime.newArray(results); } /** * @deprecated no longer used but kept for binary compatibility */ @Deprecated protected IRubyObject unmarshalResult(final ThreadContext context, final DatabaseMetaData metaData, final ResultSet resultSet, final boolean downCase) throws SQLException { return mapToRawResult(context, context.getRuntime(), metaData, resultSet, downCase); } /** * Converts a JDBC result set into an array (rows) of hashes (row). * * @param downCase should column names only be in lower case? */ @SuppressWarnings("unchecked") private IRubyObject mapToRawResult(final ThreadContext context, final Ruby runtime, final Connection connection, final ResultSet resultSet, final boolean downCase) throws SQLException { final ColumnData[] columns = extractColumns(runtime, connection, resultSet, downCase); final RubyArray results = runtime.newArray(); // [ { 'col1': 1, 'col2': 2 }, { 'col1': 3, 'col2': 4 } ] populateFromResultSet(context, runtime, (List<IRubyObject>) results, resultSet, columns); return results; } @Deprecated @SuppressWarnings("unchecked") private IRubyObject mapToRawResult(final ThreadContext context, final Ruby runtime, final DatabaseMetaData metaData, final ResultSet resultSet, final boolean downCase) throws SQLException { final ColumnData[] columns = extractColumns(runtime, metaData, resultSet, downCase); final RubyArray results = runtime.newArray(); // [ { 'col1': 1, 'col2': 2 }, { 'col1': 3, 'col2': 4 } ] populateFromResultSet(context, runtime, (List<IRubyObject>) results, resultSet, columns); return results; } private IRubyObject yieldResultRows(final ThreadContext context, final Ruby runtime, final Connection connection, final ResultSet resultSet, final Block block) throws SQLException { final ColumnData[] columns = extractColumns(runtime, connection, resultSet, false); final IRubyObject[] blockArgs = new IRubyObject[columns.length]; while ( resultSet.next() ) { for ( int i = 0; i < columns.length; i++ ) { final ColumnData column = columns[i]; blockArgs[i] = jdbcToRuby(context, runtime, column.index, column.type, resultSet); } block.call( context, blockArgs ); } return runtime.getNil(); // yielded result rows } /** * Extract columns from result set. * @param runtime * @param metaData * @param resultSet * @param downCase * @return columns data * @throws SQLException */ protected ColumnData[] extractColumns(final Ruby runtime, final Connection connection, final ResultSet resultSet, final boolean downCase) throws SQLException { return setupColumns(runtime, connection, resultSet.getMetaData(), downCase); } @Deprecated protected ColumnData[] extractColumns(final Ruby runtime, final DatabaseMetaData metaData, final ResultSet resultSet, final boolean downCase) throws SQLException { return setupColumns(runtime, metaData, resultSet.getMetaData(), downCase); } /** * @deprecated renamed and parameterized to {@link #withConnection(ThreadContext, SQLBlock)} */ @Deprecated @SuppressWarnings("unchecked") protected Object withConnectionAndRetry(final ThreadContext context, final SQLBlock block) throws RaiseException { return withConnection(context, block); } protected <T> T withConnection(final ThreadContext context, final Callable<T> block) throws RaiseException { try { return withConnection(context, true, block); } catch (final SQLException e) { return handleException(context, e); // should never happen } } private <T> T withConnection(final ThreadContext context, final boolean handleException, final Callable<T> block) throws RaiseException, RuntimeException, SQLException { Throwable exception = null; int retry = 0; int i = 0; do { if ( retry > 0 ) reconnect(context); // we're retrying running block final Connection connection = getConnection(true); boolean autoCommit = true; // retry in-case getAutoCommit throws try { autoCommit = connection.getAutoCommit(); return block.call(connection); } catch (final Exception e) { // SQLException or RuntimeException exception = e; if ( autoCommit ) { // do not retry if (inside) transactions if ( i == 0 ) { IRubyObject retryCount = getConfigValue(context, "retry_count"); if ( ! retryCount.isNil() ) { retry = (int) retryCount.convertToInteger().getLongValue(); } } if ( isConnectionValid(context, connection) ) { break; // connection not broken yet failed (do not retry) } // we'll reconnect and retry calling block again } else break; } } while ( i++ < retry ); // i == 0, retry == 1 means we should retry once // (retry) loop ended and we did not return ... exception != null if ( handleException ) { return handleException(context, getCause(exception)); // throws } else { if ( exception instanceof SQLException ) { throw (SQLException) exception; } if ( exception instanceof RuntimeException ) { throw (RuntimeException) exception; } // won't happen - our try block only throws SQL or Runtime exceptions throw new RuntimeException(exception); } } private static Throwable getCause(Throwable exception) { Throwable cause = exception.getCause(); while (cause != null && cause != exception) { // SQLException's cause might be DB specific (checked/unchecked) : if ( exception instanceof SQLException ) break; exception = cause; cause = exception.getCause(); } return exception; } protected <T> T handleException(final ThreadContext context, Throwable exception) throws RaiseException { // NOTE: we shall not wrap unchecked (runtime) exceptions into AR::Error // if it's really a misbehavior of the driver throwing a RuntimeExcepion // instead of SQLException than this should be overriden for the adapter if ( exception instanceof RuntimeException ) { throw (RuntimeException) exception; } debugStackTrace(context, exception); throw wrapException(context, exception); } /** * @deprecated use {@link #wrapException(ThreadContext, Throwable)} instead * for overriding how exceptions are handled use {@link #handleException(ThreadContext, Throwable)} */ @Deprecated protected RuntimeException wrap(final ThreadContext context, final Throwable exception) { return wrapException(context, exception); } protected RaiseException wrapException(final ThreadContext context, final Throwable exception) { final Ruby runtime = context.getRuntime(); if ( exception instanceof SQLException ) { final String message = SQLException.class == exception.getClass() ? exception.getMessage() : exception.toString(); // useful to easily see type on Ruby side final RaiseException error = wrapException(context, getJDBCError(runtime), exception, message); final int errorCode = ((SQLException) exception).getErrorCode(); final RubyException self = error.getException(); self.getMetaClass().finvoke(context, self, "errno=", runtime.newFixnum(errorCode)); self.getMetaClass().finvoke(context, self, "sql_exception=", JavaEmbedUtils.javaToRuby(runtime, exception)); return error; } return wrapException(context, getJDBCError(runtime), exception); } protected static RaiseException wrapException(final ThreadContext context, final RubyClass errorClass, final Throwable exception) { return wrapException(context, errorClass, exception, exception.toString()); } protected static RaiseException wrapException(final ThreadContext context, final RubyClass errorClass, final Throwable exception, final String message) { final RaiseException error = new RaiseException(context.getRuntime(), errorClass, message, true); error.initCause(exception); return error; } private IRubyObject convertJavaToRuby(final Object object) { return JavaUtil.convertJavaToRuby( getRuntime(), object ); } /** * Some databases support schemas and others do not. * For ones which do this method should return true, aiding in decisions regarding schema vs database determination. */ protected boolean databaseSupportsSchemas() { return false; } private static final byte[] SELECT = new byte[] { 's','e','l','e','c','t' }; private static final byte[] WITH = new byte[] { 'w','i','t','h' }; private static final byte[] SHOW = new byte[] { 's','h','o','w' }; private static final byte[] CALL = new byte[]{ 'c','a','l','l' }; @JRubyMethod(name = "select?", required = 1, meta = true, frame = false) public static IRubyObject select_p(final ThreadContext context, final IRubyObject self, final IRubyObject sql) { return context.getRuntime().newBoolean( isSelect(sql.convertToString()) ); } private static boolean isSelect(final RubyString sql) { final ByteList sqlBytes = sql.getByteList(); return startsWithIgnoreCase(sqlBytes, SELECT) || startsWithIgnoreCase(sqlBytes, WITH) || startsWithIgnoreCase(sqlBytes, SHOW) || startsWithIgnoreCase(sqlBytes, CALL); } private static final byte[] INSERT = new byte[] { 'i','n','s','e','r','t' }; @JRubyMethod(name = "insert?", required = 1, meta = true, frame = false) public static IRubyObject insert_p(final ThreadContext context, final IRubyObject self, final IRubyObject sql) { final ByteList sqlBytes = sql.convertToString().getByteList(); return context.getRuntime().newBoolean(startsWithIgnoreCase(sqlBytes, INSERT)); } protected static boolean startsWithIgnoreCase(final ByteList string, final byte[] start) { int p = skipWhitespace(string, string.getBegin()); final byte[] stringBytes = string.unsafeBytes(); if ( stringBytes[p] == '(' ) p = skipWhitespace(string, p + 1); for ( int i = 0; i < string.getRealSize() && i < start.length; i++ ) { if ( Character.toLowerCase(stringBytes[p + i]) != start[i] ) return false; } return true; } private static int skipWhitespace(final ByteList string, final int from) { final int end = string.getBegin() + string.getRealSize(); final byte[] stringBytes = string.unsafeBytes(); for ( int i = from; i < end; i++ ) { if ( ! Character.isWhitespace( stringBytes[i] ) ) return i; } return end; } /** * JDBC connection helper that handles mapping results to * <code>ActiveRecord::Result</code> (available since AR-3.1). * * @see #populateFromResultSet(ThreadContext, Ruby, List, ResultSet, RubyJdbcConnection.ColumnData[]) * @author kares */ protected static class ResultHandler { protected static Boolean USE_RESULT; // AR-3.2 : initialize(columns, rows) // AR-4.0 : initialize(columns, rows, column_types = {}) protected static Boolean INIT_COLUMN_TYPES = Boolean.FALSE; protected static Boolean FORCE_HASH_ROWS = Boolean.FALSE; private static volatile ResultHandler instance; public static ResultHandler getInstance(final Ruby runtime) { if ( instance == null ) { synchronized(ResultHandler.class) { if ( instance == null ) { // fine to initialize twice setInstance( new ResultHandler(runtime) ); } } } return instance; } protected static synchronized void setInstance(final ResultHandler instance) { ResultHandler.instance = instance; } protected ResultHandler(final Ruby runtime) { final RubyClass result = getResult(runtime); USE_RESULT = result != null && result != runtime.getNilClass(); } public IRubyObject mapRow(final ThreadContext context, final Ruby runtime, final ColumnData[] columns, final ResultSet resultSet, final RubyJdbcConnection connection) throws SQLException { if ( USE_RESULT ) { // maps a AR::Result row final RubyArray row = runtime.newArray(columns.length); for ( int i = 0; i < columns.length; i++ ) { final ColumnData column = columns[i]; row.append( connection.jdbcToRuby(context, runtime, column.index, column.type, resultSet) ); } return row; } else { return mapRawRow(context, runtime, columns, resultSet, connection); } } IRubyObject mapRawRow(final ThreadContext context, final Ruby runtime, final ColumnData[] columns, final ResultSet resultSet, final RubyJdbcConnection connection) throws SQLException { final RubyHash row = RubyHash.newHash(runtime); for ( int i = 0; i < columns.length; i++ ) { final ColumnData column = columns[i]; row.op_aset( context, column.name, connection.jdbcToRuby(context, runtime, column.index, column.type, resultSet) ); } return row; } public IRubyObject newResult(final ThreadContext context, final Ruby runtime, final ColumnData[] columns, final IRubyObject rows) { // rows array if ( USE_RESULT ) { // ActiveRecord::Result.new(columns, rows) final RubyClass result = getResult(runtime); return result.callMethod( context, "new", initArgs(runtime, columns, rows), Block.NULL_BLOCK ); } return rows; // contains { 'col1' => 1, ... } Hash-es } private IRubyObject[] initArgs(final Ruby runtime, final ColumnData[] columns, final IRubyObject rows) { final IRubyObject[] args; final RubyArray cols = RubyArray.newArray(runtime, columns.length); if ( INIT_COLUMN_TYPES ) { // NOTE: NOT IMPLEMENTED for ( int i = 0; i < columns.length; i++ ) { cols.append( columns[i].name ); } args = new IRubyObject[] { cols, rows }; } else { for ( int i = 0; i < columns.length; i++ ) { cols.append( columns[i].name ); } args = new IRubyObject[] { cols, rows }; } return args; } } protected static final class TableName { public final String catalog, schema, name; public TableName(String catalog, String schema, String table) { this.catalog = catalog; this.schema = schema; this.name = table; } @Override public String toString() { return getClass().getName() + "{catalog=" + catalog + ",schema=" + schema + ",name=" + name + "}"; } } protected TableName extractTableName( final Connection connection, String catalog, String schema, final String tableName) throws IllegalArgumentException, SQLException { final String[] nameParts = tableName.split("\\."); if ( nameParts.length > 3 ) { throw new IllegalArgumentException("table name: " + tableName + " should not contain more than 2 '.'"); } String name = tableName; if ( nameParts.length == 2 ) { schema = nameParts[0]; name = nameParts[1]; } else if ( nameParts.length == 3 ) { catalog = nameParts[0]; schema = nameParts[1]; name = nameParts[2]; } if ( schema != null ) { schema = caseConvertIdentifierForJdbc(connection, schema); } name = caseConvertIdentifierForJdbc(connection, name); if ( schema != null && ! databaseSupportsSchemas() ) { catalog = schema; } if ( catalog == null ) catalog = connection.getCatalog(); return new TableName(catalog, schema, name); } /** * @deprecated use {@link #extractTableName(Connection, String, String, String)} */ @Deprecated protected TableName extractTableName( final Connection connection, final String schema, final String tableName) throws IllegalArgumentException, SQLException { return extractTableName(connection, null, schema, tableName); } protected static final class ColumnData { public final RubyString name; public final int index; public final int type; public ColumnData(RubyString name, int type, int idx) { this.name = name; this.type = type; this.index = idx; } @Override public String toString() { return "'" + name + "'i" + index + "t" + type + ""; } } private ColumnData[] setupColumns( final Ruby runtime, final Connection connection, final ResultSetMetaData resultMetaData, final boolean downCase) throws SQLException { final int columnCount = resultMetaData.getColumnCount(); final ColumnData[] columns = new ColumnData[columnCount]; for ( int i = 1; i <= columnCount; i++ ) { // metadata is one-based String name = resultMetaData.getColumnLabel(i); if ( downCase ) { name = name.toLowerCase(); } else { name = caseConvertIdentifierForRails(connection, name); } final RubyString columnName = RubyString.newUnicodeString(runtime, name); final int columnType = resultMetaData.getColumnType(i); columns[i - 1] = new ColumnData(columnName, columnType, i); } return columns; } @Deprecated private ColumnData[] setupColumns( final Ruby runtime, final DatabaseMetaData metaData, final ResultSetMetaData resultMetaData, final boolean downCase) throws SQLException { final int columnCount = resultMetaData.getColumnCount(); final ColumnData[] columns = new ColumnData[columnCount]; for ( int i = 1; i <= columnCount; i++ ) { // metadata is one-based String name = resultMetaData.getColumnLabel(i); if ( downCase ) { name = name.toLowerCase(); } else { name = caseConvertIdentifierForRails(metaData, name); } final RubyString columnName = RubyString.newUnicodeString(runtime, name); final int columnType = resultMetaData.getColumnType(i); columns[i - 1] = new ColumnData(columnName, columnType, i); } return columns; } // JDBC API Helpers : protected static void close(final Connection connection) { if ( connection != null ) { try { connection.close(); } catch (final Exception e) { /* NOOP */ } } } public static void close(final ResultSet resultSet) { if (resultSet != null) { try { resultSet.close(); } catch (final Exception e) { /* NOOP */ } } } public static void close(final Statement statement) { if (statement != null) { try { statement.close(); } catch (final Exception e) { /* NOOP */ } } } // DEBUG-ing helpers : private static boolean debug = Boolean.getBoolean("arjdbc.debug"); public static boolean isDebug() { return debug; } public static void setDebug(boolean debug) { RubyJdbcConnection.debug = debug; } public static void debugMessage(final String msg) { debugMessage(null, msg); } public static void debugMessage(final ThreadContext context, final String msg) { if ( debug || ( context != null && context.runtime.isDebug() ) ) { final PrintStream out = context != null ? context.runtime.getOut() : System.out; out.println(msg); } } protected static void debugErrorSQL(final ThreadContext context, final String sql) { if ( debug || ( context != null && context.runtime.isDebug() ) ) { final PrintStream out = context != null ? context.runtime.getOut() : System.out; out.println("Error SQL: '" + sql + "'"); } } // disables full (Java) traces to be printed while DEBUG is on private static final Boolean debugStackTrace; static { String debugTrace = System.getProperty("arjdbc.debug.trace"); debugStackTrace = debugTrace == null ? null : Boolean.parseBoolean(debugTrace); } public static void debugStackTrace(final ThreadContext context, final Throwable e) { if ( debug || ( context != null && context.runtime.isDebug() ) ) { final PrintStream out = context != null ? context.runtime.getOut() : System.out; if ( debugStackTrace == null || debugStackTrace.booleanValue() ) { e.printStackTrace(out); } else { out.println(e); } } } protected void warn(final ThreadContext context, final String message) { callMethod(context, "warn", context.getRuntime().newString(message)); } private static RubyArray createCallerBacktrace(final ThreadContext context) { final Ruby runtime = context.getRuntime(); runtime.incrementCallerCount(); Method gatherCallerBacktrace; RubyStackTraceElement[] trace; try { gatherCallerBacktrace = context.getClass().getMethod("gatherCallerBacktrace"); trace = (RubyStackTraceElement[]) gatherCallerBacktrace.invoke(context); // 1.6.8 } catch (NoSuchMethodException ignore) { try { gatherCallerBacktrace = context.getClass().getMethod("gatherCallerBacktrace", Integer.TYPE); trace = (RubyStackTraceElement[]) gatherCallerBacktrace.invoke(context, 0); // 1.7.4 } catch (NoSuchMethodException e) { throw new RuntimeException(e); } catch (IllegalAccessException e) { throw new RuntimeException(e); } catch (InvocationTargetException e) { throw new RuntimeException(e.getTargetException()); } } catch (IllegalAccessException e) { throw new RuntimeException(e); } catch (InvocationTargetException e) { throw new RuntimeException(e.getTargetException()); } // RubyStackTraceElement[] trace = context.gatherCallerBacktrace(level); final RubyArray backtrace = runtime.newArray(trace.length); for (int i = 0; i < trace.length; i++) { RubyStackTraceElement element = trace[i]; backtrace.append( RubyString.newString(runtime, element.getFileName() + ":" + element.getLineNumber() + ":in `" + element.getMethodName() + "'" ) ); } return backtrace; } }
package com.orangeetv; import java.io.Serializable; import java.io.UnsupportedEncodingException; import javax.annotation.PostConstruct; import javax.faces.bean.ManagedBean; import javax.faces.bean.RequestScoped; import net.sourceforge.pinyin4j.*; import net.sourceforge.pinyin4j.format.*; import net.sourceforge.pinyin4j.format.exception.BadHanyuPinyinOutputFormatCombination; @ManagedBean @RequestScoped public class PinyinBean implements Serializable { private String value = ""; public String getResult() { HanyuPinyinOutputFormat format1 = new HanyuPinyinOutputFormat(); format1.setToneType(HanyuPinyinToneType.WITH_TONE_MARK); format1.setVCharType(HanyuPinyinVCharType.WITH_U_UNICODE); HanyuPinyinOutputFormat format2 = new HanyuPinyinOutputFormat(); format2.setToneType(HanyuPinyinToneType.WITHOUT_TONE); String value1 = getStringPinYin(value, format1); String value2 = getStringPinYin(value, format2); String result = "['" + value + "','" + value1 + "','" + value2 + "']"; return result; } public void setValue(String value) { try { this.value = new String(value.getBytes("iso-8859-1"),"utf-8") ; } catch (UnsupportedEncodingException e) { this.value = "encoding_error"; } } public String getValue() { return this.value; } public String getCharacterPinYin(char c, HanyuPinyinOutputFormat format) { String[] pinyin = null; try { pinyin = PinyinHelper.toHanyuPinyinStringArray(c, format); } catch (BadHanyuPinyinOutputFormatCombination e) { return null; } // ctoHanyuPinyinStringArraynull if (pinyin == null) return null; return pinyin[0]; } public String getStringPinYin(String str, HanyuPinyinOutputFormat format) { StringBuilder sb = new StringBuilder(); String tempPinyin = null; for (int i = 0; i < str.length(); ++i) { tempPinyin = getCharacterPinYin(str.charAt(i), format); if (tempPinyin == null) { // str.charAt(i) sb.append(str.charAt(i)); } else { if (i != 0) { sb.append(" "); } sb.append(tempPinyin); } } return sb.toString(); } }
package org.petschko.rpgmakermv.decrypt; import org.json.JSONException; import org.petschko.lib.Const; import org.petschko.lib.File; import org.petschko.lib.Functions; import org.petschko.lib.exceptions.PathException; import org.petschko.lib.gui.*; import org.petschko.lib.gui.notification.ErrorWindow; import org.petschko.lib.gui.notification.InfoWindow; import javax.swing.BorderFactory; import javax.swing.JDialog; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.ProgressMonitor; import javax.swing.SwingWorker; import javax.swing.UIManager; import java.awt.BorderLayout; import java.awt.GridLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; class GUI { private JFrame mainWindow; private GUI_Menu mainMenu; private JPanel windowPanel = new JPanel(new BorderLayout()); private JPanel projectFilesPanel = new JPanel(); private JPanel fileList = new JPanel(); private GUI_About guiAbout; private GUI_FileInfo fileInfo = new GUI_FileInfo(); private RPGProject rpgProject; private Decrypter decrypter; /** * GUI Constructor */ GUI() { // Create and Setup components this.createMainWindow(); this.createMainMenu(); this.guiAbout = new GUI_About("About " + Config.programName, this.mainWindow); this.createWindowGUI(); // Center Window and Display it this.mainWindow.setLocationRelativeTo(null); this.mainWindow.setVisible(true); this.mainWindow.pack(); // Assign Listener this.assignMainMenuListener(); this.setNewOutputDir(App.outputDir); // Add Update-Check if(Functions.strToBool(App.preferences.getConfig(Preferences.autoCheckForUpdates, "true"))) new GUI_Update(this, true); } /** * Returns the Main-Window * * @return - Main-Window */ JFrame getMainWindow() { return mainWindow; } /** * Returns the Main-Menu * * @return - Main-Menu */ GUI_Menu getMainMenu() { return mainMenu; } /** * Dispose the GUI */ void dispose() { this.guiAbout.dispose(); this.mainWindow.dispose(); } /** * Creates and assign the MainFrame */ private void createMainWindow() { this.mainWindow = new JFrame(Config.programName + " by " + Const.creator + " " + Config.version); // Change close Action this.mainWindow.setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE); this.mainWindow.addWindowListener(GUI_ActionListener.closeButton()); } /** * Creates and assign the Main-Menu */ private void createMainMenu() { this.mainMenu = new GUI_Menu(); this.mainWindow.add(this.mainMenu, BorderLayout.NORTH); // Set Menu-Settings if(Functions.strToBool(App.preferences.getConfig(Preferences.ignoreFakeHeader, "true"))) this.mainMenu.ignoreFakeHeader.setState(true); if(Functions.strToBool(App.preferences.getConfig(Preferences.loadInvalidRPGDirs, "false"))) this.mainMenu.loadInvalidRPGDirs.setState(true); if(Functions.strToBool(App.preferences.getConfig(Preferences.clearOutputDirBeforeDecrypt, "true"))) this.mainMenu.clearOutputDir.setState(true); if(! this.mainMenu.clearOutputDir.isSelected()) { if(Functions.strToBool(App.preferences.getConfig(Preferences.overwriteFiles, "false"))) this.mainMenu.overwriteExistingFiles.setState(true); } else this.mainMenu.overwriteExistingFiles.setEnabled(false); if(Functions.strToBool(App.preferences.getConfig(Preferences.autoCheckForUpdates, "true"))) this.mainMenu.checkForUpdates.setState(true); this.mainMenu.enableOnRPGProject(false); } /** * Creates all Components for the Window */ private void createWindowGUI() { JPanel middleFileContainer = new JPanel(new GridLayout(1, 2)); JPanel footContainer = new JPanel(new GridLayout(1, 3)); JLabelWrap filesListText = new JLabelWrap("Please open a RPG-Maker MV Project (\"File\" -> \"Select RPG MV Project\")"); filesListText.setColumns(20); /*JScrollPane scrollPane = new JScrollPane( this.fileList, ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_AS_NEEDED );*/ // Design stuff this.projectFilesPanel.setLayout(new BorderLayout()); this.projectFilesPanel.setBorder(BorderFactory.createTitledBorder("Project-Files")); this.windowPanel.setBorder(BorderFactory.createEmptyBorder(10, 10, 10, 10)); // Assign to the main comps middleFileContainer.add(this.projectFilesPanel); middleFileContainer.add(this.fileInfo); this.windowPanel.add(middleFileContainer, BorderLayout.CENTER); this.windowPanel.add(footContainer, BorderLayout.SOUTH); this.projectFilesPanel.add(filesListText, BorderLayout.NORTH); this.mainWindow.add(this.windowPanel, BorderLayout.CENTER); } /** * Assigns ActionListener to the Main-Menu parts */ private void assignMainMenuListener() { // -- File this.mainMenu.open.addActionListener( e -> { String openDir = App.preferences.getConfig(Preferences.lastRPGDir, "."); if(! File.existsDir(openDir)) openDir = "."; UIManager.put("FileChooser.readOnly", Boolean.TRUE); JDirectoryChooser dirChooser = new JDirectoryChooser(openDir); int choose = dirChooser.showDialog(this.mainWindow, null); if(dirChooser.getSelectedFile() != null && choose == JDirectoryChooser.APPROVE_OPTION) { App.preferences.setConfig(Preferences.lastRPGDir, dirChooser.getCurrentDirectory().getPath()); this.openRPGProject(dirChooser.getSelectedFile().getPath(), true); } } ); this.mainMenu.changeOutputDir.addActionListener( e -> { // Warn the user that the selected directory will be cleared if(Boolean.parseBoolean(App.preferences.getConfig(Preferences.clearOutputDirBeforeDecrypt, "true"))) new InfoWindow("You have chosen, that the selected Directory will be cleared.\nBeware that this Program clear the selected Directory (Deletes all Files within)! Don't select directories where you have important Files or Sub-Directories in!\n\n(Or turn off the clearing under Options)", "Important Info about your Files").show(this.mainWindow); String openDir = App.preferences.getConfig(Preferences.lastOutputParentDir, "."); if(! File.existsDir(openDir)) openDir = "."; UIManager.put("FileChooser.readOnly", Boolean.TRUE); JDirectoryChooser dirChooser = new JDirectoryChooser(openDir); int choose = dirChooser.showDialog(this.mainWindow, null); if(dirChooser.getSelectedFile() != null && choose == JDirectoryChooser.APPROVE_OPTION) { App.preferences.setConfig(Preferences.lastOutputParentDir, dirChooser.getCurrentDirectory().getPath()); App.preferences.setConfig(Preferences.lastOutputDir, dirChooser.getSelectedFile().getPath()); this.setNewOutputDir(dirChooser.getSelectedFile().getPath()); } } ); this.mainMenu.exit.addActionListener(GUI_ActionListener.closeMenu()); // -- Options this.mainMenu.ignoreFakeHeader.addActionListener(GUI_ActionListener.switchSetting(Preferences.ignoreFakeHeader)); this.mainMenu.loadInvalidRPGDirs.addActionListener(GUI_ActionListener.switchSetting(Preferences.loadInvalidRPGDirs)); this.mainMenu.clearOutputDir.addActionListener(GUI_ActionListener.switchSetting(Preferences.clearOutputDirBeforeDecrypt)); this.mainMenu.clearOutputDir.addActionListener( e -> this.mainMenu.overwriteExistingFiles.setEnabled(! this.mainMenu.overwriteExistingFiles.isEnabled()) ); this.mainMenu.overwriteExistingFiles.addActionListener(GUI_ActionListener.switchSetting(Preferences.overwriteFiles)); this.mainMenu.checkForUpdates.addActionListener(GUI_ActionListener.switchSetting(Preferences.autoCheckForUpdates)); // -- Decrypt // -- Tools // -- Info this.mainMenu.updateProgram.addActionListener( e -> new GUI_Update(this) ); this.mainMenu.reportABug.addActionListener(GUI_ActionListener.openWebsite(Config.projectBugReportURL)); this.mainMenu.about.addActionListener( e -> this.guiAbout.showWindow() ); } /** * Assign RPG-Project ActionListeners */ private void assignRPGActionListener() { // Remove all Previous ActionListeners Functions.buttonRemoveAllActionListeners(this.mainMenu.openRPGDirExplorer); Functions.buttonRemoveAllActionListeners(this.mainMenu.allFiles); // Add new ActionListener this.mainMenu.openRPGDirExplorer.addActionListener(GUI_ActionListener.openExplorer(this.rpgProject.getPath())); //this.mainMenu.allFiles.addActionListener(this.decrypt(this.rpgProject.getEncryptedFiles())); this.mainMenu.allFiles.addActionListener(new GUI_Decryption(this.rpgProject.getEncryptedFiles())); this.mainMenu.restoreImages.addActionListener(new GUI_Decryption(this.rpgProject.getEncryptedFiles(), true)); } /** * Opens the RPG-MV-Project * * @param currentDirectory - Current RPG-Maker Directory * @param showInfoWindow - Show Info-Window if done */ private void openRPGProject(String currentDirectory, boolean showInfoWindow) { if(currentDirectory == null) { PathException pe = new PathException("currentDirectory can't be null!", (String) null); pe.printStackTrace(); return; } GUI_OpenRPGDir openRPG = new GUI_OpenRPGDir(currentDirectory, showInfoWindow); openRPG.execute(); } /** * Set the new Output dir & assign new ActionListeners * * @param newOutputDir - New Output-Directory */ private void setNewOutputDir(String newOutputDir) { App.outputDir = File.ensureDSonEndOfPath(newOutputDir); // Remove old ActionListener Functions.buttonRemoveAllActionListeners(this.mainMenu.openOutputDirExplorer); Functions.buttonRemoveAllActionListeners(this.mainMenu.doClearOutputDir); // New ActionListener this.mainMenu.openOutputDirExplorer.addActionListener(GUI_ActionListener.openExplorer(App.outputDir)); this.mainMenu.doClearOutputDir.addActionListener(new GUI_DirectoryClearing(App.outputDir)); } /** * Class GUI_Decryption */ private class GUI_Decryption extends SwingWorker<Void, Void> implements ActionListener { private ArrayList<File> files; private ProgressMonitor progressMonitor; private boolean restoreImages = false; /** * GUI_Decryption constructor * * @param files - Files to Decrypt */ GUI_Decryption(ArrayList<File> files) { this.files = files; } /** * GUI_Decryption constructor * * @param files - Files to Decrypt * @param restoreImages - Restores Images without key */ GUI_Decryption(ArrayList<File> files, boolean restoreImages) { this.files = files; this.restoreImages = restoreImages; } /** * Computes a result, or throws an exception if unable to do so. * * Note that this method is executed only once. * * Note: this method is executed in a background thread. * * @return the computed result */ @Override protected Void doInBackground() { // Clear Output-Dir if checked if(Functions.strToBool(App.preferences.getConfig(Preferences.clearOutputDirBeforeDecrypt, "true"))) { this.progressMonitor.setNote("Clearing Output-Directory..."); File.clearDirectory(App.outputDir); } // Setup Decrypter this.progressMonitor.setNote("Configuring Decrypter..."); decrypter.setIgnoreFakeHeader( Functions.strToBool(App.preferences.getConfig(Preferences.ignoreFakeHeader, "true")) ); decrypter.setRemain(App.preferences.getConfig(Preferences.decrypterRemain, Decrypter.defaultRemain)); decrypter.setSignature(App.preferences.getConfig(Preferences.decrypterSignature, Decrypter.defaultSignature)); decrypter.setVersion(App.preferences.getConfig(Preferences.decrypterVersion, Decrypter.defaultVersion)); int headerLen = Decrypter.defaultHeaderLen; try { headerLen = Integer.parseInt(App.preferences.getConfig(Preferences.decrypterHeaderLen)); } catch(NumberFormatException ex) { ErrorWindow errorWindow = new ErrorWindow( "Header-Length was not an Valid Number - Using Default-Length!", ErrorWindow.ERROR_LEVEL_WARNING, false ); errorWindow.show(mainWindow); // Set default as new Len App.preferences.setConfig(Preferences.decrypterHeaderLen, Integer.toString(Decrypter.defaultHeaderLen)); } decrypter.setHeaderLen(headerLen); // Check if Decrypter already has a Key if(decrypter.getDecryptCode() == null) { this.progressMonitor.setNote("Try to detect Encryption-Key..."); try { decrypter.detectEncryptionKeyFromJson(rpgProject.getSystem(), rpgProject.getEncryptionKeyName()); } catch(NullPointerException decryNullEx) { // File-Null-Pointer ErrorWindow errorWindow = new ErrorWindow( "Can't find Decryption-Key-File!" + Const.newLine + "Make sure that the File is in the RPG-Directory..." + Const.newLine + "Or set the Key by yourself (Decrypter -> Set Encryption-Key)", ErrorWindow.ERROR_LEVEL_WARNING, false ); errorWindow.show(mainWindow); // Halt task this.cancel(true); return null; } catch(JSONException e1) { // JSON-NotFound ErrorWindow errorWindow = new ErrorWindow("Can't find Decryption-Key in File!", ErrorWindow.ERROR_LEVEL_WARNING, false); errorWindow.show(mainWindow); // Halt task this.cancel(true); return null; } } // Decrypt and Save Files int i = 0; for(File file : this.files) { // Check if cancel button was pressed if(this.progressMonitor.isCanceled()) { this.cancel(true); return null; } this.progressMonitor.setNote("File: " + file.getFilePath()); try { System.out.println("Decrypt: " + file.getFilePath()); decrypter.decryptFile(file, this.restoreImages); } catch(Exception e1) { e1.printStackTrace(); } finally { if(! this.restoreImages || file.isImage()) rpgProject.saveFile(file, Functions.strToBool(App.preferences.getConfig(Preferences.overwriteFiles, "false"))); } // Add Progress to Progress-Monitor i++; this.progressMonitor.setProgress(i); } return null; } /** * Executed on the <i>Event Dispatch Thread</i> after the {@code doInBackground} * method is finished. The default * implementation does nothing. Subclasses may override this method to * perform completion actions on the <i>Event Dispatch Thread</i>. Note * that you can query status inside the implementation of this method to * determine the result of this task or whether this task has been cancelled. * * @see #doInBackground * @see #isCancelled() * @see #get */ @Override protected void done() { this.progressMonitor.close(); // Reset Files/ActionListener openRPGProject(rpgProject.getPath(), false); if(this.isCancelled()) { System.out.println("Cancelled..."); InfoWindow infoWindow = new InfoWindow("Decryption canceled!"); infoWindow.show(mainWindow); } else { System.out.println("Done."); InfoWindow infoWindow; if(this.restoreImages) infoWindow = new InfoWindow("Images are restored! ^-^"); else infoWindow = new InfoWindow("Decryption complete! =)"); infoWindow.show(mainWindow); } } /** * Invoked when an action occurs. * * @param e - ActionEvent */ @Override public void actionPerformed(ActionEvent e) { this.progressMonitor = new ProgressMonitor(mainWindow, this.restoreImages ? "Restoring..." : "Decrypting...", "Preparing...", 0, this.files.size()); this.progressMonitor.setProgress(0); this.execute(); } } /** * Class GUI_DirectoryClearing */ private class GUI_DirectoryClearing extends SwingWorker<Void, Void> implements ActionListener { private String directoryPath = null; private JDialog jDialog; /** * GUI_DirectoryClearing constructor * * @param directoryPath - Path to clear */ GUI_DirectoryClearing(String directoryPath) { if(directoryPath == null) { PathException pe = new PathException("directoryPath can't be null!", (String) null); pe.printStackTrace(); return; } this.directoryPath = File.ensureDSonEndOfPath(directoryPath); } /** * Computes a result, or throws an exception if unable to do so. * * Note that this method is executed only once. * * Note: this method is executed in a background thread. * * @return the computed result */ @Override protected Void doInBackground() { if(this.directoryPath == null) return null; if(File.clearDirectory(this.directoryPath)) { InfoWindow infoWindow = new InfoWindow("Output-Directory cleared!"); infoWindow.show(mainWindow); } else { ErrorWindow errorWindow = new ErrorWindow( "Can't clear Directory... May an other Program has still Files open in there?", ErrorWindow.ERROR_LEVEL_WARNING, false ); errorWindow.show(mainWindow); } return null; } /** * Executed on the <i>Event Dispatch Thread</i> after the {@code doInBackground} * method is finished. The default * implementation does nothing. Subclasses may override this method to * perform completion actions on the <i>Event Dispatch Thread</i>. Note * that you can query status inside the implementation of this method to * determine the result of this task or whether this task has been cancelled. * * @see #doInBackground * @see #isCancelled() * @see #get */ @Override protected void done() { this.jDialog.dispose(); // Reset this ActionListener if(directoryPath != null) setNewOutputDir(this.directoryPath); } /** * Invoked when an action occurs. * * @param e - ActionEvent */ @Override public void actionPerformed(ActionEvent e) { if(this.directoryPath == null) return; this.jDialog = new JDialog(); JLabel text = new JLabel("Please wait while clearing the Directory: " + this.directoryPath); text.setBorder(BorderFactory.createEmptyBorder(15, 20, 15, 20)); this.jDialog.setTitle("Please wait..."); this.jDialog.setDefaultCloseOperation(JDialog.DO_NOTHING_ON_CLOSE); this.jDialog.add(text); this.jDialog.pack(); this.jDialog.setLocationRelativeTo(mainWindow); this.jDialog.setVisible(true); this.execute(); } } /** * Class GUI_OpenRPGDir */ private class GUI_OpenRPGDir extends SwingWorker<Void, Void> { private String directoryPath; private boolean showInfoWindow; /** * GUI_OpenRPGDir constructor * * @param directoryPath - Path of the Directory */ GUI_OpenRPGDir(String directoryPath) { this.directoryPath = directoryPath; this.showInfoWindow = false; } /** * GUI_OpenRPGDir constructor * * @param directoryPath - Path of the Directory * @param showInfoWindow - Show success Window after the Action */ GUI_OpenRPGDir(String directoryPath, boolean showInfoWindow) { this.directoryPath = directoryPath; this.showInfoWindow = showInfoWindow; } /** * Computes a result, or throws an exception if unable to do so. * * Note that this method is executed only once. * * Note: this method is executed in a background thread. * * @return the computed result */ @Override protected Void doInBackground() { try { rpgProject = new RPGProject( File.ensureDSonEndOfPath(this.directoryPath), ! Functions.strToBool(App.preferences.getConfig(Preferences.loadInvalidRPGDirs, "false")) ); } catch(PathException e) { ErrorWindow errorWindow = new ErrorWindow( e.getMessage() + Const.newLine + "You can turn on the Option \"Load invalid RPG-Dirs anyway\" if your Directory is a RPG-Dir but it not detect it correctly." + Const.newLine + "Warning: Turning on the Option may cause incorrect results.", ErrorWindow.ERROR_LEVEL_WARNING, false ); errorWindow.show(mainWindow); this.cancel(true); return null; } catch(Exception e) { ErrorWindow errorWindow = new ErrorWindow(e.getMessage(), ErrorWindow.ERROR_LEVEL_ERROR, false); errorWindow.show(mainWindow); this.cancel(true); return null; } return null; } /** * Executed on the <i>Event Dispatch Thread</i> after the {@code doInBackground} * method is finished. The default * implementation does nothing. Subclasses may override this method to * perform completion actions on the <i>Event Dispatch Thread</i>. Note * that you can query status inside the implementation of this method to * determine the result of this task or whether this task has been cancelled. * * @see #doInBackground * @see #isCancelled() * @see #get */ @Override protected void done() { if(! this.isCancelled()) { decrypter = new Decrypter(); rpgProject.setOutputPath(App.outputDir); mainMenu.enableOnRPGProject(true); assignRPGActionListener(); // Refresh Project-Files // todo (re)load file list // Done if(this.showInfoWindow) { InfoWindow infoWindow = new InfoWindow("RPG-Maker Project loaded..." + Const.newLine + "Please use \"Decrypt\" -> \"All\" Files to Decrypt."); infoWindow.show(mainWindow); } } } } }
package at.ac.tuwien.softwarearchitecture.swazam.peer.management; import java.io.BufferedReader; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.net.HttpURLConnection; import java.net.URL; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Timer; import java.util.TimerTask; import java.util.UUID; import javax.xml.bind.JAXBContext; import org.apache.log4j.Level; import org.apache.log4j.Logger; import ac.at.tuwien.infosys.swa.audio.Fingerprint; import at.ac.tuwien.softwarearchitecture.swazam.common.infos.ClientInfo; import at.ac.tuwien.softwarearchitecture.swazam.common.infos.FingerprintSearchRequest; import at.ac.tuwien.softwarearchitecture.swazam.common.infos.PeerFingerprintInformation; import at.ac.tuwien.softwarearchitecture.swazam.common.infos.PeerInfo; import at.ac.tuwien.softwarearchitecture.swazam.common.infos.PeerRingInformation; import at.ac.tuwien.softwarearchitecture.swazam.peer.fingerprintExtractorAndManager.IFingerprintExtractorAndManager; import at.ac.tuwien.softwarearchitecture.swazam.peer.matching.IMatchingManager; import at.ac.tuwien.softwarearchitecture.swazam.peer.serverCommunication.ServerCommunicationManager; import at.ac.tuwien.softwarearchitecture.swazam.peer.util.ConfigurationManagement; import at.ac.tuwien.softwarearchitecture.swazam.peer.util.NetworkUtil; public class PeerManager implements IPeerManager { /* * holds the peer ring information, i.e. the rest of the peers in the ring. * Replaced with List<IDs> for privacy, to avoid a super peer or other peers * knowing the content of all other peers */ private Map<PeerInfo, List<Fingerprint>> peerRing; { peerRing = new HashMap<PeerInfo, List<Fingerprint>>(); } /** * In the case a SubPeer receives a request from the SuperPeer, the request * is */ private IMatchingManager matchingManager; private ServerCommunicationManager serverCommunicationManager; private IFingerprintExtractorAndManager fingerprintExtractorAndManager; private int MAX_IDLE_PERIOD = 20000; private Date latestSuperPeerRefreshMade = new Date(); // private List<PeerInfo> peerRing; // peerRing = new ArrayList<PeerInfo>(); // info used to bill a client for search/result private ClientInfo clientInfo; // information regarding the current Peer // this is ditributed to the Server and other peers and used in connecting // to this Peer private PeerInfo peerInfo; // it can be scheduled at specific intervals private TimerTask checkAlivePeriodSuperPeer = new TimerTask() { public void run() { Date currentDate = new Date(); if (currentDate.getTime() - latestSuperPeerRefreshMade.getTime() > MAX_IDLE_PERIOD) { performLeaderElection(); } } }; /** * If the current peer is superPeer, it needs to continuously broadcast to * other peers its info */ private TimerTask broadcastSuperPeerInfoHeartbeat() { TimerTask task = new TimerTask() { public void run() { broadcastSuperPeerInfoHeartBeat(); } }; return task; } // used to check if super peer sent its info Timer checkForSuperPeerRefreshRate; private UUID peerID; public PeerManager(ServerCommunicationManager serverCommunicationManager, IFingerprintExtractorAndManager fingerprintExtractorAndManager) { super(); this.serverCommunicationManager = serverCommunicationManager; peerInfo = ConfigurationManagement.loadPeerInfo(); Logger.getLogger(PeerManager.class).log(Level.WARN, peerInfo); clientInfo = ConfigurationManagement.loadClientInfo(); peerInfo.setPassword(clientInfo.getPassword()); peerInfo.setUsername(clientInfo.getUsername()); peerID = generatePeerID(); this.fingerprintExtractorAndManager = fingerprintExtractorAndManager; registerToServer(); } private UUID generatePeerID() { return UUID.nameUUIDFromBytes((peerInfo.getIp() + "_" + peerInfo.getPort()).getBytes()); } public UUID getPeerID() { return peerID; } public Map<PeerInfo, List<Fingerprint>> getPeerRing() { return peerRing; } public void setPeerRing(Map<PeerInfo, List<Fingerprint>> peerRing) { this.peerRing = peerRing; } public void addPeers(Map<PeerInfo, List<Fingerprint>> peerRing) { this.peerRing.putAll(peerRing); } public void addPeer(PeerInfo peer, List<Fingerprint> fingerprints) { this.peerRing.put(peer, fingerprints); } public IMatchingManager getMatchingManager() { return matchingManager; } public void setMatchingManager(IMatchingManager matchingManager) { this.matchingManager = matchingManager; } public PeerInfo getPeerInfo() { return peerInfo; } public void setPeerInfo(PeerInfo peerInfo) { this.peerInfo = peerInfo; } public void setPeerID(UUID peerID) { this.peerID = peerID; } public ClientInfo getClientInfo() { return clientInfo; } public void setClientInfo(ClientInfo clientInfo) { this.clientInfo = clientInfo; } public ServerCommunicationManager getServerCommunicationManager() { return serverCommunicationManager; } public void setServerCommunicationManager(ServerCommunicationManager serverCommunicationManager) { this.serverCommunicationManager = serverCommunicationManager; } public IFingerprintExtractorAndManager getFingerprintExtractorAndManager() { return fingerprintExtractorAndManager; } public void setFingerprintExtractorAndManager(IFingerprintExtractorAndManager fingerprintExtractorAndManager) { this.fingerprintExtractorAndManager = fingerprintExtractorAndManager; } /** * takes the request and forwards it to the MatchingManager. It is used in * case a search is forwarded to this peer from another PeerManager */ @Override public void searchFingerprint(final ClientInfo client, final Fingerprint fingerprint) { Thread matchingThread = new Thread() { public void run() { if (matchingManager != null) { matchingManager.matchFile(client, fingerprint); } } }; matchingThread.setDaemon(true); matchingThread.start(); } @Override public void forwardSearchRequest(ClientInfo clientInfo, Fingerprint fingerprint) { // TODO Auto-generated method stub if (this.peerID.equals(this.peerInfo.getSuperPeerID())) { // broadcast to all other peers in ring the search request. for (Entry<PeerInfo, List<Fingerprint>> entry : peerRing.entrySet()) { // check if sub-peer contains desired fingerprint List<Fingerprint> peerFingerprints = entry.getValue(); boolean hasFingeprint = false; for (Fingerprint f : peerFingerprints) { if (f.match(fingerprint) == 0.0) { hasFingeprint = true; break; } } // if sub-peer does not contain the desired fingerprint, // continue if (!hasFingeprint) { continue; } boolean isAlive = NetworkUtil.checkIfPortOpen(entry.getKey().getIp(), entry.getKey().getPort()); if (isAlive) { URL url = null; HttpURLConnection connection = null; try { url = new URL("http://" + entry.getKey().getIp() + ":" + entry.getKey().getPort() + "/Peer/REST_API/search"); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("POST"); connection.setRequestProperty("Content-Type", "application/xml"); OutputStream os = connection.getOutputStream(); JAXBContext jaxbContext = JAXBContext.newInstance(FingerprintSearchRequest.class); jaxbContext.createMarshaller().marshal(new FingerprintSearchRequest(clientInfo, fingerprint), os); os.flush(); os.close(); InputStream errorStream = connection.getErrorStream(); if (errorStream != null) { BufferedReader reader = new BufferedReader(new InputStreamReader(errorStream)); String line; while ((line = reader.readLine()) != null) { Logger.getLogger(PeerManager.class.getName()).log(Level.ERROR, line); } } InputStream inputStream = connection.getInputStream(); if (inputStream != null) { BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); String line; while ((line = reader.readLine()) != null) { Logger.getLogger(PeerManager.class.getName()).log(Level.ERROR, line); } } if (connection != null) { connection.disconnect(); } } catch (Exception e) { Logger.getLogger(PeerManager.class.getName()).log(Level.ERROR, e); } } else { peerRing.remove(entry.getKey()); } } } else { // just ignore request } } public void registerToServer() { if (serverCommunicationManager != null) { peerInfo = serverCommunicationManager.registerToServer(peerInfo); Logger.getLogger(PeerManager.class).log(Level.INFO, "Retrieved peerID: " + peerInfo.getPeerID() + " and superPeerID " + peerInfo.getSuperPeerID()); } // broadcast all fingerprints to SuperPeer // if I am first in Ring, server will return my info as SuperPeer // if I am superPeer, start behavior for sending heartbeat with my Info if (peerInfo.getSuperPeerID().equals(peerInfo.getPeerID())) { if(checkForSuperPeerRefreshRate != null){ checkForSuperPeerRefreshRate.cancel(); } Timer heartbeatTimer = new Timer(); // schedule at 1.5 seconds heartbeatTimer.schedule(broadcastSuperPeerInfoHeartbeat(), 0, 1500); Logger.getLogger(PeerManager.class).log(Level.INFO, "I am super peer and starting heartbeat"); } else { // else send its fingerprints to SuperPeer by joining ring Logger.getLogger(PeerManager.class).log(Level.INFO, "I am joining peer network "); joinPeerNetwork(); // schedule at 2 seconds interval checkForSuperPeerRefreshRate = new Timer(); // only check If I am super peer if I am NOT superPeer checkForSuperPeerRefreshRate.schedule(checkAlivePeriodSuperPeer, 0, 2000); } } /** * Sends its information (PeerInfo and Fingerprints) to the SuperPeer, and * receives Ring Information */ @Override public void joinPeerNetwork() { // check if SuperPeer is alive boolean isAlive = NetworkUtil.checkIfPortOpen(peerInfo.getSuperPeerIp(), peerInfo.getSuperPeerPort()); if (isAlive) { URL url = null; HttpURLConnection connection = null; try { url = new URL("http://" + peerInfo.getSuperPeerIp() + ":" + peerInfo.getSuperPeerPort() + "/Peer/REST_API/fingerprints"); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("PUT"); connection.setRequestProperty("Content-Type", "application/xml"); connection.setRequestProperty("Accept", "application/xml"); connection.setDoOutput(true); OutputStream os = connection.getOutputStream(); JAXBContext jaxbContext = JAXBContext.newInstance(PeerFingerprintInformation.class); PeerFingerprintInformation fingerprintInformation = new PeerFingerprintInformation(); fingerprintInformation.setPeerInfo(peerInfo); fingerprintInformation.setFingerprints(this.fingerprintExtractorAndManager.getKnownFingerprints()); jaxbContext.createMarshaller().marshal(fingerprintInformation, os); os.flush(); os.close(); InputStream errorStream = connection.getErrorStream(); if (errorStream != null) { BufferedReader reader = new BufferedReader(new InputStreamReader(errorStream)); String line; while ((line = reader.readLine()) != null) { Logger.getLogger(PeerManager.class.getName()).log(Level.ERROR, line); } } InputStream inputStream = connection.getInputStream(); if (inputStream != null) { BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); String line; while ((line = reader.readLine()) != null) { Logger.getLogger(PeerManager.class.getName()).log(Level.ERROR, line); } } if (connection != null) { connection.disconnect(); } } catch (Exception e) { Logger.getLogger(PeerManager.class.getName()).log(Level.ERROR, e); e.printStackTrace(); } } else { Logger.getLogger(this.getClass()).log(Level.ERROR, "SuperPeer " + peerInfo.getSuperPeerIp() + ":" + peerInfo.getSuperPeerPort() + " is not responding. Unable to register"); } } @Override public void performLeaderElection() { // test which Peer is alive. And the Peer with largest ID is compared to // this, and the largest ID wins // refresh ring by checking who is still alive List<Thread> ringRefreshThreads = new ArrayList<Thread>(); for (final PeerInfo info : peerRing.keySet()) { Thread thread = new Thread() { public void run() { // check if Peer is alive (if port is not closed) boolean isAlive = NetworkUtil.checkIfPortOpen(info.getIp(), info.getPort()); if (!isAlive) { peerRing.remove(info); } } }; ringRefreshThreads.add(thread); } // start threads for (Thread t : ringRefreshThreads) { t.setDaemon(true); t.start(); } // wait for threads to finish for (Thread t : ringRefreshThreads) { try { t.join(); } catch (InterruptedException e) { // TODO Auto-generated catch block Logger.getLogger(PeerManager.class.getName()).log(Level.ERROR, e); } } PeerInfo largestPeer = this.peerInfo; // get peer with largest ID from map for (PeerInfo info : peerRing.keySet()) { if (largestPeer.getPeerID().compareTo(info.getPeerID()) < 0) { largestPeer = info; } } // updating superPeer { peerInfo.setSuperPeerID(largestPeer.getPeerID()); peerInfo.setSuperPeerPort(largestPeer.getPort()); peerInfo.setSuperPeerIp(largestPeer.getIp()); } // if I am superPeer, start behavior for sending heartbeat with my Info if (peerInfo.getSuperPeerID().equals(peerInfo.getPeerID())) { Timer heartbeatTimer = new Timer(); if(checkAlivePeriodSuperPeer!= null){ checkAlivePeriodSuperPeer.cancel(); } Logger.getLogger(PeerManager.class).log(Level.INFO, "I am super peer "); serverCommunicationManager.notifyServerIAmSuperPeer(peerInfo); // schedule at 1.5 seconds heartbeatTimer.schedule(broadcastSuperPeerInfoHeartbeat(), 0, 1500); } else { // else send its fingerprints to SuperPeer by joining ring Logger.getLogger(PeerManager.class).log(Level.INFO, "I am joining peer network "); joinPeerNetwork(); } } /** * Used to update the fingerprint information about the other peers in the * ring */ @Override public void updateRingInformation(PeerRingInformation peerRingInformation) { PeerInfo superPeerInfo = peerRingInformation.getSuperPeerInfo(); // updating superPeer { peerInfo.setSuperPeerID(superPeerInfo.getPeerID()); peerInfo.setSuperPeerPort(superPeerInfo.getPort()); peerInfo.setSuperPeerIp(superPeerInfo.getIp()); } latestSuperPeerRefreshMade = new Date(); // as this is not super peer, it does not store the fingerprints for now for (PeerInfo info : peerRingInformation.getPeerRing()) { peerRing.put(info, new ArrayList<Fingerprint>()); } } @Override public void broadcastSuperPeerInfoHeartBeat() { final PeerRingInformation peerRingInformation = new PeerRingInformation(); peerRingInformation.setSuperPeerInfo(peerInfo); peerRingInformation.addPeerRing(peerRing.keySet()); List<Thread> sendSuperPeerHeartbeatThreads = new ArrayList<Thread>(); for (final PeerInfo info : peerRing.keySet()) { if(info.getPeerID().equals(peerInfo.getPeerID())){ continue; } Thread thread = new Thread() { public void run() { // Logger.getLogger(PeerManager.class).log(Level.INFO, "Broadcasting Super Peer ID " + peerInfo.getPeerID() + " to " + info.getPeerID()); // call Peer RESTful API URL url = null; HttpURLConnection connection = null; try { // Logger.getLogger(PeerManager.class).log(Level.INFO, "Sending super-peer heartbeat to " + info.getIp() + ":" + info.getPort()); url = new URL("http://" + info.getIp() + ":" + info.getPort() + "/Peer/REST_API/updateRingInformation"); connection = (HttpURLConnection) url.openConnection(); connection.setDoOutput(true); connection.setInstanceFollowRedirects(false); connection.setDoOutput(true); connection.setRequestMethod("PUT"); connection.setRequestProperty("Content-Type", "application/xml"); // write message body OutputStream os = connection.getOutputStream(); JAXBContext jaxbContext = JAXBContext.newInstance(PeerRingInformation.class); jaxbContext.createMarshaller().marshal(peerRingInformation, os); os.flush(); os.close(); InputStream errorStream = connection.getErrorStream(); if (errorStream != null) { BufferedReader reader = new BufferedReader(new InputStreamReader(errorStream)); String line; while ((line = reader.readLine()) != null) { Logger.getLogger(PeerManager.class).log(Level.ERROR, line); } } InputStream inputStream = connection.getInputStream(); if (inputStream != null) { BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); String line; while ((line = reader.readLine()) != null) { Logger.getLogger(PeerManager.class).log(Level.ERROR, line); } } } catch (Exception e) { Logger.getLogger(PeerManager.class).log(Level.ERROR, e); } } }; sendSuperPeerHeartbeatThreads.add(thread); } // start threads for (Thread t : sendSuperPeerHeartbeatThreads) { t.setDaemon(true); t.start(); } } @Override public void updatePeerInformation(PeerFingerprintInformation fingerprintInformation) { this.peerRing.put(fingerprintInformation.getPeerInfo(), new ArrayList<Fingerprint>(fingerprintInformation.getFingerprints())); } @Override public PeerInfo getCurrentPeerInformation() { return peerInfo; } }
package opennlp.tools.ngram; import java.io.DataInputStream; import java.io.EOFException; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import java.util.zip.GZIPInputStream; import opennlp.tools.util.NumberedSet; /** * This class allows for the the loading of n-gram dictionaries to facilitate feature generation * for n-gram based models. * @see MutableDictionary * @author Tom Morton * */ public class Dictionary { public static String FILE_TYPE = "dict"; /** Mapping between words and a unique integer assigned to each words. **/ protected NumberedSet wordMap; /** Set which stores all n-grams. */ protected Set gramSet; //protected int size; protected int cutoff; protected NGramFactory nGramFactory; protected Dictionary() {} /** Constructor used to load a previously created dictionary for the specifed dictionary file. * @param dictionaryFile A file storing a dictionary. */ public Dictionary(String dictionaryFile) throws IOException { DataInputStream input = new DataInputStream(new GZIPInputStream(new FileInputStream(new File(dictionaryFile)))); input.readUTF(); int numWords = input.readInt(); System.err.println("Reading: "+numWords+" words"); wordMap = new NumberedSet(numWords); for (int wi=0;wi<numWords;wi++) { String word = input.readUTF(); int index = input.readInt(); wordMap.setIndex(word,index); } loadGrams(input); nGramFactory = new NGramFactory(wordMap); } protected void loadGrams(DataInputStream input) throws IOException { gramSet = new HashSet(); try { while(true) { int gramLength=input.readInt(); int[] words = new int[gramLength]; for (int wi=0;wi<gramLength;wi++) { words[wi]=input.readInt(); } gramSet.add(new NGram(words)); } } catch(EOFException e) { } } public boolean contains(String[] words) { if (words.length == 1) { return wordMap.contains(words[0]); } else { NGram ngram = nGramFactory.createNGram(words); if (ngram == null) { return false; } else { return gramSet.contains(ngram); } } } public Iterator iterator() { return new DictionaryIterator(this); } public static void main(String[] args) throws IOException { if (args.length == 0) { System.err.println("Usage: Dictionary dictionary_file"); System.exit(0); } Dictionary dict = new Dictionary(args[0]); for (Iterator di = dict.iterator();di.hasNext();) { System.out.println(di.next()); } } } class DictionaryIterator implements Iterator { Iterator wordIterator; Iterator gramIterator; boolean onWords; String[] words; Dictionary dict; public DictionaryIterator(Dictionary dict) { /* words = new String[dict.wordMap.size()+1]; for (Iterator wi=dict.wordMap.iterator();wi.hasNext();) { String word = (String) wi.next(); words[dict.wordMap.getIndex(word)]=word; } */ wordIterator = dict.wordMap.iterator(); gramIterator = dict.gramSet.iterator(); onWords = true; this.dict = dict; } public boolean hasNext() { if (onWords) { if (wordIterator.hasNext()) { return true; } else { onWords = false; } } return gramIterator.hasNext(); } public Object next() { if (onWords) { String word = (String) wordIterator.next(); return word+"="+dict.wordMap.getIndex(word); } else { int[] gramInts = ((NGram) gramIterator.next()).getWords(); StringBuffer sb = new StringBuffer(); for (int gi=0;gi<gramInts.length;gi++) { //sb.append(words[gramInts[gi]]).append(","); sb.append(gramInts[gi]).append(","); } sb.setLength(sb.length()-1); return sb.toString(); } } public void remove() { throw new UnsupportedOperationException("DictionaryIterator does not allow removal"); } }
package org.apereo.cas.support.pac4j.web.flow; import org.apereo.cas.CasProtocolConstants; import org.apereo.cas.CentralAuthenticationService; import org.apereo.cas.authentication.Authentication; import org.apereo.cas.authentication.AuthenticationManager; import org.apereo.cas.authentication.AuthenticationResult; import org.apereo.cas.authentication.AuthenticationResultBuilder; import org.apereo.cas.authentication.AuthenticationSystemSupport; import org.apereo.cas.authentication.AuthenticationTransaction; import org.apereo.cas.authentication.AuthenticationTransactionManager; import org.apereo.cas.authentication.CoreAuthenticationTestUtils; import org.apereo.cas.authentication.principal.Service; import org.apereo.cas.services.RegisteredServiceTestUtils; import org.apereo.cas.ticket.ExpirationPolicy; import org.apereo.cas.ticket.TicketGrantingTicket; import org.apereo.cas.ticket.TicketGrantingTicketImpl; import org.junit.Test; import org.pac4j.core.client.Clients; import org.pac4j.core.context.WebContext; import org.pac4j.core.exception.HttpAction; import org.pac4j.oauth.client.FacebookClient; import org.pac4j.oauth.client.TwitterClient; import org.pac4j.oauth.credentials.OAuth20Credentials; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.mock.web.MockHttpServletResponse; import org.springframework.mock.web.MockHttpSession; import org.springframework.web.servlet.i18n.LocaleChangeInterceptor; import org.springframework.web.servlet.theme.ThemeChangeInterceptor; import org.springframework.webflow.context.servlet.ServletExternalContext; import org.springframework.webflow.core.collection.MutableAttributeMap; import org.springframework.webflow.execution.Event; import org.springframework.webflow.test.MockRequestContext; import java.util.Set; import static org.junit.Assert.*; import static org.mockito.Mockito.*; /** * This class tests the {@link DelegatedClientAuthenticationAction} class. * * @author Jerome Leleu * @since 3.5.2 */ public class DelegatedClientAuthenticationActionTests { private static final String TGT_NAME = "ticketGrantingTicketId"; private static final String TGT_ID = "TGT-00-xxxxxxxxxxxxxxxxxxxxxxxxxx.cas0"; private static final String MY_KEY = "my_key"; private static final String MY_SECRET = "my_secret"; private static final String MY_LOGIN_URL = "http://casserver/login"; private static final String MY_SERVICE = "http://myservice"; private static final String MY_THEME = "my_theme"; private static final String MY_LOCALE = "fr"; private static final String MY_METHOD = "POST"; @Test public void verifyStartAuthentication() throws Exception { final MockHttpServletResponse mockResponse = new MockHttpServletResponse(); final MockHttpServletRequest mockRequest = new MockHttpServletRequest(); mockRequest.setParameter(ThemeChangeInterceptor.DEFAULT_PARAM_NAME, MY_THEME); mockRequest.setParameter(LocaleChangeInterceptor.DEFAULT_PARAM_NAME, MY_LOCALE); mockRequest.setParameter(CasProtocolConstants.PARAMETER_METHOD, MY_METHOD); final MockHttpSession mockSession = new MockHttpSession(); mockRequest.setSession(mockSession); final ServletExternalContext servletExternalContext = mock(ServletExternalContext.class); when(servletExternalContext.getNativeRequest()).thenReturn(mockRequest); when(servletExternalContext.getNativeResponse()).thenReturn(mockResponse); final MockRequestContext mockRequestContext = new MockRequestContext(); mockRequestContext.setExternalContext(servletExternalContext); mockRequestContext.getFlowScope().put(CasProtocolConstants.PARAMETER_SERVICE, RegisteredServiceTestUtils.getService(MY_SERVICE)); final FacebookClient facebookClient = new FacebookClient(MY_KEY, MY_SECRET); final TwitterClient twitterClient = new TwitterClient("3nJPbVTVRZWAyUgoUKQ8UA", "h6LZyZJmcW46Vu8R47MYfeXTSYGI30EqnWaSwVhFkbA"); final Clients clients = new Clients(MY_LOGIN_URL, facebookClient, twitterClient); final DelegatedClientAuthenticationAction action = new DelegatedClientAuthenticationAction(clients, null, mock(CentralAuthenticationService.class), "theme", "locale", false); final Event event = action.execute(mockRequestContext); assertEquals("error", event.getId()); assertEquals(MY_THEME, mockSession.getAttribute(ThemeChangeInterceptor.DEFAULT_PARAM_NAME)); assertEquals(MY_LOCALE, mockSession.getAttribute(LocaleChangeInterceptor.DEFAULT_PARAM_NAME)); assertEquals(MY_METHOD, mockSession.getAttribute(CasProtocolConstants.PARAMETER_METHOD)); final MutableAttributeMap flowScope = mockRequestContext.getFlowScope(); final Set<DelegatedClientAuthenticationAction.ProviderLoginPageConfiguration> urls = (Set<DelegatedClientAuthenticationAction.ProviderLoginPageConfiguration>) flowScope.get(DelegatedClientAuthenticationAction.PAC4J_URLS); assertFalse(urls.isEmpty()); assertTrue(urls.size() == 2); } @Test public void verifyFinishAuthentication() throws Exception { final MockHttpServletRequest mockRequest = new MockHttpServletRequest(); mockRequest.setParameter(Clients.DEFAULT_CLIENT_NAME_PARAMETER, "FacebookClient"); final MockHttpSession mockSession = new MockHttpSession(); mockSession.setAttribute(ThemeChangeInterceptor.DEFAULT_PARAM_NAME, MY_THEME); mockSession.setAttribute(LocaleChangeInterceptor.DEFAULT_PARAM_NAME, MY_LOCALE); mockSession.setAttribute(CasProtocolConstants.PARAMETER_METHOD, MY_METHOD); final Service service = CoreAuthenticationTestUtils.getService(MY_SERVICE); mockSession.setAttribute(CasProtocolConstants.PARAMETER_SERVICE, service); mockRequest.setSession(mockSession); final ServletExternalContext servletExternalContext = mock(ServletExternalContext.class); when(servletExternalContext.getNativeRequest()).thenReturn(mockRequest); when(servletExternalContext.getNativeResponse()).thenReturn(new MockHttpServletResponse()); final MockRequestContext mockRequestContext = new MockRequestContext(); mockRequestContext.setExternalContext(servletExternalContext); final FacebookClient facebookClient = new FacebookClient() { @Override protected OAuth20Credentials retrieveCredentials(final WebContext context) throws HttpAction { return new OAuth20Credentials("fakeVerifier", FacebookClient.class.getSimpleName()); } }; facebookClient.setName(FacebookClient.class.getSimpleName()); final Clients clients = new Clients(MY_LOGIN_URL, facebookClient); final TicketGrantingTicket tgt = new TicketGrantingTicketImpl(TGT_ID, mock(Authentication.class), mock(ExpirationPolicy.class)); final CentralAuthenticationService casImpl = mock(CentralAuthenticationService.class); when(casImpl.createTicketGrantingTicket(any(AuthenticationResult.class))).thenReturn(tgt); final AuthenticationTransactionManager transManager = mock(AuthenticationTransactionManager.class); final AuthenticationManager authNManager = mock(AuthenticationManager.class); when(authNManager.authenticate(any(AuthenticationTransaction.class))) .thenReturn(CoreAuthenticationTestUtils.getAuthentication()); when(transManager.getAuthenticationManager()).thenReturn(authNManager); when(transManager.handle(any(AuthenticationTransaction.class), any(AuthenticationResultBuilder.class))).thenReturn(transManager); final AuthenticationSystemSupport support = mock(AuthenticationSystemSupport.class); when(support.getAuthenticationTransactionManager()).thenReturn(transManager); final DelegatedClientAuthenticationAction action = new DelegatedClientAuthenticationAction(clients, support, casImpl, "theme", "locale", false); final Event event = action.execute(mockRequestContext); assertEquals("success", event.getId()); assertEquals(MY_THEME, mockRequest.getAttribute(ThemeChangeInterceptor.DEFAULT_PARAM_NAME)); assertEquals(MY_LOCALE, mockRequest.getAttribute(LocaleChangeInterceptor.DEFAULT_PARAM_NAME)); assertEquals(MY_METHOD, mockRequest.getAttribute(CasProtocolConstants.PARAMETER_METHOD)); assertEquals(MY_SERVICE, mockRequest.getAttribute(CasProtocolConstants.PARAMETER_SERVICE)); final MutableAttributeMap flowScope = mockRequestContext.getFlowScope(); final MutableAttributeMap requestScope = mockRequestContext.getRequestScope(); assertEquals(service, flowScope.get(CasProtocolConstants.PARAMETER_SERVICE)); assertEquals(TGT_ID, flowScope.get(TGT_NAME)); assertEquals(TGT_ID, requestScope.get(TGT_NAME)); } }
package org.spideruci.tacoco; import static org.spideruci.tacoco.cli.AbstractCli.DB; import static org.spideruci.tacoco.cli.AbstractCli.HELP; import static org.spideruci.tacoco.cli.AbstractCli.HOME; import static org.spideruci.tacoco.cli.AbstractCli.INST; import static org.spideruci.tacoco.cli.AbstractCli.INST_ARGS; import static org.spideruci.tacoco.cli.AbstractCli.LANUCHER_CLI; import static org.spideruci.tacoco.cli.AbstractCli.OUTDIR; import static org.spideruci.tacoco.cli.AbstractCli.PROJECT; import static org.spideruci.tacoco.cli.AbstractCli.SUT; import static org.spideruci.tacoco.cli.AbstractCli.PIT; import static org.spideruci.tacoco.cli.LauncherCli.readArgumentValue; import static org.spideruci.tacoco.cli.LauncherCli.readOptionalArgumentValue; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths; import org.spideruci.tacoco.AbstractBuildProbe.Child; import org.spideruci.tacoco.db.CreateSQLiteDB; public class TacocoLauncher { private String tacocoHome, targetDir; private static String tacocoClasspath = null; private TacocoLauncher(String tacocoHome, String targetDir){ this.tacocoHome = tacocoHome; this.targetDir = targetDir; } public static void main(String[] args) throws Exception{ if(System.getProperties().containsKey(HELP)) { LANUCHER_CLI.printHelp(); } String targetDir = readArgumentValue(SUT); if(targetDir.endsWith("/")) targetDir = targetDir.substring(0, targetDir.length()); TacocoLauncher launcher = new TacocoLauncher(readOptionalArgumentValue(HOME,System.getProperty("user.dir")) ,targetDir); AbstractBuildProbe probe = AbstractBuildProbe.getInstance(launcher.targetDir); String name = readOptionalArgumentValue(PROJECT, probe.getId()); launcher.setTacocoEnv(); String parentCP = launcher.getTacocoClasspath() +":"+ probe.getClasspath(); //String parentCP = probe.getClasspath() +":"+ launcher.getTacocoClasspath() ; if(probe.hasChild()){ for(Child child : probe.getChildren()){ launcher.startJUnitRunner(name+"."+child.id, child.classpath+":"+ parentCP, child.targetDir, child.jvmArgs, probe); } } launcher.startJUnitRunner(name, parentCP, launcher.targetDir, null, probe); } private void startJUnitRunner(String id, String classpath, String targetDir, String[] jvmArgs, AbstractBuildProbe probe) { String outdir = readOptionalArgumentValue(OUTDIR, tacocoHome+"/tacoco_output"); if(!new File(outdir).exists()) new File(outdir).mkdirs(); //delete files before execution, File exec = new File(outdir, id+".exec"); File err = new File(outdir, id+".err"); File log = new File(outdir, id+".log"); if(exec.exists()) exec.delete(); if(err.exists()) err.delete(); if(log.exists()) log.delete(); final String instrumenterLocation = readOptionalArgumentValue(INST, tacocoHome+"/lib/org.jacoco.agent-0.7.4.201502262128-runtime.jar"); final String instrumentedArgs = readOptionalArgumentValue(INST_ARGS, "destfile=" + outdir + "/" + id + ".exec" + ",dumponexit=false"); InstrumenterConfig jacocoConfig = InstrumenterConfig.get(instrumenterLocation, instrumentedArgs); ProcessBuilder builder = new ProcessBuilder( "java", "-cp", classpath, jacocoConfig.getMemory(), jacocoConfig.buildJavaagentOpt(), "-Dtacoco.sut="+targetDir, "-Dtacoco.output="+outdir, "-Dtacoco.log=off", "-Dtacoco.thread="+1, "org.spideruci.tacoco.JUnitRunner"); builder.directory(new File(targetDir)); builder.redirectOutput(ProcessBuilder.Redirect.INHERIT); builder.redirectError(err); builder.redirectOutput(log); /* final Process p; try{ p= builder.start(); Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { p.destroy(); } }); p.waitFor(); }catch(Exception e){ e.printStackTrace(); } */ String dbFile = outdir+"/"+id+".db"; if(System.getProperties().containsKey(DB)) try { CreateSQLiteDB.dump(dbFile, targetDir, exec.toString()); } catch (Exception e) { e.printStackTrace(); } if(System.getProperties().containsKey(PIT)){ runPit(id, classpath, targetDir, probe, outdir); } } private void runPit(String id, String classpath, String targetDir, AbstractBuildProbe probe, String outdir) { StringBuffer testClasses= new StringBuffer(); StringBuffer classes= new StringBuffer(); for(String s : probe.getTestClasses()){ testClasses.append(s+","); } for(String s : probe.getClasses()){ classes.append(s+","); } String pitPath = this.tacocoHome+"/lib/pitest-command-line-1.1.7.jar:" +this.tacocoHome+"/lib/pitest-1.1.7.jar"; File err = new File(outdir, id+".pit.err"); File log = new File(outdir, id+".pit.log"); if(err.exists()) err.delete(); if(log.exists()) log.delete(); ProcessBuilder pitRunner = new ProcessBuilder( "java", "-cp", classpath+":"+pitPath, "org.pitest.mutationtest.commandline.MutationCoverageReport", "--reportDir="+outdir+"/"+id, "--targetClasses="+classes, "--targetTests="+testClasses, "--sourceDirs="+targetDir+"/src", "--outputFormats=XML"); pitRunner.directory(new File(targetDir)); pitRunner.redirectError(err); pitRunner.redirectOutput(log); final Process pit; try{ pit= pitRunner.start(); Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { pit.destroy(); } }); pit.waitFor(); }catch(Exception e){ e.printStackTrace(); } } private String getTacocoClasspath() throws Exception{ if(tacocoClasspath != null) return tacocoClasspath; if(!new File(tacocoHome+"/cp.txt").exists()) { ProcessBuilder builder = new ProcessBuilder( "/usr/bin/mvn","dependency:build-classpath","-Dmdep.outputFile=cp.txt").inheritIO(); builder.directory(new File(tacocoHome)); Process p = builder.start(); p.waitFor(); } tacocoClasspath = new String(Files.readAllBytes(Paths.get("cp.txt")))+":"+ tacocoHome + "/target/classes"; return tacocoClasspath; } /* * Move jacoco.jar from mvn repo to tacocoHome/lib */ private void setTacocoEnv() { if(new File(tacocoHome+"/lib").exists()) return; ProcessBuilder builder = new ProcessBuilder("/usr/local/bin/mvn","dependency:copy-dependencies","-DoutputDirectory=lib").inheritIO(); builder.directory(new File(tacocoHome)); try{ Process p = builder.start(); p.waitFor(); }catch(Exception e){ e.printStackTrace(); } } }
package org.spongepowered.mod; import net.minecraft.launchwrapper.Launch; import net.minecraftforge.common.ForgeVersion; import net.minecraftforge.fml.common.Loader; import net.minecraftforge.fml.relauncher.IFMLCallHook; import net.minecraftforge.fml.relauncher.IFMLLoadingPlugin; import org.spongepowered.asm.launch.MixinBootstrap; import org.spongepowered.asm.mixin.MixinEnvironment; import org.spongepowered.asm.mixin.MixinEnvironment.Phase; import org.spongepowered.asm.mixin.extensibility.IEnvironmentTokenProvider; import org.spongepowered.common.SpongeImpl; import org.spongepowered.common.launch.SpongeLaunch; import org.spongepowered.common.launch.transformer.SpongeSuperclassRegistry; import java.io.File; import java.util.Map; @IFMLLoadingPlugin.MCVersion("1.8") public class SpongeCoremod implements IFMLLoadingPlugin { public static File modFile; public static final class TokenProvider implements IEnvironmentTokenProvider { @Override public int getPriority() { return IEnvironmentTokenProvider.DEFAULT_PRIORITY; } @Override public Integer getToken(String token, MixinEnvironment env) { if ("FORGE".equals(token)) { return Integer.valueOf(ForgeVersion.getBuildVersion()); } else if ("FML".equals(token)) { String fmlVersion = Loader.instance().getFMLVersionString(); int build = Integer.parseInt(fmlVersion.substring(fmlVersion.lastIndexOf('.') + 1)); return Integer.valueOf(build); } return null; } } public SpongeCoremod() { // Let's get this party started MixinBootstrap.init(); MixinEnvironment.setCompatibilityLevel(MixinEnvironment.CompatibilityLevel.JAVA_8); // Add pre-init mixins MixinEnvironment.getEnvironment(Phase.PREINIT) .addConfiguration("mixins.forge.preinit.json") .registerTokenProviderClass("org.spongepowered.mod.SpongeCoremod$TokenProvider"); SpongeLaunch.initialize(); SpongeImpl.getGlobalConfig(); // Load config MixinEnvironment.getEnvironment(Phase.INIT) .addConfiguration("mixins.forge.init.json") .registerTokenProviderClass("org.spongepowered.mod.SpongeCoremod$TokenProvider"); // Add default mixins MixinEnvironment.getDefaultEnvironment() .addConfiguration("mixins.common.api.json") .addConfiguration("mixins.common.core.json") .addConfiguration("mixins.common.bungeecord.json") .addConfiguration("mixins.common.eulashutdown.json") .addConfiguration("mixins.common.timings.json") .addConfiguration("mixins.forge.core.json") .addConfiguration("mixins.forge.entityactivation.json") .registerTokenProviderClass("org.spongepowered.mod.SpongeCoremod$TokenProvider"); // Classloader exclusions - TODO: revise when event pkg refactor reaches impl Launch.classLoader.addClassLoaderExclusion("org.spongepowered.api.event.cause.CauseTracked"); Launch.classLoader.addClassLoaderExclusion("org.spongepowered.api.event.cause.Cause"); Launch.classLoader.addClassLoaderExclusion("org.spongepowered.api.event.cause.NamedCause"); Launch.classLoader.addClassLoaderExclusion("org.spongepowered.api.event.Cancellable"); Launch.classLoader.addClassLoaderExclusion("org.spongepowered.api.eventgencore.annotation.PropertySettings"); // Transformer exclusions Launch.classLoader.addTransformerExclusion("ninja.leaping.configurate."); Launch.classLoader.addTransformerExclusion("org.apache.commons.lang3."); Launch.classLoader.addTransformerExclusion("org.spongepowered.mod.interfaces.IMixinEvent"); Launch.classLoader.addTransformerExclusion("org.spongepowered.mod.asm.transformer.WorldGeneratorTransformer"); Launch.classLoader.addTransformerExclusion("org.spongepowered.common.launch."); SpongeSuperclassRegistry.registerSuperclassModification("org.spongepowered.api.entity.ai.task.AbstractAITask", "org.spongepowered.common.entity.ai.SpongeEntityAICommonSuperclass"); } @Override public String[] getASMTransformerClass() { return new String[] { "org.spongepowered.mod.asm.transformer.WorldGeneratorTransformer", "org.spongepowered.common.launch.transformer.SpongeSuperclassTransformer" }; } @Override public String getModContainerClass() { return "org.spongepowered.mod.SpongeMod"; } @Override public String getSetupClass() { return null; } @Override public void injectData(Map<String, Object> data) { if ((Boolean)data.get("runtimeDeobfuscationEnabled")) { MixinEnvironment.getDefaultEnvironment() .registerErrorHandlerClass("org.spongepowered.mod.mixin.handler.MixinErrorHandler"); } modFile = (File) data.get("coremodLocation"); if (modFile == null) modFile = new File(getClass().getProtectionDomain().getCodeSource().getLocation().getPath()); } @Override public String getAccessTransformerClass() { return "org.spongepowered.mod.asm.transformer.SpongeAccessTransformer"; } }
package com.asi.ext.api.product.criteria.processor; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.apache.log4j.Logger; import org.springframework.util.StringUtils; import com.asi.ext.api.exception.VelocityException; import com.asi.ext.api.product.transformers.JerseyClientPost; import com.asi.ext.api.product.transformers.ProductDataStore; import com.asi.ext.api.response.JsonProcessor; import com.asi.ext.api.service.model.Apparel; import com.asi.ext.api.service.model.ShippingEstimate; import com.asi.ext.api.service.model.Size; import com.asi.ext.api.util.ApplicationConstants; import com.asi.ext.api.util.CommonUtilities; import com.asi.ext.api.util.ProductParserUtil; import com.asi.ext.api.util.RestAPIProperties; import com.asi.service.product.client.vo.CriteriaSetCodeValues; import com.asi.service.product.client.vo.CriteriaSetValues; import com.asi.service.product.client.vo.ProductCriteriaSets; import com.asi.service.product.client.vo.ProductDetail; import com.asi.service.product.client.vo.Value; public class ProductSizeGroupProcessor extends SimpleCriteriaProcessor { private final static Logger LOGGER = Logger.getLogger(ProductSizeGroupProcessor.class.getName()); public final static String[] SIZE_GROUP_CRITERIACODES = { "CAPS", "DIMS", "SABR", "SAHU", "SAIT", "SANS", "SAWI", "SSNM", "SVWT", "SOTH" }; private String sizesWSResponse = null; @SuppressWarnings("rawtypes") private HashMap sizeElementsResponse = null; private String sizesCriteriaWSResponse = null; private String sizesShippingDimsWSResponse = null; private String companyId = "0"; private String configId = "0"; private String criteriaSetId = "0"; private String productId; /** * @param companyId * @param productId * @param configId */ public ProductSizeGroupProcessor(String criteriaSetId) { this.criteriaSetId = criteriaSetId; } public Map<String, ProductCriteriaSets> getProductCriteriaSet(Size size, ProductDetail product, Map<String, ProductCriteriaSets> existingCriteriaSetMap, String configId) { this.productId = product.getID(); this.companyId = product.getCompanyId(); this.configId = configId; String criteriaCode = findCriteriaCodeForSizeModel(size, product.getExternalProductId()); if (CommonUtilities.isValueNull(criteriaCode)) { productDataStore.addErrorToBatchLogCollection(product.getExternalProductId(), ApplicationConstants.CONST_BATCH_ERR_INVALID_VALUE, "Unable to identify the given size details"); return existingCriteriaSetMap; } String sizeValues = ProductParserUtil.getSizeValuesFromSize(size, criteriaCode); if (CommonUtilities.isValueNull(sizeValues)) { productDataStore.addErrorToBatchLogCollection(product.getExternalProductId(), ApplicationConstants.CONST_BATCH_ERR_INVALID_VALUE, "Unable to read the given size details"); return existingCriteriaSetMap; } ProductCriteriaSets tempCriteriaSet = getSizeCriteriaSet(sizeValues, criteriaCode, existingCriteriaSetMap.get(criteriaCode), product); if (tempCriteriaSet != null && tempCriteriaSet.getCriteriaSetValues() != null && !tempCriteriaSet.getCriteriaSetValues().isEmpty()) { // For the reference productDataStore.registerSizeGroupOfProduct(criteriaCode, product.getExternalProductId()); ProductCriteriaSets exisitingCriteriaSet = existingCriteriaSetMap.get(criteriaCode); if (exisitingCriteriaSet == null) { existingCriteriaSetMap = removeSizeRelatedCriteriaSetFromExisting(existingCriteriaSetMap); exisitingCriteriaSet = compareAndUpdateSizeGroup(product, tempCriteriaSet, exisitingCriteriaSet); } else { exisitingCriteriaSet = compareAndUpdateSizeGroup(product, tempCriteriaSet, exisitingCriteriaSet); } if (exisitingCriteriaSet != null) { existingCriteriaSetMap.put(criteriaCode, exisitingCriteriaSet); } } else { // for clean up existingCriteriaSetMap = removeSizeRelatedCriteriaSetFromExisting(existingCriteriaSetMap); } return existingCriteriaSetMap; } public boolean isSizeNull(Size size) { if (size == null || (size.getApparel() == null && size.getDimension() == null && size.getCapacity() == null && size.getVolume() == null && size.getOther() == null)) { return true; } else { return false; } } private String findCriteriaCodeForSizeModel(Size size, String xid) { String criteriaCode = null; boolean sizeCodeFound = false; if (size.getApparel() != null && size.getApparel().getType() != null && size.getApparel().getValues() != null && !size.getApparel().getValues().isEmpty()) { criteriaCode = findCriteriaCodeFromApparalType(size.getApparel()); if (criteriaCode != null) { sizeCodeFound = true; } } if (size.getCapacity() != null && size.getCapacity().getValues() != null && !size.getCapacity().getValues().isEmpty()) { if (!sizeCodeFound) { criteriaCode = ApplicationConstants.CONST_SIZE_GROUP_CAPACITY; sizeCodeFound = true; } else { productDataStore.addErrorToBatchLogCollection(xid, ApplicationConstants.CONST_BATCH_ERR_INVALID_VALUE, "Morethan one size group specified"); return criteriaCode; } } if (size.getDimension() != null && size.getDimension().getValues() != null && !size.getDimension().getValues().isEmpty()) { if (!sizeCodeFound) { criteriaCode = ApplicationConstants.CONST_SIZE_GROUP_DIMENSION; sizeCodeFound = true; } else { productDataStore.addErrorToBatchLogCollection(xid, ApplicationConstants.CONST_BATCH_ERR_INVALID_VALUE, "Morethan one size group specified"); return criteriaCode; } } if (size.getVolume() != null && size.getVolume().getValues() != null && !size.getVolume().getValues().isEmpty()) { if (!sizeCodeFound) { criteriaCode = ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_VOL_WEI; sizeCodeFound = true; } else { productDataStore.addErrorToBatchLogCollection(xid, ApplicationConstants.CONST_BATCH_ERR_INVALID_VALUE, "Morethan one size group specified"); return criteriaCode; } } if (size.getOther() != null && size.getOther().getValues() != null && !size.getOther().getValues().isEmpty()) { if (!sizeCodeFound) { criteriaCode = ApplicationConstants.CONST_SIZE_OTHER_CODE; } else { productDataStore.addErrorToBatchLogCollection(xid, ApplicationConstants.CONST_BATCH_ERR_INVALID_VALUE, "Morethan one size group specified"); return criteriaCode; } } return criteriaCode; } private String findCriteriaCodeFromApparalType(Apparel apparel) { String criteriaCode = null; if (apparel.getType().equalsIgnoreCase(ApplicationConstants.CONST_STRING_APPAREL_INFANT_TODDLER)) { criteriaCode = ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_INF_TLDR; } else if (apparel.getType().equalsIgnoreCase(ApplicationConstants.CONST_STRING_APPAREL_DRESS_SHIRT_SIZES)) { criteriaCode = ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_DRS_SHRT_SIZE; } else if (apparel.getType().equalsIgnoreCase(ApplicationConstants.CONST_STRING_APPAREL_PANTS_SIZES)) { criteriaCode = ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_PANT_SIZE; } else if (apparel.getType().equalsIgnoreCase(ApplicationConstants.CONST_STRING_APPAREL_HOSIERY_UNIFORM)) { criteriaCode = ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_HSR_UNIFORM; } else if (apparel.getType().equalsIgnoreCase(ApplicationConstants.CONST_STRING_APPAREL_BRA_SIZES)) { criteriaCode = ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_BRA; } else if (apparel.getType().equalsIgnoreCase(ApplicationConstants.CONST_STRING_STANDARD_NUMBERED)) { criteriaCode = ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_STD_NUM; } return criteriaCode; } protected String findSizeGroupApparalType(Apparel apparel) { String criteriaCode = null; if (apparel.getType().equalsIgnoreCase(ApplicationConstants.CONST_STRING_APPAREL_INFANT_TODDLER)) { criteriaCode = ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_INF_TLDR; } else if (apparel.getType().equalsIgnoreCase(ApplicationConstants.CONST_STRING_APPAREL_DRESS_SHIRT_SIZES)) { criteriaCode = ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_DRS_SHRT_SIZE; } else if (apparel.getType().equalsIgnoreCase(ApplicationConstants.CONST_STRING_APPAREL_PANTS_SIZES)) { criteriaCode = ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_PANT_SIZE; } else if (apparel.getType().equalsIgnoreCase(ApplicationConstants.CONST_STRING_APPAREL_HOSIERY_UNIFORM)) { criteriaCode = ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_HSR_UNIFORM; } else if (apparel.getType().equalsIgnoreCase(ApplicationConstants.CONST_STRING_APPAREL_BRA_SIZES)) { criteriaCode = ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_BRA; } else if (apparel.getType().equalsIgnoreCase(ApplicationConstants.CONST_STRING_STANDARD_NUMBERED)) { criteriaCode = ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_STD_NUM; } return criteriaCode; } private ProductCriteriaSets getSizeCriteriaSet(String sizeValues, String sizeCriteriaCode, ProductCriteriaSets matchedCriteriaSet, ProductDetail product) { boolean checkForExisting = (matchedCriteriaSet != null); ProductCriteriaSets newCriteriaSet = new ProductCriteriaSets(); if (!checkForExisting) { // Create new one newCriteriaSet = new ProductCriteriaSets(); newCriteriaSet.setProductId(product.getID()); newCriteriaSet.setConfigId(configId); newCriteriaSet.setCriteriaSetId(criteriaSetId); newCriteriaSet.setCriteriaCode(sizeCriteriaCode); newCriteriaSet.setCompanyId(companyId); newCriteriaSet.setIsBase(ApplicationConstants.CONST_STRING_FALSE_SMALL); newCriteriaSet.setIsRequiredForOrder(ApplicationConstants.CONST_STRING_FALSE_SMALL); newCriteriaSet.setIsDefaultConfiguration(ApplicationConstants.CONST_STRING_FALSE_SMALL); } else { this.criteriaSetId = matchedCriteriaSet.getCriteriaSetId(); newCriteriaSet.setProductId(product.getID()); newCriteriaSet.setConfigId(configId); newCriteriaSet.setCriteriaSetId(criteriaSetId); newCriteriaSet.setCriteriaCode(sizeCriteriaCode); newCriteriaSet.setCompanyId(companyId); newCriteriaSet.setIsBase(matchedCriteriaSet.getIsBase()); newCriteriaSet.setIsRequiredForOrder(matchedCriteriaSet.getIsRequiredForOrder()); newCriteriaSet.setIsDefaultConfiguration(matchedCriteriaSet.getIsDefaultConfiguration()); } newCriteriaSet.setCriteriaSetValues(getCriteriaSetValues(product, sizeCriteriaCode, sizeValues)); return newCriteriaSet; } private List<CriteriaSetValues> getCriteriaSetValues(ProductDetail product, String sizeCriteriaCode, String sizeValues) { List<CriteriaSetValues> criteriaSetValues = new ArrayList<CriteriaSetValues>(); // Dimensions Parsing Starts Here try { if (null != sizeCriteriaCode && !sizeCriteriaCode.trim().equals("") && !sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_STRING_NULL_SMALL)) { LOGGER.info("SizeGroups Transformation Starts :" + sizeCriteriaCode); if (!sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_DIMENSION) && !sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_CAPACITY) && !sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_VOL_WEI) && !sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_DIMENSION) && !sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_WEIGHT)) { if (!sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_HSR_UNIFORM) && !sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_INF_TLDR) && !sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_DRS_SHRT_SIZE) && !sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_PANT_SIZE) && !sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_BRA) && !sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_STD_NUM)) sizeCriteriaCode = ApplicationConstants.CONST_SIZE_OTHER_CODE; if (sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_HSR_UNIFORM)) sizeValues = sizeValues.toUpperCase(); if (null != sizeValues && !sizeValues.trim().equals("")) { criteriaSetValues = addCriteriaSetForApparals(product, sizeCriteriaCode, sizeValues); } } else if (!sizeCriteriaCode.equals("")) { String[] tmpSizeValuesAry = sizeValues.split(":"); if (sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_DIMENSION) && tmpSizeValuesAry.length > 3) { criteriaSetValues = addCriteriaSetForSizes(sizeValues, product, sizeCriteriaCode); } else if (tmpSizeValuesAry.length > 1) { criteriaSetValues = addCriteriaSetForSizes(sizeValues, product, sizeCriteriaCode); } } LOGGER.info("SizeGroups Transformation Ends"); } } catch (Exception e) { LOGGER.error("Excepiton while processing SIZE CriteriaSetValues", e); } return criteriaSetValues; } public List<CriteriaSetValues> addCriteriaSetForSizes(String productSizes, ProductDetail product, String sizeCriteriaCode) throws VelocityException { List<CriteriaSetValues> criteriaSetValuesAry = new ArrayList<CriteriaSetValues>(); //Value[] valueAry = null; if (null != productSizes && !productSizes.trim().equals("")) { // If Product has any Size Groups // length:9:in;width:9:in;height:9:in,length:23:cm,arc:23:in String[] individualSizes = productSizes.split(","); // Checking CSV String attribute = null; String sizeValue = null; String units = null; // criteriaSetCodeValuesAry = new CriteriaSetCodeValues[1]; LOGGER.info("Individual Size (All Sizes):" + individualSizes); String initialUnits = ""; String tempValueElement = null; for (int criteriaSetValuesCntr = 0; criteriaSetValuesCntr < individualSizes.length; criteriaSetValuesCntr++) { List<Value> valueAry = new ArrayList<Value>(); if (sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_DIMENSION) && !CommonUtilities.isValidDimension(individualSizes[criteriaSetValuesCntr])) { productDataStore.addErrorToBatchLogCollection(product.getExternalProductId().trim(), ApplicationConstants.CONST_BATCH_ERR_INVALID_VALUE, "Invalid format/value for Dimension "); continue; } else if ((sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_VOL_WEI))) { /*String temp = sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_CAPACITY) ? "Capacity" : ApplicationConstants.CONST_STRING_VOLUME_WEIGHT; productDataStore.addErrorToBatchLogCollection(product.getExternalProductId().trim(), ApplicationConstants.CONST_BATCH_ERR_INVALID_VALUE, "Invalid format/value for " + temp); continue;*/ } String[] sizeValueElements = individualSizes[criteriaSetValuesCntr].split(";"); //valueAry = new Value[sizeValueElements.length]; for (int valueElementsCntr = 0; valueElementsCntr < sizeValueElements.length; valueElementsCntr++) { tempValueElement = sizeValueElements[valueElementsCntr]; // For Single Size Element(attribute:value:units) it will iterate once if (sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_DIMENSION)) { if (valueElementsCntr == 0) tempValueElement = "Length:" + tempValueElement; else if (valueElementsCntr == 1) tempValueElement = "Width:" + tempValueElement; else if (valueElementsCntr == 2) tempValueElement = "Height:" + tempValueElement; } if (tempValueElement.contains(":")) { String[] valueElements = tempValueElement.split(":"); for (int sizeElemntCntr = 0; sizeElemntCntr < valueElements.length; sizeElemntCntr++) { if (sizeElemntCntr == 0) { attribute = valueElements[sizeElemntCntr]; } else if (sizeElemntCntr == 1) sizeValue = valueElements[sizeElemntCntr]; else if (sizeElemntCntr == 2) units = valueElements[sizeElemntCntr]; } } // End of : tokens if (sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_CAPACITY)) initialUnits = sizeValue; if (sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_VOL_WEI)) { initialUnits = sizeValue; sizeValue = attribute; if (null == sizesCriteriaWSResponse) { sizesCriteriaWSResponse = JerseyClientPost.getLookupsResponse(RestAPIProperties .get(ApplicationConstants.SIZES_CRITERIA_LOOKUP_URL)); } sizeElementsResponse = JsonProcessor.getSizesResponse(sizesCriteriaWSResponse, ApplicationConstants.CONST_STRING_VOLUME, sizeCriteriaCode); attribute = JsonProcessor.getSizesElementValue("ID", sizeElementsResponse, attribute); units = JsonProcessor.getSizesElementValue(ApplicationConstants.CONST_STRING_UNITS, sizeElementsResponse, initialUnits.trim()); if (units.equals("")) { sizeElementsResponse = JsonProcessor.getSizesResponse(sizesCriteriaWSResponse, ApplicationConstants.CONST_STRING_WEIGHT, sizeCriteriaCode); attribute = JsonProcessor.getSizesElementValue("ID", sizeElementsResponse, attribute); units = JsonProcessor.getSizesElementValue(ApplicationConstants.CONST_STRING_UNITS, sizeElementsResponse, initialUnits.trim()); } } else if (sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_DIMENSION) || sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_DIMENSION)) { if (null != attribute && null != sizeValue && null != units) { if (null == sizesCriteriaWSResponse) { sizesCriteriaWSResponse = JerseyClientPost.getLookupsResponse(RestAPIProperties .get(ApplicationConstants.SIZES_CRITERIA_LOOKUP_URL)); } sizeElementsResponse = JsonProcessor.getSizesResponse(sizesCriteriaWSResponse, attribute, sizeCriteriaCode); attribute = JsonProcessor.getSizesElementValue("ID", sizeElementsResponse, attribute); if (units.equalsIgnoreCase(ApplicationConstants.CONST_STRING_INCH_SHORT_SMALL) || units.equalsIgnoreCase(ApplicationConstants.CONST_STRING_INCH_SMALL)) units = "\""; if (units.equalsIgnoreCase(ApplicationConstants.CONST_STRING_FEET_SHORT_SMALL) || units.equalsIgnoreCase(ApplicationConstants.CONST_STRING_FEET_SMALL)) units = "\'"; units = JsonProcessor.getSizesElementValue(ApplicationConstants.CONST_STRING_UNITS, sizeElementsResponse, units.trim()); } else { continue; } } else if (sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_CAPACITY) || sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_WEIGHT)) { if (null == sizesCriteriaWSResponse) { sizesCriteriaWSResponse = JerseyClientPost.getLookupsResponse(RestAPIProperties .get(ApplicationConstants.SIZES_CRITERIA_LOOKUP_URL)); } if (sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_WEIGHT)) { sizeElementsResponse = JsonProcessor .getSizesResponse(sizesCriteriaWSResponse, "Unit", sizeCriteriaCode); } else { sizeElementsResponse = JsonProcessor.getSizesResponse(sizesCriteriaWSResponse, ApplicationConstants.CONST_STRING_CAPACITY, sizeCriteriaCode); } if (null != sizeValue) { units = JsonProcessor.getSizesElementValue("UNITS", sizeElementsResponse, sizeValue.trim()); sizeValue = attribute; attribute = JsonProcessor.getSizesElementValue("ID", sizeElementsResponse, ApplicationConstants.CONST_STRING_CAPACITY); } } Value value = new Value(); value.setCriteriaAttributeId(attribute); value.setUnitValue(sizeValue); value.setUnitOfMeasureCode(units); valueAry.add(value); } CriteriaSetCodeValues criteriaSetCodeValuesNew = new CriteriaSetCodeValues(); String criteriaSetValueId = ""; if (sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_DIMENSION)) { if (null == sizesShippingDimsWSResponse) { sizesShippingDimsWSResponse = JerseyClientPost.getLookupsResponse(RestAPIProperties .get(ApplicationConstants.SIZE_GROUP_SHIPPING_DIMENSION_LOOKUP)); } criteriaSetValueId = JsonProcessor.checkImprintArtWorkValueKeyPair(sizesShippingDimsWSResponse, "Other", ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_DIMENSION); // criteriaSetValueId=ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_DIMENSION_VAL_ID; } else if (sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_WEIGHT)) { // SIZE_GROUP_SHIPPING_WGHT_LOOKUP // criteriaSetValueId=ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_WEIGHT_VAL_ID; if (null == sizesShippingDimsWSResponse) { sizesShippingDimsWSResponse = JerseyClientPost.getLookupsResponse(RestAPIProperties .get(ApplicationConstants.SIZE_GROUP_SHIPPING_DIMENSION_LOOKUP)); } criteriaSetValueId = JsonProcessor.checkImprintArtWorkValueKeyPair(sizesShippingDimsWSResponse, "Other", ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_WEIGHT); } else criteriaSetValueId = JsonProcessor.getSizesElementValue("CRITERIASETID", sizeElementsResponse, initialUnits); criteriaSetCodeValuesNew.setSetCodeValueId(criteriaSetValueId); CriteriaSetValues criteriaSetValueNew = new CriteriaSetValues(); criteriaSetValueNew.setId(ApplicationConstants.CONST_STRING_ZERO); criteriaSetValueNew.setCriteriaCode(sizeCriteriaCode); if (sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_DIMENSION) || sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_CAPACITY) || sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_DIMENSION) || sizeCriteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_WEIGHT)) criteriaSetValueNew.setValueTypeCode(ApplicationConstants.CONST_VALUE_TYPE_CODE_CUST); else criteriaSetValueNew.setValueTypeCode(ApplicationConstants.CONST_VALUE_TYPE_CODE_LOOK); criteriaSetValueNew.setCriteriaValueDetail(ApplicationConstants.CONST_STRING_NONE_SMALL); criteriaSetCodeValuesNew.setId(ApplicationConstants.CONST_STRING_ZERO); criteriaSetValueNew.setId(--uniqueSetValueId + ""); criteriaSetCodeValuesNew.setCriteriaSetValueId(uniqueSetValueId + ""); criteriaSetValueNew.setCriteriaSetId(criteriaSetId); CriteriaSetCodeValues[] criteriaSetCodeValuesAryNew = new CriteriaSetCodeValues[1]; criteriaSetCodeValuesAryNew[0] = criteriaSetCodeValuesNew; // criteriaSetCodeValuesAry[0].setCriteriaSetValueId(criteriaSetValue.getId()); criteriaSetValueNew.setCriteriaSetCodeValues(criteriaSetCodeValuesAryNew); criteriaSetValueNew.setIsSubset(ApplicationConstants.CONST_STRING_FALSE_SMALL); criteriaSetValueNew.setIsSetValueMeasurement(ApplicationConstants.CONST_STRING_FALSE_SMALL); criteriaSetValueNew.setValue(valueAry.toArray(new Value[0])); // TODO : Set ReferenceTable // Adding a this criteriaSet entry details to reference table, so later can be referenced easily productDataStore.updateCriteriaSetValueReferenceTable(product.getExternalProductId().trim(), sizeCriteriaCode, processSourceCriteriaValueByCriteriaCode(individualSizes[criteriaSetValuesCntr], sizeCriteriaCode), criteriaSetValueNew.getId()); if (criteriaSetValueNew != null) { criteriaSetValuesAry.add(criteriaSetValueNew); } } } // End of product sizes if condition return criteriaSetValuesAry; } public List<CriteriaSetValues> addCriteriaSetForApparals(ProductDetail product, String criteriaCode, String srcCriteria) throws VelocityException { //String initSizeGroup = criteriaCode; criteriaCode = criteriaCode.trim(); srcCriteria = srcCriteria.trim(); boolean isCustomValue = false; boolean isOtherSize = false; /* * if (criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_STRING_APPAREL_INFANT_TODDLER)) { * criteriaCode = ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_INF_TLDR; * } else if (criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_STRING_APPAREL_DRESS_SHIRT_SIZES)) { * criteriaCode = ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_DRS_SHRT_SIZE; * } else if (criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_STRING_APPAREL_PANTS_SIZES)) { * criteriaCode = ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_PANT_SIZE; * } else if (criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_STRING_APPAREL_HOSIERY_UNIFORM)) { * criteriaCode = ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_HSR_UNIFORM; * srcCriteria = srcCriteria.toUpperCase(); * } else if (criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_STRING_APPAREL_BRA_SIZES)) { * criteriaCode = ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_BRA; * } else if (criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_STRING_STANDARD_NUMBERED)) { * criteriaCode = ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_STD_NUM; * } */ List<CriteriaSetValues> criteriaSetValuesList = new ArrayList<CriteriaSetValues>(); CriteriaSetCodeValues[] criteriaSetCodeValues = null; String unitValue = ""; String actualCriteria = srcCriteria; isCustomValue = false; String[] criteriaElements = srcCriteria.split(","); for (String curntCriteria : criteriaElements) { String orignalCriteriaValue = processSourceCriteriaValueByCriteriaCode(curntCriteria, criteriaCode); isCustomValue = false; curntCriteria = curntCriteria != null ? curntCriteria.trim() : curntCriteria; actualCriteria = curntCriteria; CriteriaSetValues criteriaSetValue = new CriteriaSetValues(); criteriaSetValue.setCriteriaSetId(criteriaSetId); CriteriaSetCodeValues child1Obj = new CriteriaSetCodeValues(); criteriaSetCodeValues = new CriteriaSetCodeValues[1]; if (criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_HSR_UNIFORM)) { curntCriteria = curntCriteria.toUpperCase(); } if (criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_BRA) || criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_STD_NUM) || criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_OTHER_CODE) || criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_DRS_SHRT_SIZE) || criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_PANT_SIZE) || criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_HSR_UNIFORM) || criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_INF_TLDR)) { String tempCriteria = curntCriteria; if (null == sizesWSResponse) { sizesWSResponse = JerseyClientPost.getLookupsResponse(RestAPIProperties .get(ApplicationConstants.SIZES_LOOKUP_URL)); } if (criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_OTHER_CODE) && !(curntCriteria.equalsIgnoreCase(ApplicationConstants.CONST_STRING_CUSTOM) || curntCriteria .equalsIgnoreCase(ApplicationConstants.CONST_STRING_STANDARD))) { curntCriteria = JsonProcessor.checkSizesKeyValuePair(sizesWSResponse, ApplicationConstants.CONST_STRING_OTHER_SIZES, criteriaCode); isCustomValue = true; } else { curntCriteria = JsonProcessor.checkSizesKeyValuePair(sizesWSResponse, curntCriteria, criteriaCode); } if (curntCriteria.equalsIgnoreCase(ApplicationConstants.CONST_STRING_NULL_SMALL)) { if (criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_BRA) || criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_HSR_UNIFORM) || criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_INF_TLDR) || criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_DRS_SHRT_SIZE) || criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_PANT_SIZE)) { isOtherSize = true; curntCriteria = JsonProcessor.checkOtherSizesKeyValuePair(sizesWSResponse, ApplicationConstants.CONST_STRING_OTHER, criteriaCode); isCustomValue = true; } if (criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_INF_TLDR)) { isOtherSize = true; if (tempCriteria.toLowerCase().contains("month")) { tempCriteria = tempCriteria.toLowerCase(); tempCriteria = tempCriteria.substring(0, tempCriteria.indexOf("m")); unitValue = "months"; actualCriteria = tempCriteria.trim(); } else if (tempCriteria.toLowerCase().contains("t")) { tempCriteria = tempCriteria.toLowerCase(); tempCriteria = tempCriteria.substring(0, tempCriteria.indexOf("t")); unitValue = "T"; actualCriteria = tempCriteria.trim(); } } if (criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_STD_NUM)) { curntCriteria = JsonProcessor.checkSizesKeyValuePair(sizesWSResponse, ApplicationConstants.CONST_STRING_STANDARD_NUMBERED_OTHER, criteriaCode); isCustomValue = true; } } } else { curntCriteria = null; } if (null != curntCriteria && !curntCriteria.equalsIgnoreCase(ApplicationConstants.CONST_STRING_NULL_SMALL)) { uniqueSetValueId child1Obj.setSetCodeValueId(curntCriteria); // child1Obj.setCodeValue(criteriaCode); // "PRCL" child1Obj.setCriteriaSetValueId(uniqueSetValueId + ""); criteriaSetCodeValues[0] = child1Obj; child1Obj = null; criteriaSetValue.setId(uniqueSetValueId + ""); if (criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_RUSH_TIME_CRITERIA_CODE) || criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_PRODUCTION_TIME_CRITERIA_CODE) || criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SHIPPING_ITEM_CRITERIA_CODE)) { Value value = new Value(); if (criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SHIPPING_ITEM_CRITERIA_CODE)) { if (null == sizesCriteriaWSResponse) { sizesCriteriaWSResponse = JerseyClientPost.getLookupsResponse(RestAPIProperties .get(ApplicationConstants.SIZES_CRITERIA_LOOKUP_URL)); } sizeElementsResponse = JsonProcessor.getSizesResponse(sizesCriteriaWSResponse, "Unit", criteriaCode); if (unitValue.contains(":")) { String[] unitValueAry = unitValue.split(":"); String unitsCode = unitValueAry[1]; unitValue = unitValueAry[0]; String temp = unitsCode; unitsCode = JsonProcessor.getSizesElementValue(ApplicationConstants.CONST_STRING_UNITS, sizeElementsResponse, unitsCode.trim()); if (CommonUtilities.isValueNull(unitsCode)) { // Fix for unit other than in Lookup unitsCode = JsonProcessor.getSizesElementValue(ApplicationConstants.CONST_STRING_UNITS, sizeElementsResponse, ApplicationConstants.CONST_STRING_OTHER); if (criteriaSetValue.getCriteriaSetCodeValues() != null && criteriaSetValue.getCriteriaSetCodeValues().length > 0) { criteriaSetValue.getCriteriaSetCodeValues()[0].setCodeValue(temp); value.setUnitOfMeasureCode(unitsCode); } } else { value.setUnitOfMeasureCode(unitsCode); } value.setCriteriaAttributeId(JsonProcessor.getSizesElementValue("ID", sizeElementsResponse, unitValue.trim())); value.setUnitValue(unitValue); value.setUnitOfMeasureCode(unitsCode); Value[] valueAry = new Value[1]; valueAry[0] = value; criteriaSetValue.setValue(valueAry); } } } else if (isOtherSize) { if (criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_BRA) || criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_HSR_UNIFORM) || criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_INF_TLDR)) { if (null == sizesCriteriaWSResponse) { sizesCriteriaWSResponse = JerseyClientPost.getLookupsResponse(RestAPIProperties .get(ApplicationConstants.SIZES_CRITERIA_LOOKUP_URL)); } sizeElementsResponse = JsonProcessor.getSizesResponse(sizesCriteriaWSResponse, "Unit", criteriaCode); Value valueObj = new Value(); // String[] unitValueAry=new String[1]; // String unitsCode = unitValueAry[1]; if (criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_INF_TLDR) && !CommonUtilities.isValueNull(unitValue)) { String temp = JsonProcessor.getSizesElementValue("Units", sizeElementsResponse, unitValue.trim()); valueObj.setUnitOfMeasureCode(CommonUtilities.isValueNull(temp) ? "" : temp); } unitValue = (unitValue == null || unitValue.trim().isEmpty()) ? actualCriteria : unitValue; valueObj.setCriteriaAttributeId(JsonProcessor.getSizesElementValue("ID", sizeElementsResponse, unitValue.trim())); valueObj.setUnitValue(actualCriteria); Value[] valueAry = new Value[1]; valueAry[0] = valueObj; criteriaSetValue.setValue(valueAry); } if (criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_DRS_SHRT_SIZE) || criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_PANT_SIZE)) { String[] unitValueAry = null; if (criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_DRS_SHRT_SIZE)) unitValueAry = new String[] { ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_DRS_SHRT_SIZE_NECK, ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_DRS_SHRT_SIZE_SLVS }; else unitValueAry = new String[] { ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_PANT_SIZE_WAIST, ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_PANT_SIZE_INSEAM }; Value valueObj = null; List<Value> valueAry = new ArrayList<>(); String[] untValueFnlAry = new String[unitValueAry.length]; String validUnit = null; if (criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_DRS_SHRT_SIZE)) validUnit = CommonUtilities.getStringWithBrackets(actualCriteria); else { if (actualCriteria.contains("x")) validUnit = actualCriteria.substring(actualCriteria.indexOf("x") + 1, actualCriteria.length()); } if (criteriaCode.equalsIgnoreCase(ApplicationConstants.CONST_SIZE_GROUP_SHP_APR_DRS_SHRT_SIZE)) { if (!CommonUtilities.isValueNull(actualCriteria)) { String neck = ""; String sleeve = ""; int index = 0; if (actualCriteria.contains("(")) { index = actualCriteria.indexOf("("); neck = actualCriteria.substring(0, index); sleeve = actualCriteria.substring(index); sleeve = sleeve.replaceAll("\\(", "").replaceAll("\\)", ""); neck = neck.replaceAll("\\)", "").replaceAll("\\(", ""); } else { neck = actualCriteria.replaceAll("\\(", "").replaceAll("\\)", ""); sleeve = ApplicationConstants.CONST_STRING_EMPTY; } untValueFnlAry = new String[] { neck, sleeve }; } /*untValueFnlAry = new String[] { ne, ApplicationConstants.CONST_STRING_EMPTY }; if (validUnit.equalsIgnoreCase(ApplicationConstants.CONST_STRING_FALSE_SMALL)) { untValueFnlAry = new String[] { actualCriteria, ApplicationConstants.CONST_STRING_EMPTY }; } else untValueFnlAry = new String[] { actualCriteria.substring(0, actualCriteria.indexOf("(")), validUnit };*/ } else { if (actualCriteria.contains("x")) untValueFnlAry = new String[] { actualCriteria.substring(0, actualCriteria.indexOf("x")), validUnit }; else untValueFnlAry = new String[] { actualCriteria, ApplicationConstants.CONST_STRING_EMPTY }; } for (int untValCntr = 0; untValCntr < unitValueAry.length; untValCntr++) { valueObj = new Value(); if (null == sizesCriteriaWSResponse) { sizesCriteriaWSResponse = JerseyClientPost.getLookupsResponse(RestAPIProperties .get(ApplicationConstants.SIZES_CRITERIA_LOOKUP_URL)); } sizeElementsResponse = JsonProcessor.getSizesResponse(sizesCriteriaWSResponse, unitValueAry[untValCntr], criteriaCode); valueObj.setCriteriaAttributeId(JsonProcessor.getSizesElementValue("ID", sizeElementsResponse, unitValueAry[untValCntr])); valueObj.setUnitValue(untValueFnlAry[untValCntr].trim()); if (!valueObj.getUnitValue().equalsIgnoreCase(ApplicationConstants.CONST_STRING_EMPTY)) { valueAry.add(valueObj); } else { // Check this later //valueAry = Arrays.copyOf(valueAry, valueAry.length - 1); } } criteriaSetValue.setValue(valueAry.toArray(new Value[0])); } isOtherSize = false; } else { criteriaSetValue.setValue(actualCriteria); } if (isCustomValue) { criteriaSetValue.setValueTypeCode(ApplicationConstants.CONST_VALUE_TYPE_CODE_CUST); isCustomValue = false; } else criteriaSetValue.setValueTypeCode(ApplicationConstants.CONST_VALUE_TYPE_CODE_LOOK); criteriaSetValue.setIsSubset(ApplicationConstants.CONST_STRING_FALSE_SMALL); criteriaSetValue.setIsSetValueMeasurement(ApplicationConstants.CONST_STRING_FALSE_SMALL); criteriaSetValue.setCriteriaSetId(criteriaSetId); criteriaSetValue.setCriteriaSetId(criteriaSetId); criteriaSetValue.setCriteriaCode(criteriaCode); criteriaSetValue.setCriteriaSetCodeValues(criteriaSetCodeValues); if (criteriaSetValuesList != null) { criteriaSetValuesList.add(criteriaSetValue); // Adding a this criteriaSet entry details to reference table, so later can be referenced easily productDataStore.updateCriteriaSetValueReferenceTable(product.getExternalProductId(), criteriaCode, orignalCriteriaValue, criteriaSetValue.getId()); } criteriaSetValue = null; } } return criteriaSetValuesList; } public ProductCriteriaSets compareAndUpdateSizeGroup(ProductDetail existingProduct, ProductCriteriaSets newlyCreatedCriteriaSet, ProductCriteriaSets existingCriteriaSet) { if (newlyCreatedCriteriaSet == null) { return null; } if (existingCriteriaSet != null) { Map<String, CriteriaSetValues> existingMap = createExistingSizesCollection(existingCriteriaSet.getCriteriaSetValues(), existingCriteriaSet.getCriteriaCode()); List<CriteriaSetValues> finalValues = new ArrayList<CriteriaSetValues>(); if (!existingMap.isEmpty()) { for (CriteriaSetValues criteriaSetValue : newlyCreatedCriteriaSet.getCriteriaSetValues()) { String key = newlyCreatedCriteriaSet.getCriteriaCode() + "_" + getKeyFromValue(criteriaSetValue.getValue()); if (existingMap.containsKey(key)) { finalValues.add(existingMap.get(key)); /* * findSizeValueDetails(criteriaSetValue.getCriteriaCode(), getSizeElementResponse(), existingMap.get(key), * existingProduct.getExternalProductId()); */ } else { criteriaSetValue.setCriteriaSetId(existingCriteriaSet.getCriteriaSetId()); finalValues.add(criteriaSetValue); } } existingCriteriaSet.setCriteriaSetValues(finalValues); return existingCriteriaSet; } else { // TODO : Update few attributes of newly created set return syncProductCriteriaSet(newlyCreatedCriteriaSet); } } else if (existingProduct != null) { // Set productId and CompanyId and ConfigId; newlyCreatedCriteriaSet.setProductId(existingProduct.getID()); newlyCreatedCriteriaSet.setCompanyId(existingProduct.getCompanyId()); newlyCreatedCriteriaSet.setConfigId(this.configId); return newlyCreatedCriteriaSet; } else { newlyCreatedCriteriaSet.setProductId(this.productId); newlyCreatedCriteriaSet.setCompanyId(this.companyId); newlyCreatedCriteriaSet.setConfigId(this.configId); return newlyCreatedCriteriaSet; } } public Map<String, CriteriaSetValues> createExistingSizesCollection(List<CriteriaSetValues> existingCriteriaSetValues, String criteriaCode) { Map<String, CriteriaSetValues> existing = new HashMap<String, CriteriaSetValues>(); for (CriteriaSetValues criteriaSetValue : existingCriteriaSetValues) { if (criteriaSetValue != null) { existing.put(criteriaCode + "_" + getKeyFromValue(criteriaSetValue.getValue()), criteriaSetValue); } } return existing; } public ProductCriteriaSets syncProductCriteriaSet(ProductCriteriaSets criteriaSet) { criteriaSet.setProductId(this.productId); criteriaSet.setCompanyId(this.companyId); criteriaSet.setConfigId(this.configId); return criteriaSet; } protected ProductCriteriaSets getProductCriteriaSetForSizeGroup(Map<String, ProductCriteriaSets> criteriaSets) { ProductCriteriaSets criteriaSet = null; for (String code : SIZE_GROUP_CRITERIACODES) { criteriaSet = criteriaSets.get(code); if (criteriaSet != null) { return criteriaSet; } } return criteriaSet; } public Map<String, ProductCriteriaSets> removeSizeRelatedCriteriaSetFromExisting( Map<String, ProductCriteriaSets> existingCriteriaCollection) { try { for (String code : SIZE_GROUP_CRITERIACODES) { if (existingCriteriaCollection.remove(code) != null) { break; } } } catch (Exception e) { // Nothing to do on exception } return existingCriteriaCollection; } public ProductCriteriaSets getSizeRelatedCriteriaSetFromExisting(Map<String, ProductCriteriaSets> existingCriteriaCollection) { try { List<String> sizeCriteriaCodes = Arrays.asList(SIZE_GROUP_CRITERIACODES); for (ProductCriteriaSets criteriaSets : existingCriteriaCollection.values()) { if (criteriaSets != null) { if (sizeCriteriaCodes.contains(criteriaSets.getCriteriaCode())) { return criteriaSets; } } } } catch (Exception e) { // Nothing to do with exception } return null; } /** * Finds the value of a given size element from sizeElementResponses * * @param elementName * is the type of size value like Dimension, Weight, etc... * @param sizeElementsResponse * contains all the size related details * @param attribute * is the value * @return Criteria code of the give size value */ @SuppressWarnings("rawtypes") public String getSizesElementValue(String elementName, LinkedList<LinkedHashMap> sizeElementsResponse, String attribute) { attribute = attribute.trim(); elementName = elementName.trim(); String ElementValue = ""; try { // LinkedList<LinkedHashMap> sizeElementsResponse=(LinkedList<LinkedHashMap>)jsonParser.parseToList(response); if (null != sizeElementsResponse) { Iterator<LinkedHashMap> iterator = sizeElementsResponse.iterator(); while (iterator.hasNext()) { Map sizeIndividualLookupMap = (LinkedHashMap) iterator.next(); if (elementName.equalsIgnoreCase("id")) { if (sizeIndividualLookupMap.get("ID").toString().equals(attribute)) { ElementValue = sizeIndividualLookupMap.get("DisplayName").toString(); break; } } else if (elementName.equalsIgnoreCase("units")) { @SuppressWarnings({ "unchecked" }) LinkedList<LinkedHashMap> unitValuesList = (LinkedList<LinkedHashMap>) sizeIndividualLookupMap .get("UnitsOfMeasure"); Iterator<LinkedHashMap> unitValuesiterator = unitValuesList.iterator(); while (unitValuesiterator.hasNext()) { Map codeValueGrpsMap = (LinkedHashMap) unitValuesiterator.next(); if (codeValueGrpsMap.get("Code").toString().equalsIgnoreCase(attribute)) { ElementValue = (String) codeValueGrpsMap.get("Format"); break; } } } if (!ElementValue.isEmpty()) break; } } } catch (Exception Ex) { LOGGER.error("Exception while processing Product Size Group JSON", Ex); } return ElementValue; } @SuppressWarnings("rawtypes") public LinkedList<LinkedHashMap> getSizeElementResponse() { return ProductDataStore.getLookupResponse(); } @Override protected ProductCriteriaSets getCriteriaSet(String values, ProductDetail existingProduct, ProductCriteriaSets matchedCriteriaSet, int currentSetValueId) { // TODO Auto-generated method stub return null; } @Override public String getSetCodeValueId(String value) { // TODO Auto-generated method stub return ProductDataStore.getSetCodeValueIdForShippingItem(value); } @Override protected boolean isValueIsValid(String value) { // TODO Auto-generated method stub return false; } @Override protected String[] processValues(String value) { // TODO Auto-generated method stub return null; } @Override protected boolean updateCriteriaSet(String value) { // TODO Auto-generated method stub return false; } /* * public boolean registerExistingValuesForReference(ProductCriteriaSets criteriaSet, String externalProductId) { * if (criteriaSet == null) { * return false; * } * LOGGER.info("Registering existing Size values of product"); * if (criteriaSet.getCriteriaSetValues() != null && criteriaSet.getCriteriaSetValues().length > 0) { * for (CriteriaSetValues criteriaValues : criteriaSet.getCriteriaSetValues()) { * if (criteriaValues.getCriteriaSetCodeValues().length != 0) { * findSizeValueDetails(criteriaSet.getCriteriaCode(), getSizeElementResponse(), criteriaValues, externalProductId); * } * } * } * LOGGER.info("Completed existing Size values of product"); * * return false; * } */ @SuppressWarnings({ "rawtypes", "unused" }) private void findSizeValueDetails(String criteriaCode, LinkedList<LinkedHashMap> criteriaAttributes, CriteriaSetValues criteriaSetValue, String externalProductId) { // String[] stringAry=new String[2]; if (criteriaCode.equalsIgnoreCase("DIMS")) LOGGER.info("Found Size Group Dimension"); else if (criteriaCode.equalsIgnoreCase("CAPS")) LOGGER.info("Found Size Group Capacity"); else if (criteriaCode.equalsIgnoreCase("SVWT")) LOGGER.info("Found Size Group Volume/Weight"); else if (criteriaCode.equalsIgnoreCase("SABR")) LOGGER.info("Found Size Group Apparel - Bra Sizes"); else if (criteriaCode.equalsIgnoreCase("SAHU")) LOGGER.info("Found Size Group Apparel - Hosiery/Uniform Sizes"); else if (criteriaCode.equalsIgnoreCase("SAIT")) LOGGER.info("Found Size Group Apparel - Infant & Toddler"); else if (criteriaCode.equalsIgnoreCase("SANS")) LOGGER.info("Found Size Group Apparel - Dress Shirt Sizes"); else if (criteriaCode.equalsIgnoreCase("SAWI")) LOGGER.info("Found Size Group Apparel - Pants Sizes"); else if (criteriaCode.equalsIgnoreCase("SSNM")) LOGGER.info("Found Size Group Standard & Numbered"); else if (criteriaCode.equalsIgnoreCase("SOTH")) LOGGER.info("Found Size Group Other"); String sizeValue = "", finalSizeValue = "", delim = ""; String sizeElementValue = ""; // int noOfSizes=criteriaSetValueLst.size(); int elementsCntr = 0; String unitOfmeasureCode = ""; int sizeCntr = 0; if (criteriaSetValue.getValue() instanceof List) { ArrayList<?> valueList = (ArrayList<?>) criteriaSetValue.getValue(); Iterator<?> sizeValuesItr = valueList.iterator(); while (sizeValuesItr.hasNext()) { LinkedHashMap<?, ?> valueMap = (LinkedHashMap<?, ?>) sizeValuesItr.next(); unitOfmeasureCode = getSizesElementValue("UNITS", criteriaAttributes, valueMap.get("UnitOfMeasureCode").toString()); if (unitOfmeasureCode.equals("\"")) unitOfmeasureCode = "in"; if (unitOfmeasureCode.equals("'")) unitOfmeasureCode = "ft"; if (criteriaCode.equalsIgnoreCase("DIMS") || criteriaCode.equalsIgnoreCase("SDIM")) { if (criteriaCode.equalsIgnoreCase("DIMS")) sizeValue = getSizesElementValue("ID", criteriaAttributes, valueMap.get("CriteriaAttributeId").toString()) + ":" + valueMap.get("UnitValue") + ":" + unitOfmeasureCode; else sizeValue = valueMap.get("UnitValue") + ":" + unitOfmeasureCode; delim = "; "; } else if (criteriaCode.equalsIgnoreCase("CAPS") || criteriaCode.equalsIgnoreCase("SVWT") || criteriaCode.equalsIgnoreCase("SHWT")) { sizeValue = valueMap.get("UnitValue") + ":" + unitOfmeasureCode; delim = ": "; } else { if (criteriaCode.equalsIgnoreCase("SAWI")) { if (getSizesElementValue("ID", criteriaAttributes, valueMap.get("CriteriaAttributeId").toString()).equals( "Waist")) sizeValue = valueMap.get("UnitValue").toString(); else if (getSizesElementValue("ID", criteriaAttributes, valueMap.get("CriteriaAttributeId").toString()) .equals("Inseam")) sizeValue = "x" + valueMap.get("UnitValue").toString(); } else if (criteriaCode.equalsIgnoreCase("SAIT")) { if (unitOfmeasureCode.length() == 1) { sizeValue = valueMap.get("UnitValue").toString() + unitOfmeasureCode; } else { sizeValue = valueMap.get("UnitValue").toString() + " " + unitOfmeasureCode; } } } if (sizeCntr != 0) { if (criteriaCode.equalsIgnoreCase("SANS")) { sizeElementValue += "(" + sizeValue.trim() + ")"; } else { sizeElementValue += delim + sizeValue; } } else { sizeElementValue += sizeValue; } sizeCntr++; } // updateReferenceTable(existingProduct.getExternalProductId(), ApplicationConstants.CONST_ORIGIN_CRITERIA_CODE, // value, criteriaSetValue); updateReferenceTable(externalProductId, criteriaCode, sizeElementValue, criteriaSetValue); // criteriaSetParser.addReferenceSet(externalProductId,criteriaCode,criteriaSetValue.getID(),sizeElementValue); } else { sizeValue = criteriaSetValue.getBaseLookupValue(); if (null == sizeValue) { sizeValue = criteriaSetValue.getFormatValue(); } updateReferenceTable(externalProductId, criteriaCode, sizeElementValue, criteriaSetValue); // criteriaSetParser.addReferenceSet(externalProductId, criteriaSetValue.getCriteriaCode(), criteriaSetValue.getID(), // sizeValue); sizeElementValue += sizeValue; sizeCntr++; } if (elementsCntr != 0) { finalSizeValue = finalSizeValue + "," + sizeElementValue.trim(); } else { finalSizeValue = sizeElementValue.trim(); } sizeElementValue = ""; } public Map<String, ProductCriteriaSets> processShippingItem(ProductDetail product, Map<String, ProductCriteriaSets> existingCriteriaSetMap, String configId, ShippingEstimate shippingEstimate, int criteriaSetId) { this.configId = configId; this.productId = product.getID(); this.companyId = product.getCompanyId(); if (shippingEstimate == null) { existingCriteriaSetMap.remove(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_DIMENSION); existingCriteriaSetMap.remove(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_WEIGHT); existingCriteriaSetMap.remove(ApplicationConstants.CONST_SHIPPING_ITEM_CRITERIA_CODE); return existingCriteriaSetMap; } // Shipping Dimensions and Weight String[] dimnsAry = {}; if (shippingEstimate.getDimensions() == null) { // Remove Previous SDIM from the criteria set existingCriteriaSetMap.remove(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_DIMENSION); } else { this.criteriaSetId = String.valueOf(--criteriaSetId); String shippingDimensions = ProductParserUtil.getShippingDimension(shippingEstimate); LOGGER.info("Shipping Dimensions Transformation Starts :" + shippingDimensions); dimnsAry = shippingDimensions.split(";"); if (dimnsAry.length == 3) { ProductCriteriaSets tempCriteriaSet = getSizeCriteriaSet(shippingDimensions, ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_DIMENSION, existingCriteriaSetMap.get(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_DIMENSION), product); // compare and update if (tempCriteriaSet != null && tempCriteriaSet.getCriteriaSetValues().size() > 0) { tempCriteriaSet = compareAndUpdateSizeGroup(product, tempCriteriaSet, existingCriteriaSetMap.get(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_DIMENSION)); existingCriteriaSetMap.put(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_DIMENSION, tempCriteriaSet); } else { existingCriteriaSetMap.remove(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_DIMENSION); } } else { // TODO : LOG ERROR } LOGGER.info("Shipping Dimensions Transformation Ends"); } if (shippingEstimate.getNumberOfItems() == null) { // Remove Previous SDIM from the criteria set existingCriteriaSetMap.remove(ApplicationConstants.CONST_SHIPPING_ITEM_CRITERIA_CODE); } else { this.criteriaSetId = String.valueOf(--criteriaSetId); ProductCriteriaSets tempCriteriaSet = getCriteriaSet(shippingEstimate, product, existingCriteriaSetMap.get(ApplicationConstants.CONST_SHIPPING_ITEM_CRITERIA_CODE), criteriaSetId); existingCriteriaSetMap.put(ApplicationConstants.CONST_SHIPPING_ITEM_CRITERIA_CODE, tempCriteriaSet); } if (shippingEstimate.getWeight() == null) { // Remove Previous SDIM from the criteria set existingCriteriaSetMap.remove(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_WEIGHT); } else { this.criteriaSetId = String.valueOf(--criteriaSetId); String shippingWeight = ProductParserUtil.getShippingWeight(shippingEstimate); ProductCriteriaSets tempCriteriaSet = getSizeCriteriaSet(shippingWeight, ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_WEIGHT, existingCriteriaSetMap.get(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_WEIGHT), product); // compare and update if (tempCriteriaSet != null && tempCriteriaSet.getCriteriaSetValues().size() > 0) { tempCriteriaSet = compareAndUpdateSizeGroup(product, tempCriteriaSet, existingCriteriaSetMap.get(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_WEIGHT)); existingCriteriaSetMap.put(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_WEIGHT, tempCriteriaSet); } else { existingCriteriaSetMap.remove(ApplicationConstants.CONST_SIZE_GROUP_SHIPPING_WEIGHT); } } return existingCriteriaSetMap; } private ProductCriteriaSets getCriteriaSet(ShippingEstimate shippingEstimate, ProductDetail existingProduct, ProductCriteriaSets matchedCriteriaSet, int uniqueCriteriaSetId) { if (shippingEstimate == null || shippingEstimate.getNumberOfItems() == null) { return null; } LOGGER.info("Started Processing of Shipping Items of Shipping Estimate" + shippingEstimate.getNumberOfItems()); // String[] finalValues = processValues(values); List<CriteriaSetValues> finalCriteriaSetValues = new ArrayList<>(); boolean checkExistingElements = matchedCriteriaSet != null; HashMap<String, CriteriaSetValues> existingValueMap = new HashMap<String, CriteriaSetValues>(); if (checkExistingElements) { existingValueMap = createTableForExistingSetValue(matchedCriteriaSet.getCriteriaSetValues()); } else { matchedCriteriaSet = new ProductCriteriaSets(); // Set Basic elements matchedCriteriaSet.setCriteriaSetId(String.valueOf(uniqueCriteriaSetId)); matchedCriteriaSet.setProductId(existingProduct.getID()); matchedCriteriaSet.setCompanyId(existingProduct.getCompanyId()); matchedCriteriaSet.setConfigId(this.configId); matchedCriteriaSet.setCriteriaCode(ApplicationConstants.CONST_SHIPPING_ITEM_CRITERIA_CODE); matchedCriteriaSet.setIsBase(ApplicationConstants.CONST_STRING_FALSE_SMALL); matchedCriteriaSet.setIsRequiredForOrder(ApplicationConstants.CONST_STRING_FALSE_SMALL); matchedCriteriaSet.setIsDefaultConfiguration(ApplicationConstants.CONST_STRING_FALSE_SMALL); } String setCodeValueId = getSetCodeValueId(ApplicationConstants.CONST_STRING_OTHER); CriteriaSetValues criteriaSetValue = null; Value value = getValueForShippingItem(existingProduct.getExternalProductId(), shippingEstimate.getNumberOfItems()); if (value == null) { return null; } else { String key = getKeyFromValue(value); if (checkExistingElements) { criteriaSetValue = existingValueMap.get(key); } if (criteriaSetValue == null) { // If no match found in the existing list // Set basic properties for a criteriaSetValue criteriaSetValue = new CriteriaSetValues(); criteriaSetValue.setId(String.valueOf(--uniqueSetValueId)); criteriaSetValue.setCriteriaValueDetail(shippingEstimate.getNumberOfItems().getUnit()); criteriaSetValue.setCriteriaCode(ApplicationConstants.CONST_SHIPPING_ITEM_CRITERIA_CODE); criteriaSetValue.setValueTypeCode(ApplicationConstants.CONST_VALUE_TYPE_CODE_CUST); criteriaSetValue.setIsSubset(ApplicationConstants.CONST_STRING_FALSE_SMALL); criteriaSetValue.setIsSetValueMeasurement(ApplicationConstants.CONST_STRING_FALSE_SMALL); criteriaSetValue.setCriteriaSetId(matchedCriteriaSet.getCriteriaSetId()); criteriaSetValue.setCriteriaSetCodeValues(getCriteriaSetCodeValues(setCodeValueId, criteriaSetValue.getId())); criteriaSetValue.setValue(new Value[] { value }); } } if (criteriaSetValue != null) { criteriaSetValue.setCriteriaValueDetail(shippingEstimate.getNumberOfItems().getUnit()); } updateReferenceTable(existingProduct.getExternalProductId(), ApplicationConstants.CONST_SHIPPING_ITEM_CRITERIA_CODE, String.valueOf(ProductParserUtil.getShippingItem(shippingEstimate)), criteriaSetValue); finalCriteriaSetValues.add(criteriaSetValue); LOGGER.info("Completed Processing of Shipping Item of Shipping Estimate " + shippingEstimate); matchedCriteriaSet.setCriteriaSetValues(finalCriteriaSetValues); return matchedCriteriaSet; } private HashMap<String, CriteriaSetValues> createTableForExistingSetValue(List<CriteriaSetValues> existingCriteriaSetValues) { if (LOGGER.isTraceEnabled()) { LOGGER.trace("Started createTableForExistingSetValue(), " + System.currentTimeMillis()); } HashMap<String, CriteriaSetValues> existing = new HashMap<String, CriteriaSetValues>(existingCriteriaSetValues.size()); for (CriteriaSetValues criteriaSetValue : existingCriteriaSetValues) { if (criteriaSetValue != null) { existing.put(getKeyFromValue(criteriaSetValue.getValue()), criteriaSetValue); } } if (LOGGER.isTraceEnabled()) { LOGGER.trace("Completed createTableForExistingSetValue(), " + System.currentTimeMillis()); } return existing; } private Value getValueForShippingItem(String externalProductId, com.asi.ext.api.service.model.Value shippingItemValue) { if (!CommonUtilities.isValueNull(shippingItemValue.getValue()) || !CommonUtilities.isValueNull(shippingItemValue.getUnit())) { String criteriaSetAttributeId = getCriteriaSetAttributeId(ApplicationConstants.CONST_SHIPPING_ITEM_CRITERIA_CODE); String unitOfMeasureCode = getUnitOfMeasureCode(ApplicationConstants.CONST_SHIPPING_ITEM_CRITERIA_CODE, shippingItemValue.getUnit()); if (unitOfMeasureCode == null) { unitOfMeasureCode = getUnitOfMeasureCode(ApplicationConstants.CONST_SHIPPING_ITEM_CRITERIA_CODE, ApplicationConstants.CONST_STRING_OTHER); } if (criteriaSetAttributeId != null && unitOfMeasureCode != null) { Value value = new Value(); value.setCriteriaAttributeId(criteriaSetAttributeId); value.setUnitOfMeasureCode(unitOfMeasureCode); value.setUnitValue(shippingItemValue.getValue()); return value; } else { addErrorToBatchLogCollection(externalProductId, ApplicationConstants.CONST_BATCH_ERR_GENERIC_PLHDR, "One of the required attribute not found in the Shipping Estimate lookup data "); return null; } } else { addErrorToBatchLogCollection(externalProductId, ApplicationConstants.CONST_BATCH_ERR_INVALID_VALUE, "Invalid value found for Number of Items in Shipping Estimate unit : " + shippingItemValue.getUnit() + ", value : " + shippingItemValue.getValue()); } return null; } }
package components; import com.badlogic.ashley.core.Component; import javafx.scene.paint.Color; import javafx.scene.shape.Circle; public class CellComponent implements Component { public Circle cellCoverage; public CellComponent(Circle cellCoverage) { this.cellCoverage = cellCoverage; this.cellCoverage.setFill(new Color(135/255, 206/255, 250/255, 0.3)); } }
package org.suporma.gears; import java.util.AbstractCollection; import java.util.Deque; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.NoSuchElementException; import java.util.function.Predicate; public class HashedLinkedDeque<T> extends AbstractCollection<T> implements Deque<T> { private final Map<T, EquivalenceList> nodeMap; private Node front, back; private int size = 0; private class Node { private Node prev, next; private EquivalenceNode equivalenceNode; private T val; public Node(T val) { this.val = val; this.prev = null; this.next = null; } } private class EquivalenceNode { private final Node node; private EquivalenceNode prev, next; public EquivalenceNode(Node node) { this.node = node; this.prev = null; this.next = null; } } private class EquivalenceList { private EquivalenceNode front, back; private int size; public EquivalenceList() { front = new EquivalenceNode(null); back = new EquivalenceNode(null); front.next = back; back.prev = front; size = 0; } public void shift(Node node) { EquivalenceNode simpleNode = new EquivalenceNode(node); simpleNode.next = front.next; simpleNode.prev = front; front.next = simpleNode; simpleNode.next.prev = simpleNode; ++size; } public void push(Node node) { EquivalenceNode simpleNode = new EquivalenceNode(node); simpleNode.next = back; simpleNode.prev = back.prev; back.prev = simpleNode; simpleNode.prev.next = simpleNode; ++size; } public Node remove(Node node) { EquivalenceNode equivalenceNode = node.equivalenceNode; EquivalenceNode prev = equivalenceNode.prev; EquivalenceNode next = equivalenceNode.next; prev.next = next; next.prev = prev; --size; return equivalenceNode.node; } public int size() { return size; } public Node first() { return front.next.node; } public Node last() { return back.prev.node; } } public HashedLinkedDeque() { nodeMap = new HashMap<>(); front = new Node(null); back = new Node(null); front.next = back; back.prev = front; size = 0; } public int count(T val) { return nodeMap.get(val).size(); } private void removeNode(Node node) { Node prev = node.prev; Node next = node.next; prev.next = next; next.prev = prev; EquivalenceList eqList = nodeMap.get(node.val); eqList.remove(node); if (eqList.size() == 0) { nodeMap.remove(node.val); } } public boolean removeIf(Predicate<? super T> filter) { boolean elementsRemoved = false; Node node = front.next; while (node != back) { if (filter.test(node.val)) { removeNode(node); --size; elementsRemoved = true; } node = node.next; } return elementsRemoved; } public int size() { return size; } private class HashedLinkedIterator implements Iterator<T> { private Node node; private final boolean reversed; public HashedLinkedIterator() { this(false); } public HashedLinkedIterator(boolean reversed) { if (reversed) { this.node = back; } else { this.node = front; } this.reversed = reversed; } public boolean hasNext() { if (reversed) return hasPrevious(); return node.next != back; } public T next() { if (reversed) return previous(); node = node.next; return node.val; } private boolean hasPrevious() { return node.prev != front; } private T previous() { node = node.prev; return node.val; } public void remove() { removeNode(node); } } public void addFirst(T e) { Node node = new Node(e); node.prev = front; node.next = front.next; front.next = node; node.next.prev = node; EquivalenceList list = nodeMap.computeIfAbsent(e, (val) -> new EquivalenceList()); list.shift(node); } public void addLast(T e) { Node node = new Node(e); node.next = back; node.prev = back.prev; back.prev = node; node.prev.next = node; EquivalenceList list = nodeMap.computeIfAbsent(e, (val) -> new EquivalenceList()); list.push(node); } public boolean offerFirst(T e) { addFirst(e); return true; } public boolean offerLast(T e) { addLast(e); return true; } public T removeFirst() { if (size > 0) { return pollFirst(); } else { throw new NoSuchElementException(); } } public T removeLast() { if (size > 0) { return pollLast(); } else { throw new NoSuchElementException(); } } @Override public T pollFirst() { if (size > 0) { Node node = front.next; removeNode(node); return node.val; } else { return null; } } @Override public T pollLast() { if (size > 0) { Node node = front.next; removeNode(node); return node.val; } else { return null; } } @Override public T getFirst() { if (size > 0) { return front.next.val; } else { throw new NoSuchElementException(); } } @Override public T getLast() { if (size > 0) { return back.prev.val; } else { throw new NoSuchElementException(); } } @Override public T peekFirst() { if (size > 0) { return front.next.val; } else { return null; } } @Override public T peekLast() { if (size > 0) { return back.prev.val; } else { return null; } } @Override public boolean removeFirstOccurrence(Object o) { EquivalenceList eqList = nodeMap.get(o); if (eqList != null) { Node node = eqList.first(); removeNode(node); return true; } else { return false; } } @Override public boolean removeLastOccurrence(Object o) { EquivalenceList eqList = nodeMap.get(o); if (eqList != null) { Node node = eqList.last(); removeNode(node); return true; } else { return false; } } @Override public boolean offer(T e) { return offerLast(e); } @Override public T remove() { return removeFirst(); } @Override public T poll() { return pollFirst(); } @Override public T element() { return getFirst(); } @Override public T peek() { return peekFirst(); } @Override public void push(T e) { addFirst(e); } @Override public T pop() { return removeFirst(); } @Override public Iterator<T> iterator() { return new HashedLinkedIterator(); } @Override public Iterator<T> descendingIterator() { return new HashedLinkedIterator(true); } }
package com.sdl.dxa.modules.context.content; import com.sdl.dxa.modules.context.model.Conditions; import com.sdl.webapp.common.api.content.ConditionalEntityEvaluator; import com.sdl.webapp.common.api.contextengine.ContextClaims; import com.sdl.webapp.common.api.contextengine.ContextClaimsProvider; import com.sdl.webapp.common.api.model.EntityModel; import com.sdl.webapp.common.api.model.ViewModel; import com.sdl.webapp.common.exceptions.DxaException; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; import java.util.HashSet; import java.util.Map; import java.util.Set; /** * Implementation of {@link ConditionalEntityEvaluator} that analyzes Context Expression Conditions * set as {@link ViewModel#getExtensionData()}. */ @Component @Slf4j public class ContextExpressionEntityEvaluator implements ConditionalEntityEvaluator { @Autowired private ContextClaimsProvider contextClaimsProvider; @Value("${dxa.modules.contextexpr.extension_data_map_key}") private String contextExpressionsKey = "ContextExpressions"; /** * {@inheritDoc} * <p>Determines whether a given Entity Model should be included * based on the conditions specified on the Entity Model and the context.</p> */ @Override public boolean includeEntity(@NonNull EntityModel entity) { if (entity.getExtensionData() == null || !entity.getExtensionData().containsKey(contextExpressionsKey)) { log.debug("Entity {} is included because there is no extension data with a key {}", entity, contextExpressionsKey); return true; } Conditions conditions = (Conditions) entity.getExtensionData().get(contextExpressionsKey); if (conditions == null || conditions.isEmpty()) { log.warn("Found conditions, but they are null or empty, that looks like an error!"); return true; } try { Map<String, Object> contextClaims = contextClaimsProvider.getContextClaims(null); boolean isExcludedNoIncludes = false, isExcludedAnyExclude = false; if (shouldBeExcluded(conditions.getIncludes(), contextClaims, Mode.INCLUDE)) { isExcludedNoIncludes = true; } if (!isExcludedNoIncludes && shouldBeExcluded(conditions.getExcludes(), contextClaims, Mode.EXCLUDE)) { isExcludedAnyExclude = true; } if (isExcludedNoIncludes || isExcludedAnyExclude) { log.debug("suppressing entity because of {} Context Expression conditions; entity {}", isExcludedNoIncludes ? "Include" : "Exclude", entity); return false; } } catch (DxaException e) { log.warn("Exception while requesting context claims, including entity", e); return true; } log.debug("All include/exclude context conditions are satisfied, including Entity"); return true; } private boolean shouldBeExcluded(@Nullable Set<String> cxs, @NonNull Map<String, Object> contextClaims, @NonNull Mode mode) { //if set is null, then we don't process, and return FALSE for "excluded" if (cxs == null || cxs.isEmpty()) { log.debug("Context expression set is empty or null, ignoring"); return false; } //ignore any unknown claims Set<String> filtered = filterCxsByClaims(cxs, contextClaims); if (filtered.isEmpty()) { log.debug("Filtered context expressions set is empty, meaning expressions are not in context claims"); //if set is empty, then we don't process, and return FALSE for "excluded" return false; } //if this is INCLUDE, then any include means FALSE for "excluded" //if this is EXCLUDE, then any exclude means TRUE for "excluded" return (mode == Mode.INCLUDE) != anyCxIsTrue(filtered, contextClaims); } @NotNull private Set<String> filterCxsByClaims(@NonNull Set<String> contextExpressions, @NonNull Map<String, Object> contextClaims) { Set<String> filtered = new HashSet<>(); for (String claimName : contextExpressions) { if (contextClaims.containsKey(claimName)) { filtered.add(claimName); } } return filtered; } private boolean anyCxIsTrue(@NonNull Set<String> contextExpressions, @NonNull Map<String, Object> contextClaims) { //also covers if set is empty, then we don't iterate for (String claimName : contextExpressions) { Boolean claimValue = ContextClaims.castClaim(contextClaims.get(claimName), Boolean.class); if (claimValue != null && claimValue) { return true; } } //set is empty or all conditions are not satisfied return false; } private enum Mode { /** * This is include condition. */ INCLUDE, /** * This is exclude condition. */ EXCLUDE } }
package de.teiesti.postie; import org.pmw.tinylog.Logger; import java.io.IOException; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketException; import java.util.Collections; import java.util.HashSet; import java.util.Set; /** * An {@link Office} accepts network connection on a given {@link ServerSocket} and spawns {@link Postman} to respond * to requests. A running {@link Office} awaits incoming connection, then clones the configured {@link Postman}, * hands over the connection and starts it. Afterwards it repeats this procedure.<br> * <br> * To setup an {@link Office} call {@link #spawn(Postman)} with a {@link Postman} that was configured with a {@link * Serializer} and all necessary {@link Recipient}s. If a {@link Socket} was bound it will be ignored. There is no * trouble if the {@link Postman} is running but the system may behaves strange if the {@link Postman}'s state is * changing. In addition, you must call {@link #bind(ServerSocket)} with a {@link ServerSocket}. To complete the * setup, call {@link #start()} which starts the required thread. If work is done call {@link #stop()}. */ public class Office { private ServerSocket serverSocket; private Postman blueprint; private Set<Postman> postmen = Collections.synchronizedSet(new HashSet<Postman>()); private final Recipient postmanHelper = new Recipient() { @Override public void accept(Object o, Postman from) { } @Override public void acceptedLast(Postman from) { postmen.remove(from); } }; private Thread acceptor; public final synchronized Office bind(ServerSocket serverSocket) { if (serverSocket == null) throw new IllegalArgumentException("serverSocket == null"); this.serverSocket = serverSocket; return this; } public final synchronized Office spawn(Postman blueprint) { if (isRunning()) throw new IllegalStateException("cannot configure a blueprint because this is running"); if (blueprint == null) throw new IllegalArgumentException("blueprint == null"); this.blueprint = blueprint.register(postmanHelper); return this; } public final synchronized Office start() { if (isRunning()) throw new IllegalStateException("cannot start because this is already running"); // TODO check configuration, how? acceptor = new Acceptor(); acceptor.start(); return this; } public final synchronized Office stop() { return stop(false); } public final synchronized Office stop(boolean stopPostmen) { if (!isRunning()) throw new IllegalStateException("cannot stop because this is not running"); try { serverSocket.close(); acceptor.join(); } catch (IOException | InterruptedException e) { Logger.error(e); System.exit(1); } acceptor = null; if (stopPostmen) synchronized (postmen) { for (Postman p : postmen) { p.unregister(postmanHelper); p.stop(); } postmen.clear(); } return this; } /** * Returns weather this {@link Office} is running. * * @return if this {@link Office} is running. */ public final boolean isRunning() { return acceptor != null && acceptor.isAlive(); } private class Acceptor extends Thread { @Override public void run() { Socket socket; Postman postman; while (true) { try { socket = serverSocket.accept(); postman = blueprint.clone().bind(socket); postmen.add(postman); postman.start(); } catch (IOException | CloneNotSupportedException e) { if (e instanceof SocketException) break; Logger.error(e); System.exit(1); } } } } }
package org.teamstbf.yats.ui; import java.time.LocalDate; import java.time.format.DateTimeFormatter; import java.util.logging.Logger; import org.teamstbf.yats.commons.core.LogsCenter; import org.teamstbf.yats.commons.events.ui.EventPanelSelectionChangedEvent; import org.teamstbf.yats.commons.util.FxViewUtil; import org.teamstbf.yats.model.Model; import org.teamstbf.yats.model.item.ReadOnlyEvent; import com.sun.javafx.scene.control.skin.DatePickerSkin; import javafx.application.Platform; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.fxml.FXML; import javafx.scene.Node; import javafx.scene.control.Button; import javafx.scene.control.DatePicker; import javafx.scene.control.Label; import javafx.scene.control.ListCell; import javafx.scene.control.ListView; import javafx.scene.layout.AnchorPane; import javafx.scene.layout.BorderPane; import javafx.scene.layout.Region; //@@author A0138952W @SuppressWarnings("restriction") public class MultiViewPanel extends UiPart<Region> { protected Model model; private final Logger logger = LogsCenter.getLogger(TaskListPanel.class); private static final String FXML = "CalendarView.fxml"; private static final String FXMLPERSON = "PersonListCard.fxml"; private static ObservableList<String[]> timeData = FXCollections.observableArrayList(); private ObservableList<ReadOnlyEvent> calendarList; private final DatePickerSkin calendar; @FXML private AnchorPane calendarPanel; @FXML private BorderPane calendarRoot; @FXML private DatePicker datepicker; @FXML private ListView<ReadOnlyEvent> taskListView; @FXML private ListView<String[]> timeTasks; @FXML private Button prevDate; @FXML private Button nextDate; @FXML private Label date; private static LocalDate today; private static DateTimeFormatter formatter; private static final int TASK_DETAILS = 4; private static final int TASK_TITLE = 0; private static final int TASK_START = 1; private static final int TASK_END = 2; private static final int TASK_LOCATION = 3; /** * The AnchorPane where the CalendarView must be inserted * * @param placeholder */ public MultiViewPanel(AnchorPane placeholder, ObservableList<ReadOnlyEvent> observableTaskList, Model model) { super(FXML); this.model = model; datepicker = new DatePicker(today); calendar = new DatePickerSkin(datepicker); today = LocalDate.now(); formatter = DateTimeFormatter.ofPattern("d MMMM"); setConnectionsCalendarView(); setConnectionsDoneView(observableTaskList); addToPlaceholder(placeholder); } private void setConnectionsCalendarView() { Node popupContent = calendar.getPopupContent(); calendarRoot.setCenter(popupContent); updateCurrentDay(today); updateCalendarList(today); timeTasks.setItems(timeData); timeTasks.setCellFactory(listView -> new TimeSlotListViewCell()); setEventHandlerForSelectionChangeEvent(); } private void setConnectionsDoneView(ObservableList<ReadOnlyEvent> observableTaskList) { taskListView.setItems(observableTaskList); taskListView.setCellFactory(listView -> new TaskListViewCell()); setEventHandlerForSelectionChangeEvent(); } private void addToPlaceholder(AnchorPane placeHolderPane) { FxViewUtil.applyAnchorBoundaryParameters(calendarPanel, 0.0, 0.0, 0.0, 0.0); placeHolderPane.getChildren().add(calendarPanel); } private void setEventHandlerForSelectionChangeEvent() { taskListView.getSelectionModel().selectedItemProperty().addListener((observable, oldValue, newValue) -> { if (newValue != null) { logger.fine("Selection in task list panel changed to : '" + newValue + "'"); raise(new EventPanelSelectionChangedEvent(newValue)); } }); } public void scrollTo(int index) { Platform.runLater(() -> { taskListView.scrollTo(index); taskListView.getSelectionModel().clearAndSelect(index); }); } private class TimeSlotListViewCell extends ListCell<String[]> { @Override protected void updateItem(String[] taskSlot, boolean empty) { super.updateItem(taskSlot, empty); if (empty || (taskSlot == null)) { setGraphic(null); setText(null); } else { setGraphic(new TimeCard(taskSlot).getRoot()); } } } private class TaskListViewCell extends ListCell<ReadOnlyEvent> { @Override protected void updateItem(ReadOnlyEvent task, boolean empty) { super.updateItem(task, empty); if (empty || task == null) { setGraphic(null); setText(null); } else { if (task.getIsDone().getValue().equals("Yes")) { setGraphic(new TaskCard(task, getIndex() + 1, FXMLPERSON).getRoot()); } } } } private void updateCalendarList(LocalDate day) { String[] data = new String[TASK_DETAILS]; model.updateCalendarFilteredListToShowStartTime(day); calendarList = model.getCalendarFilteredTaskList(); if (calendarList.size() == 0) { timeData.clear(); } else { timeData.clear(); for (int i = 0; i < calendarList.size(); i++) { ReadOnlyEvent event = calendarList.get(i); data[TASK_TITLE] = event.getTitle().toString(); data[TASK_START] = event.getStartTime().toString(); data[TASK_END] = event.getEndTime().toString(); data[TASK_LOCATION] = event.getLocation().toString(); timeData.add(data); String[] data1 = timeData.get(i); System.out.println(data1[j]); } } } } public void prevDay() { MultiViewPanel.today = today.minusDays(1); datepicker.setValue(today); updateCalendarList(today); updateCurrentDay(today); } public void nextDay() { MultiViewPanel.today = today.plusDays(1); datepicker.setValue(today); updateCalendarList(today); updateCurrentDay(today); } public void resetDay() { MultiViewPanel.today = LocalDate.now(); datepicker.setValue(today); updateCalendarList(today); updateCurrentDay(today); } public void updateCurrentDay(LocalDate day) { MultiViewPanel.today = day; date.setText(today.format(formatter)); } }
package org.jdesktop.swingx; import java.applet.Applet; import java.awt.AlphaComposite; import java.awt.Color; import java.awt.Component; import java.awt.Container; import java.awt.Composite; import java.awt.Dimension; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Insets; import java.awt.Point; import java.awt.Rectangle; import java.awt.Window; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import javax.swing.JComponent; import javax.swing.SwingConstants; import javax.swing.SwingUtilities; import javax.swing.Timer; /** * Component used to display transluscent user-interface content. * This component and all of its content will be displayed with the specified * &quot;alpha&quot; transluscency property value. When this component is made visible, * it's content will fade in until the alpha transluscency level is reached. * <p> * If the glassbox's &quot;dismissOnClick&quot; property is <code>true</code> * (the default) then the glassbox will be made invisible when the user * clicks on it.</p> * <p> * This component is particularly useful for displaying transient messages * on the glasspane.</p> * * @author Amy Fowler * @version 1.0 */ public class JXGlassBox extends JXPanel { private static final int SHOW_DELAY = 30; private static final int TIMER_INCREMENT = 10; private float alphaStart = 0.01f; private float alphaEnd = 0.8f; private Timer animateTimer; private float alphaIncrement = 0.02f; private boolean dismissOnClick = false; private MouseAdapter dismissListener = null; public JXGlassBox() { setOpaque(false); setAlpha(alphaStart); setBackground(Color.white); setDismissOnClick(true); animateTimer = new Timer(TIMER_INCREMENT, new ActionListener() { public void actionPerformed(ActionEvent e) { setAlpha(getAlpha() + alphaIncrement); } }); } public JXGlassBox(float alpha) { this(); setAlpha(alpha); } public void setAlpha(float alpha) { super.setAlpha(alpha); this.alphaIncrement = (alphaEnd - alphaStart)/(SHOW_DELAY/TIMER_INCREMENT); } public void setDismissOnClick(boolean dismissOnClick) { boolean oldDismissOnClick = this.dismissOnClick; this.dismissOnClick = dismissOnClick; if (dismissOnClick && !oldDismissOnClick) { if (dismissListener == null) { dismissListener = new MouseAdapter() { public void mouseClicked(MouseEvent e) { JComponent glassBox = JXGlassBox.this; JComponent parent = (JComponent) glassBox.getParent(); Container toplevel = parent.getTopLevelAncestor(); parent.remove(glassBox); toplevel.validate(); toplevel.repaint(); } }; } addMouseListener(dismissListener); } else if (!dismissOnClick && oldDismissOnClick) { removeMouseListener(dismissListener); } } public void paint(Graphics g) { super.paint(g); if (!animateTimer.isRunning() && getAlpha() < alphaEnd ) { animateTimer.start(); } if (animateTimer.isRunning() && getAlpha() >= alphaEnd) { animateTimer.stop(); } } public void setVisible(boolean visible) { setAlpha(alphaStart); super.setVisible(visible); } private Container getTopLevel() { Container p = getParent(); while (p != null && !(p instanceof Window || p instanceof Applet)) { p = p.getParent(); } return p; } public void showOnGlassPane(Container glassPane, Component component, int componentX, int componentY, int positionHint) { Dimension boxPrefSize = getPreferredSize(); Dimension glassSize = glassPane.getSize(); Rectangle compRect = component.getBounds(); int boxX = 0; int boxY = 0; int boxWidth = Math.min(boxPrefSize.width, glassSize.width); int boxHeight = Math.min(boxPrefSize.height, glassSize.height); Point compLocation = SwingUtilities.convertPoint(component.getParent(), compRect.x, compRect.y, glassPane); if (positionHint == SwingConstants.TOP) { if (compLocation.x + componentX + boxWidth <= glassSize.width) { boxX = compLocation.x + componentX; } else { boxX = glassSize.width - boxWidth; } boxY = compLocation.y - boxHeight; if (boxY < 0) { if (compLocation.y + compRect.height <= glassSize.height) { boxY = compLocation.y + compRect.height; } else { boxY = 0; } } } glassPane.setLayout(null); setBounds(boxX, boxY, boxWidth, boxHeight); glassPane.add(this); glassPane.setVisible(true); Container topLevel = getTopLevel(); topLevel.validate(); topLevel.repaint(); } public void showOnGlassPane(Container glassPane, int originX, int originY) { Dimension boxPrefSize = getPreferredSize(); Dimension glassSize = glassPane.getSize(); int boxX = 0; int boxY = 0; int boxWidth = 0; int boxHeight = 0; boxWidth = Math.min(boxPrefSize.width, glassSize.width); boxHeight = Math.min(boxPrefSize.height, glassSize.height); if (originY - boxHeight >= 0) { boxY = originY - boxHeight; } else if (originY + boxHeight <= glassSize.height) { boxY = originY; } else { boxY = glassSize.height - boxHeight; } if (originX + boxWidth <= glassSize.width) { boxX = originX; } else if (originX >= boxWidth) { boxX = originX - boxWidth; } else { boxX = glassSize.width - boxWidth; } glassPane.setLayout(null); setBounds(boxX, boxY, boxWidth, boxHeight); glassPane.add(this); glassPane.setVisible(true); Container topLevel = getTopLevel(); topLevel.validate(); topLevel.repaint(); } }
package emufog.fog2; import emufog.graph.EdgeNode; import java.util.HashSet; import java.util.Set; class StartingNode extends BaseNode { private final Set<BaseNode> reachableNodes; StartingNode(EdgeNode node) { super(node); reachableNodes = new HashSet<>(); } int getDeviceCount() { return ((EdgeNode) node).getDeviceCount(); } Set<BaseNode> getReachableNodes() { return reachableNodes; } /** * Adds a node to the list of possible nodes for this edge node. * * @param node possible fog node */ void addPossibleNode(BaseNode node) { reachableNodes.add(node); modified = true; } /** * Removes a fog node from the list of possible nodes if it's not available any more. * * @param node fog node to remove */ void removePossibleNode(BaseNode node) { modified = reachableNodes.remove(node); } /** * Notifies all possible nodes of this edge node that the node does not have to be covered any more. */ void notifyPossibleNodes() { for (BaseNode node : reachableNodes) { node.removeStartingNode(this); } } }
package org.threeten.extra.scale; import java.io.Serializable; import java.util.Objects; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.time.DateTimeException; import java.time.Duration; import java.time.Instant; import java.time.format.DateTimeParseException; /** * An instantaneous point on the time-line measured in the TAI time-scale. * <p> * Most of the Time Framework for Java works on the assumption that the time-line is * simple, there are no leap-seconds and there are always 24 * 60 * 60 seconds in a day. * However, the Earth's rotation is not straightforward, and a solar day does not match * this definition. * <p> * This class is an alternative representation based on the TAI time-scale. * TAI is a single incrementing count of SI seconds. * There are no leap seconds or other discontinuities. * <p> * As a result of the simple definition, this time-scale would make an excellent timestamp. * However, there are, at the time of writing, few easy ways to obtain an accurate TAI instant, * but it is relatively easy to obtain a GPS instant. * GPS and TAI differ by the fixed amount of 19 seconds. * <p> * The duration between two points on the TAI time-scale is calculated solely using this class. * Do not use the {@code between} method on {@code Duration} as that will lose information. * Instead use {@link #durationUntil(TAIInstant)} on this class. * <p> * It is intended that most applications will use the {@code Instant} class * which uses the UTC-SLS mapping from UTC to guarantee 86400 seconds per day. * Specialist applications with access to an accurate time-source may find this class useful. * * <h4>Time-scale</h4> * <p> * The TAI time-scale is a very simple well-regarded representation of time. * The scale is defined using atomic clocks counting SI seconds. * It has proceeded in a continuous uninterrupted manner since the defined * epoch of {@code 1958-01-01T00:00:00(TAI)}. * There are no leap seconds or other discontinuities. * <p> * This class may be used for instants in the far past and far future. * Since some instants will be prior to 1958, it is not strictly an implementation of TAI. * Instead, it is a proleptic time-scale based on TAI and equivalent to it since 1958. * * * <h4>Implementation notes</h4> * This class is immutable and thread-safe. */ public final class TAIInstant implements Comparable<TAIInstant>, Serializable { // does not implement InstantProvider as that would enable methods like // Duration.between which gives the wrong answer due to lossy conversion /** * Constant for nanos per second. */ private static final int NANOS_PER_SECOND = 1000000000; /** * Parse regex. */ private static final Pattern PARSER = Pattern.compile("([-]?[0-9]+)\\.([0-9]{9})s[(]TAI[)]"); /** * Serialization version. */ private static final long serialVersionUID = 2133469726395847026L; /** * The number of seconds from the epoch of 1958-01-01T00:00:00(TAI). */ private final long seconds; /** * The number of nanoseconds, later along the time-line, from the seconds field. * This is always positive, and never exceeds 999,999,999. */ private final int nanos; /** * Obtains an instance of {@code TAIInstant} from the number of seconds from * the TAI epoch of 1958-01-01T00:00:00(TAI) with a nanosecond fraction of second. * <p> * This method allows an arbitrary number of nanoseconds to be passed in. * The factory will alter the values of the second and nanosecond in order * to ensure that the stored nanosecond is in the range 0 to 999,999,999. * For example, the following will result in the exactly the same instant: * <pre> * TAIInstant.ofSeconds(3, 1); * TAIInstant.ofSeconds(4, -999999999); * TAIInstant.ofSeconds(2, 1000000001); * </pre> * * @param taiSeconds the number of seconds from the epoch of 1958-01-01T00:00:00(TAI) * @param nanoAdjustment the nanosecond adjustment to the number of seconds, positive or negative * @return the TAI instant, not null */ public static TAIInstant ofTAISeconds(long taiSeconds, long nanoAdjustment) { long secs = Math.addExact(taiSeconds, Math.floorDiv(nanoAdjustment, NANOS_PER_SECOND)); long nos = Math.floorMod(nanoAdjustment, NANOS_PER_SECOND); return new TAIInstant(secs, Long.valueOf(nos).intValue()); } /** * Obtains an instance of {@code TAIInstant} from an {@code Instant} * using the system default leap second rules. * <p> * Converting a UTC-SLS instant to a TAI instant requires leap second rules. * This method uses the latest available system rules. * The conversion first maps from UTC-SLS to UTC, then converts to TAI. * <p> * Conversion from an {@code Instant} will not be completely accurate near * a leap second in accordance with UTC-SLS. * * @param instant the instant to convert, not null * @return the TAI instant, not null * @throws ArithmeticException if the calculation exceeds the supported range */ public static TAIInstant of(Instant instant) { return UTCInstant.of(instant).toTAIInstant(); } /** * Obtains an instance of {@code TAIInstant} from a {@code UTCInstant}. * <p> * Converting a UTC instant to a TAI instant requires leap second rules. * This method uses the rules held in within the UTC instant. * <p> * Conversion from a {@code UTCInstant} will be entirely accurate. * The resulting TAI instant will not reference the leap second rules, so * converting back to a UTC instant may result in a different UTC instant. * * @param instant the instant to convert, not null * @return the TAI instant, not null * @throws ArithmeticException if the calculation exceeds the supported range */ public static TAIInstant of(UTCInstant instant) { return instant.toTAIInstant(); } /** * Obtains an instance of {@code TAIInstant} from a text string. * <p> * The following format is accepted: * <p><ul> * <li>{@code {seconds}.{nanosOfSecond}s(TAI)} * </ul><p> * The accepted format is strict. * The seconds part must contain only numbers and a possible leading negative sign. * The nanoseconds part must contain exactly nine digits. * The trailing literal must be exactly specified. * This format parses the {@code toString} format. * * @param text the text to parse such as "12345.123456789s(TAI)", not null * @return the parsed instant, not null * @throws DateTimeException if the text cannot be parsed */ public static TAIInstant parse(CharSequence text) { Objects.requireNonNull(text, "text"); Matcher matcher = PARSER.matcher(text); if (matcher.matches()) { try { long seconds = Long.parseLong(matcher.group(1)); long nanos = Long.parseLong(matcher.group(2)); return TAIInstant.ofTAISeconds(seconds, nanos); } catch (NumberFormatException ex) { throw new DateTimeParseException("The text could not be parsed", text, 0, ex); } } throw new DateTimeParseException("The text could not be parsed", text, 0); } /** * Constructs an instance. * * @param taiSeconds the number of TAI seconds from the epoch * @param nanoOfSecond the nanoseconds within the second, from 0 to 999,999,999 */ private TAIInstant(long taiSeconds, int nanoOfSecond) { super(); this.seconds = taiSeconds; this.nanos = nanoOfSecond; } /** * Gets the number of seconds from the TAI epoch of 1958-01-01T00:00:00(TAI). * <p> * The TAI second count is a simple incrementing count of seconds where * second 0 is 1958-01-01T00:00:00(TAI). * The nanosecond part of the day is returned by {@code getNanosOfSecond}. * * @return the seconds from the epoch of 1958-01-01T00:00:00(TAI) */ public long getTAISeconds() { return seconds; } /** * Returns a copy of this {@code TAIInstant} with the number of seconds * from the TAI epoch of 1958-01-01T00:00:00(TAI). * <p> * The TAI second count is a simple incrementing count of seconds where * second 0 is 1958-01-01T00:00:00(TAI). * The nanosecond part of the day is returned by {@code getNanosOfSecond}. * <p> * This instance is immutable and unaffected by this method call. * * @param taiSeconds the number of seconds from the epoch of 1958-01-01T00:00:00(TAI) * @return a {@code TAIInstant} based on this instant with the requested second, not null */ public TAIInstant withTAISeconds(long taiSeconds) { return ofTAISeconds(taiSeconds, nanos); } /** * Gets the number of nanoseconds, later along the time-line, from the start * of the second. * <p> * The nanosecond-of-second value measures the total number of nanoseconds from * the second returned by {@code getTAISeconds}. * * @return the nanoseconds within the second, from 0 to 999,999,999 */ public int getNano() { return nanos; } public TAIInstant withNano(int nanoOfSecond) { if (nanoOfSecond < 0 || nanoOfSecond >= NANOS_PER_SECOND) { throw new IllegalArgumentException("NanoOfSecond must be from 0 to 999,999,999"); } return ofTAISeconds(seconds, nanoOfSecond); } /** * Returns a copy of this instant with the specified duration added. * <p> * The duration is added using simple addition of the seconds and nanoseconds * in the duration to the seconds and nanoseconds of this instant. * As a result, the duration is treated as being measured in TAI compatible seconds * for the purpose of this method. * <p> * This instance is immutable and unaffected by this method call. * * @param duration the duration to add, not null * @return a {@code TAIInstant} based on this instant with the duration added, not null * @throws ArithmeticException if the calculation exceeds the supported range */ public TAIInstant plus(Duration duration) { long secsToAdd = duration.getSeconds(); int nanosToAdd = duration.getNano(); if ((secsToAdd | nanosToAdd) == 0) { return this; } long secs = Math.addExact(seconds, secsToAdd); long nanoAdjustment = ((long) nanos) + nanosToAdd; // safe int+int return ofTAISeconds(secs, nanoAdjustment); } /** * Returns a copy of this instant with the specified duration subtracted. * <p> * The duration is subtracted using simple subtraction of the seconds and nanoseconds * in the duration from the seconds and nanoseconds of this instant. * As a result, the duration is treated as being measured in TAI compatible seconds * for the purpose of this method. * <p> * This instance is immutable and unaffected by this method call. * * @param duration the duration to subtract, not null * @return a {@code TAIInstant} based on this instant with the duration subtracted, not null * @throws ArithmeticException if the calculation exceeds the supported range */ public TAIInstant minus(Duration duration) { long secsToSubtract = duration.getSeconds(); int nanosToSubtract = duration.getNano(); if ((secsToSubtract | nanosToSubtract) == 0) { return this; } long secs = Math.subtractExact(seconds, secsToSubtract); long nanoAdjustment = ((long) nanos) - nanosToSubtract; // safe int+int return ofTAISeconds(secs, nanoAdjustment); } /** * Returns the duration between this instant and the specified instant. * <p> * This calculates the duration between this instant and another based on * the TAI time-scale. Adding the duration to this instant using {@link #plus} * will always result in an instant equal to the specified instant. * * @param taiInstant the instant to calculate the duration until, not null * @return the duration until the specified instant, may be negative, not null * @throws ArithmeticException if the calculation exceeds the supported range */ public Duration durationUntil(TAIInstant taiInstant) { long durSecs = Math.subtractExact(taiInstant.seconds, seconds); long durNanos = taiInstant.nanos - nanos; return Duration.ofSeconds(durSecs, durNanos); } /** * Converts this instant to a {@code UTCInstant} using the system default * leap second rules. * <p> * This method converts this instant from the TAI to the UTC time-scale using the * system default leap-second rules. This conversion does not lose information * and the UTC instant may safely be converted back to a {@code TAIInstant}. * * @return a {@code UTCInstant} representing the same instant using the system leap second rules, not null */ public UTCInstant toUTCInstant() { return UTCInstant.of(this, UTCRules.system()); } /** * Converts this instant to an {@code Instant} using the system default * leap second rules. * <p> * This method converts this instant from the TAI to the UTC-SLS time-scale using the * system default leap-second rules to convert to UTC. * This conversion will lose information around a leap second in accordance with UTC-SLS. * Converting back to a {@code TAIInstant} may result in a slightly different instant. * * @return an {@code Instant} representing the best approximation of this instant, not null */ public Instant toInstant() { return toUTCInstant().toInstant(); } /** * Compares this instant to another based on the time-line. * * @param otherInstant the other instant to compare to, not null * @return the comparator value, negative if less, positive if greater */ public int compareTo(TAIInstant otherInstant) { int cmp = Long.compare(seconds, otherInstant.seconds); if (cmp != 0) { return cmp; } return nanos - otherInstant.nanos; } /** * Checks if this instant is equal to the specified {@code TAIInstant}. * * @param otherInstant the other instant, null returns false * @return true if the other instant is equal to this one */ @Override public boolean equals(Object otherInstant) { if (this == otherInstant) { return true; } if (otherInstant instanceof TAIInstant) { TAIInstant other = (TAIInstant) otherInstant; return this.seconds == other.seconds && this.nanos == other.nanos; } return false; } /** * Returns a hash code for this instant. * * @return a suitable hash code */ @Override public int hashCode() { // TODO: Evaluate hash code return ((int) (seconds ^ (seconds >>> 32))) + 51 * nanos; } /** * A string representation of this instant. * <p> * The string is formatted as {@code {seconds).(nanosOfSecond}s(TAI)}. * At least one second digit will be present. * The nanoseconds will always be nine digits. * * @return a representation of this instant, not null */ @Override public String toString() { StringBuilder buf = new StringBuilder(); buf.append(seconds); int pos = buf.length(); buf.append(nanos + NANOS_PER_SECOND); buf.setCharAt(pos, '.'); buf.append("s(TAI)"); return buf.toString(); } }
package net.somethingdreadful.MAL.account; import android.accounts.AbstractAccountAuthenticator; import android.accounts.Account; import android.accounts.AccountAuthenticatorResponse; import android.accounts.AccountManager; import android.accounts.NetworkErrorException; import android.app.Service; import android.content.Context; import android.content.Intent; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.os.Bundle; import android.os.IBinder; import android.text.TextUtils; import android.util.Log; import com.crashlytics.android.Crashlytics; import com.freshdesk.mobihelp.Mobihelp; import net.somethingdreadful.MAL.PrefManager; import net.somethingdreadful.MAL.Theme; import net.somethingdreadful.MAL.database.DatabaseTest; public class AccountService extends Service { public static AccountType accountType; private static Account account; private static Context context; private Authenticator mAuthenticator; /** * The account version will be used to peform */ private static int accountVersion = 2; public static void create(Context context) { AccountService.context = context; } /** * This is used for Account upgrade purpose */ private static void onUpgrade() { Crashlytics.log(Log.INFO, "MALX", "AccountService.onUpgrade(): Upgrading to " + String.valueOf(accountVersion) + "."); setAccountVersion(accountVersion); switch (accountVersion) { case 1: // We support now all Anilist scores, the user needs to log out (2.1 beta 3). if (!accountType.equals(AccountType.MyAnimeList)) deleteAccount(); case 2: // We added new base models to make loading easier, the user needs to log out (2.2 beta 1). deleteAccount(); } } /** * Get the provider whose behavior is being controlled. * * @return String The provider */ public static String getAuth() throws PackageManager.NameNotFoundException { PackageInfo pInfo = context.getPackageManager().getPackageInfo(context.getPackageName(), 0); if (TextUtils.isDigitsOnly(pInfo.versionName.replace(".", ""))) return ".account.Provider"; else return ".beta.account.Provider"; } /** * Get the username of an account. * * @return String The username */ public static String getUsername() { if (getAccount() == null) return null; String username = getAccount().name; Mobihelp.setUserFullName(context, username); Crashlytics.setUserName(username); return username; } /** * Get the password of an account. * * @return String The password */ public static String getPassword() { Account account = getAccount(); if (account == null) return null; AccountManager accountManager = AccountManager.get(context); return accountManager.getPassword(account); } /** * Get an Account on the device. * * @return Account The account */ public static Account getAccount() { if (account == null) { AccountManager accountManager = AccountManager.get(context); Account[] myaccount = accountManager.getAccountsByType(".account.SyncAdapter.account"); String version = String.valueOf(accountVersion); if (myaccount.length > 0) { accountType = getAccountType(accountManager.getUserData(myaccount[0], "accountType")); version = accountManager.getUserData(myaccount[0], "accountVersion"); Theme.setCrashData("Site", AccountService.accountType.toString()); Theme.setCrashData("accountVersion", version); } account = myaccount.length > 0 ? myaccount[0] : null; if (version == null || accountVersion != Integer.parseInt(version)) onUpgrade(); } return account; } public static boolean isMAL() { getAccount(); return accountType.equals(AccountType.MyAnimeList); } /** * Get the authtoken with the given string. * * @param type The authToken string * @return AccountType The type of account */ public static AccountType getAccountType(String type) { if (AccountType.AniList.toString().equals(type)) return AccountType.AniList; else return AccountType.MyAnimeList; } /** * Removes an account from the accountmanager. */ public static void deleteAccount() { AccountManager accountManager = AccountManager.get(context); accountManager.removeAccount(getAccount(), null, null); account = null; } /** * Add an account in the accountmanager. * * @param username The username of the account that will be saved * @param password The password of the account that will be saved */ public static void addAccount(String username, String password, AccountType accountType) { AccountManager accountManager = AccountManager.get(context); final Account account = new Account(username, ".account.SyncAdapter.account"); accountManager.addAccountExplicitly(account, password, null); accountManager.setUserData(account, "accountType", accountType.toString()); accountManager.setUserData(account, "accountVersion", String.valueOf(accountVersion)); AccountService.accountType = accountType; } /** * Add an accesToken to the Account data. * * @param token The AccesToken which should be stored * @param time The time till the token will expire * @return String The token */ public static String setAccesToken(String token, Long time) { AccountManager accountManager = AccountManager.get(context); accountManager.setUserData(getAccount(), "accesToken", token); accountManager.setUserData(getAccount(), "accesTokenTime", Long.toString((System.currentTimeMillis() / 1000) + (time - 60))); return token; } /** * Get the accesToken. * <p/> * Note: this method will return null if the accesToken is expired! * * @return String accesToken */ public static String getAccesToken() { AccountManager accountManager = AccountManager.get(context); String token = accountManager.getUserData(getAccount(), "accesToken"); try { Long expireTime = Long.parseLong(accountManager.getUserData(getAccount(), "accesTokenTime")); Long time = System.currentTimeMillis() / 1000; Long timeLeft = expireTime - time; Crashlytics.log(Log.INFO, "MALX", "AccountService: The accestoken will expire in " + Long.toString(timeLeft / 60) + " minutes."); return timeLeft >= 0 ? token : null; } catch (Exception e) { Crashlytics.log(Log.INFO, "MALX", "AccountService: The expire time could not be received."); return null; } } /** * Set an auth token in the accountmanager. * * @param accountVersion The new accountversion of the account that will be saved */ public static void setAccountVersion(int accountVersion) { if (account != null) { AccountManager accountManager = AccountManager.get(context); accountManager.setUserData(account, "accountVersion", String.valueOf(accountVersion)); } } /** * Set an auth token in the accountmanager. * * @param refreshToken The auth token of the account that will be saved */ public static void setRefreshToken(String refreshToken) { AccountManager accountManager = AccountManager.get(context); accountManager.setUserData(getAccount(), "refreshToken", refreshToken); } /** * Set an refresh token in the accountmanager. */ public static String getRefreshToken() { AccountManager accountManager = AccountManager.get(context); return accountManager.getUserData(getAccount(), "refreshToken"); } /** * Removes the userdata * * @param prefs If true it will remove all the prefrences saved. */ public static void clearData(boolean prefs) { DatabaseTest.deleteDatabase(context); if (prefs) PrefManager.clear(); Mobihelp.clearUserData(context); AccountService.deleteAccount(); } @Override public void onCreate() { mAuthenticator = new Authenticator(this); } @Override public IBinder onBind(Intent intent) { return mAuthenticator.getIBinder(); } public class Authenticator extends AbstractAccountAuthenticator { public Authenticator(Context context) { super(context); } @Override public Bundle editProperties(AccountAuthenticatorResponse accountAuthenticatorResponse, String s) { throw new UnsupportedOperationException(); } @Override public Bundle addAccount(AccountAuthenticatorResponse accountAuthenticatorResponse, String s, String s2, String[] strings, Bundle bundle) throws NetworkErrorException { throw new UnsupportedOperationException(); } @Override public Bundle confirmCredentials(AccountAuthenticatorResponse accountAuthenticatorResponse, Account account, Bundle bundle) throws NetworkErrorException { return null; } @Override public Bundle getAuthToken(AccountAuthenticatorResponse accountAuthenticatorResponse, Account account, String s, Bundle bundle) throws NetworkErrorException { throw new UnsupportedOperationException(); } @Override public String getAuthTokenLabel(String s) { throw new UnsupportedOperationException(); } @Override public Bundle updateCredentials(AccountAuthenticatorResponse accountAuthenticatorResponse, Account account, String s, Bundle bundle) throws NetworkErrorException { throw new UnsupportedOperationException(); } @Override public Bundle hasFeatures(AccountAuthenticatorResponse accountAuthenticatorResponse, Account account, String[] strings) throws NetworkErrorException { throw new UnsupportedOperationException(); } } }
package org.jaxen.dom4j; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import junit.textui.TestRunner; import java.util.Iterator; import java.util.List; import org.dom4j.Attribute; import org.dom4j.Document; import org.dom4j.Element; import org.dom4j.Namespace; import org.dom4j.io.SAXReader; import org.dom4j.tree.DefaultAttribute; import org.dom4j.tree.DefaultDocument; import org.dom4j.tree.DefaultElement; import org.jaxen.XPath; import org.jaxen.saxpath.SAXPathException; import org.jaxen.saxpath.helpers.XPathReaderFactory; public class XPathTest extends TestCase { private static final String BASIC_XML = "xml/basic.xml"; public static void main( String[] args ) { TestRunner.run( suite() ); } public static Test suite() { return new TestSuite( XPathTest.class ); } public XPathTest(String name) { super( name ); } public void setUp() { System.setProperty( XPathReaderFactory.DRIVER_PROPERTY, "" ); } public void testConstruction() { try { new Dom4jXPath( "/foo/bar/baz" ); } catch (SAXPathException e) { fail( e.getMessage() ); } } public void testSelection() { try { XPath xpath = new Dom4jXPath( "/foo/bar/baz" ); SAXReader reader = new SAXReader(); Document doc = reader.read( BASIC_XML ); List results = xpath.selectNodes( doc ); assertEquals( 3, results.size() ); Iterator iter = results.iterator(); assertEquals( "baz", ((Element)iter.next()).getName() ); assertEquals( "baz", ((Element)iter.next()).getName() ); assertEquals( "baz", ((Element)iter.next()).getName() ); assertTrue( ! iter.hasNext() ); } catch (Exception e) { fail( e.getMessage() ); } } public void testAsBoolean() { try { XPath xpath = new Dom4jXPath( "/root/a = 'a'" ); SAXReader reader = new SAXReader(); Document doc = reader.read( "xml/simple.xml" ); boolean answer = xpath.booleanValueOf( doc ); assertTrue( "Xpath worked: " + xpath, answer ); xpath = new Dom4jXPath( "'a' = 'b'" ); answer = xpath.booleanValueOf( doc ); assertTrue( "XPath should return false: " + xpath, ! answer ); } catch (Exception e) { fail( e.getMessage() ); } } public void testJaxen20AttributeNamespaceNodes() { try { Namespace ns1 = Namespace.get("p1", "www.acme1.org"); Namespace ns2 = Namespace.get("p2", "www.acme2.org"); Element element = new DefaultElement("test", ns1); Attribute attribute = new DefaultAttribute("pre:foo", "bar", ns2); element.add(attribute); Document doc = new DefaultDocument(element); XPath xpath = new Dom4jXPath( "//namespace::node()" ); List results = xpath.selectNodes( doc ); assertEquals( 3, results.size() ); } catch (Exception e) { fail( e.getMessage() ); } } public void testNamespaceNodesAreInherited() { try { Namespace ns0 = Namespace.get("p0", "www.acme0.org"); Namespace ns1 = Namespace.get("p1", "www.acme1.org"); Namespace ns2 = Namespace.get("p2", "www.acme2.org"); Element element = new DefaultElement("test", ns1); Attribute attribute = new DefaultAttribute("pre:foo", "bar", ns2); element.add(attribute); Element root = new DefaultElement("root", ns0); root.add(element); Document doc = new DefaultDocument(root); XPath xpath = new Dom4jXPath( "namespace::node()" ); List results = xpath.selectNodes( doc ); assertEquals( 4, results.size() ); } catch (Exception e) { fail( e.getMessage() ); } } }
package io.github.benas.jpopulator.randomizers.validation; import org.apache.commons.math3.random.RandomDataGenerator; import java.math.BigDecimal; import java.math.BigInteger; /** * A randomizer that generate random values less than or equal to a maximum value. * * This is used for fields annotated with {@link javax.validation.constraints.Max}. * * @author Mahmoud Ben Hassine (md.benhassine@gmail.com) */ public class MaxValueRandomizer { private static RandomDataGenerator randomDataGenerator = new RandomDataGenerator(); private MaxValueRandomizer() { } /** * Generate a random value for the given type. * * @param type the type for which a random value will be generated * @param maxValue the maximum threshold for the generated value * @return a random value (lower than maxValue) for the given type or null if the type is not supported */ public static Object getRandomValue(final Class type, final long maxValue) { if (type.equals(Byte.TYPE) || type.equals(Byte.class)) { return (byte) randomDataGenerator.nextLong(Byte.MIN_VALUE, maxValue); } if (type.equals(Short.TYPE) || type.equals(Short.class)) { return (short) randomDataGenerator.nextLong(Short.MIN_VALUE, maxValue); } if (type.equals(Integer.TYPE) || type.equals(Integer.class)) { return (int) randomDataGenerator.nextLong(Integer.MIN_VALUE, maxValue); } if (type.equals(Long.TYPE) || type.equals(Long.class)) { return randomDataGenerator.nextLong(Long.MIN_VALUE, maxValue); } if (type.equals(BigInteger.class)) { return new BigInteger(String.valueOf(randomDataGenerator.nextLong(Long.MIN_VALUE, maxValue))); } if (type.equals(BigDecimal.class)) { return new BigDecimal(randomDataGenerator.nextLong(Long.MIN_VALUE, maxValue)); } return null; } }
package net.somethingdreadful.MAL.api.BaseModels; import android.database.Cursor; import net.somethingdreadful.MAL.PrefManager; import net.somethingdreadful.MAL.account.AccountService; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Locale; import lombok.Getter; import lombok.Setter; public class Profile implements Serializable { /** * List of developers. */ static String[] developersMAL = {"ratan12", "motoko"}; static String[] developersAL = {"ratan12", "motokoaoyama"}; /** * The username of the requested profile. */ @Getter @Setter private String username; /** * The profile image of the user */ @Getter @Setter private String imageUrl; /** * The names of the custom anime list * <p/> * Website: AniList */ @Getter @Setter private ArrayList<String> customAnime; /** * The names of the custom manga list. * <p/> * Website: AniList */ @Getter @Setter private ArrayList<String> customManga; /** * The profile banner. * <p/> * Website: AniList */ @Getter @Setter private String imageUrlBanner; /** * The number of notifications * <p/> * Website: AniList */ @Getter @Setter private int notifications; /** * A MangaStats object containing Stats of the user */ @Getter @Setter private net.somethingdreadful.MAL.api.MALModels.Profile.MangaStats mangaStats; /** * A MangaStats object containing Stats of the user */ @Getter @Setter private net.somethingdreadful.MAL.api.MALModels.Profile.AnimeStats animeStats; /** * A ProfileDetails object containing general information on the user * <p/> * Website: MyAnimeList */ @Getter @Setter private net.somethingdreadful.MAL.api.MALModels.Profile.Details details; /** * The score type that the users is using for displaying info. * <p/> * Website: AniList */ @Getter private int scoreType; /** * Text about the users. * <p/> * Website: AniList */ @Getter @Setter private String about; /** * Activity */ @Getter @Setter ArrayList<History> activity; public boolean isDeveloper() { return isDeveloper(getUsername()); } public static boolean isDeveloper(String username) { if (username == null) return false; String[] developers = AccountService.isMAL() ? developersMAL : developersAL; return Arrays.asList(developers).contains(username.toLowerCase(Locale.US)); } public void setScoreType(int scoreType) { this.scoreType = scoreType; PrefManager.setScoreType(scoreType); PrefManager.commitChanges(); } public static Profile friendFromCursor(Cursor cursor) { List<String> columnNames = Arrays.asList(cursor.getColumnNames()); Profile profile = new Profile(); profile.setDetails(new net.somethingdreadful.MAL.api.MALModels.Profile.Details()); profile.setUsername(cursor.getString(columnNames.indexOf("username"))); profile.setImageUrl(cursor.getString(columnNames.indexOf("imageUrl"))); profile.getDetails().setLastOnline(cursor.getString(columnNames.indexOf("lastOnline"))); return profile; } public static Profile fromCursor(Cursor cursor) { List<String> columnNames = Arrays.asList(cursor.getColumnNames()); Profile profile = new Profile(); profile.setDetails(new net.somethingdreadful.MAL.api.MALModels.Profile.Details()); profile.setAnimeStats(new net.somethingdreadful.MAL.api.MALModels.Profile.AnimeStats()); profile.setMangaStats(new net.somethingdreadful.MAL.api.MALModels.Profile.MangaStats()); profile.setUsername(cursor.getString(columnNames.indexOf("username"))); profile.setImageUrl(cursor.getString(columnNames.indexOf("imageUrl"))); profile.setImageUrlBanner(cursor.getString(columnNames.indexOf("imageUrlBanner"))); profile.setNotifications(cursor.getInt(columnNames.indexOf("notifications"))); profile.getDetails().setLastOnline(cursor.getString(columnNames.indexOf("lastOnline"))); profile.getDetails().setStatus(cursor.getString(columnNames.indexOf("status"))); profile.getDetails().setGender(cursor.getString(columnNames.indexOf("gender"))); profile.getDetails().setBirthday(cursor.getString(columnNames.indexOf("birthday"))); profile.getDetails().setLocation(cursor.getString(columnNames.indexOf("location"))); profile.getDetails().setWebsite(cursor.getString(columnNames.indexOf("website"))); profile.getDetails().setJoinDate(cursor.getString(columnNames.indexOf("joinDate"))); profile.getDetails().setAccessRank(cursor.getString(columnNames.indexOf("accessRank"))); profile.getDetails().setAnimeListViews(cursor.getInt(columnNames.indexOf("animeListViews"))); profile.getDetails().setMangaListViews(cursor.getInt(columnNames.indexOf("mangaListViews"))); profile.getDetails().setForumPosts(cursor.getInt(columnNames.indexOf("forumPosts"))); profile.getDetails().setComments(cursor.getInt(columnNames.indexOf("comments"))); profile.getAnimeStats().setTimeDays(cursor.getDouble(columnNames.indexOf("AnimetimeDays"))); profile.getAnimeStats().setWatching(cursor.getInt(columnNames.indexOf("Animewatching"))); profile.getAnimeStats().setCompleted(cursor.getInt(columnNames.indexOf("Animecompleted"))); profile.getAnimeStats().setOnHold(cursor.getInt(columnNames.indexOf("AnimeonHold"))); profile.getAnimeStats().setDropped(cursor.getInt(columnNames.indexOf("Animedropped"))); profile.getAnimeStats().setPlanToWatch(cursor.getInt(columnNames.indexOf("AnimeplanToWatch"))); profile.getAnimeStats().setTotalEntries(cursor.getInt(columnNames.indexOf("AnimetotalEntries"))); profile.getMangaStats().setTimeDays(cursor.getDouble(columnNames.indexOf("MangatimeDays"))); profile.getMangaStats().setReading(cursor.getInt(columnNames.indexOf("Mangareading"))); profile.getMangaStats().setCompleted(cursor.getInt(columnNames.indexOf("Mangacompleted"))); profile.getMangaStats().setOnHold(cursor.getInt(columnNames.indexOf("MangaonHold"))); profile.getMangaStats().setDropped(cursor.getInt(columnNames.indexOf("Mangadropped"))); profile.getMangaStats().setPlanToRead(cursor.getInt(columnNames.indexOf("MangaplanToRead"))); profile.getMangaStats().setTotalEntries(cursor.getInt(columnNames.indexOf("MangatotalEntries"))); return profile; } }
package hex.gbm; import java.util.Arrays; import java.util.concurrent.atomic.*; import sun.misc.Unsafe; import water.*; import water.nbhm.UtilUnsafe; import water.util.SB; import water.util.Utils; import water.fvec.Frame; import water.fvec.Vec; /** A Histogram, computed in parallel over a Vec. <p> A {@code DSharedHistogram} bins every value added to it, and computes a the vec min & max (for use in the next split), and response mean & variance for each bin. {@code DSharedHistogram}s are initialized with a min, max and number-of- elements to be added (all of which are generally available from a Vec). Bins run from min to max in uniform sizes. If the {@code DSharedHistogram} can determine that fewer bins are needed (e.g. boolean columns run from 0 to 1, but only ever take on 2 values, so only 2 bins are needed), then fewer bins are used. <p> {@code DSharedHistogram} are shared per-node, and atomically updated. There's an {@code add} call to help cross-node reductions. The data is stored in primitive arrays, so it can be sent over the wire. The {@code AtomicXXXArray} classes are local utility classes for atomically updating primitive arrays. <p> If we are successively splitting rows (e.g. in a decision tree), then a fresh {@code DSharedHistogram} for each split will dynamically re-bin the data. Each successive split will logarithmically divide the data. At the first split, outliers will end up in their own bins - but perhaps some central bins may be very full. At the next split(s), the full bins will get split, and again until (with a log number of splits) each bin holds roughly the same amount of data. This dynamic binning resolves a lot of problems with picking the proper bin count or limits - generally a few more tree levels will equal any fancy but fixed-size binning strategy. <p> @author Cliff Click */ public class DSharedHistogram extends Iced { public final transient String _name; // Column name (for debugging) public final byte _isInt; // 0: float col, 1: int col, 2: enum & int col public final char _nbin; // Bin count public final float _step; // Linear interpolation step per bin public final float _min, _max; // Conservative Min/Max over whole collection public long _bins[]; // Bins, shared, atomically incremented private float _mins[], _maxs[]; // Min/Max, shared, atomically updated private double _sums[], _ssqs[]; // Sums & square-sums, shared, atomically incremented public DSharedHistogram( String name, final int nbins, byte isInt, float min, float max, long nelems ) { assert nelems > 0; assert nbins >= 1; assert max > min : "Caller ensures "+max+">"+min+", since if max==min== the column "+name+" is all constants"; _isInt = isInt; _name = name; _min=min; _max=max; // See if we can show there are fewer unique elements than nbins. // Common for e.g. boolean columns, or near leaves. int xbins = nbins; float step; if( isInt>0 && max-min <= nbins ) { assert ((long)min)==min; xbins = (char)((long)max-(long)min+1L); // Shrink bins step = 1.0f; // Fixed stepsize } else { step = (max-min)/nbins; // Step size for linear interpolation if( step == 0 ) { assert max==min; step = 1.0f; } assert step > 0; } _step = 1.0f/step; _nbin = (char)xbins; // Do not allocate the big arrays here; wait for scoreCols to pick which cols will be used. } // Interpolate d to find bin int bin( float col_data ) { if( Float.isNaN(col_data) ) return 0; // Always NAs to bin 0 assert col_data <= _max : "Coldata out of range "+col_data+" "+this; int idx1 = (int)((col_data-_min)*_step); int idx2 = Math.max(Math.min(idx1,_bins.length-1),0); // saturate at bounds return idx2; } float binAt( int b ) { return _min+b/_step; } public int nbins() { return _nbin; } public long bins(int b) { return _bins[b]; } public float mins(int b) { return _mins[b]; } public float maxs(int b) { return _maxs[b]; } public double mean(int b) { long n = _bins[b]; return n>0 ? _sums[b]/n : 0; } public double var (int b) { long n = _bins[b]; if( n<=1 ) return 0; return (_ssqs[b] - _sums[b]*_sums[b]/n)/(n-1); } // Big allocation of arrays final void init() { assert _bins == null; _bins = MemoryManager.malloc8 (_nbin); _mins = MemoryManager.malloc4f(_nbin); Arrays.fill(_mins, Float.MAX_VALUE); _maxs = MemoryManager.malloc4f(_nbin); Arrays.fill(_maxs,-Float.MAX_VALUE); _sums = MemoryManager.malloc8d(_nbin); _ssqs = MemoryManager.malloc8d(_nbin); } // Add one row to a bin found via simple linear interpolation. // Compute bin min/max. // Compute response mean & variance. final void incr( float col_data, double y ) { int b = bin(col_data); // Compute bin# via linear interpolation AtomicLongArray.incr(_bins,b); // Bump count in bin // Track actual lower/upper bound per-bin AtomicFloatArray.setMin(_mins,b,col_data); AtomicFloatArray.setMax(_maxs,b,col_data); if( y != 0 ) { AtomicDoubleArray.add(_sums,b,y); AtomicDoubleArray.add(_ssqs,b,y*y); } } // Merge two equal histograms together. Done in a F/J reduce, so no // synchronization needed. void add( DSharedHistogram dsh ) { assert _isInt == dsh._isInt && _nbin == dsh._nbin && _step == dsh._step && _min == dsh._min && _max == dsh._max; assert (_bins == null && dsh._bins == null) || (_bins != null && dsh._bins != null); if( _bins == null ) return; Utils.add(_bins,dsh._bins); Utils.add(_sums,dsh._sums); Utils.add(_ssqs,dsh._ssqs); for( int i=0; i<_nbin; i++ ) if( dsh._mins[i] < _mins[i] ) _mins[i] = dsh._mins[i]; for( int i=0; i<_nbin; i++ ) if( dsh._maxs[i] > _maxs[i] ) _maxs[i] = dsh._maxs[i]; } public float find_min() { if( _bins == null ) return Float.NaN; int n = 0; while( n < _nbin && _bins[n]==0 ) n++; // First non-empty bin if( n == _nbin ) return Float.NaN; // All bins are empty??? return _mins[n]; // Take min from 1st non-empty bin } public float find_max() { int x = _nbin-1; // Last bin while( _bins[x]==0 ) x--; // Last non-empty bin return _maxs[x]; // Take max from last non-empty bin } // Compute a "score" for a column; lower score "wins" (is a better split). // Score is the sum of the MSEs when the data is split at a single point. // mses[1] == MSE for splitting between bins 0 and 1. // mses[n] == MSE for splitting between bins n-1 and n. public DTree.Split scoreMSE( int col ) { final int nbins = nbins(); assert nbins > 1; // Compute mean/var for cumulative bins from 0 to nbins inclusive. double sums0[] = MemoryManager.malloc8d(nbins+1); double ssqs0[] = MemoryManager.malloc8d(nbins+1); long ns0[] = MemoryManager.malloc8 (nbins+1); for( int b=1; b<=nbins; b++ ) { double m0 = sums0[b-1], m1 = _sums[b-1]; double s0 = ssqs0[b-1], s1 = _ssqs[b-1]; long k0 = ns0 [b-1], k1 = _bins[b-1]; if( k0==0 && k1==0 ) continue; sums0[b] = m0+m1; ssqs0[b] = s0+s1; ns0 [b] = k0+k1; } long tot = ns0[nbins]; // If we see zero variance, we must have a constant response in this // column. Normally this situation is cut out before we even try to split, but we might // have NA's in THIS column... if( ssqs0[nbins]*tot - sums0[nbins]*sums0[nbins] == 0 ) { assert isConstantResponse(); return null; } // Compute mean/var for cumulative bins from nbins to 0 inclusive. double sums1[] = MemoryManager.malloc8d(nbins+1); double ssqs1[] = MemoryManager.malloc8d(nbins+1); long ns1[] = MemoryManager.malloc8 (nbins+1); for( int b=nbins-1; b>=0; b double m0 = sums1[b+1], m1 = _sums[b]; double s0 = ssqs1[b+1], s1 = _ssqs[b]; long k0 = ns1 [b+1], k1 = _bins[b]; if( k0==0 && k1==0 ) continue; sums1[b] = m0+m1; ssqs1[b] = s0+s1; ns1 [b] = k0+k1; assert ns0[b]+ns1[b]==tot; } // Now roll the split-point across the bins. There are 2 ways to do this: // split left/right based on being less than some value, or being equal/ // not-equal to some value. Equal/not-equal makes sense for catagoricals // but both splits could work for any integral datatype. Do the less-than // splits first. int best=0; // The no-split double best_se0=Double.MAX_VALUE; // Best squared error double best_se1=Double.MAX_VALUE; // Best squared error boolean equal=false; // Ranged check for( int b=1; b<=nbins-1; b++ ) { if( _bins[b] == 0 ) continue; // Ignore empty splits // We're making an unbiased estimator, so that MSE==Var. // Then Squared Error = MSE*N = Var*N // = (ssqs/N - mean^2)*N // = ssqs - N*mean^2 // = ssqs - N*(sum/N)(sum/N) // = ssqs - sum^2/N double se0 = ssqs0[b] - sums0[b]*sums0[b]/ns0[b]; double se1 = ssqs1[b] - sums1[b]*sums1[b]/ns1[b]; if( (se0+se1 < best_se0+best_se1) || // Strictly less error? // Or tied MSE, then pick split towards middle bins (se0+se1 == best_se0+best_se1 && Math.abs(b -(nbins>>1)) < Math.abs(best-(nbins>>1))) ) { best_se0 = se0; best_se1 = se1; best = b; } } // If the min==max, we can also try an equality-based split if( _isInt > 0 && _step == 1.0f && // For any integral (not float) column _max-_min+1 > 2 ) { // Also need more than 2 (boolean) choices to actually try a new split pattern for( int b=1; b<=nbins-1; b++ ) { if( _bins[b] == 0 ) continue; // Ignore empty splits assert _mins[b] == _maxs[b] : "int col, step of 1.0 "+_mins[b]+".."+_maxs[b]+" "+this+" "+Arrays.toString(sums0)+":"+Arrays.toString(ns0); long N = ns0[b+0] + ns1[b+1]; double sums = sums0[b+0]+sums1[b+1]; double ssqs = ssqs0[b+0]+ssqs1[b+1]; if( N == 0 ) continue; double si = ssqs - sums * sums / N ; // Left+right, excluding 'b' double sx = _ssqs[b] - _sums[b]*_sums[b]/_bins[b]; // Just 'b' if( si+sx < best_se0+best_se1 ) { // Strictly less error? best_se0 = si; best_se1 = sx; best = b; equal = true; // Equality check } } } if( best==0 ) return null; // No place to split assert best > 0 : "Must actually pick a split "+best; long n0 = !equal ? ns0[best] : ns0[best]+ ns1[best+1]; long n1 = !equal ? ns1[best] : _bins[best] ; double p0 = !equal ? sums0[best] : sums0[best]+sums1[best+1]; double p1 = !equal ? sums1[best] : _sums[best] ; return new DTree.Split(col,best,equal,best_se0,best_se1,n0,n1,p0/n0,p1/n1); } // The initial histogram bins are setup from the Vec rollups. static public DSharedHistogram[] initialHist(Frame fr, int ncols, int nbins, DSharedHistogram hs[]) { Vec vecs[] = fr.vecs(); for( int c=0; c<ncols; c++ ) { Vec v = vecs[c]; hs[c] = (v.naCnt()==v.length() || v.min()==v.max()) ? null : new DSharedHistogram(fr._names[c],nbins,(byte)(v.isEnum() ? 2 : (v.isInt()?1:0)),(float)v.min(),(float)v.max(),v.length()); } return hs; } // Check for a constant response variable public boolean isConstantResponse() { double m = Double.NaN; for( int b=0; b<_bins.length; b++ ) { if( _bins[b] == 0 ) continue; if( var(b) > 1e-16 ) return false; double mean = mean(b); if( mean != m ) if( Double.isNaN(m) ) m=mean; else return false; } return true; } // Pretty-print a histogram @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(_name).append(":").append(_min).append("-").append(_max).append(" step="+(1/_step)+" nbins="+_bins.length); if( _bins != null ) { for( int b=0; b<_bins.length; b++ ) { sb.append(String.format("\ncnt=%d, min=%f, max=%f, mean/var=", _bins[b],_mins[b],_maxs[b])); sb.append(String.format("%6.2f/%6.2f,", mean(b), var(b))); } sb.append('\n'); } return sb.toString(); } public long byteSize() { long sum = 8+8; // Self header sum += 1+2; // enum; nbin sum += 4+4+4; // step,min,max sum += 8*5; // 5 internal arrays if( _bins == null ) return sum; // + 20(array header) + len<<2 (array body) sum += 24+_bins.length<<3; sum += 20+_mins.length<<2; sum += 20+_maxs.length<<2; sum += 24+_sums.length<<3; sum += 24+_ssqs.length<<3; return sum; } // Atomically-updated float array private static class AtomicFloatArray { private static final Unsafe _unsafe = UtilUnsafe.getUnsafe(); private static final int _Fbase = _unsafe.arrayBaseOffset(float[].class); private static final int _Fscale = _unsafe.arrayIndexScale(float[].class); private static long rawIndex(final float[] ary, final int idx) { assert idx >= 0 && idx < ary.length; return _Fbase + idx * _Fscale; } static void setMin( float fs[], int i, float min ) { float old = fs[i]; while( min < old && !_unsafe.compareAndSwapInt(fs,rawIndex(fs,i), Float.floatToRawIntBits(old), Float.floatToRawIntBits(min) ) ) old = fs[i]; } static void setMax( float fs[], int i, float max ) { float old = fs[i]; while( max > old && !_unsafe.compareAndSwapInt(fs,rawIndex(fs,i), Float.floatToRawIntBits(old), Float.floatToRawIntBits(max) ) ) old = fs[i]; } static public String toString( float fs[] ) { SB sb = new SB(); sb.p('['); for( float f : fs ) sb.p(f==Float.MAX_VALUE ? "max": (f==-Float.MAX_VALUE ? "min": Float.toString(f))).p(','); return sb.p(']').toString(); } } // Atomically-updated double array private static class AtomicDoubleArray { private static final Unsafe _unsafe = UtilUnsafe.getUnsafe(); private static final int _Dbase = _unsafe.arrayBaseOffset(double[].class); private static final int _Dscale = _unsafe.arrayIndexScale(double[].class); private static long rawIndex(final double[] ary, final int idx) { assert idx >= 0 && idx < ary.length; return _Dbase + idx * _Dscale; } static void add( double ds[], int i, double y ) { double old = ds[i]; while( !_unsafe.compareAndSwapLong(ds,rawIndex(ds,i), Double.doubleToRawLongBits(old), Double.doubleToRawLongBits(old+y) ) ) old = ds[i]; } } // Atomically-updated long array. Instead of using the similar JDK pieces, // allows the bare array to be exposed for fast readers. private static class AtomicLongArray { private static final Unsafe _unsafe = UtilUnsafe.getUnsafe(); private static final int _Lbase = _unsafe.arrayBaseOffset(long[].class); private static final int _Lscale = _unsafe.arrayIndexScale(long[].class); private static long rawIndex(final long[] ary, final int idx) { assert idx >= 0 && idx < ary.length; return _Lbase + idx * _Lscale; } static void incr( long ls[], int i ) { long old = ls[i]; while( !_unsafe.compareAndSwapLong(ls,rawIndex(ls,i), old, old+1) ) old = ls[i]; } } }
package permafrost.tundra.lang; import com.wm.data.IData; import com.wm.data.IDataCursor; import com.wm.data.IDataFactory; import com.wm.data.IDataPortable; import com.wm.data.IDataUtil; import com.wm.util.Table; import com.wm.util.coder.IDataCodable; import com.wm.util.coder.ValuesCodable; import permafrost.tundra.data.IDataHelper; import permafrost.tundra.io.InputOutputHelper; import permafrost.tundra.io.InputStreamHelper; import permafrost.tundra.io.ReaderHelper; import permafrost.tundra.math.BigDecimalHelper; import permafrost.tundra.math.BigIntegerHelper; import permafrost.tundra.math.DoubleHelper; import permafrost.tundra.math.FloatHelper; import permafrost.tundra.math.IntegerHelper; import permafrost.tundra.math.LongHelper; import permafrost.tundra.math.NumberHelper; import permafrost.tundra.time.DateTimeHelper; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.io.StringWriter; import java.io.Writer; import java.math.BigDecimal; import java.math.BigInteger; import java.nio.charset.Charset; import java.text.MessageFormat; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * A collection of convenience methods for working with String objects. */ public final class StringHelper { /** * Disallow instantiation of this class. */ private StringHelper() {} /** * Normalizes the given byte[] as a string. * * @param bytes A byte[] to be converted to a string. * @return A string representation of the given byte[]. */ public static String normalize(byte[] bytes) { return normalize(bytes, null); } /** * Converts the given byte[] as a string. * * @param bytes A byte[] to be converted to a string. * @param charset The character set to use. * @return A string representation of the given byte[]. */ public static String normalize(byte[] bytes, Charset charset) { if (bytes == null) return null; return new String(bytes, CharsetHelper.normalize(charset)); } /** * Converts the given java.io.InputStream as a String, and closes the stream. * * @param inputStream A java.io.InputStream to be converted to a string. * @return A string representation of the given java.io.InputStream. * @throws IOException If the given encoding is unsupported, or if there is an error reading from the * java.io.InputStream. */ public static String normalize(InputStream inputStream) throws IOException { return normalize(inputStream, CharsetHelper.DEFAULT_CHARSET); } /** * Converts the given java.io.InputStream as a String, and closes the stream. * * @param inputStream A java.io.InputStream to be converted to a string. * @param charset The character set to use. * @return A string representation of the given java.io.InputStream. * @throws IOException If there is an error reading from the java.io.InputStream. */ public static String normalize(InputStream inputStream, Charset charset) throws IOException { if (inputStream == null) return null; Writer writer = new StringWriter(); InputOutputHelper.copy(new InputStreamReader(InputStreamHelper.normalize(inputStream), CharsetHelper.normalize(charset)), writer); return writer.toString(); } /** * Normalizes the given String, byte[], or java.io.InputStream object to a String. * * @param object The object to be normalized to a string. * @return A string representation of the given object. * @throws IOException If the given encoding is unsupported, or if there is an error reading from the * java.io.InputStream. */ public static String normalize(Object object) throws IOException { return normalize(object, null); } /** * Normalizes the given String, byte[], or java.io.InputStream object to a String. * * @param object The object to be normalized to a string. * @param charset The character set to use. * @return A string representation of the given object. * @throws IOException If there is an error reading from the java.io.InputStream. */ public static String normalize(Object object, Charset charset) throws IOException { String value = null; if (object instanceof String) { value = (String)object; } else if (object instanceof Boolean) { value = BooleanHelper.emit((Boolean)object); } else if (object instanceof Number) { value = NumberHelper.emit((Number)object); } else if (object instanceof InputStream) { value = normalize((InputStream)object, charset); } else if (object instanceof byte[]) { value = normalize((byte[])object, charset); } else if (object instanceof Reader) { value = ReaderHelper.read((Reader)object); } else if (object instanceof Class) { value = ((Class)object).getName(); } else if (object != null) { value = object.toString(); } return value; } /** * Normalizes the list of String, byte[], or InputStream to a String list. * * @param array The array of objects to be normalized. * @param charset The character set to use. * @return The resulting String list representing the given array. * @throws IOException If there is an error reading from the java.io.InputStream. */ public static String[] normalize(Object[] array, Charset charset) throws IOException { if (array == null) return null; String[] output = new String[array.length]; for (int i = 0; i < array.length; i++) { output[i] = normalize(array[i], charset); } return output; } /** * Normalizes the list of String, byte[], or InputStream to a String list. * * @param array The array of objects to be normalized. * @param charsetName The character set to use. * @return The resulting String list representing the given array. * @throws IOException If there is an error reading from the java.io.InputStream. */ public static String[] normalize(Object[] array, String charsetName) throws IOException { return normalize(array, CharsetHelper.normalize(charsetName)); } /** * Returns a substring starting at the given index for the given length. * * @param input The string to be sliced. * @param index The zero-based starting index of the slice. * @param length The length in characters of the slice. * @return The resulting substring. */ public static String slice(String input, int index, int length) { if (input == null || input.equals("")) return input; String output = ""; int inputLength = input.length(), endIndex = 0; // support reverse length if (length < 0) { // support reverse indexing if (index < 0) { endIndex = index + inputLength + 1; } else { if (index >= inputLength) index = inputLength - 1; endIndex = index + 1; } index = endIndex + length; } else { // support reverse indexing if (index < 0) index += inputLength; endIndex = index + length; } if (index < inputLength && endIndex > 0) { if (index < 0) index = 0; if (endIndex > inputLength) endIndex = inputLength; output = input.substring(index, endIndex); } return output; } /** * Truncates the given string to the given length. If the string length is less than or equal to the desired * length it is returned unmodified, otherwise it is truncated to the desired length. * * @param input The string to be truncated. * @param length The length to truncate the string to. * @return The truncated string. */ public static String truncate(String input, int length) { return truncate(input, length, false); } /** * Truncates the given string to the given length. If the string length is less than or equal to the desired * length it is returned unmodified, otherwise it is truncated to the desired length. * * @param input The string to be truncated. * @param length The length to truncate the string to. * @param ellipsis If true, the returned string is suffixed with an ellipsis character when truncated. * @return The truncated string. */ public static String truncate(String input, int length, boolean ellipsis) { if (input == null) return null; if (input.length() > Math.abs(length)) { if (ellipsis && length != 0) { if (length > 0) { input = slice(input, 0, length - 1) + "…"; } else { input = "…" + slice(input, -1, length + 1); } } else if (length < 0){ input = slice(input, -1, length); } else { input = slice(input, 0, length); } } return input; } /** * Truncates the given strings to the given length. If a string's length is less than or equal to the desired * length it is returned unmodified, otherwise it is truncated to the desired length. * * @param input The strings to be truncated. * @param length The length to truncate the strings to. * @param ellipsis If true, the returned strings are suffixed with an ellipsis character when truncated. * @return The truncated strings. */ public static String[] truncate(String[] input, int length, boolean ellipsis) { if (input == null) return null; String output[] = new String[input.length]; for(int i = 0; i < input.length; i++) { output[i] = truncate(input[i], length, ellipsis); } return output; } /** * Truncates the given strings to the given length. If a string's length is less than or equal to the desired * length it is returned unmodified, otherwise it is truncated to the desired length. * * @param input The strings to be truncated. * @param length The length to truncate the strings to. * @param ellipsis If true, the returned strings are suffixed with an ellipsis character when truncated. * @return The truncated strings. */ public static String[][] truncate(String[][] input, int length, boolean ellipsis) { if (input == null) return null; String output[][] = new String[input.length][]; for(int i = 0; i < input.length; i++) { output[i] = truncate(input[i], length, ellipsis); } return output; } /** * Recursively truncates all strings in the given IData document to the given length. If a string's length is less * than or equal to the desired length it is returned unmodified, otherwise it is truncated to the desired length. * * @param input The IData document containing strings to be truncated. * @param length The length to truncate the strings to. * @param ellipsis If true, the returned strings are suffixed with an ellipsis character when truncated. * @return A new IData document containing the truncated strings. */ public static IData truncate(IData input, int length, boolean ellipsis) { if (input == null) return null; IData output = IDataFactory.create(); IDataCursor inputCursor = input.getCursor(); IDataCursor outputCursor = output.getCursor(); try { while(inputCursor.next()) { String key = inputCursor.getKey(); Object value = inputCursor.getValue(); if (value instanceof IData[] || value instanceof Table || value instanceof IDataCodable[] || value instanceof IDataPortable[] || value instanceof ValuesCodable[]) { value = truncate(IDataHelper.toIDataArray(value), length, ellipsis); } else if (value instanceof IData || value instanceof IDataCodable || value instanceof IDataPortable || value instanceof ValuesCodable) { value = truncate(IDataHelper.toIData(value), length, ellipsis); } else if (value instanceof String) { value = truncate((String)value, length, ellipsis); } else if (value instanceof String[]) { value = truncate((String[])value, length, ellipsis); } else if (value instanceof String[][]) { value = truncate((String[][])value, length, ellipsis); } outputCursor.insertAfter(key, value); } } finally { inputCursor.destroy(); outputCursor.destroy(); } return output; } /** * Recursively truncates all strings in the given IData[] document list to the given length. If a string's length * is less than or equal to the desired length it is returned unmodified, otherwise it is truncated to the desired * length. * * @param input The IData[] document list containing strings to be truncated. * @param length The length to truncate the strings to. * @param ellipsis If true, the returned strings are suffixed with an ellipsis character when truncated. * @return A new IData[] document list containing the truncated strings. */ public static IData[] truncate(IData[] input, int length, boolean ellipsis) { if (input == null) return null; IData[] output = new IData[input.length]; for (int i = 0; i < input.length; i++) { output[i] = truncate(input[i], length, ellipsis); } return output; } /** * Converts a null input string to an empty string, or returns the string unmodified if not null. * * @param input The string to be converted to an empty string if null. * @return If input is null then empty string, otherwise input string unmodified. */ public static String blankify(String input) { return blankify(input, true); } /** * Converts a null input string to an empty string, or returns the string unmodified if not null. * * @param input The string to be converted to an empty string if null. * @param blankify If true, nulls will be converted to empty strings, else no conversion will occur. * @return If blankify is true and input is null then empty string, otherwise input string unmodified. */ public static String blankify(String input, boolean blankify) { if (!blankify) return input; return input == null ? "" : input; } /** * Converts any null strings to empty strings, or returns the strings unmodified if not null. * * @param input The list of strings to be converted to an empty strings if null. * @return The list of strings converted to empty strings if they were null. */ public static String[] blankify(String[] input) { return blankify(input, true); } /** * Converts any null strings to empty strings, or returns the strings unmodified if not null. * * @param input The list of strings to be converted to an empty strings if null. * @param blankify If true, nulls will be converted to empty strings, else no conversion will occur. * @return The list of strings converted to empty strings if they were null. */ public static String[] blankify(String input[], boolean blankify) { if (!blankify || input == null) return input; String output[] = new String[input.length]; for (int i = 0; i < input.length; i++) { output[i] = blankify(input[i], blankify); } return output; } /** * Capitalizes the first character in either the first word or all words in the given string. * * @param string The string to capitalize. * @param firstWordOnly Whether only the first word should be capitalized, or all words. * @return The capitalized string. */ public static String capitalize(String string, boolean firstWordOnly) { if (string == null) return null; char[] characters = string.toCharArray(); boolean capitalize = true; for (int i = 0; i < characters.length; i++) { char character = characters[i]; if (Character.isWhitespace(character)) { capitalize = true; } else if (capitalize) { characters[i] = Character.toTitleCase(character); capitalize = false; if (firstWordOnly) break; } } return new String(characters); } /** * Capitalizes the first character in either the first word or all words in each of the given * strings. * * @param input The strings to capitalize. * @param firstWordOnly Whether only the first word should be capitalized, or all words. * @return The capitalized strings. */ public static String[] capitalize(String[] input, boolean firstWordOnly) { if (input == null) return null; String[] output = new String[input.length]; for (int i = 0; i < input.length; i++) { output[i] = capitalize(input[i], firstWordOnly); } return output; } /** * Returns the given string as a list of characters. * * @param string The string. * @return The characters in the given string. */ public static Character[] characters(String string) { if (string == null) return null; char[] chars = string.toCharArray(); Character[] characters = new Character[chars.length]; for (int i = 0; i < chars.length; i++) { characters[i] = chars[i]; } return characters; } /** * Concatenates all non-null string leaf values in the given IData document. * * @param operands An IData document containing strings to be concatenated. * @return All string leaf values in the IData document concatenated together. */ public static String concatenate(IData operands) { return concatenate(operands, null, false); } /** * Concatenates all non-null string leaf values in the given IData document, separated by the given separator * string. * * @param operands An IData document containing strings to be concatenated. * @param separator An optional separator string to be used between items of the array. * @return All string leaf values in the IData document concatenated together. */ public static String concatenate(IData operands, String separator) { return concatenate(operands, separator, false); } /** * Concatenates all string leaf values in the given IData document, separated by the given separator string. * * @param operands An IData document containing strings to be concatenated. * @param separator An optional separator string to be used between items of the array. * @param includeNulls If true, null values will be included in the output string, otherwise they are ignored. * @return All string leaf values in the IData document concatenated together. */ @SuppressWarnings("unchecked") public static String concatenate(IData operands, String separator, boolean includeNulls) { return concatenate(separator, includeNulls, IDataHelper.getLeaves(operands, String.class)); } /** * Concatenates all given non-null strings. * * @param strings A list of strings to be concatenated. * @return All given strings concatenated together. */ public static String concatenate(String ...strings) { return concatenate(null, false, strings); } /** * Concatenates all given strings, separated by the given separator string. * * @param separator An optional separator string to be used between items of the array. * @param strings A list of strings to be concatenated. * @return All given strings concatenated together. */ public static String concatenate(String separator, String ...strings) { return concatenate(separator, false, strings); } /** * Concatenates all given strings, separated by the given separator string. * * @param separator An optional separator string to be used between items of the array. * @param includeNulls If true, null values will be included in the output string, otherwise they are ignored. * @param strings A list of strings to be concatenated. * @return All given strings concatenated together. */ public static String concatenate(String separator, boolean includeNulls, String ...strings) { if (strings == null || strings.length == 0) return includeNulls ? null : ""; StringBuilder builder = new StringBuilder(); boolean separatorRequired = false; for (String string : strings) { boolean includeItem = includeNulls || string != null; if (separator != null && separatorRequired && includeItem) builder.append(separator); if (includeItem) { builder.append(string); separatorRequired = true; } } return builder.toString(); } /** * Returns the given string with leading and trailing whitespace removed. * * @param string The string to be trimmed. * @return The trimmed string. */ public static String trim(String string) { String output = null; if (string != null) output = string.trim(); return output; } /** * Trims each item in the given String[] of leading and trailing whitespace. * * @param input The String[] to be trimmed. * @return A new String[] contained the trimmed versions of the items in the given input. */ public static String[] trim(String[] input) { if (input == null) return null; String[] output = new String[input.length]; for (int i = 0; i < input.length; i++) { String item = input[i]; if (item != null) output[i] = input[i].trim(); } return output; } /** * Trims each item in the given String[][] of leading and trailing whitespace. * * @param input The String[][] to be trimmed. * @return A new String[][] contained the trimmed versions of the items in the given input. */ public static String[][] trim(String[][] input) { if (input == null) return null; String[][] output = new String[input.length][]; for (int i = 0; i < input.length; i++) { output[i] = trim(input[i]); } return output; } /** * Returns the length or number of characters of the string. * * @param string The string to be measured. * @return The length of the given string. */ public static int length(String string) { int length = 0; if (string != null) length = string.length(); return length; } /** * Returns all the groups captured by the given regular expression pattern in the given string. * * @param string The string to match against the regular expression. * @param pattern The regular expression pattern. * @return The capture groups from the regular expression pattern match against the string. */ public static IData[] capture(String string, String pattern) { if (string == null || pattern == null) return null; List<IData> captures = new ArrayList<IData>(); Pattern regex = Pattern.compile(pattern); Matcher matcher = regex.matcher(string); while (matcher.find()) { int count = matcher.groupCount(); List<IData> groups = new ArrayList<IData>(count); for (int i = 0; i <= count; i++) { int index = matcher.start(i); int length = matcher.end(i) - index; String content = matcher.group(i); boolean captured = index >= 0; IData group = IDataFactory.create(); IDataCursor groupCursor = group.getCursor(); IDataUtil.put(groupCursor, "captured?", "" + captured); if (captured) { IDataUtil.put(groupCursor, "index", "" + index); IDataUtil.put(groupCursor, "length", "" + length); IDataUtil.put(groupCursor, "content", content); } groupCursor.destroy(); groups.add(group); } IData capture = IDataFactory.create(); IDataCursor captureCursor = capture.getCursor(); IDataUtil.put(captureCursor, "groups", groups.toArray(new IData[groups.size()])); IDataUtil.put(captureCursor, "groups.length", "" + groups.size()); captureCursor.destroy(); captures.add(capture); } return captures.toArray(new IData[captures.size()]); } /** * Returns true if the given regular expression pattern is found anywhere in the given string. * * @param string The string to match against the regular expression. * @param pattern The regular expression pattern. * @return True if the regular expression pattern was found anywhere in the given string. */ public static boolean find(String string, String pattern) { return find(string, pattern, false); } * /** Returns true if the given pattern is found anywhere in the given string. * * @param string The string to match against the regular expression. * @param pattern The literal of regular expression pattern. * @param literal Whether the pattern is a literal pattern or a regular expression. * @return True if the pattern was found anywhere in the given string. */ public static boolean find(String string, String pattern, boolean literal) { boolean found = false; if (string != null && pattern != null) { if (literal) { found = string.contains(pattern); } else { Pattern regex = Pattern.compile(pattern); Matcher matcher = regex.matcher(string); found = matcher.find(); } } return found; } /** * Returns true if the given regular expression pattern matches the entirety of the given string. * * @param string The string to match against the regular expression. * @param pattern The regular expression pattern. * @return True if the regular expression matches the entirety of the given string. */ public static boolean match(String string, String pattern) { return match(string, pattern, false); } /** * Returns true if the pattern matches the entirety of the given string. * * @param string The string to match against the regular expression. * @param pattern The literal or regular expression pattern. * @param literal Whether the pattern is a literal pattern or a regular expression. * @return True if the pattern matches the entirety of the given string. */ public static boolean match(String string, String pattern, boolean literal) { boolean match = false; if (string != null && pattern != null) { if (literal) { match = string.equals(pattern); } else { match = string.matches(pattern); } } return match; } /** * Removes all occurrences of the given regular expression in the given string. * * @param string The string to remove the pattern from. * @param pattern The regular expression pattern to be removed. * @return The given string with all occurrences of the given pattern removed. */ public static String remove(String string, String pattern) { return remove(string, pattern, false); } /** * Removes either the first or all occurrences of the given regular expression in the given string. * * @param string The string to remove the pattern from. * @param pattern The regular expression pattern to be removed. * @param literal Whether the replacement string is literal and therefore requires quoting. * @return The given string with either the first or all occurrences of the given pattern removed. */ public static String remove(String string, String pattern, boolean literal) { return replace(string, pattern, "", literal, false); } /** * Removes either the first or all occurrences of the given regular expression in the given string. * * @param string The string to remove the pattern from. * @param pattern The regular expression pattern to be removed. * @param literal Whether the replacement string is literal and therefore requires quoting. * @param firstOnly If true, only the first occurrence is removed, otherwise all occurrences are removed. * @return The given string with either the first or all occurrences of the given pattern removed. */ public static String remove(String string, String pattern, boolean literal, boolean firstOnly) { if (pattern != null && literal) pattern = Matcher.quoteReplacement(pattern); return replace(string, pattern == null ? null : Pattern.compile(pattern), "", firstOnly); } /** * Replaces all occurrences of the given regular expression in the given string with the given replacement. * * @param string The string to be replaced. * @param pattern The regular expression pattern. * @param replacement The replacement string. * @param literal Whether the replacement string is literal and therefore requires quoting. * @return The replaced string. */ public static String replace(String string, String pattern, String replacement, boolean literal) { return replace(string, pattern, replacement, literal, false); } /** * Replaces either the first or all occurrences of the given regular expression in the given string with the given * replacement. * * @param string The string to be replaced. * @param pattern The regular expression pattern. * @param replacement The replacement string. * @param literal Whether the replacement string is literal and therefore requires quoting. * @param firstOnly If true, only the first occurrence is replaced, otherwise all occurrences are replaced. * @return The replaced string. */ public static String replace(String string, String pattern, String replacement, boolean literal, boolean firstOnly) { return replace(string, pattern, false, replacement, literal, firstOnly); } /** * Replaces either the first or all occurrences of the given regular expression in the given string with the given * replacement. * * @param string The string to be replaced. * @param pattern The regular expression pattern. * @param literalPattern Whether the pattern string is literal and therefore requires quoting. * @param replacement The replacement string. * @param literalReplacement Whether the replacement string is literal and therefore requires quoting. * @param firstOnly If true, only the first occurrence is replaced, otherwise all occurrences are replaced. * @return The replaced string. */ public static String replace(String string, String pattern, boolean literalPattern, String replacement, boolean literalReplacement, boolean firstOnly) { return replace(string, pattern == null ? null : Pattern.compile(literalPattern ? Matcher.quoteReplacement(pattern) : pattern), replacement != null && literalReplacement ? Matcher.quoteReplacement(replacement) : replacement, firstOnly); } /** * Replaces either the first or all occurrences of the given regular expression in the given string with the given * replacement. * * @param string The string to be replaced. * @param pattern The regular expression pattern. * @param replacement The replacement string. * @param firstOnly If true, only the first occurrence is replaced, otherwise all occurrences are replaced. * @return The replaced string. */ public static String replace(String string, Pattern pattern, String replacement, boolean firstOnly) { if (string == null || pattern == null || replacement == null) return string; Matcher matcher = pattern.matcher(string); if (firstOnly) { string = matcher.replaceFirst(replacement); } else { string = matcher.replaceAll(replacement); } return string; } /** * Replaces either the first or all occurrences of the given regular expression in the given string array elements * with the given replacement. * * @param array The string array whose elements are to be replaced. * @param pattern The regular expression pattern. * @param replacement The replacement string. * @param literal Whether the replacement string is literal and therefore requires quoting. * @param firstOnly If true, only the first occurrence is replaced, otherwise all occurrences are replaced. * @return The string array with replaced string elements. */ public static String[] replace(String[] array, String pattern, String replacement, boolean literal, boolean firstOnly) { return replace(array, pattern, false, replacement, literal, firstOnly); } /** * Replaces either the first or all occurrences of the given regular expression in the given string array elements * with the given replacement. * * @param array The string array whose elements are to be replaced. * @param pattern The regular expression pattern. * @param literalPattern Whether the pattern string is literal and therefore requires quoting. * @param replacement The replacement string. * @param literalReplacement Whether the replacement string is literal and therefore requires quoting. * @param firstOnly If true, only the first occurrence is replaced, otherwise all occurrences are replaced. * @return The string array with replaced string elements. */ public static String[] replace(String[] array, String pattern, boolean literalPattern, String replacement, boolean literalReplacement, boolean firstOnly) { return replace(array, pattern == null ? null : Pattern.compile(literalPattern ? Matcher.quoteReplacement(pattern) : pattern), replacement != null && literalReplacement ? Matcher.quoteReplacement(replacement) : replacement, firstOnly); } /** * Replaces either the first or all occurrences of the given regular expression in the given string array elements * with the given replacement. * * @param array The string array whose elements are to be replaced. * @param pattern The regular expression pattern. * @param replacement The replacement string. * @param firstOnly If true, only the first occurrence is replaced, otherwise all occurrences are replaced. * @return The string array with replaced string elements. */ public static String[] replace(String[] array, Pattern pattern, String replacement, boolean firstOnly) { if (array == null || pattern == null || replacement == null) return array; String[] output = new String[array.length]; for (int i = 0; i < array.length; i++) { output[i] = replace(array[i], pattern, replacement, firstOnly); } return output; } /** * Replaces either the first or all occurrences of the given regular expression in the given string table elements * with the given replacement. * * @param table The string table whose elements are to be replaced. * @param pattern The regular expression pattern. * @param replacement The replacement string. * @param literal Whether the replacement string is literal and therefore requires quoting. * @param firstOnly If true, only the first occurrence is replaced, otherwise all occurrences are replaced. * @return The string table with replaced string elements */ public static String[][] replace(String[][] table, String pattern, String replacement, boolean literal, boolean firstOnly) { return replace(table, pattern, false, replacement, literal, firstOnly); } /** * Replaces either the first or all occurrences of the given regular expression in the given string table elements * with the given replacement. * * @param table The string table whose elements are to be replaced. * @param pattern The regular expression pattern. * @param literalPattern Whether the pattern string is literal and therefore requires quoting. * @param replacement The replacement string. * @param literalReplacement Whether the replacement string is literal and therefore requires quoting. * @param firstOnly If true, only the first occurrence is replaced, otherwise all occurrences are replaced. * @return The string table with replaced string elements */ public static String[][] replace(String[][] table, String pattern, boolean literalPattern, String replacement, boolean literalReplacement, boolean firstOnly) { return replace(table, pattern == null ? null : Pattern.compile(literalPattern ? Matcher.quoteReplacement(pattern) : pattern), replacement != null && literalReplacement ? Matcher.quoteReplacement(replacement) : replacement, firstOnly); } /** * Replaces either the first or all occurrences of the given regular expression in the given string table elements * with the given replacement. * * @param table The string table whose elements are to be replaced. * @param pattern The regular expression pattern. * @param replacement The replacement string. * @param firstOnly If true, only the first occurrence is replaced, otherwise all occurrences are replaced. * @return The string table with replaced string elements. */ public static String[][] replace(String[][] table, Pattern pattern, String replacement, boolean firstOnly) { if (table == null || pattern == null || replacement == null) return table; String[][] output = new String[table.length][]; for (int i = 0; i < table.length; i++) { output[i] = replace(table[i], pattern, replacement, firstOnly); } return output; } /** * Splits a string around each match of the given regular expression pattern. * * @param string The string to be split. * @param pattern The regular expression pattern to split around. * @return The array of strings computed by splitting the given string around matches of this pattern. */ public static String[] split(String string, String pattern) { return split(string, pattern, false); } /** * Splits a string around each match of the given pattern. * * @param string The string to be split. * @param pattern The literal or regular expression pattern to split around. * @param literal Whether the pattern is a literal pattern or a regular expression. * @return The array of strings computed by splitting the given string around matches of this pattern. */ public static String[] split(String string, String pattern, boolean literal) { String[] output = null; if (string != null && pattern != null) { if (literal) pattern = quote(pattern); output = Pattern.compile(pattern).split(string); } else if (string != null) { output = new String[1]; output[0] = string; } return output; } /** * Returns all the lines in the given string as an array. * * @param string The string to be split into lines. * @return The array of lines from the given string. */ public static String[] lines(String string) { return split(string, "\n"); } /** * Trims the given string of leading and trailing whitespace, and optionally replaces runs of whitespace characters * with a single space character. * * @param string The string to be squeezed. * @param internal Whether runs of whitespace characters should be replaced with a single space character. * @return The squeezed string. */ public static String squeeze(String string, boolean internal) { if (string == null) return null; string = string.trim(); if (internal) string = replace(string, "\\s+", " ", true); return string.equals("") ? null : string; } /** * Trims the given string of leading and trailing whitespace, and replaces runs of whitespace characters with a * single space character. * * @param string The string to be squeezed. * @return The squeezed string. */ public static String squeeze(String string) { return squeeze(string, true); } /** * Returns a literal regular expression pattern for the given string. * * @param string The string to quote. * @return A regular expression pattern which literally matches the given string. */ public static String quote(String string) { if (string == null) return null; return Pattern.quote(string); } /** * Returns a regular expression pattern that matches any of the values in the given string list. * * @param array The list of strings to be matched. * @return A regular expression which literally matches any of the given strings. */ public static String quote(String[] array) { if (array == null) return null; int last = array.length - 1; StringBuilder builder = new StringBuilder(); for (int i = 0; i < array.length; i++) { if (i == 0) builder.append("("); builder.append(quote(array[i])); if (i < last) builder.append("|"); if (i == last) builder.append(")"); } return builder.toString(); } /** * Pads a string with the given character to the given length. * * @param string The string to pad. * @param length The desired length of the string. If less than 0 the string is padded right to left, otherwise * it is padded from left to right. * @param character The character to pad the string with. * @return The padded string. */ public static String pad(String string, int length, char character) { if (string == null) string = ""; boolean left = length >= 0; if (length < 0) length = length * -1; if (string.length() >= length) return string; StringBuilder builder = new StringBuilder(length); if (!left) builder.append(string); for (int i = string.length(); i < length; i++) { builder.append(character); } if (left) builder.append(string); return builder.toString(); } /** * Pads each string in the given list with the given character to the given length. * * @param input The list of strings to be padded. * @param length The desired length of the strings. If less than 0 the strings are padded right to left, otherwise * they are padded from left to right. * @param character The character to pad the strings with. * @return The list of padded strings. */ public static String[] pad(String[] input, int length, char character) { if (input == null) return null; String[] output = new String[input.length]; for (int i = 0; i < input.length; i++) { output[i] = pad(input[i], length, character); } return output; } /** * Compares two strings lexicographically. * * @param string1 The first string to compare. * @param string2 The second string to compare. * @return Less than 0 if the first string is less than the second string, equal to 0 if the two strings are equal, * or greater than 0 if the first string is greater than the second string. */ public static int compare(String string1, String string2) { return compare(string1, string2, false, false); } /** * Compares two strings lexicographically. * * @param string1 The first string to compare. * @param string2 The second string to compare. * @param caseInsensitive Whether the comparison should be case insensitive. * @return Less than 0 if the first string is less than the second string, equal to 0 if the two strings are equal, * or greater than 0 if the first string is greater than the second string. */ public static int compare(String string1, String string2, boolean caseInsensitive) { return compare(string1, string2, caseInsensitive, false); } /** * Compares two strings lexicographically. * * @param string1 The first string to compare. * @param string2 The second string to compare. * @param caseInsensitive Whether the comparison should be case insensitive. * @param whitespaceInsensitive Whether the comparison should be whitespace insensitive. * @return Less than 0 if the first string is less than the second string, equal to 0 if the two strings are equal, * or greater than 0 if the first string is greater than the second string. */ public static int compare(String string1, String string2, boolean caseInsensitive, boolean whitespaceInsensitive) { if (string1 == null && string2 == null) return 0; if (string1 == null) return -1; if (string2 == null) return 1; if (whitespaceInsensitive) { string1 = string1.replaceAll("\\s", ""); string2 = string2.replaceAll("\\s", ""); } if (caseInsensitive) { return string1.compareToIgnoreCase(string2); } else { return string1.compareTo(string2); } } public static String format(Locale locale, String pattern, IData[] arguments, IData scope) { return format(locale, pattern, arguments, null, scope); } public static String format(Locale locale, String pattern, IData[] arguments, IData scope, int index) { return format(locale, pattern, arguments, null, scope, 0); } public static String format(Locale locale, String pattern, IData[] arguments, IData pipeline, IData scope) { return format(locale, pattern, arguments, pipeline, scope, 0); } public static String format(Locale locale, String pattern, IData[] arguments, IData pipeline, IData scope, int index) { if (pattern == null || arguments == null || scope == null) return null; List<Object> args = new ArrayList<Object>(arguments == null? 0 : arguments.length); for (IData argument : arguments) { if (argument != null) { IDataCursor cursor = argument.getCursor(); String key = IDataUtil.getString(cursor, "key"); Object value = IDataUtil.get(cursor, "value"); String type = IDataUtil.getString(cursor, "type"); String argPattern = IDataUtil.getString(cursor, "pattern"); boolean blankify = BooleanHelper.parse(IDataUtil.getString(cursor, "blankify?")); cursor.destroy(); if (key != null && value == null) { value = IDataHelper.get(pipeline, scope, key); if (value == null) { if (key.equals("$index")) { value = index; } else if (key.equals("$iteration")) { value = index + 1; } } } if (value != null) { if (type == null || type.equalsIgnoreCase("string")) { value = value.toString(); } else if (type.equalsIgnoreCase("integer")) { value = BigIntegerHelper.normalize(value); } else if (type.equalsIgnoreCase("decimal")) { value = BigDecimalHelper.normalize(value); } else if (type.equalsIgnoreCase("datetime")) { value = DateTimeHelper.normalize(value, argPattern); } } else if (blankify) { if (type == null || type.equalsIgnoreCase("string")) { value = ""; } else if (type.equalsIgnoreCase("integer")) { value = BigInteger.ZERO; } else if (type.equalsIgnoreCase("decimal")) { value = BigDecimal.ZERO; } } args.add(value); } } return String.format(locale, pattern, args.toArray(new Object[args.size()])); } public static String format(Locale locale, String pattern, IData[] arguments, String recordSeparator, IData ... records) { return format(locale, pattern, arguments, null, recordSeparator, records); } public static String format(Locale locale, String pattern, IData[] arguments, IData pipeline, String recordSeparator, IData ... records) { if (pattern == null || arguments == null || records == null) return null; StringBuilder builder = new StringBuilder(); for (int i = 0; i < records.length; i++) { builder.append(format(locale, pattern, arguments, pipeline, records[i], i)); if (recordSeparator != null) builder.append(recordSeparator); } return builder.toString(); } /** * Returns null if the given string only contains whitespace characters. * * @param input The string to be nullified. * @return Null if the given string only contains whitespace characters, otherwise the given string unmodified. */ public static String nullify(String input) { return nullify(input, true); } /** * Returns null if the given string only contains whitespace characters. * * @param input The string to be nullified. * @param nullify If true, the string will be nullified. * @return Null if the given string only contains whitespace characters, otherwise the given string unmodified. */ public static String nullify(String input, boolean nullify) { return (nullify && (input == null || input.trim().equals(""))) ? null : input; } /** * Converts each string in the given list to null if it only contains whitespace characters. * * @param input The string list to be nullified. * @return The nullified list of strings. */ public static String[] nullify(String[] input) { return nullify(input, true); } /** * Converts each string in the given list to null if it only contains whitespace characters. * * @param input The string list to be nullified. * @param nullify If true, the list will be nullified. * @return The nullified list of strings. */ public static String[] nullify(String[] input, boolean nullify) { if (!nullify || input == null) return null; String[] output = new String[input.length]; for (int i = 0; i < input.length; i++) { output[i] = nullify(input[i], nullify); } return output; } /** * Repeats the given string atom the given count times, returning the result. * * @param atom A string to be repeated. * @param count The number of times to repeat the string. * @return A new string containing the given string atom repeated the given number of times. */ public static String repeat(String atom, int count) { if (atom == null) return null; if (count < 0) throw new IllegalArgumentException("count must be >= 0"); // short-circuit when only 1 repeat is required if (count == 1) return atom; StringBuilder builder = new StringBuilder(); for (int i = 0; i < count; i++) { builder.append(atom); } return builder.toString(); } /** * Reverses the given string. * * @param input A string to be reversed. * @return The reverse of the given string. */ public static String reverse(String input) { return input == null ? null : new StringBuilder(input).reverse().toString(); } private static final char JAVA_IDENTIFIER_ILLEGAL_CHARACTER_REPLACEMENT = '_'; public static String legalize(String input) { if (input == null) return null; char[] characters = input.toCharArray(); StringBuilder output = new StringBuilder(); for (int i = 0; i < characters.length; i++) { char character = characters[i]; if ((i == 0 && !Character.isJavaIdentifierStart(character)) || (i > 0 && !Character.isJavaIdentifierPart(character))) { character = JAVA_IDENTIFIER_ILLEGAL_CHARACTER_REPLACEMENT; } output.append(character); } return output.toString(); } }
package hex.singlenoderf; import dontweave.gson.JsonObject; import hex.ConfusionMatrix; import hex.FrameTask; import hex.VarImp; import hex.drf.DRF; import water.*; import water.Timer; import water.api.AUCData; import water.api.Constants; import water.api.DocGen; import water.api.ParamImportance; import water.fvec.Frame; import water.fvec.Vec; import water.util.*; import java.util.*; import static water.util.MRUtils.sampleFrameStratified; public class SpeeDRF extends Job.ValidatedJob { static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields public static DocGen.FieldDoc[] DOC_FIELDS; public static final String DOC_GET = "SpeeDRF"; @API(help = "Number of trees", filter = Default.class, json = true, lmin = 1, lmax = Integer.MAX_VALUE, importance = ParamImportance.CRITICAL) public int ntrees = 50; @API(help = "Number of features to randomly select at each split.", filter = Default.class, json = true, lmin = -1, lmax = Integer.MAX_VALUE, importance = ParamImportance.SECONDARY) public int mtries = -1; @API(help = "Max Depth", filter = Default.class, json = true, lmin = 0, lmax = Integer.MAX_VALUE, importance = ParamImportance.CRITICAL) public int max_depth = 20; @API(help = "Split Criterion Type", filter = Default.class, json=true, importance = ParamImportance.SECONDARY) public Tree.SelectStatType select_stat_type = Tree.SelectStatType.ENTROPY; // @API(help = "Use local data. Auto-enabled if data does not fit in a single node.") /*, filter = Default.class, json = true, importance = ParamImportance.EXPERT) */ // public boolean local_mode = false; /* Legacy parameter: */ public double[] class_weights = null; @API(help = "Sampling Strategy", filter = Default.class, json = true, importance = ParamImportance.SECONDARY) public Sampling.Strategy sampling_strategy = Sampling.Strategy.RANDOM; @API(help = "Sampling Rate at each split.", filter = Default.class, json = true, dmin = 0, dmax = 1, importance = ParamImportance.EXPERT) public double sample_rate = 0.67; // @API(help ="Score each iteration", filter = Default.class, json = true, importance = ParamImportance.SECONDARY) public boolean score_each_iteration = false; @API(help = "Create the Score POJO", filter = Default.class, json = true, importance = ParamImportance.EXPERT) public boolean score_pojo = true; /*Imbalanced Classes*/ /** * For imbalanced data, balance training data class counts via * over/under-sampling. This can result in improved predictive accuracy. */ @API(help = "Balance training data class counts via over/under-sampling (for imbalanced data)", filter = Default.class, json = true, importance = ParamImportance.EXPERT) public boolean balance_classes = false; /** * When classes are balanced, limit the resulting dataset size to the * specified multiple of the original dataset size. */ @API(help = "Maximum relative size of the training data after balancing class counts (can be less than 1.0)", filter = Default.class, json = true, dmin=1e-3, importance = ParamImportance.EXPERT) public float max_after_balance_size = Float.POSITIVE_INFINITY; @API(help = "Out of bag error estimate", filter = Default.class, json = true, importance = ParamImportance.SECONDARY) public boolean oobee = true; @API(help = "Variable Importance", filter = Default.class, json = true) public boolean importance = false; public Key _modelKey = dest(); /* Advanced settings */ @API(help = "bin limit", filter = Default.class, json = true, lmin = 0, lmax = 65534, importance = ParamImportance.EXPERT) public int nbins = 1024; @API(help = "seed", filter = Default.class, json = true, importance = ParamImportance.EXPERT) public long seed = -1; @API(help = "Tree splits and extra statistics printed to stdout.", filter = Default.class, json = true, importance = ParamImportance.EXPERT) public boolean verbose = false; @API(help = "split limit", importance = ParamImportance.EXPERT) public int _exclusiveSplitLimit = 0; private static Random _seedGenerator = Utils.getDeterRNG( new Random().nextLong() );//0xd280524ad7fe0602L); private boolean regression; public DRFParams drfParams; private long use_seed; Tree.StatType stat_type; /** Return the query link to this page */ public static String link(Key k, String content) { RString rs = new RString("<a href='/2/SpeeDRF.query?source=%$key'>%content</a>"); rs.replace("key", k.toString()); rs.replace("content", content); return rs.toString(); } protected SpeeDRFModel makeModel( SpeeDRFModel model, double err, ConfusionMatrix cm, VarImp varimp, AUCData validAUC) { return new SpeeDRFModel(model, err, cm, varimp, validAUC); } @Override protected void queryArgumentValueSet(Argument arg, java.util.Properties inputArgs) { super.queryArgumentValueSet(arg, inputArgs); if (arg._name.equals("classification")) { arg._hideInQuery = true; } if (arg._name.equals("balance_classes")) { arg.setRefreshOnChange(); if(regression) { arg.disable("Class balancing is only for classification."); } } // Regression is selected if classification is false and vice-versa. if (arg._name.equals("classification")) { regression = !this.classification; } // Regression only accepts the MSE stat type. if (arg._name.equals("select_stat_type")) { if(regression) { arg.disable("Minimize MSE for regression."); } } // Class weights depend on the source data set an response value to be specified and are invalid for regression if (arg._name.equals("class_weights")) { if (source == null || response == null) { arg.disable("Requires source and response to be specified."); } if (regression) { arg.disable("No class weights for regression."); } } // Prevent Stratified Local when building regression tress. if (arg._name.equals("sampling_strategy")) { arg.setRefreshOnChange(); if (regression) { arg.disable("Random Sampling for regression trees."); } } // Variable Importance disabled in SpeeDRF regression currently if (arg._name.equals("importance")) { if (regression) { arg.disable("Variable Importance not supported in SpeeDRF regression."); } } // max balance size depends on balance_classes to be enabled if(classification) { if(arg._name.equals("max_after_balance_size") && !balance_classes) { arg.disable("Requires balance classes flag to be set.", inputArgs); } } } // Put here all precondition verification @Override protected void init() { super.init(); assert 0 <= ntrees && ntrees < 1000000; // Sanity check // Not enough rows to run if (source.numRows() - response.naCnt() <=0) throw new IllegalArgumentException("Dataset contains too many NAs!"); if( !classification && (!(response.isEnum() || response.isInt()))) throw new IllegalArgumentException("Classification cannot be performed on a float column!"); if(classification) { if (0.0f > sample_rate || sample_rate > 1.0f) throw new IllegalArgumentException("Sampling rate must be in [0,1] but found " + sample_rate); } if(regression) throw new IllegalArgumentException("SpeeDRF does not currently support regression."); } @Override protected void execImpl() { SpeeDRFModel rf_model; try { source.read_lock(self()); if (validation != null && validation != source) validation.read_lock(self()); buildForest(); if (n_folds > 0) CrossValUtils.crossValidate(this); } catch (JobCancelledException ex){ rf_model = UKV.get(dest()); state = JobState.CANCELLED; //for JSON REST response rf_model.get_params().state = state; //for parameter JSON on the HTML page Log.info("Random Forest was cancelled."); } catch(Exception ex) { ex.printStackTrace(); throw new RuntimeException(ex); } finally { source.unlock(self()); if (validation != null && validation != source) validation.unlock(self()); remove(); state = UKV.<Job>get(self()).state; // Argh, this is horrible new TAtomic<SpeeDRFModel>() { @Override public SpeeDRFModel atomic(SpeeDRFModel m) { if (m != null) m.get_params().state = state; return m; } }.invoke(dest()); emptyLTrash(); cleanup(); } } @Override protected Response redirect() { return SpeeDRFProgressPage.redirect(this, self(), dest()); } private void buildForest() { logStart(); SpeeDRFModel model = null; try { Frame train = setTrain(); Frame test = setTest(); Vec resp = regression ? null : train.lastVec().toEnum(); if (resp != null) gtrash(resp); float[] priorDist = setPriorDist(train); train = setStrat(train, test, resp); model = initModel(train, test, priorDist); model.start_training(null); model.write_lock(self()); drfParams = DRFParams.create(train.find(resp), model.N, model.max_depth, (int) train.numRows(), model.nbins, model.statType, use_seed, model.weights, mtries, model.sampling_strategy, (float) sample_rate, model.strata_samples, model.verbose ? 100 : 1, _exclusiveSplitLimit, true, regression); DRFTask tsk = new DRFTask(self(), train, drfParams, model._key, model.src_key); tsk.validateInputData(train); tsk.invokeOnAllNodes(); Log.info("Tree building complete. Scoring..."); model = UKV.get(dest()); model.scoreAllTrees(test == null ? train : test, resp); // Launch a Variable Importance Task if (importance && !regression) { Log.info("Scoring complete. Performing Variable Importance Calculations."); model.current_status = "Performing Variable Importance Calculation."; Timer VITimer = new Timer(); model.variableImportanceCalc(train, resp); Log.info("Variable Importance on "+(train.numCols()-1)+" variables and "+ ntrees +" trees done in " + VITimer); } Log.info("Generating Tree Stats"); JsonObject trees = new JsonObject(); trees.addProperty(Constants.TREE_COUNT, model.size()); if( model.size() > 0 ) { trees.add(Constants.TREE_DEPTH, model.depth().toJson()); trees.add(Constants.TREE_LEAVES, model.leaves().toJson()); } model.generateHTMLTreeStats(new StringBuilder(), trees); model.current_status = "Model Complete"; } finally { if (model != null) { model.unlock(self()); model.stop_training(); } } } public SpeeDRFModel initModel(Frame train, Frame test, float[] priorDist) { setStatType(); setSeed(seed); if (mtries == -1) setMtry(regression, train.numCols() - 1); Key src_key = source._key; int src_ncols = source.numCols(); SpeeDRFModel model = new SpeeDRFModel(dest(), src_key, train, regression ? null : train.lastVec().domain(), this, priorDist); // Model INPUTS model.src_key = src_key.toString(); model.verbose = verbose; model.verbose_output = new String[]{""}; model.validation = test != null; model.confusion = null; model.zeed = use_seed; model.cmDomain = getCMDomain(); model.nbins = nbins; model.max_depth = max_depth; model.oobee = validation == null && oobee; model.statType = regression ? Tree.StatType.MSE : stat_type; model.testKey = validation == null ? null : validation._key; model.importance = importance; model.regression = regression; model.features = src_ncols; model.sampling_strategy = regression ? Sampling.Strategy.RANDOM : sampling_strategy; model.sample = (float) sample_rate; model.weights = regression ? null : class_weights; model.time = 0; model.N = ntrees; model.useNonLocal = true; if (!regression) model.setModelClassDistribution(new MRUtils.ClassDist(train.lastVec()).doAll(train.lastVec()).rel_dist()); model.resp_min = (int) train.lastVec().min(); model.mtry = mtries; int csize = H2O.CLOUD.size(); model.local_forests = new Key[csize][]; for(int i=0;i<csize;i++) model.local_forests[i] = new Key[0]; model.node_split_features = new int[csize]; model.t_keys = new Key[0]; model.dtreeKeys = new Key[ntrees][regression ? 1 : model.classes()]; model.time = 0; for( Key tkey : model.t_keys ) assert DKV.get(tkey)!=null; model.jobKey = self(); model.score_pojo = score_pojo; model.current_status = "Initializing Model"; // Model OUTPUTS model.varimp = null; model.validAUC = null; model.cms = new ConfusionMatrix[1]; model.errs = new double[]{-1.0}; return model; } private void setStatType() { if (regression) stat_type = Tree.StatType.MSE; stat_type = select_stat_type == Tree.SelectStatType.ENTROPY ? Tree.StatType.ENTROPY : Tree.StatType.GINI; } private void setSeed(long s) { if (s == -1) { seed = _seedGenerator.nextLong(); use_seed = seed; } else { _seedGenerator = Utils.getDeterRNG(s); use_seed = _seedGenerator.nextLong(); } } private void setMtry(boolean reg, int numCols) { mtries = reg ? (int) Math.floor((float) (numCols) / 3.0f) : (int) Math.floor(Math.sqrt(numCols)); } private Frame setTrain() { Frame train = FrameTask.DataInfo.prepareFrame(source, response, ignored_cols, !regression /*toEnum is TRUE if regression is FALSE*/, false, false); if (train.lastVec().masterVec() != null && train.lastVec() != response) gtrash(train.lastVec()); return train; } private Frame setTest() { if (validation == null) return null; Frame test = null; ArrayList<Integer> v_ignored_cols = new ArrayList<Integer>(); for (int ignored_col : ignored_cols) if (validation.find(source.names()[ignored_col]) != -1) v_ignored_cols.add(ignored_col); int[] v_ignored = new int[v_ignored_cols.size()]; for (int i = 0; i < v_ignored.length; ++i) v_ignored[i] = v_ignored_cols.get(i); if (validation != null) test = FrameTask.DataInfo.prepareFrame(validation, validation.vecs()[validation.find(source.names()[source.find(response)])], v_ignored, !regression, false, false); if (test != null && test.lastVec().masterVec() != null) gtrash(test.lastVec()); return test; } private Frame setStrat(Frame train, Frame test, Vec resp) { Frame fr = train; float[] trainSamplingFactors; if (classification && balance_classes) { assert resp != null : "Regression called and stratified sampling was invoked to balance classes!"; // Handle imbalanced classes by stratified over/under-sampling // initWorkFrame sets the modeled class distribution, and model.score() corrects the probabilities back using the distribution ratios int response_idx = fr.find(_responseName); fr.replace(response_idx, resp); trainSamplingFactors = new float[resp.domain().length]; //leave initialized to 0 -> will be filled up below Frame stratified = sampleFrameStratified(fr, resp, trainSamplingFactors, (long) (max_after_balance_size * fr.numRows()), use_seed, true, false); if (stratified != fr) { fr = stratified; gtrash(stratified); } } // Check that that test/train data are consistent, throw warning if not if(classification && validation != null) { assert resp != null : "Regression called and stratified sampling was invoked to balance classes!"; Vec testresp = test.lastVec().toEnum(); gtrash(testresp); if (!isSubset(testresp.domain(), resp.domain())) { Log.warn("Test set domain: " + Arrays.toString(testresp.domain()) + " \nTrain set domain: " + Arrays.toString(resp.domain())); Log.warn("Train and Validation data have inconsistent response columns! Test data has a response not found in the Train data!"); } } return fr; } private float[] setPriorDist(Frame train) { return classification ? new MRUtils.ClassDist(train.lastVec()).doAll(train.lastVec()).rel_dist() : null; } public Frame score( Frame fr ) { return ((SpeeDRFModel)UKV.get(dest())).score(fr); } private boolean isSubset(String[] sub, String[] container) { HashSet<String> hs = new HashSet<String>(); Collections.addAll(hs, container); for (String s: sub) { if (!hs.contains(s)) return false; } return true; } public final static class DRFTask extends DRemoteTask { /** The RF Model. Contains the dataset being worked on, the classification * column, and the training columns. */ // private final SpeeDRFModel _rfmodel; private final Key _rfmodel; /** Job representing this DRF execution. */ private final Key _jobKey; /** RF parameters. */ private final DRFParams _params; private final Frame _fr; private final String _key; DRFTask(Key jobKey, Frame frameKey, DRFParams params, Key rfmodel, String src_key) { _jobKey = jobKey; _fr = frameKey; _params = params; _rfmodel = rfmodel; _key = src_key; } /**Inhale the data, build a DataAdapter and kick-off the computation. * */ @Override public final void lcompute() { final DataAdapter dapt = DABuilder.create(_params, _rfmodel).build(_fr, _params._useNonLocalData); if (dapt == null) { tryComplete(); return; } Data localData = Data.make(dapt); int numSplitFeatures = howManySplitFeatures(); int ntrees = howManyTrees(); int[] rowsPerChunks = howManyRPC(_fr); updateRFModel(_rfmodel, numSplitFeatures); updateRFModelStatus(_rfmodel, "Building Forest"); updateRFModelLocalForests(_rfmodel, ntrees); Log.info("Dispalying local forest stats:"); SpeeDRF.build(_jobKey, _rfmodel, _params, localData, ntrees, numSplitFeatures, rowsPerChunks); tryComplete(); } static void updateRFModel(Key modelKey, final int numSplitFeatures) { final int idx = H2O.SELF.index(); new TAtomic<SpeeDRFModel>() { @Override public SpeeDRFModel atomic(SpeeDRFModel old) { if(old == null) return null; SpeeDRFModel newModel = (SpeeDRFModel)old.clone(); newModel.node_split_features[idx] = numSplitFeatures; return newModel; } }.invoke(modelKey); } static void updateRFModelLocalForests(Key modelKey, final int num_trees) { final int selfIdx = H2O.SELF.index(); new TAtomic<SpeeDRFModel>() { @Override public SpeeDRFModel atomic(SpeeDRFModel old) { if (old == null) return null; SpeeDRFModel newModel = (SpeeDRFModel)old.clone(); newModel.local_forests[selfIdx] = new Key[num_trees]; return newModel; } }.invoke(modelKey); } static void updateRFModelStatus(Key modelKey, final String status) { new TAtomic<SpeeDRFModel>() { @Override public SpeeDRFModel atomic(SpeeDRFModel old) { if(old == null) return null; SpeeDRFModel newModel = (SpeeDRFModel)old.clone(); newModel.current_status = status; return newModel; } }.invoke(modelKey); } /** Unless otherwise specified each split looks at sqrt(#features). */ private int howManySplitFeatures() { return _params.num_split_features; } /** Figure the number of trees to make locally, so the total hits ntrees. * Divide equally amongst all the nodes that actually have data. First: * compute how many nodes have data. Give each Node ntrees/#nodes worth of * trees. Round down for later nodes, and round up for earlier nodes. */ private int howManyTrees() { Frame fr = _fr; final long num_chunks = fr.anyVec().nChunks(); final int num_nodes = H2O.CLOUD.size(); HashSet<H2ONode> nodes = new HashSet<H2ONode>(); for( int i=0; i<num_chunks; i++ ) { nodes.add(fr.anyVec().chunkKey(i).home_node()); if( nodes.size() == num_nodes ) // All of nodes covered? break; // That means we are done. } H2ONode[] array = nodes.toArray(new H2ONode[nodes.size()]); Arrays.sort(array); // Give each H2ONode ntrees/#nodes worth of trees. Round down for later nodes, // and round up for earlier nodes int ntrees = _params.num_trees / nodes.size(); if( Arrays.binarySearch(array, H2O.SELF) < _params.num_trees - ntrees*nodes.size() ) ++ntrees; return ntrees; } private int[] howManyRPC(Frame fr) { int[] result = new int[fr.anyVec().nChunks()]; for(int i = 0; i < result.length; ++i) { result[i] = fr.anyVec().chunkLen(i); } return result; } private void validateInputData(Frame fr) { Vec[] vecs = fr.vecs(); Vec c = vecs[vecs.length-1]; if (!_params.regression) { final int classes = c.cardinality(); if (!(2 <= classes && classes <= 254)) throw new IllegalArgumentException("Response contains " + classes + " classes, but algorithm supports only 254 levels"); } if (_params.num_split_features!=-1 && (_params.num_split_features< 1 || _params.num_split_features>vecs.length-1)) throw new IllegalArgumentException("Number of split features exceeds available data. Should be in [1,"+(vecs.length-1)+"]"); ChunkAllocInfo cai = new ChunkAllocInfo(); boolean can_load_all = canLoadAll(fr, cai); if (_params._useNonLocalData && !can_load_all) { String heap_warning = "This algorithm requires loading of all data from remote nodes." + "\nThe node " + cai.node + " requires " + PrettyPrint.bytes(cai.requiredMemory) + " more memory to load all data and perform computation but there is only " + PrettyPrint.bytes(cai.availableMemory) + " of available memory." + "\n\nPlease provide more memory for JVMs \n\n-OR-\n\n Try Big Data Random Forest: "; Log.warn(heap_warning); throw new IllegalArgumentException(heap_warning + DRF.link(Key.make(_key), "Big Data Random Forest") ); } if (can_load_all) { _params._useNonLocalData = true; Log.info("Enough available free memory to compute on all data. Pulling all data locally and then launching RF."); } } private boolean canLoadAll(final Frame fr, ChunkAllocInfo cai) { int nchks = fr.anyVec().nChunks(); long localBytes = 0l; for (int i = 0; i < nchks; ++i) { Key k = fr.anyVec().chunkKey(i); if (k.home()) { localBytes += fr.anyVec().chunkForChunkIdx(i).byteSize(); } } long memForNonLocal = fr.byteSize() - localBytes; // Also must add in the RF internal data structure overhead memForNonLocal += fr.numRows() * fr.numCols(); for(int i = 0; i < H2O.CLOUD._memary.length; i++) { HeartBeat hb = H2O.CLOUD._memary[i]._heartbeat; long nodeFreeMemory = (long)(hb.get_max_mem() * 0.8); // * OVERHEAD_MAGIC; Log.debug(Log.Tag.Sys.RANDF, i + ": computed available mem: " + PrettyPrint.bytes(nodeFreeMemory)); Log.debug(Log.Tag.Sys.RANDF, i + ": remote chunks require: " + PrettyPrint.bytes(memForNonLocal)); if (nodeFreeMemory - memForNonLocal <= 0 || (nodeFreeMemory <= TWO_HUNDRED_MB && memForNonLocal >= ONE_FIFTY_MB)) { Log.info("Node free memory raw: "+nodeFreeMemory); cai.node = H2O.CLOUD._memary[i]; cai.availableMemory = nodeFreeMemory; cai.requiredMemory = memForNonLocal; return false; } } return true; } /** Helper POJO to store required chunk allocation. */ private static class ChunkAllocInfo { H2ONode node; long availableMemory; long requiredMemory; } static final float OVERHEAD_MAGIC = 3/8.f; // memory overhead magic static final long TWO_HUNDRED_MB = 200 * 1024 * 1024; static final long ONE_FIFTY_MB = 150 * 1024 * 1024; @Override public void reduce(DRemoteTask drt) { } } private static final long ROOT_SEED_ADD = 0x026244fd935c5111L; private static final long TREE_SEED_INIT = 0x1321e74a0192470cL; /** Build random forest for data stored on this node. */ public static void build( final Key jobKey, final Key modelKey, final DRFParams drfParams, final Data localData, int ntrees, int numSplitFeatures, int[] rowsPerChunks) { Timer t_alltrees = new Timer(); Tree[] trees = new Tree[ntrees]; Log.info(Log.Tag.Sys.RANDF,"Building "+ntrees+" trees"); Log.info(Log.Tag.Sys.RANDF,"Number of split features: "+ numSplitFeatures); Log.info(Log.Tag.Sys.RANDF,"Starting RF computation with "+ localData.rows()+" rows "); Random rnd = Utils.getRNG(localData.seed() + ROOT_SEED_ADD); Sampling sampler = createSampler(drfParams, rowsPerChunks); byte producerId = (byte) H2O.SELF.index(); for (int i = 0; i < ntrees; ++i) { long treeSeed = rnd.nextLong() + TREE_SEED_INIT; // make sure that enough bits is initialized trees[i] = new Tree(jobKey, modelKey, localData, producerId, drfParams.max_depth, drfParams.stat_type, numSplitFeatures, treeSeed, i, drfParams._exclusiveSplitLimit, sampler, drfParams._verbose, drfParams.regression, !drfParams._useNonLocalData, ((SpeeDRFModel)UKV.get(modelKey)).score_pojo); } Log.info("Invoking the tree build tasks on all nodes."); DRemoteTask.invokeAll(trees); Log.info(Log.Tag.Sys.RANDF,"All trees ("+ntrees+") done in "+ t_alltrees); } static Sampling createSampler(final DRFParams params, int[] rowsPerChunks) { switch(params.sampling_strategy) { case RANDOM : return new Sampling.Random(params.sample, rowsPerChunks); default: assert false : "Unsupported sampling strategy"; return null; } } /** RF execution parameters. */ public final static class DRFParams extends Iced { /** Total number of trees */ int num_trees; /** If true, build trees in parallel (default: true) */ boolean parallel; /** Maximum depth for trees (default MaxInt) */ int max_depth; /** Split statistic */ Tree.StatType stat_type; /** Feature holding the classifier (default: #features-1) */ int classcol; /** Utilized sampling method */ Sampling.Strategy sampling_strategy; /** Proportion of observations to use for building each individual tree (default: .67)*/ float sample; /** Limit of the cardinality of a feature before we bin. */ int bin_limit; /** Weights of the different features (default: 1/features) */ double[] class_weights; /** Arity under which we may use exclusive splits */ public int _exclusiveSplitLimit; /** Output warnings and info*/ public int _verbose; /** Number of features which are tried at each split * If it is equal to -1 then it is computed as sqrt(num of usable columns) */ int num_split_features; /** Defined stratas samples for each class */ float[] strata_samples; /** Utilize not only local data but try to use data from other nodes. */ boolean _useNonLocalData; /** Number of rows per chunk - used to replay sampling */ int _numrows; /** Pseudo random seed initializing RF algorithm */ long seed; /** Build regression trees if true */ boolean regression; public static DRFParams create(int col, int ntrees, int depth, int numrows, int binLimit, Tree.StatType statType, long seed, double[] classWt, int numSplitFeatures, Sampling.Strategy samplingStrategy, float sample, float[] strataSamples, int verbose, int exclusiveSplitLimit, boolean useNonLocalData, boolean regression) { DRFParams drfp = new DRFParams(); drfp.num_trees = ntrees; drfp.max_depth = depth; drfp.sample = sample; drfp.bin_limit = binLimit; drfp.stat_type = statType; drfp.seed = seed; drfp.class_weights = classWt; drfp.num_split_features = numSplitFeatures; drfp.sampling_strategy = samplingStrategy; drfp.strata_samples = strataSamples; drfp._numrows = numrows; drfp._useNonLocalData = useNonLocalData; drfp._exclusiveSplitLimit = exclusiveSplitLimit; drfp._verbose = verbose; drfp.classcol = col; drfp.regression = regression; drfp.parallel = true; return drfp; } } /** * Cross-Validate a SpeeDRF model by building new models on N train/test holdout splits * @param splits Frames containing train/test splits * @param cv_preds Array of Frames to store the predictions for each cross-validation run * @param offsets Array to store the offsets of starting row indices for each cross-validation run * @param i Which fold of cross-validation to perform */ @Override public void crossValidate(Frame[] splits, Frame[] cv_preds, long[] offsets, int i) { // Train a clone with slightly modified parameters (to account for cross-validation) final SpeeDRF cv = (SpeeDRF) this.clone(); cv.genericCrossValidation(splits, offsets, i); cv_preds[i] = ((SpeeDRFModel) UKV.get(cv.dest())).score(cv.validation); new TAtomic<SpeeDRFModel>() { @Override public SpeeDRFModel atomic(SpeeDRFModel m) { if (!keep_cross_validation_splits && /*paranoid*/ cv.dest().toString().contains("xval")) { m.get_params().source = null; m.get_params().validation=null; m.get_params().response=null; } return m; } }.invoke(cv.dest()); } }
package com.braintreepayments.api; import android.content.Context; import android.content.Intent; import android.net.Uri; import android.os.Bundle; import com.braintreepayments.api.exceptions.BraintreeException; import com.braintreepayments.api.exceptions.ErrorWithResponse; import com.braintreepayments.api.exceptions.InvalidArgumentException; import com.braintreepayments.api.interfaces.ConfigurationListener; import com.braintreepayments.api.interfaces.HttpResponseCallback; import com.braintreepayments.api.interfaces.PaymentMethodNonceCallback; import com.braintreepayments.api.internal.ClassHelper; import com.braintreepayments.api.internal.ManifestValidator; import com.braintreepayments.api.models.BraintreeRequestCodes; import com.braintreepayments.api.models.CardBuilder; import com.braintreepayments.api.models.CardNonce; import com.braintreepayments.api.models.Configuration; import com.braintreepayments.api.models.PaymentMethodNonce; import com.braintreepayments.api.models.ThreeDSecureAuthenticationResponse; import com.braintreepayments.api.models.ThreeDSecureInfo; import com.braintreepayments.api.models.ThreeDSecureLookup; import com.braintreepayments.api.models.ThreeDSecureRequest; import com.cardinalcommerce.cardinalmobilesdk.Cardinal; import com.cardinalcommerce.cardinalmobilesdk.models.response.ValidateResponse; import com.cardinalcommerce.cardinalmobilesdk.services.CardinalInitService; import com.cardinalcommerce.cardinalmobilesdk.services.CardinalValidateReceiver; import com.cardinalcommerce.shared.models.enums.DirectoryServerID; import com.cardinalcommerce.shared.models.parameters.CardinalConfigurationParameters; import com.cardinalcommerce.shared.models.parameters.CardinalEnvironment; import org.json.JSONException; import org.json.JSONObject; import java.io.Serializable; import static androidx.appcompat.app.AppCompatActivity.RESULT_OK; import static com.braintreepayments.api.models.BraintreeRequestCodes.THREE_D_SECURE; public class ThreeDSecure { private static String mDFReferenceId; /** * The versioned path of the 3D Secure assets to use. Hosted by Braintree. */ private static final String THREE_D_SECURE_ASSETS_PATH = "/mobile/three-d-secure-redirect/0.1.5"; /** * @deprecated Use {{@link #performVerification(BraintreeFragment, CardBuilder, ThreeDSecureRequest)}} for 3DS 2.0. * <p> * The amount can be provided via {@link ThreeDSecureRequest#amount(String)}. */ @Deprecated public static void performVerification(final BraintreeFragment fragment, final CardBuilder cardBuilder, final String amount) { TokenizationClient.tokenize(fragment, cardBuilder, new PaymentMethodNonceCallback() { @Override public void success(PaymentMethodNonce paymentMethodNonce) { performVerification(fragment, paymentMethodNonce.getNonce(), amount); } @Override public void failure(Exception exception) { fragment.postCallback(exception); } }); } /** * @deprecated Use {{@link #performVerification(BraintreeFragment, ThreeDSecureRequest)}} for 3DS 2.0. * <p> * The nonce can be provided via {@link ThreeDSecureRequest#nonce(String)}. * <p> * The amount can be provided via {@link ThreeDSecureRequest#amount(String)}. */ @Deprecated public static void performVerification(final BraintreeFragment fragment, final String nonce, final String amount) { ThreeDSecureRequest request = new ThreeDSecureRequest() .nonce(nonce) .amount(amount); performVerification(fragment, request); } public static void performVerification(final BraintreeFragment fragment, final CardBuilder cardBuilder, final ThreeDSecureRequest request) { if (request.getAmount() == null) { fragment.postCallback(new InvalidArgumentException("The ThreeDSecureRequest amount cannot be null")); return; } TokenizationClient.tokenize(fragment, cardBuilder, new PaymentMethodNonceCallback() { @Override public void success(PaymentMethodNonce paymentMethodNonce) { request.nonce(paymentMethodNonce.getNonce()); performVerification(fragment, request); } @Override public void failure(Exception exception) { fragment.postCallback(exception); } }); } public static void performVerification(final BraintreeFragment fragment, final ThreeDSecureRequest request) { if (request.getAmount() == null || request.getNonce() == null) { fragment.postCallback(new InvalidArgumentException("The ThreeDSecureRequest nonce and amount cannot be null")); return; } fragment.waitForConfiguration(new ConfigurationListener() { @Override public void onConfigurationFetched(Configuration configuration) { if (!configuration.isThreeDSecureEnabled()) { fragment.postCallback(new BraintreeException("Three D Secure is not enabled in the control panel")); return; } final boolean supportsBrowserSwitch = ManifestValidator.isUrlSchemeDeclaredInAndroidManifest( fragment.getApplicationContext(), fragment.getReturnUrlScheme(), BraintreeBrowserSwitchActivity.class); if (!supportsBrowserSwitch) { fragment.sendAnalyticsEvent("three-d-secure.invalid-manifest"); fragment.postCallback(new BraintreeException("BraintreeBrowserSwitchActivity missing, " + "incorrectly configured in AndroidManifest.xml or another app defines the same browser " + "switch url as this app. See " + "https://developers.braintreepayments.com/guides/client-sdk/android/v2#browser-switch " + "for the correct configuration")); return; } fragment.getHttpClient().post(TokenizationClient.versionedPath( TokenizationClient.PAYMENT_METHOD_ENDPOINT + "/" + request.getNonce() + "/three_d_secure/lookup"), request.build(mDFReferenceId), new HttpResponseCallback() { @Override public void success(String responseBody) { try { final ThreeDSecureLookup threeDSecureLookup = ThreeDSecureLookup.fromJson(responseBody); Boolean showChallenge = threeDSecureLookup.getAcsUrl() != null; String threeDSecureVersion = threeDSecureLookup.getThreeDSecureVersion(); fragment.sendAnalyticsEvent(String.format("three-d-secure.verification-flow.challenge-presented.%b", showChallenge)); // TODO: Move this analytic event. Instead, now we want to track with 3ds version path we actually take, not what is possible. fragment.sendAnalyticsEvent(String.format("three-d-secure.verification-flow.3ds-version.%s", threeDSecureVersion)); if (showChallenge) { if (threeDSecureVersion.startsWith("2.") && request.getVersionRequested() == 2) { performCardinalAuthentication(fragment, threeDSecureLookup); } else { launchBrowserSwitch(fragment, threeDSecureLookup); } } else { completeVerificationFlowWithNoncePayload(fragment, threeDSecureLookup.getCardNonce()); } } catch (JSONException exception) { fragment.postCallback(exception); } } @Override public void failure(Exception exception) { fragment.postCallback(exception); } }); } }); } protected static void performCardinalAuthentication(final BraintreeFragment fragment, final ThreeDSecureLookup threeDSecureLookup) { fragment.sendAnalyticsEvent("three-d-secure.verification-flow.started"); Bundle extras = new Bundle(); extras.putParcelable(ThreeDSecureActivity.EXTRA_THREE_D_SECURE_LOOKUP, threeDSecureLookup); Intent intent = new Intent(fragment.getApplicationContext(), ThreeDSecureActivity.class); intent.putExtras(extras); fragment.startActivityForResult(intent, BraintreeRequestCodes.THREE_D_SECURE); } protected static void authenticateCardinalJWT(final BraintreeFragment fragment, final ThreeDSecureLookup threeDSecureLookup, final String cardinalJWT) { final CardNonce cardNonce = threeDSecureLookup.getCardNonce(); fragment.sendAnalyticsEvent("three-d-secure.verification-flow.upgrade-payment-method.started"); final String nonce = cardNonce.getNonce(); JSONObject body = new JSONObject(); try { body.put("jwt", cardinalJWT); body.put("paymentMethodNonce", nonce); } catch (JSONException ignored) {} fragment.getHttpClient().post(TokenizationClient.versionedPath( TokenizationClient.PAYMENT_METHOD_ENDPOINT + "/" + nonce + "/three_d_secure/authenticate_from_jwt"), body.toString(), new HttpResponseCallback() { @Override public void success(String responseBody) { ThreeDSecureAuthenticationResponse authenticationResponse = ThreeDSecureAuthenticationResponse.fromJson(responseBody); ThreeDSecureInfo authenticationResponseThreeDSecureInfo = authenticationResponse.getThreeDSecureInfo(); if (authenticationResponse.getErrors() == null) { // TODO replace with authenticationResponse.isSuccessful() // 3DS was successful fragment.sendAnalyticsEvent("three-d-secure.verification-flow.upgrade-payment-method.succeeded"); completeVerificationFlowWithNoncePayload(fragment, authenticationResponse.getCardNonce()); } else if (authenticationResponseThreeDSecureInfo != null && authenticationResponseThreeDSecureInfo.isLiabilityShiftPossible()) { fragment.sendAnalyticsEvent("three-d-secure.verification-flow.upgrade-payment-method.liability-shift-possible"); completeVerificationFlowWithNoncePayload(fragment, cardNonce); } else { // TODO: This isn't a GraphQL request, but the response uses GraphQL style errors. How do we want to parse them? fragment.sendAnalyticsEvent("three-d-secure.verification-flow.upgrade-payment-method.errored"); fragment.postCallback(ErrorWithResponse.fromGraphQLJson(authenticationResponse.getErrors())); } } @Override public void failure(Exception exception) { fragment.sendAnalyticsEvent("three-d-secure.verification-flow.upgrade-payment-method.errored"); fragment.postCallback(exception); } }); } protected static void onActivityResult(BraintreeFragment fragment, int resultCode, Intent data) { if (resultCode != RESULT_OK) { return; } Uri resultUri = data.getData(); if (resultUri != null) { // V1 flow ThreeDSecureAuthenticationResponse authenticationResponse = ThreeDSecureAuthenticationResponse .fromJson(resultUri.getQueryParameter("auth_response")); if (authenticationResponse.isSuccess()) { completeVerificationFlowWithNoncePayload(fragment, authenticationResponse.getCardNonce()); } else if (authenticationResponse.getException() != null) { fragment.postCallback(new BraintreeException(authenticationResponse.getException())); } else { fragment.postCallback(new ErrorWithResponse(422, authenticationResponse.getErrors())); } } else { // V2 flow ThreeDSecureLookup threeDSecureLookup = data.getParcelableExtra(ThreeDSecureActivity.EXTRA_THREE_D_SECURE_LOOKUP); ValidateResponse validateResponse = (ValidateResponse) data.getSerializableExtra(ThreeDSecureActivity.EXTRA_VALIDATION_RESPONSE); String jwt = data.getStringExtra(ThreeDSecureActivity.EXTRA_JWT); fragment.sendAnalyticsEvent(String.format("three-d-secure.verification-flow.cardinal-sdk.action-code.%s", validateResponse.getActionCode().name().toLowerCase())); switch (validateResponse.getActionCode()) { case FAILURE: case SUCCESS: case NOACTION: authenticateCardinalJWT(fragment, threeDSecureLookup, jwt); fragment.sendAnalyticsEvent("three-d-secure.verification-flow.completed"); break; case ERROR: fragment.postCallback(new BraintreeException(validateResponse.errorDescription)); fragment.sendAnalyticsEvent("three-d-secure.verification-flow.failed"); break; case CANCEL: fragment.postCancelCallback(BraintreeRequestCodes.THREE_D_SECURE); fragment.sendAnalyticsEvent("three-d-secure.verification-flow.canceled"); break; } } } private static void completeVerificationFlowWithNoncePayload(BraintreeFragment fragment, CardNonce noncePayload) { ThreeDSecureInfo info = noncePayload.getThreeDSecureInfo(); fragment.sendAnalyticsEvent(String.format("three-d-secure.verification-flow.liability-shifted.%b", info.isLiabilityShifted())); fragment.sendAnalyticsEvent(String.format("three-d-secure.verification-flow.liability-shift-possible.%b", info.isLiabilityShiftPossible())); fragment.postCallback(noncePayload); } private static void launchBrowserSwitch(BraintreeFragment fragment, ThreeDSecureLookup threeDSecureLookup) { String assetsBaseUrl = fragment.getConfiguration().getAssetsUrl() + THREE_D_SECURE_ASSETS_PATH; String returnUrl = String.format("%s/redirect.html?redirect_url=%s://x-callback-url/braintree/threedsecure?", assetsBaseUrl, fragment.getReturnUrlScheme()); Uri redirectUri = Uri.parse(assetsBaseUrl + "/index.html") .buildUpon() .appendQueryParameter("AcsUrl", threeDSecureLookup.getAcsUrl()) .appendQueryParameter("PaReq", threeDSecureLookup.getPareq()) .appendQueryParameter("MD", threeDSecureLookup.getMd()) .appendQueryParameter("TermUrl", threeDSecureLookup.getTermUrl()) .appendQueryParameter("ReturnUrl", returnUrl) .build(); fragment.browserSwitch(THREE_D_SECURE, redirectUri.toString()); } static void configureCardinal(final BraintreeFragment fragment) { boolean cardinalSdkAvailable = ClassHelper.isClassAvailable( "com.cardinalcommerce.cardinalmobilesdk.Cardinal"); if (cardinalSdkAvailable) { fragment.waitForConfiguration(new ConfigurationListener() { @Override public void onConfigurationFetched(Configuration configuration) { CardinalEnvironment cardinalEnvironment = CardinalEnvironment.STAGING; if ("production".equalsIgnoreCase(configuration.getEnvironment())) { cardinalEnvironment = CardinalEnvironment.PRODUCTION; } CardinalConfigurationParameters cardinalConfigurationParameters = new CardinalConfigurationParameters(); cardinalConfigurationParameters.setEnvironment(cardinalEnvironment); cardinalConfigurationParameters.setTimeout(8000); cardinalConfigurationParameters.setEnableQuickAuth(false); cardinalConfigurationParameters.setEnableDFSync(true); Cardinal cardinal = Cardinal.getInstance(); cardinal.configure(fragment.getApplicationContext(), cardinalConfigurationParameters); cardinal.init(configuration.getCardinalAuthenticationJwt(), new CardinalInitService() { @Override public void onSetupCompleted(String consumerSessionId) { mDFReferenceId = consumerSessionId; fragment.sendAnalyticsEvent("three-d-secure.cardinal-sdk.init.setup-completed"); } @Override public void onValidated(ValidateResponse validateResponse, String serverJWT) { fragment.sendAnalyticsEvent("three-d-secure.cardinal-sdk.init.setup-failed"); } }); } }); } } }
package pointGroups.util; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.util.Properties; import pointGroups.PointGroups; public class PointGroupsUtility { /** * Get the location of the resource relative to the binaries of point groups. * * @param file * @returned.addAll(this.edges); * @throws FileNotFoundException */ public static URI getResource(String file) throws FileNotFoundException { try { ClassLoader classLoader = PointGroups.class.getClassLoader(); URL url = classLoader.getResource(file); return url.toURI(); } catch (NullPointerException | URISyntaxException e) { throw new FileNotFoundException("File " + file + " couldn't be found. Error-Message: " + e.getMessage()); } } /** * Get the standard {@link Properties} of the point group project. We assume a * {@linkplain settings.ini} in the root directory of the compiled classes to * fetch from. * * @return * @throws IOException */ public static Properties getProperties() throws IOException { Properties prop = new Properties(); URI file = getResource("settings.ini"); prop.load(new FileInputStream(new File(file))); return prop; } /** * Get the location of a symmetry relative to the `symmetries` folder in the * resources. * * @param symmetry * @return * @throws IOException */ public static File getSymmetry(String symmetry) throws IOException { URI file = getResource("symmetries/" + symmetry); return new File(file); } /** * Get the location of the image relative to the `images` folder in the * resources. * * @param image * @return * @throws IOException */ public static URL getImage(String image) throws IOException { URI file = getResource("images/" + image); try { return file.toURL(); } catch (MalformedURLException e) { throw new FileNotFoundException("File " + e.getMessage() + " couldn't be found."); } } /** * Get the path of the polymake driver from the resources. * * @return * @throws IOException */ public static File getPolymakeDriverPath() throws IOException { URI file = getResource("perl/pmDriver.pl"); return new File(file); } /** * Get the path of polymake from the `settings.ini`. * * @return * @throws IOException */ public static File getPolymakePath() throws IOException { Properties prop = getProperties(); String file = prop.getProperty("POLYMAKEPATH"); return new File(file); } }
package io.scif.io; import io.scif.common.Constants; import java.io.EOFException; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.channels.FileChannel; /** * A wrapper for buffered NIO logic that implements the IRandomAccess interface. * * @see IRandomAccess * @see java.io.RandomAccessFile * @author Chris Allan */ public class NIOFileHandle extends AbstractNIOHandle { // -- Static fields -- /** Default NIO buffer size to facilitate buffered I/O. */ private static int defaultBufferSize = 1048576; /** * Default NIO buffer size to facilitate buffered I/O for read/write streams. */ private static int defaultRWBufferSize = 8192; // -- Fields -- /** The random access file object backing this FileHandle. */ private final RandomAccessFile raf; /** The file channel backed by the random access file. */ private final FileChannel channel; /** The absolute position within the file. */ private long position = 0; /** The absolute position of the start of the buffer. */ private long bufferStartPosition = 0; /** The buffer size. */ private final int bufferSize; /** The buffer itself. */ private ByteBuffer buffer; /** The default map mode for the file. */ private FileChannel.MapMode mapMode = FileChannel.MapMode.READ_ONLY; /** The buffer's byte ordering. */ private ByteOrder order; /** Service which provides NIO byte buffers, allocated or memory mapped. */ private final NIOService nioService; // -- Constructors -- /** * Creates a random access file stream to read from, and optionally to write * to, the file specified by the File argument. */ public NIOFileHandle(final NIOService nioService, final File file, final String mode, final int bufferSize) throws IOException { this.nioService = nioService; this.bufferSize = bufferSize; validateMode(mode); if (mode.equals("rw")) { mapMode = FileChannel.MapMode.READ_WRITE; } raf = new RandomAccessFile(file, mode); channel = raf.getChannel(); buffer(position, 0); } /** * Creates a random access file stream to read from, and optionally to write * to, the file specified by the File argument. */ public NIOFileHandle(final NIOService nioService, final File file, final String mode) throws IOException { this(nioService, file, mode, mode.equals("rw") ? defaultRWBufferSize : defaultBufferSize); } /** * Creates a random access file stream to read from, and optionally to write * to, a file with the specified name. */ public NIOFileHandle(final NIOService nioService, final String name, final String mode) throws IOException { this(nioService, new File(name), mode); } // -- NIOFileHandle API methods -- /** * Set the default buffer size for read-only files. Subsequent uses of the * NIOFileHandle(String, String) and NIOFileHandle(File, String) constructors * will use this buffer size. */ public static void setDefaultBufferSize(final int size) { defaultBufferSize = size; } /** * Set the default buffer size for read/write files. Subsequent uses of the * NIOFileHandle(String, String) and NIOFileHandle(File, String) constructors * will use this buffer size. */ public static void setDefaultReadWriteBufferSize(final int size) { defaultRWBufferSize = size; } // -- FileHandle and Channel API methods -- /** Gets the random access file object backing this FileHandle. */ public RandomAccessFile getRandomAccessFile() { return raf; } /** Gets the FileChannel from this FileHandle. */ public FileChannel getFileChannel() { return channel; } /** Gets the current buffer size. */ public int getBufferSize() { return bufferSize; } // -- AbstractNIOHandle API methods -- @Override public void setLength(final long length) throws IOException { raf.seek(length - 1); raf.write((byte) 0); buffer = null; } // -- IRandomAccess API methods -- @Override public void close() throws IOException { raf.close(); } @Override public long getFilePointer() { return position; } @Override public long length() throws IOException { return raf.length(); } @Override public ByteOrder getOrder() { return buffer == null ? order : buffer.order(); } @Override public void setOrder(final ByteOrder order) { this.order = order; if (buffer != null) { buffer.order(order); } } @Override public int read(final byte[] b) throws IOException { return read(ByteBuffer.wrap(b)); } @Override public int read(final byte[] b, final int off, final int len) throws IOException { return read(ByteBuffer.wrap(b), off, len); } @Override public int read(final ByteBuffer buf) throws IOException { return read(buf, 0, buf.capacity()); } @Override public int read(final ByteBuffer buf, final int off, final int len) throws IOException { buf.position(off); buf.limit(off + len); channel.position(position); final int readLength = channel.read(buf); buffer(position + readLength, 0); // Return value of NIO channel's is -1 when zero bytes are read at the // end // of the file. return readLength == -1 ? 0 : readLength; } @Override public void seek(final long pos) throws IOException { if (mapMode == FileChannel.MapMode.READ_WRITE && pos > length()) { setLength(pos); } buffer(pos, 0); } @Override public boolean readBoolean() throws IOException { return readByte() == 1; } @Override public byte readByte() throws IOException { buffer(position, 1); position += 1; try { return buffer.get(); } catch (final BufferUnderflowException e) { final EOFException eof = new EOFException(EOF_ERROR_MSG); eof.initCause(e); throw eof; } } @Override public char readChar() throws IOException { buffer(position, 2); position += 2; try { return buffer.getChar(); } catch (final BufferUnderflowException e) { final EOFException eof = new EOFException(EOF_ERROR_MSG); eof.initCause(e); throw eof; } } @Override public double readDouble() throws IOException { buffer(position, 8); position += 8; try { return buffer.getDouble(); } catch (final BufferUnderflowException e) { final EOFException eof = new EOFException(EOF_ERROR_MSG); eof.initCause(e); throw eof; } } @Override public float readFloat() throws IOException { buffer(position, 4); position += 4; try { return buffer.getFloat(); } catch (final BufferUnderflowException e) { final EOFException eof = new EOFException(EOF_ERROR_MSG); eof.initCause(e); throw eof; } } @Override public void readFully(final byte[] b) throws IOException { read(b); } @Override public void readFully(final byte[] b, final int off, final int len) throws IOException { read(b, off, len); } @Override public int readInt() throws IOException { buffer(position, 4); position += 4; try { return buffer.getInt(); } catch (final BufferUnderflowException e) { final EOFException eof = new EOFException(EOF_ERROR_MSG); eof.initCause(e); throw eof; } } @Override public String readLine() throws IOException { raf.seek(position); final String line = raf.readLine(); buffer(raf.getFilePointer(), 0); return line; } @Override public long readLong() throws IOException { buffer(position, 8); position += 8; try { return buffer.getLong(); } catch (final BufferUnderflowException e) { final EOFException eof = new EOFException(EOF_ERROR_MSG); eof.initCause(e); throw eof; } } @Override public short readShort() throws IOException { buffer(position, 2); position += 2; try { return buffer.getShort(); } catch (final BufferUnderflowException e) { final EOFException eof = new EOFException(EOF_ERROR_MSG); eof.initCause(e); throw eof; } } @Override public int readUnsignedByte() throws IOException { return readByte() & 0xFF; } @Override public int readUnsignedShort() throws IOException { return readShort() & 0xFFFF; } @Override public String readUTF() throws IOException { raf.seek(position); final String utf8 = raf.readUTF(); buffer(raf.getFilePointer(), 0); return utf8; } @Override public int skipBytes(final int n) throws IOException { if (n < 1) { return 0; } final long oldPosition = position; final long newPosition = oldPosition + Math.min(n, length()); buffer(newPosition, 0); return (int) (position - oldPosition); } // -- DataOutput API methods -- @Override public void write(final byte[] b) throws IOException { write(ByteBuffer.wrap(b)); } @Override public void write(final byte[] b, final int off, final int len) throws IOException { write(ByteBuffer.wrap(b), off, len); } @Override public void write(final ByteBuffer buf) throws IOException { write(buf, 0, buf.capacity()); } @Override public void write(final ByteBuffer buf, final int off, final int len) throws IOException { writeSetup(len); buf.limit(off + len); buf.position(off); position += channel.write(buf, position); buffer = null; } @Override public void write(final int b) throws IOException { writeByte(b); } @Override public void writeBoolean(final boolean v) throws IOException { writeByte(v ? 1 : 0); } @Override public void writeByte(final int v) throws IOException { writeSetup(1); buffer.put((byte) v); doWrite(1); } @Override public void writeBytes(final String s) throws IOException { write(s.getBytes(Constants.ENCODING)); } @Override public void writeChar(final int v) throws IOException { writeSetup(2); buffer.putChar((char) v); doWrite(2); } @Override public void writeChars(final String s) throws IOException { write(s.getBytes("UTF-16BE")); } @Override public void writeDouble(final double v) throws IOException { writeSetup(8); buffer.putDouble(v); doWrite(8); } @Override public void writeFloat(final float v) throws IOException { writeSetup(4); buffer.putFloat(v); doWrite(4); } @Override public void writeInt(final int v) throws IOException { writeSetup(4); buffer.putInt(v); doWrite(4); } @Override public void writeLong(final long v) throws IOException { writeSetup(8); buffer.putLong(v); doWrite(8); } @Override public void writeShort(final int v) throws IOException { writeSetup(2); buffer.putShort((short) v); doWrite(2); } @Override public void writeUTF(final String str) throws IOException { // NB: number of bytes written is greater than the length of the string final int strlen = str.getBytes(Constants.ENCODING).length + 2; writeSetup(strlen); raf.seek(position); raf.writeUTF(str); position += strlen; buffer = null; } /** * Aligns the NIO buffer, maps it if it is not currently and sets all relevant * positions and offsets. * * @param offset The location within the file to read from. * @param size The requested read length. * @throws IOException If there is an issue mapping, aligning or allocating * the buffer. */ private void buffer(long offset, final int size) throws IOException { position = offset; final long newPosition = offset + size; if (newPosition < bufferStartPosition || newPosition > bufferStartPosition + bufferSize || buffer == null) { bufferStartPosition = offset; long currentLength = length(); if (currentLength > 0 && currentLength - 1 < bufferStartPosition) { bufferStartPosition = currentLength - 1; } long newSize = Math.min(currentLength - bufferStartPosition, bufferSize); if (newSize < size && newSize == bufferSize) newSize = size; if (newSize + bufferStartPosition > currentLength) { newSize = currentLength - bufferStartPosition; } offset = bufferStartPosition; final ByteOrder byteOrder = buffer == null ? order : getOrder(); buffer = nioService.allocate(channel, mapMode, bufferStartPosition, (int) newSize); if (byteOrder != null) setOrder(byteOrder); } buffer.position((int) (offset - bufferStartPosition)); if (buffer.position() + size > buffer.limit() && mapMode == FileChannel.MapMode.READ_WRITE) { buffer.limit(buffer.position() + size); } } private void writeSetup(final int length) throws IOException { validateLength(length); buffer(position, length); } private void doWrite(final int length) throws IOException { buffer.position(buffer.position() - length); channel.write(buffer, position); position += length; } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package bropals.lib.simplegame.test; import bropals.lib.simplegame.*; import bropals.lib.simplegame.io.PropertiesReader; /** * * @author Owner */ public class TesterClass { public static void main(String[] args) { // make a window GameWindow window = new GameWindow("Super cool", 500, 350); // make a GameStateRunner that is using the newly made window with // an inital GameState. GameStateRunner runner = new GameStateRunner(window, new TestState()); // begin looping the game! runner.loop(); } }
package com.highstreet.technologies.odl.app.impl; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.CheckedFuture; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import org.opendaylight.controller.md.sal.binding.api.DataBroker; import org.opendaylight.controller.md.sal.binding.api.DataTreeIdentifier; import org.opendaylight.controller.md.sal.binding.api.MountPoint; import org.opendaylight.controller.md.sal.binding.api.MountPointService; import org.opendaylight.controller.md.sal.binding.api.ReadOnlyTransaction; import org.opendaylight.controller.md.sal.binding.api.ReadWriteTransaction; import org.opendaylight.controller.md.sal.common.api.data.AsyncTransaction; import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType; import org.opendaylight.controller.md.sal.common.api.data.ReadFailedException; import org.opendaylight.controller.md.sal.common.api.data.TransactionChain; import org.opendaylight.controller.md.sal.common.api.data.TransactionChainListener; import org.opendaylight.controller.md.sal.common.api.data.TransactionCommitFailedException; import org.opendaylight.controller.sal.binding.api.BindingAwareBroker; import org.opendaylight.controller.sal.binding.api.RpcProviderRegistry; import org.opendaylight.yang.gen.v1.uri.onf.coremodel.corefoundationmodule.superclassesandcommonpackages.rev160710.UniversalId; import org.opendaylight.yang.gen.v1.uri.onf.coremodel.corenetworkmodule.objectclasses.rev160811.NetworkElement; import org.opendaylight.yang.gen.v1.uri.onf.coremodel.corenetworkmodule.objectclasses.rev160811.logicalterminationpoint.LpList; import org.opendaylight.yang.gen.v1.uri.onf.coremodel.corenetworkmodule.objectclasses.rev160811.networkelement.LtpRefList; import org.opendaylight.yang.gen.v1.uri.onf.microwavemodel.objectclasses.airinterface.rev160901.MWAirInterfacePac; import org.opendaylight.yang.gen.v1.uri.onf.microwavemodel.objectclasses.airinterface.rev160901.MWAirInterfacePacBuilder; import org.opendaylight.yang.gen.v1.uri.onf.microwavemodel.objectclasses.airinterface.rev160901.MWAirInterfacePacKey; import org.opendaylight.yang.gen.v1.uri.onf.microwavemodel.objectclasses.airinterface.rev160901.mw_airinterface_pac.AirInterfaceConfigurationBuilder; import org.opendaylight.yang.gen.v1.urn.opendaylight.netconf.node.topology.rev150114.NetconfNode; import org.opendaylight.yang.gen.v1.urn.opendaylight.netconf.node.topology.rev150114.NetconfNodeConnectionStatus; import org.opendaylight.yang.gen.v1.urn.opendaylight.netconf.node.topology.rev150114.network.topology.topology.topology.types.TopologyNetconf; import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.closedloopautomation.rev160919.ClosedLoopAutomationService; import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.closedloopautomation.rev160919.ReadTimerOutput; import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.closedloopautomation.rev160919.ReadTimerOutputBuilder; import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.closedloopautomation.rev160919.SaveTimerInput; import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.closedloopautomation.rev160919.SaveTimerOutput; import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.closedloopautomation.rev160919.SaveTimerOutputBuilder; import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.closedloopautomation.rev160919.StartOutput; import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.closedloopautomation.rev160919.StartOutputBuilder; import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.closedloopautomation.rev160919.Timer; import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.closedloopautomation.rev160919.TimerConfig; import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.closedloopautomation.rev160919.TimerConfigBuilder; import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.NetworkTopology; import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.TopologyId; import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.Topology; import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.TopologyKey; import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Node; import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.NodeKey; import org.opendaylight.yangtools.concepts.ListenerRegistration; import org.opendaylight.yangtools.yang.binding.InstanceIdentifier; import org.opendaylight.yangtools.yang.common.RpcResult; import org.opendaylight.yangtools.yang.common.RpcResultBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ClosedLoopAutomationImpl implements AutoCloseable, ClosedLoopAutomationService, TransactionChainListener { private static final Logger LOG = LoggerFactory.getLogger(ClosedLoopAutomationImpl.class); private static final Boolean TIMER_DEFAULT_ENABLED = Boolean.FALSE; private static final Timer.Option TIMER_DEFAULT_OPTION = Timer.Option._5seconds; private static final InstanceIdentifier<TimerConfig> TIMER_SETTING_PATH = InstanceIdentifier.create(TimerConfig.class); private static final InstanceIdentifier<Topology> NETCONF_TOPO_IID = InstanceIdentifier .create(NetworkTopology.class) .child(Topology.class, new TopologyKey(new TopologyId(TopologyNetconf.QNAME.getLocalName()))); public static final String SUITABLE_CAPABILITY = "http://netconfcentral.org/ns/yuma-proc"; public static final String LAYER_PROTOCOL = "MWPS"; private DataBroker dataBroker; private BindingAwareBroker.RpcRegistration registration; private MountPointService mountService; private ScheduledExecutorService scheduledExecutorService; private ScheduledFuture scheduledFuture; private ListenerRegistration dataTreeChangeHandler; /** * Here everything are initialized. Databroker, executor scheduler for timer and registration for datatree changelistener. * @param providerContext * @param rpcProviderRegistry */ public ClosedLoopAutomationImpl(BindingAwareBroker.ProviderContext providerContext, final RpcProviderRegistry rpcProviderRegistry) { this.dataBroker = providerContext.getSALService(DataBroker.class); this.mountService = providerContext.getSALService(MountPointService.class); this.registration = rpcProviderRegistry.addRpcImplementation(ClosedLoopAutomationService.class, this); // registration for data tree change lister. Listener listens whether some device is connected or disconnected dataTreeChangeHandler = dataBroker.registerDataTreeChangeListener(new DataTreeIdentifier<Topology>(LogicalDatastoreType.OPERATIONAL, NETCONF_TOPO_IID), new DeviceConnectionStatusHandler()); // config executor scheduler, where will be maximally one job. scheduledExecutorService = Executors.newScheduledThreadPool(10); try { Timer timer = readTimer().get().getResult(); LOG.info("Init timer. isEnabled {} and option {}",timer.isEnabled(), timer.getOption()); if (timer.isEnabled()) { scheduledFuture = createNewTimerJob(timer.getOption()); } } catch (Exception e) { LOG.error(e.getMessage(),e); } } /** * Immediately execute closed loop process * @return */ @Override public Future<RpcResult<StartOutput>> start() { LOG.info("Call close loop automation"); boolean result = processNetworkDevices(); StartOutputBuilder startBuilder = new StartOutputBuilder(); startBuilder.setStatus(result ? "ok" : "failed"); return RpcResultBuilder.success(startBuilder.build()).buildFuture(); } /** * Save new configuration of the timer to the config datastore. According this config, reschedule actually timer. * If it needs, run new job * @param input * @return */ @Override public Future<RpcResult<SaveTimerOutput>> saveTimer(SaveTimerInput input) { LOG.info("Received data. Enabled {}, Option: {} ", input.isEnabled(), input.getOption()); String message = null; if (input.isEnabled()==null || input.getOption() == null) { message = "Value of enabled or option is empty"; } else { // save data to config datastore TimerConfigBuilder builder = new TimerConfigBuilder(); ReadWriteTransaction transaction = dataBroker.newReadWriteTransaction(); builder.setEnabled(input.isEnabled()).setOption(input.getOption()); transaction.put(LogicalDatastoreType.CONFIGURATION, TIMER_SETTING_PATH,builder.build()); try { transaction.submit().checkedGet(); // remove last instance of the job if (scheduledFuture != null) { scheduledFuture.cancel(false); } //if timer is enabled, run a new job if (input.isEnabled()) { scheduledFuture = createNewTimerJob(input.getOption()); } LOG.info("Schdeduler has been changed"); message = "ok"; } catch (TransactionCommitFailedException e) { LOG.error(e.getMessage(),e); message = "failed"; } } // create and send message as response of this RPC SaveTimerOutputBuilder saveTimerOutputBuilder = new SaveTimerOutputBuilder(); saveTimerOutputBuilder.setStatus(message); return RpcResultBuilder.success(saveTimerOutputBuilder.build()).buildFuture(); } /** * Read configuration of the Timer from the config datastore * @return */ @Override public Future<RpcResult<ReadTimerOutput>> readTimer() { ReadOnlyTransaction transaction = dataBroker.newReadOnlyTransaction(); CheckedFuture<Optional<TimerConfig>, ReadFailedException> timerSettingFuture = transaction.read(LogicalDatastoreType.CONFIGURATION,TIMER_SETTING_PATH); ReadTimerOutputBuilder readTimerOutputBuilder = new ReadTimerOutputBuilder(); try { Optional<TimerConfig> opt = timerSettingFuture.checkedGet(); TimerConfig timerSetting = opt.get(); readTimerOutputBuilder.setEnabled(timerSetting.isEnabled()); readTimerOutputBuilder.setOption(timerSetting.getOption()); } catch (Exception e) { // if node of the config datastore is empty, we will return default timer setting readTimerOutputBuilder.setEnabled(TIMER_DEFAULT_ENABLED); readTimerOutputBuilder.setOption(TIMER_DEFAULT_OPTION); } return RpcResultBuilder.success(readTimerOutputBuilder.build()).buildFuture(); } /** * If device is connected and has specifically capability then this device is suitable for closed loop process * @param deviceNode * @return */ private boolean canProcessDevice(Node deviceNode) { NetconfNode nnode = deviceNode.getAugmentation(NetconfNode.class); if (nnode != null && nnode.getAvailableCapabilities() != null && nnode.getAvailableCapabilities().getAvailableCapability() != null) { boolean hasCapability = false; for (String capability : nnode.getAvailableCapabilities().getAvailableCapability()) { if (capability.contains(SUITABLE_CAPABILITY)) { hasCapability = true; } } if (hasCapability && nnode.getConnectionStatus() == NetconfNodeConnectionStatus.ConnectionStatus.Connected) { return true; } } return false; } /** * Start closed loop process. Read all possible devices from topology. Read airinterface name. Modify it on another name. * @return */ public boolean processNetworkDevices() { ReadWriteTransaction transaction = dataBroker.newReadWriteTransaction(); CheckedFuture<Optional<Topology>, ReadFailedException> topology = transaction.read(LogicalDatastoreType.OPERATIONAL,NETCONF_TOPO_IID); try { Optional<Topology> optTopology = topology.checkedGet(); List<Node> nodeList = optTopology.get().getNode(); for (Node node : nodeList) { // loop all nodes from topology LOG.info("Node : {}", node.getKey().getNodeId()); if (canProcessDevice(node)) { // check if we can process it processNode(node.getKey()); } } } catch (Exception e) { LOG.error(e.getMessage(),e); return false; } return true; } /** * Process device which has MWAirInterfacePac * @param nodeKey */ private void processNode(NodeKey nodeKey) { final Optional<MountPoint> xrNodeOptional = mountService.getMountPoint(NETCONF_TOPO_IID.child(Node.class, nodeKey)); // try to mount the device Preconditions.checkArgument(xrNodeOptional.isPresent(), "Unable to locate mountpoint: %s, not mounted yet or not configured", nodeKey.getNodeId().getValue()); final MountPoint xrNode = xrNodeOptional.get(); final DataBroker xrNodeBroker = xrNode.getService(DataBroker.class).get(); LOG.info("We found the suitable device : {}", nodeKey); // retrieve list of universal IDs which need to retrieve MWAirInterfacePac List<UniversalId> universalIdList = retrieveUniversalId(xrNodeBroker); if (universalIdList != null && universalIdList.size() > 0) { for (UniversalId uuid : universalIdList) { ReadWriteTransaction airInterfaceTransaction = null; try { // read MWAirInterfacePac airInterfaceTransaction = xrNodeBroker.newReadWriteTransaction(); InstanceIdentifier<MWAirInterfacePac> path = InstanceIdentifier.builder(MWAirInterfacePac.class, new MWAirInterfacePacKey(uuid)).build(); MWAirInterfacePac airInterfacePac = readNode(airInterfaceTransaction, path); if (airInterfacePac != null) { SimpleDateFormat dateFormat = new SimpleDateFormat("dd-M hh:mm:ss"); String newAirInterfaceName = "AirInterface "+dateFormat.format(new Date()); LOG.info("Old AirInterfaceName: {} - New AirInterfaceName: {}",airInterfacePac.getAirInterfaceConfiguration().getAirInterfaceName(), newAirInterfaceName); // modify AirInterface name. MWAirInterfacePacBuilder mWAirInterfacePacBuilder = new MWAirInterfacePacBuilder(airInterfacePac); AirInterfaceConfigurationBuilder configurationBuilder = new AirInterfaceConfigurationBuilder(airInterfacePac.getAirInterfaceConfiguration()); configurationBuilder.setAirInterfaceName(newAirInterfaceName); mWAirInterfacePacBuilder.setAirInterfaceConfiguration(configurationBuilder.build()); // store new information to config datastore airInterfaceTransaction.merge(LogicalDatastoreType.CONFIGURATION, path, mWAirInterfacePacBuilder.build()); airInterfaceTransaction.submit(); } else { // in case if there is nothing airInterfaceTransaction.cancel(); } } catch (Exception e) { // in case if something strange was happened if (airInterfaceTransaction != null) { airInterfaceTransaction.cancel(); } } } } } /** * Read information from mounted node. Result is MWAirInterfacePac * @param xrNodeReadTx * @param path * @return * @throws ReadFailedException */ private MWAirInterfacePac readNode(ReadWriteTransaction xrNodeReadTx, InstanceIdentifier<MWAirInterfacePac> path) throws ReadFailedException { Optional<MWAirInterfacePac> airInterfaceOpt; airInterfaceOpt = xrNodeReadTx.read(LogicalDatastoreType.CONFIGURATION, path).checkedGet(); if (airInterfaceOpt.isPresent()) { return airInterfaceOpt.get(); } return null; } /** * Search UUID in has already mounted device. Loop all Logical Termination Point and then loop all Layer Protocol. * We search layer protocols which are MWPS * @param xrNodeBroker * @return */ private List<UniversalId> retrieveUniversalId(DataBroker xrNodeBroker) { List<UniversalId> list = new ArrayList<>(); ReadOnlyTransaction networkElementTransaction = null; try { // read network elements InstanceIdentifier<NetworkElement> path = InstanceIdentifier.create(NetworkElement.class); networkElementTransaction = xrNodeBroker.newReadOnlyTransaction(); Optional<NetworkElement> networkElementOpt = networkElementTransaction.read(LogicalDatastoreType.OPERATIONAL, path).checkedGet(); if (networkElementOpt.isPresent()) { NetworkElement networkElement = networkElementOpt.get(); if (networkElement.getLtpRefList() != null) { // loop Logical Termination Point for (LtpRefList ltp : networkElement.getLtpRefList()) { for (LpList lp : ltp.getLpList()) { // loop Layer Protocol if (LAYER_PROTOCOL.equals(lp.getLayerProtocolName().getValue())) { //if it is MWPS we have one LOG.info("UUID: "+lp.getKey().getUuid()); list.add(lp.getKey().getUuid()); } } } } } networkElementTransaction.close(); } catch (Exception e) { if (networkElementTransaction != null) { networkElementTransaction.close(); } } return list; } /** * Create new job according the timer option * @param option * @return */ private ScheduledFuture createNewTimerJob(Timer.Option option) { switch (option) { case _5seconds: return scheduledExecutorService.scheduleAtFixedRate(new TimerJob(this),10, 5, TimeUnit.SECONDS); case _30seconds: return scheduledExecutorService.scheduleAtFixedRate(new TimerJob(this),10, 30, TimeUnit.SECONDS); case _1minute: return scheduledExecutorService.scheduleAtFixedRate(new TimerJob(this),10, 60, TimeUnit.SECONDS); case _2minutes: return scheduledExecutorService.scheduleAtFixedRate(new TimerJob(this),10, 120, TimeUnit.SECONDS); case _30minutes: return scheduledExecutorService.scheduleAtFixedRate(new TimerJob(this),10, 1800, TimeUnit.SECONDS); case _1hour: return scheduledExecutorService.scheduleAtFixedRate(new TimerJob(this),10, 3600, TimeUnit.SECONDS); default: { throw new IllegalArgumentException("Wrong option"); } } } /** * Clean up information * @throws Exception */ @Override public void close() throws Exception { if (this.registration != null) { this.registration.close(); } if (scheduledFuture != null) { scheduledFuture.cancel(false); } if (scheduledExecutorService != null) { scheduledExecutorService.shutdown(); } if (dataTreeChangeHandler != null) { dataTreeChangeHandler.close(); } } @Override public void onTransactionChainFailed(TransactionChain<?, ?> transactionChain, AsyncTransaction<?, ?> asyncTransaction, Throwable throwable) { } @Override public void onTransactionChainSuccessful(TransactionChain<?, ?> transactionChain) { } } /** * This is the timer job. Class which is based on the Runnable. The asynchronic job execute closed loop process on the devices */ class TimerJob implements Runnable { private static final Logger LOG = LoggerFactory.getLogger(TimerJob.class); private ClosedLoopAutomationImpl impl; public TimerJob(ClosedLoopAutomationImpl impl) { this.impl = impl; } @Override public void run() { LOG.info("Timer start "); impl.processNetworkDevices(); LOG.info("Timer end "); } }
package org.aikodi.chameleon.oo.type; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import org.aikodi.chameleon.core.Config; import org.aikodi.chameleon.core.declaration.Declaration; import org.aikodi.chameleon.core.element.Element; import org.aikodi.chameleon.core.language.Language; import org.aikodi.chameleon.core.lookup.DeclarationSelector; import org.aikodi.chameleon.core.lookup.LocalLookupContext; import org.aikodi.chameleon.core.lookup.LookupContext; import org.aikodi.chameleon.core.lookup.LookupContextSelector; import org.aikodi.chameleon.core.lookup.LookupException; import org.aikodi.chameleon.core.lookup.SelectionResult; import org.aikodi.chameleon.core.modifier.Modifier; import org.aikodi.chameleon.core.namespace.Namespace; import org.aikodi.chameleon.core.property.ChameleonProperty; import org.aikodi.chameleon.core.validation.BasicProblem; import org.aikodi.chameleon.core.validation.Valid; import org.aikodi.chameleon.core.validation.Verification; import org.aikodi.chameleon.exception.ChameleonProgrammerException; import org.aikodi.chameleon.oo.language.ObjectOrientedLanguage; import org.aikodi.chameleon.oo.member.HidesRelation; import org.aikodi.chameleon.oo.member.Member; import org.aikodi.chameleon.oo.member.MemberRelationSelector; import org.aikodi.chameleon.oo.member.SimpleNameMember; import org.aikodi.chameleon.oo.type.generics.TypeParameter; import org.aikodi.chameleon.oo.type.inheritance.InheritanceRelation; import org.aikodi.chameleon.util.Lists; import org.aikodi.chameleon.util.Pair; import be.kuleuven.cs.distrinet.rejuse.collection.CollectionOperations; import be.kuleuven.cs.distrinet.rejuse.java.collections.TypeFilter; import be.kuleuven.cs.distrinet.rejuse.predicate.TypePredicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; /** * <p>A class representing types in object-oriented programs.</p> * * <p>A class contains <a href="Member.html">members</a> as its content.</p> * * @author Marko van Dooren */ public abstract class ClassImpl extends SimpleNameMember implements Type { /** * Initialize a new class with the given name. */ /*@ @ public behavior @ @ post name() == name; @ post parent() == null; @*/ public ClassImpl(String name) { setName(name); } @Override public Type declarationType() { return this; } @Override public <P extends Parameter> void addAllParameters(Class<P> kind, Collection<P> parameters) { for(P p: parameters) { addParameter(kind, p); } } // public SimpleNameSignature signature() { // return (SimpleNameSignature) super.signature(); private List<? extends Declaration> _declarationCache = null; private synchronized List<? extends Declaration> declarationCache() { if(_declarationCache != null && Config.cacheDeclarations()) { return new ArrayList<Declaration>(_declarationCache); } else { return null; } } @Override public List<? extends Declaration> locallyDeclaredDeclarations() throws LookupException { return directlyDeclaredMembers(); } @Override public String infoName() { try { try { return getFullyQualifiedName(); } catch(Exception exc) { return name(); } } catch(NullPointerException exc) { return ""; } } @Override public Verification verifySubtypeOf(Type otherType, String meaningThisType, String meaningOtherType, Element cause) { Verification result = Valid.create(); String messageOther = meaningOtherType+" ("+otherType.infoName()+")."; String messageThis = meaningThisType + " (" + infoName() + ")"; try { boolean subtype = subTypeOf(otherType); if(! subtype) { result = result.and(new BasicProblem(cause, messageThis+" is not a subtype of " + messageOther)); } } catch (Exception e) { result = result.and(new BasicProblem(cause, "Cannot determine if "+messageThis+" is a subtype of "+messageOther)); } return result; } @Override public synchronized void flushLocalCache() { super.flushLocalCache(); if(_lexicalMembersLookupStrategy != null) { _lexicalMembersLookupStrategy.flushCache(); } _declarationCache = null; _membersCache = null; _superTypeCache = null; _judge = null; _superTypeAndSelfCache = null; } private synchronized void setDeclarationCache(List<? extends Declaration> cache) { if(Config.cacheDeclarations()) { _declarationCache = new ArrayList<Declaration>(cache); } } protected ClassImpl() { } /* (non-Javadoc) * @see chameleon.oo.type.Tajp#getFullyQualifiedName() */ /*@ @ public behavior @ @ getPackage().getFullyQualifiedName().equals("") ==> \result == getName(); @ ! getPackage().getFullyQualifiedName().equals("") == > \result.equals(getPackage().getFullyQualifiedName() + getName()); @*/ @Override public String getFullyQualifiedName() { String prefix; Type nearest = nearestAncestor(Type.class); if(nearest != null) { prefix = nearest.getFullyQualifiedName(); } else { Namespace namespace = namespace(); if(namespace != null) { prefix = namespace.fullyQualifiedName(); } else { prefix = null; } } return prefix == null ? null : (prefix.equals("") ? "" : prefix+".")+name(); } @Override public String toString() { try { try { return getFullyQualifiedName(); } catch(Exception exc) { return name(); } }catch(NullPointerException exc) { return ""; } } /* (non-Javadoc) * @see chameleon.oo.type.Tajp#targetContext() */ @Override public LocalLookupContext<?> targetContext() throws LookupException { return localContext(); } protected LocalLookupContext _target; /* (non-Javadoc) * @see chameleon.oo.type.Tajp#localStrategy() */ @Override public LocalLookupContext localContext() throws LookupException { if(_target == null) { Language language = language(); if(language != null) { _target = language.lookupFactory().createTargetLookupStrategy(this); } else { throw new LookupException("Element of type "+getClass().getName()+" is not connected to a language. Cannot retrieve target context."); } } return _target; } /* (non-Javadoc) * @see chameleon.oo.type.Tajp#lookupContext(chameleon.core.element.Element) */ @Override public LookupContext lookupContext(Element element) throws LookupException { if(element instanceof InheritanceRelation && hasInheritanceRelation((InheritanceRelation) element)) { Element parent = parent(); if(parent != null) { return lexicalParametersLookupStrategy(); } else { throw new LookupException("Parent of type is null when looking for the parent context of a type."); } } else { return lexicalMembersLookupStrategy(); //language().lookupFactory().createLexicalContext(this,targetContext()); } } /** * Check whether the given element is an inheritance relation of this type. * The default implementation checks whether the given element is in the * collection returned by inheritanceRelations(). * * This method can be overridden for example to deal with generated inheritance * relations, which are not lexically part of the type. * @throws LookupException */ public boolean hasInheritanceRelation(InheritanceRelation relation) throws LookupException { return inheritanceRelations().contains(relation); } protected LookupContext lexicalMembersLookupStrategy() throws LookupException { LookupContext result = _lexicalMembersLookupStrategy; // Lazy initialization if(result == null) { Language language = language(); if(language == null) { throw new LookupException("Parent of type "+name()+" is null."); } _lexicalMembersLookupStrategy = language.lookupFactory().createLexicalLookupStrategy(targetContext(), this, new LookupContextSelector(){ @Override public LookupContext strategy() throws LookupException { return lexicalParametersLookupStrategy(); } }); _lexicalMembersLookupStrategy.enableCache(); result = _lexicalMembersLookupStrategy; } return result; } protected LookupContext _lexicalMembersLookupStrategy; protected LookupContext lexicalParametersLookupStrategy() { LookupContext result = _lexicalParametersLookupStrategy; // lazy initialization if(result == null) { _lexicalParametersLookupStrategy = language().lookupFactory().createLexicalLookupStrategy(_localInheritanceLookupStrategy, this); result = _lexicalParametersLookupStrategy; } return result; } protected LookupContext _lexicalParametersLookupStrategy; protected LocalParameterBlockLookupStrategy _localInheritanceLookupStrategy = new LocalParameterBlockLookupStrategy(this); protected class LocalParameterBlockLookupStrategy extends LocalLookupContext<Type> { public LocalParameterBlockLookupStrategy(Type element) { super(element); } @Override @SuppressWarnings("unchecked") public <D extends Declaration> List<? extends SelectionResult> declarations(DeclarationSelector<D> selector) throws LookupException { // return selector.selection(parameters()); List<SelectionResult> result = Lists.create(); List<ParameterBlock> parameterBlocks = parameterBlocks(); Iterator<ParameterBlock> iter = parameterBlocks.iterator(); // If the selector found a match, we stop. // We must iterate in reverse. while(result.isEmpty() && iter.hasNext()) { ParameterBlock imporT = iter.next(); result.addAll(selector.selection(imporT.parameters())); } return result; } } @Override public <P extends Parameter> int nbTypeParameters(Class<P> kind) { return parameterBlock(kind).nbTypeParameters(); } @Override public <P extends Parameter> List<P> parameters(Class<P> kind) { List<P> result; ParameterBlock<P> parameterBlock = parameterBlock(kind); if(parameterBlock != null) { result = parameterBlock.parameters(); } else { result = ImmutableList.of(); } return result; } /** * Indices start at 1. */ @Override public <P extends Parameter> P parameter(Class<P> kind, int index) { return parameterBlock(kind).parameter(index); } @Override public List<Member> getIntroducedMembers() { return ImmutableList.<Member>of(this); } /* (non-Javadoc) * @see chameleon.oo.type.Tajp#complete() */ @Override public boolean complete() throws LookupException { List<Member> members = localMembers(Member.class); // Only check for actual definitions new TypePredicate<Declaration>(Declaration.class).filter(members); Iterator<Member> iter = members.iterator(); boolean result = true; while(iter.hasNext()) { Member member = iter.next(); result = result && (mustBeOverridden(member)); } return result; } /* (non-Javadoc) * @see chameleon.oo.type.Tajp#add(chameleon.oo.type.TypeElement) */ /*@ @ public behavior @ @ pre element != null; @ @ post directlyDeclaredElements().contains(element); @*/ @Override public abstract void add(TypeElement element) throws ChameleonProgrammerException; /* (non-Javadoc) * @see chameleon.oo.type.Tajp#remove(chameleon.oo.type.TypeElement) */ /*@ @ public behavior @ @ pre element != null; @ @ post ! directlyDeclaredElements().contains(element); @*/ @Override public abstract void remove(TypeElement element) throws ChameleonProgrammerException; /* (non-Javadoc) * @see chameleon.oo.type.Tajp#addAll(java.util.Collection) */ /*@ @ public behavior @ @ pre elements != null; @ pre !elements.contains(null); @ @ post directlyDeclaredElements().containsAll(elements); @*/ @Override public void addAll(Collection<? extends TypeElement> elements) throws ChameleonProgrammerException { for(TypeElement element: elements) { add(element); } } /* (non-Javadoc) * @see chameleon.oo.type.Tajp#getDirectSuperTypes() */ @Override public List<Type> getDirectSuperTypes() throws LookupException { List<Type> result = Lists.create(); for(InheritanceRelation element:inheritanceRelations()) { Type type = element.superType(); if (type!=null) { result.add(type); } } return result; } @Override public List<Type> getDirectSuperClasses() throws LookupException { List<Type> result = Lists.create(); for(InheritanceRelation element:inheritanceRelations()) { result.add((Type)element.superElement()); } return result; } @Override public void accumulateAllSuperTypes(Set<Type> acc) throws LookupException { List<Type> temp =getDirectSuperTypes(); for(Type type:temp) { boolean add=true; for(Type acced: acc) { if(acced.baseType().sameAs(type.baseType())) { add=false; break; } } if(add) { acc.add(type); type.accumulateAllSuperTypes(acc); } } } @Override public Set<Type> getAllSuperTypes() throws LookupException { if(_superTypeCache == null) { synchronized(this) { if(_superTypeCache == null) { Set<Type> elements = new HashSet<Type>(); accumulateAllSuperTypes(elements); _superTypeCache = ImmutableSet.<Type>builder().addAll(elements).build(); } } } return _superTypeCache; } @Override public Set<Type> getSelfAndAllSuperTypesView() throws LookupException { try { if(_superTypeAndSelfCache == null) { synchronized(this) { if(_superTypeAndSelfCache == null) { Set<Type> elements = new HashSet<Type>(); newAccumulateSelfAndAllSuperTypes(elements); _superTypeAndSelfCache = ImmutableSet.<Type>builder().addAll(elements).build(); } } } return _superTypeAndSelfCache; } catch(ChameleonProgrammerException exc) { if(exc.getCause() instanceof LookupException) { throw (LookupException) exc.getCause(); } else { throw exc; } } } protected SuperTypeJudge _judge; protected AtomicBoolean _judgeLock = new AtomicBoolean(); public SuperTypeJudge superTypeJudge() throws LookupException { SuperTypeJudge result = _judge; if(result == null) { if(_judgeLock.compareAndSet(false, true)) { result = new SuperTypeJudge(); accumulateSuperTypeJudge(result); _judge = result; } else { //spin lock while((result = _judge) == null) {} } } return result; // if(_judge == null) { // synchronized(this) { // if(_judge == null) { // _judge = new SuperTypeJudge(); // accumulateSuperTypeJudge(_judge); // // SuperTypeJudge faster = new SuperTypeJudge(); // // accumulateSuperTypeJudge(faster); // // _judge.add(this); // // List<Type> temp = getDirectSuperTypes(); // // for(Type type:temp) { // // SuperTypeJudge superJudge = type.superTypeJudge(); // // _judge.merge(superJudge); // // Set<Type> fasterTypes = faster.types(); // // Set<Type> judgeTypes = _judge.types(); // // Set<Type> view = getSelfAndAllSuperTypesView(); // // if(fasterTypes.size() != (judgeTypes.size())) { // // //_judge = null; // // System.out.println("debug"); // return _judge; } @Override public void newAccumulateAllSuperTypes(Set<Type> acc) throws LookupException { List<Type> temp = getDirectSuperTypes(); for(Type type:temp) { boolean add=true; for(Type acced: acc) { if(acced.baseType().sameAs(type.baseType())) { add=false; break; } } if(add) { type.newAccumulateSelfAndAllSuperTypes(acc); } } } @Override public void newAccumulateSelfAndAllSuperTypes(Set<Type> acc) throws LookupException { acc.add(this); newAccumulateAllSuperTypes(acc); } private Set<Type> _superTypeCache; private Set<Type> _superTypeAndSelfCache; //TODO: rename to properSubTypeOf /*@ @ public behavior @ @ post \result == equals(other) || subTypeOf(other); @*/ @Override public boolean assignableTo(Type other) throws LookupException { return subTypeOf(other); } /* (non-Javadoc) * @see chameleon.oo.type.Tajp#inheritanceRelations() */ /*@ @ public behavior @ @ post \result != null; @*/ @Override public abstract List<InheritanceRelation> inheritanceRelations() throws LookupException; /** * The default behavior is to return inheritanceRelations(). If there are * member inheritance relations, the method must be overridden to exclude them. * @return */ @Override public abstract List<InheritanceRelation> nonMemberInheritanceRelations(); @Override public abstract List<InheritanceRelation> explicitNonMemberInheritanceRelations(); @Override public <I extends InheritanceRelation> List<I> explicitNonMemberInheritanceRelations(Class<I> kind) { List result = explicitNonMemberInheritanceRelations(); CollectionOperations.filter(result, d -> kind.isInstance(d)); return result; } @Override public <I extends InheritanceRelation> List<I> nonMemberInheritanceRelations(Class<I> kind) { List result = nonMemberInheritanceRelations(); CollectionOperations.filter(result, d -> kind.isInstance(d)); return result; } /*@ @ public behavior @ @ pre relation != null; @ post inheritanceRelations().contains(relation); @*/ // FIXME rename to addNonMemberInheritanceRelation. @Override public abstract void addInheritanceRelation(InheritanceRelation relation) throws ChameleonProgrammerException; /* (non-Javadoc) * @see chameleon.oo.type.Tajp#removeInheritanceRelation(chameleon.oo.type.inheritance.InheritanceRelation) */ /*@ @ public behavior @ @ pre relation != null; @ post ! inheritanceRelations().contains(relation); @*/ @Override public abstract void removeNonMemberInheritanceRelation(InheritanceRelation relation) throws ChameleonProgrammerException; @Override public void removeAllNonMemberInheritanceRelations() { for(InheritanceRelation relation: nonMemberInheritanceRelations()) { removeNonMemberInheritanceRelation(relation); } } /* (non-Javadoc) * @see chameleon.oo.type.Tajp#localMembers(java.lang.Class) */ @Override public <T extends Member> List<T> localMembers(final Class<T> kind) throws LookupException { return (List<T>) new TypeFilter(kind).retain(localMembers()); } /* (non-Javadoc) * @see chameleon.oo.type.Tajp#localMembers() */ @Override public abstract List<Member> localMembers() throws LookupException; @Override public List<Member> implicitMembers() { return Collections.EMPTY_LIST; } @Override public <M extends Member> List<M> implicitMembers(Class<M> kind) { // implicitMembers returns an immutable list. List result = new ArrayList(implicitMembers()); Iterator iter = result.iterator(); while(iter.hasNext()) { Object o = iter.next(); if(! kind.isInstance(o)) { iter.remove(); } } return result; } public <D extends Member> List<? extends SelectionResult> implicitMembers(DeclarationSelector<D> selector) throws LookupException { return selector.selection(implicitMembers()); } @Override public <T extends Member> List<T> directlyDeclaredMembers(Class<T> kind) { return (List<T>) new TypeFilter(kind).retain(directlyDeclaredMembers()); } @Override public <T extends Member> List<T> directlyDeclaredMembers(Class<T> kind, ChameleonProperty property) { List<T> result = directlyDeclaredMembers(kind); Iterator<T> iter = result.iterator(); while(iter.hasNext()) { T t = iter.next(); if(! t.isTrue(property)) { iter.remove(); } } return result; } @Override public List<Member> directlyDeclaredMembers() { List<Member> result = Lists.create(); for(TypeElement m: directlyDeclaredElements()) { result.addAll(m.declaredMembers()); } return result; } @Override public <D extends Member> List<? extends SelectionResult> members(DeclarationSelector<D> selector) throws LookupException { // 1) perform local search boolean nonGreedy = ! selector.isGreedy(); List<SelectionResult> result = (List)localMembers(selector); if(nonGreedy || result.isEmpty()) { List<SelectionResult> implicitMembers = (List)implicitMembers(selector); if(result == Collections.EMPTY_LIST) { result = implicitMembers; } else { result.addAll(implicitMembers); } } // 2) process inheritance relations // only if the selector isn't greedy or // there are not results. if(nonGreedy || result.isEmpty()) { for (InheritanceRelation rel : inheritanceRelations()) { result = rel.accumulateInheritedMembers(selector, result); } // We cannot take a shortcut and test for > 1 because if // the inheritance relation transforms the member (as is done with subobjects) // the transformed member may have to be removed, even if there is only 1. selector.filter(result); return result; } else { return result; } } @Override public void addAllInheritanceRelations(Collection<InheritanceRelation> relations) { for(InheritanceRelation rel: relations) { addInheritanceRelation(rel); } } /* (non-Javadoc) * @see chameleon.oo.type.Tajp#localMembers(chameleon.core.lookup.DeclarationSelector) */ @Override @SuppressWarnings("unchecked") public abstract <D extends Member> List<? extends SelectionResult> localMembers(DeclarationSelector<D> selector) throws LookupException; @Override public List<Member> members() throws LookupException { return members(Member.class); } @Override @SuppressWarnings("unchecked") public <M extends Member> List<M> members(final Class<M> kind) throws LookupException { // 1) All defined members of the requested kind are added. boolean foundInCache = false; List<M> result = null; if(_membersCache != null) { result = _membersCache.get(kind); if(result != null) { foundInCache = true; result = new ArrayList<M>(result); } } if(! foundInCache){ result = localMembers(kind); result.addAll(implicitMembers(kind)); // 2) Fetch all potentially inherited members from all inheritance relations for (InheritanceRelation rel : inheritanceRelations()) { result = rel.accumulateInheritedMembers(kind, result); } if(Config.cacheDeclarations()) { if(_membersCache == null) { _membersCache = new HashMap<Class,List>(); } _membersCache.put(kind, new ArrayList(result)); } } return result; } private Map<Class,List> _membersCache; @Override public abstract List<? extends TypeElement> directlyDeclaredElements(); @Override public <T extends TypeElement> List<T> directlyDeclaredElements(Class<T> kind) { List<TypeElement> tmp = (List<TypeElement>) directlyDeclaredElements(); new TypePredicate<>(kind).filter(tmp); return (List<T>)tmp; } /* (non-Javadoc) * @see chameleon.oo.type.Tajp#declarations() */ @Override public List<? extends Declaration> declarations() throws LookupException { List<? extends Declaration> result = declarationCache(); if(result == null) { result = members(); setDeclarationCache(result); } return result; } /* (non-Javadoc) * @see chameleon.oo.type.Tajp#declarations(chameleon.core.lookup.DeclarationSelector) */ @Override public <D extends Declaration> List<? extends SelectionResult> declarations(DeclarationSelector<D> selector) throws LookupException { return members((DeclarationSelector<? extends Member>)selector); } protected void copyContents(Type from) { copyContents(from, false); } protected void copyContents(Type from, boolean link) { copyInheritanceRelations(from, link); copyEverythingExceptInheritanceRelations(from, link); } protected void copyInheritanceRelations(Type from, boolean link) { List<InheritanceRelation> relations = from.explicitNonMemberInheritanceRelations(); for(InheritanceRelation relation : relations) { InheritanceRelation clone = clone(relation); if(link) { clone.setOrigin(relation); } addInheritanceRelation(clone); } } protected void copyEverythingExceptInheritanceRelations(Type from, boolean link) { copyParameterBlocks(from, link); copyModifiers(from, link); copyTypeElements(from, link); } private void copyTypeElements(Type from, boolean link) { for(TypeElement el : from.directlyDeclaredElements()) { TypeElement clone = clone(el); if(link) { clone.setOrigin(el); } add(clone); } } protected void copyParameterBlocks(Type from, boolean link) { for(ParameterBlock par : parameterBlocks()) { removeParameterBlock(par); } for(ParameterBlock par : from.parameterBlocks()) { ParameterBlock clone = clone(par); if(link) { clone.setOrigin(par); } addParameterBlock(clone); } } protected void copyModifiers(Type from, boolean link) { for(Modifier mod : from.modifiers()) { Modifier clone = clone(mod); if(link) { clone.setOrigin(mod); } addModifier(clone); } } /* (non-Javadoc) * @see chameleon.oo.type.Tajp#alias(chameleon.core.declaration.SimpleNameSignature) */ @Override public Type alias(String name) { return new TypeAlias(name,this); } /* (non-Javadoc) * @see chameleon.oo.type.Tajp#intersection(chameleon.oo.type.Type) */ @Override public Type intersection(Type type) throws LookupException { return type.intersectionDoubleDispatch(this); } @Override public Type intersectionDoubleDispatch(Type type) throws LookupException { Type result = new IntersectionType(this,type); result.setUniParent(parent()); return result; } @Override public Type intersectionDoubleDispatch(IntersectionType type) throws LookupException { IntersectionType result = clone(type); result.addType(type); return result; } @Override public Type union(Type type) throws LookupException { return type.unionDoubleDispatch(this); } @Override public Type unionDoubleDispatch(Type type) throws LookupException { Type result = new UnionType(this,type); result.setUniParent(parent()); return result; } @Override public Type unionDoubleDispatch(UnionType type) throws LookupException { UnionType result = clone(type); result.addType(type); return result; } /* (non-Javadoc) * @see chameleon.oo.type.Tajp#replace(chameleon.oo.type.TypeElement, chameleon.oo.type.TypeElement) */ @Override public abstract void replace(TypeElement oldElement, TypeElement newElement); /* (non-Javadoc) * @see chameleon.oo.type.Tajp#baseType() */ @Override public abstract Type baseType(); protected boolean mustBeOverridden(Member member) { ObjectOrientedLanguage lang = language(ObjectOrientedLanguage.class); // ! CLASS ==> ! ABSTRACT return member.isTrue(lang.OVERRIDABLE) && member.isTrue(lang.INSTANCE) && member.isFalse(lang.DEFINED); } /* (non-Javadoc) * @see chameleon.oo.type.Tajp#verifySelf() */ @Override public Verification verifySelf() { Verification result = Valid.create(); ObjectOrientedLanguage lang = language(ObjectOrientedLanguage.class); if(! isTrue(lang.ABSTRACT)) { List<Member> members = null; try { members = members(); } catch (LookupException e) { result = result.and(new BasicProblem(this, "Cannot compute the members of this class")); } if(members != null) { Iterator<Member> iter = members.iterator(); while(iter.hasNext()) { Member m = iter.next(); if(!mustBeOverridden(m)) { iter.remove(); } else { //DEBUG mustBeOverridden(m); } } if(! members.isEmpty()) { StringBuffer msg = new StringBuffer("This class must implement the following abstract members: "); int size = members.size(); for(int i=0; i< size; i++) { try { msg.append(members.get(i).name()); if(i < size -1) { msg.append(','); } } catch(NullPointerException exc) { } } result = result.and(new BasicProblem(this, msg.toString())); } } } return result; } @Override public boolean lowerBoundAtLeatAsHighAs(Type other, TypeFixer trace) throws LookupException { return false; } @Override public boolean sameAs(Type other, List<Pair<TypeParameter, TypeParameter>> trace) throws LookupException { // List<Pair<TypeParameter, TypeParameter>> newTrace = new ArrayList<Pair<TypeParameter, TypeParameter>>(trace); return uniSameAs(other,trace) || other.uniSameAs(this,trace); } @Override public Type lowerBound() throws LookupException { return this; } @Override public Type upperBound() throws LookupException { return this; } @Override public <D extends Member> List<D> membersDirectlyOverriddenBy(MemberRelationSelector<D> selector) throws LookupException { List<D> result = Lists.create(); if(!selector.declaration().ancestors().contains(this)) { result.addAll((List)members(selector)); } else { for(InheritanceRelation relation:inheritanceRelations()) { result.addAll(relation.membersDirectlyOverriddenBy(selector)); } } return result; } @Override public <D extends Member> List<D> membersDirectlyAliasedBy(MemberRelationSelector<D> selector) throws LookupException { List<D> result = Lists.create(); for(InheritanceRelation relation:inheritanceRelations()) { result.addAll(relation.membersDirectlyAliasedBy(selector)); } return result; } @Override public <D extends Member> List<D> membersDirectlyAliasing(MemberRelationSelector<D> selector) throws LookupException { return ImmutableList.of(); } @Override public HidesRelation<? extends Member> hidesRelation() { return _hidesSelector; } private static HidesRelation<Type> _hidesSelector = new HidesRelation<Type>(Type.class); }
package io.sniffy.sql; import io.sniffy.Query; import io.sniffy.Spy; import io.sniffy.Threads; import io.sniffy.WrongNumberOfQueriesError; public class SqlQueries { private SqlQueries() { } public static SqlExpectation_Count none() { return exact(0); } public static SqlExpectation_Count atMostOnce() { return between(0,1); } public static SqlExpectation_Count exact(int count) { return between(count, count); } public static SqlExpectation_Count between(int min, int max) { return new SqlExpectation_Count(min, max); } public static SqlExpectation_Min min(int min) { return new SqlExpectation_Min(min); } public static SqlExpectation_Max max(int max) { return new SqlExpectation_Max(max); } private static class SqlExpectation implements Spy.Expectation { protected final int min; protected final int max; protected final Threads threads; protected final Query type; protected SqlExpectation(int min, int max, Threads threads, Query type) { this.min = min; this.max = max; this.threads = threads; this.type = type; } @Override public <T extends Spy<T>> Spy<T> verify(Spy<T> spy) throws WrongNumberOfQueriesError { int numQueries = spy.executedStatements(threads, type); if (numQueries > max || numQueries < min) { throw new WrongNumberOfQueriesError( threads, type, min, max, numQueries, spy.getExecutedStatements(threads) ); } return spy; } } public static class SqlExpectation_Min extends SqlExpectation_Count { private SqlExpectation_Min(int min) { super(min, Integer.MAX_VALUE); } public SqlExpectation_Count max(int max) { if (max < min) throw new IllegalArgumentException("max cannot be less than min"); return new SqlExpectation_Count(min, max); } } public static class SqlExpectation_Max extends SqlExpectation_Count { private SqlExpectation_Max(int max) { super(0, max); } public SqlExpectation_Count min(int min) { if (max < min) throw new IllegalArgumentException("max cannot be less than min"); return new SqlExpectation_Count(min, max); } } public static class SqlExpectation_Count extends SqlExpectation { private SqlExpectation_Count(int min, int max) { super(min, max, Threads.CURRENT, Query.ANY); if (min < 0) throw new IllegalArgumentException("min cannot be negative"); if (max < min) throw new IllegalArgumentException("max cannot be less than min"); } public SqlExpectation_Count_Query type(Query query) { return new SqlExpectation_Count_Query(min, max, query); } public SqlExpectation_Count_Query select() { return type(Query.SELECT); } public SqlExpectation_Count_Query insert() { return type(Query.INSERT); } public SqlExpectation_Count_Query update() { return type(Query.UPDATE); } public SqlExpectation_Count_Query delete() { return type(Query.DELETE); } public SqlExpectation_Count_Query merge() { return type(Query.MERGE); } // TODO: change name since it clashes with otherThreads() public SqlExpectation_Count_Query other() { return type(Query.OTHER); } public SqlExpectation_Count_Threads threads(Threads threads) { return new SqlExpectation_Count_Threads(min, max, threads); } public SqlExpectation_Count_Threads currentThread() { return threads(Threads.CURRENT); } public SqlExpectation_Count_Threads otherThreads() { return threads(Threads.OTHERS); } public SqlExpectation_Count_Threads anyThreads() { return threads(Threads.ANY); } } public static class SqlExpectation_Count_Query extends SqlExpectation { private SqlExpectation_Count_Query(int min, int max, Query query) { super(min, max, Threads.CURRENT, query); } public SqlExpectation threads(Threads threads) { return new SqlExpectation(min, max, threads, type); } public SqlExpectation currentThread() { return threads(Threads.CURRENT); } public SqlExpectation otherThreads() { return threads(Threads.OTHERS); } public SqlExpectation anyThreads() { return threads(Threads.ANY); } } public static class SqlExpectation_Count_Threads extends SqlExpectation { private SqlExpectation_Count_Threads(int min, int max, Threads threads) { super(min, max, threads, Query.ANY); } public SqlExpectation type(Query query) { return new SqlExpectation(min, max, threads, query); } public SqlExpectation select() { return type(Query.SELECT); } public SqlExpectation insert() { return type(Query.INSERT); } public SqlExpectation update() { return type(Query.UPDATE); } public SqlExpectation delete() { return type(Query.DELETE); } public SqlExpectation merge() { return type(Query.MERGE); } public SqlExpectation other() { return type(Query.OTHER); } } }
package org.apache.fop.layout; import org.apache.fop.datatypes.ColorType; import org.apache.fop.datatypes.CondLength; public class BorderAndPadding implements Cloneable { public static final int TOP = 0; public static final int RIGHT = 1; public static final int BOTTOM = 2; public static final int LEFT = 3; private static class ResolvedCondLength implements Cloneable { int iLength; // Resolved length value boolean bDiscard; ResolvedCondLength(CondLength length) { bDiscard = length.isDiscard(); iLength = length.mvalue(); } public Object clone() throws CloneNotSupportedException { return super.clone(); } } /** * Return a full copy of the BorderAndPadding information. This clones all * padding and border information. * @return The copy. */ public Object clone() throws CloneNotSupportedException { BorderAndPadding bp = (BorderAndPadding) super.clone(); bp.padding = (ResolvedCondLength[])padding.clone(); bp.borderInfo = (BorderInfo[])borderInfo.clone(); for (int i=0; i<padding.length; i++) { if (padding[i] != null) { bp.padding[i]=(ResolvedCondLength)padding[i].clone(); } if (borderInfo[i] != null) { bp.borderInfo[i]=(BorderInfo)borderInfo[i].clone(); } } return bp; } public static class BorderInfo implements Cloneable { private int mStyle; // Enum for border style private ColorType mColor; // Border color private ResolvedCondLength mWidth; BorderInfo(int style, CondLength width, ColorType color) { mStyle = style; mWidth = new ResolvedCondLength(width); mColor = color; } public Object clone() throws CloneNotSupportedException { BorderInfo bi = (BorderInfo) super.clone(); bi.mWidth = (ResolvedCondLength)mWidth.clone(); // do we need to clone the Color too??? return bi; } } private BorderInfo[] borderInfo = new BorderInfo[4]; private ResolvedCondLength[] padding = new ResolvedCondLength[4]; public BorderAndPadding() {} public void setBorder(int side, int style, CondLength width, ColorType color) { borderInfo[side] = new BorderInfo(style, width, color); } public void setPadding(int side, CondLength width) { padding[side] = new ResolvedCondLength(width); } public void setPaddingLength(int side, int iLength) { padding[side].iLength = iLength; } public void setBorderLength(int side, int iLength) { borderInfo[side].mWidth.iLength = iLength; } public int getBorderLeftWidth(boolean bDiscard) { return getBorderWidth(LEFT, bDiscard); } public int getBorderRightWidth(boolean bDiscard) { return getBorderWidth(RIGHT, bDiscard); } public int getBorderTopWidth(boolean bDiscard) { return getBorderWidth(TOP, bDiscard); } public int getBorderBottomWidth(boolean bDiscard) { return getBorderWidth(BOTTOM, bDiscard); } public int getPaddingLeft(boolean bDiscard) { return getPadding(LEFT, bDiscard); } public int getPaddingRight(boolean bDiscard) { return getPadding(RIGHT, bDiscard); } public int getPaddingBottom(boolean bDiscard) { return getPadding(BOTTOM, bDiscard); } public int getPaddingTop(boolean bDiscard) { return getPadding(TOP, bDiscard); } private int getBorderWidth(int side, boolean bDiscard) { if ((borderInfo[side] == null) || (bDiscard && borderInfo[side].mWidth.bDiscard)) { return 0; } else return borderInfo[side].mWidth.iLength; } public ColorType getBorderColor(int side) { if (borderInfo[side] != null) { return borderInfo[side].mColor; } else return null; } public int getBorderStyle(int side) { if (borderInfo[side] != null) { return borderInfo[side].mStyle; } else return 0; } private int getPadding(int side, boolean bDiscard) { if ((padding[side] == null) || (bDiscard && padding[side].bDiscard)) { return 0; } else return padding[side].iLength; } }
package kuleuven.group2; import static com.google.common.base.Preconditions.checkNotNull; import java.util.List; import kuleuven.group2.classloader.ReloadingStoreClassLoader; import kuleuven.group2.data.Test; import kuleuven.group2.data.TestDatabase; import kuleuven.group2.data.updating.MethodTestLinkUpdater; import kuleuven.group2.data.updating.OssRewriterLoader; import kuleuven.group2.data.updating.TestResultUpdater; import kuleuven.group2.defer.DeferredConsumer; import kuleuven.group2.policy.Policy; import kuleuven.group2.testrunner.TestRunner; import kuleuven.group2.sourcehandler.ClassSourceEventHandler; import kuleuven.group2.sourcehandler.SourceEventHandler; import kuleuven.group2.sourcehandler.TestSourceEventHandler; import kuleuven.group2.store.Store; import kuleuven.group2.store.StoreEvent; import kuleuven.group2.store.StoreFilter; import kuleuven.group2.store.StoreWatcher; import kuleuven.group2.util.Consumer; /** * Brings all parts of the program together to form a pipeline. * TODO [DOC] vervolledig beschrijving can de klasse Pipeline * * @author Group2 * @version 19 November 2013 */ public class Pipeline { protected final Store classSourceStore; protected final Store testSourceStore; protected final Store binaryStore; protected Policy sortPolicy; protected final TestDatabase testDatabase; protected final ReloadingStoreClassLoader testClassLoader; protected final TestRunner testRunner; protected final OssRewriterLoader rewriterLoader; protected final MethodTestLinkUpdater methodTestLinkUpdater; protected final TestResultUpdater testResultUpdater; protected final StoreWatcher classSourceWatcher; protected final StoreWatcher testSourceWatcher; protected final SourceEventHandler classSourceEventHandler; protected final SourceEventHandler testSourceEventHandler; protected final PipelineTask task; protected final DeferredConsumer<StoreEvent> deferredTask; public Pipeline(Store classSourceStore, Store testSourceStore, Store binaryStore, Policy sortPolicy) { this.classSourceStore = checkNotNull(classSourceStore); this.testSourceStore = checkNotNull(testSourceStore); this.binaryStore = checkNotNull(binaryStore); this.sortPolicy = checkNotNull(sortPolicy); this.testDatabase = new TestDatabase(); this.testClassLoader = new ReloadingStoreClassLoader(binaryStore, getClass().getClassLoader()); this.testRunner = new TestRunner(testClassLoader); this.rewriterLoader = new OssRewriterLoader(); this.methodTestLinkUpdater = new MethodTestLinkUpdater(testDatabase, rewriterLoader); methodTestLinkUpdater.registerTestHolder(testRunner); this.testResultUpdater = new TestResultUpdater(testDatabase); testRunner.addRunListener(testResultUpdater); this.classSourceWatcher = new StoreWatcher(classSourceStore, StoreFilter.SOURCE); this.testSourceWatcher = new StoreWatcher(testSourceStore, StoreFilter.SOURCE); this.classSourceEventHandler = new ClassSourceEventHandler(classSourceStore, binaryStore, testDatabase, testClassLoader); this.testSourceEventHandler = new TestSourceEventHandler(testSourceStore, binaryStore, testDatabase, testClassLoader); this.task = new PipelineTask(); this.deferredTask = new DeferredConsumer<>(task); } public Policy getSortPolicy() { return sortPolicy; } public void setSortPolicy(Policy sortPolicy) { this.sortPolicy = checkNotNull(sortPolicy); } public void start() { // Start listening classSourceWatcher.registerConsumer(deferredTask); testSourceWatcher.registerConsumer(deferredTask); classSourceStore.startListening(); testSourceStore.startListening(); // TODO Enable rewriter! } private void run(List<StoreEvent> events) { reloadClasses(); handleSourceEvents(events); handleTestSourceEvents(events); Test[] sortedTests = sortTests(); runTests(sortedTests); } private void reloadClasses() { testClassLoader.reload(); } private void handleSourceEvents(List<StoreEvent> events) { try { classSourceEventHandler.handleEvents(events); } catch (Exception e) { // TODO Show in GUI? System.err.println(e.getMessage()); } } private void handleTestSourceEvents(List<StoreEvent> events) { try { testSourceEventHandler.handleEvents(events); } catch (Exception e) { // TODO Show in GUI? System.err.println(e.getMessage()); } } private Test[] sortTests() { return sortPolicy.getSortedTestsAccordingToPolicy(testDatabase); } private void runTests(Test[] tests) { try { testRunner.runTestMethods(tests); } catch (Exception e) { // TODO Show in GUI? e.printStackTrace(); } } public void stop() { // Stop listening classSourceWatcher.unregisterConsumer(deferredTask); testSourceWatcher.unregisterConsumer(deferredTask); classSourceStore.stopListening(); testSourceStore.stopListening(); // TODO Disable rewriter! // TODO Stop current test run as well? } protected class PipelineTask implements Consumer<List<StoreEvent>> { @Override public void consume(List<StoreEvent> events) { Pipeline.this.run(events); } } }
package org.biojava.bibliography; import java.util.Hashtable; import org.biojava.utils.candy.CandyFinder; import org.biojava.utils.candy.CandyVocabulary; /** * <p> * This interface defines supporting utilities for working with * bibliographic repositories. * </p> * * <p> * The fundamental part of this interface deals with the controlled * vocabularies. However, the <tt>BibRefSupport</tt> interface is here * just a gateway to other Java interfaces defined in a separate * package {@link org.biojava.utils.candy}. * </p> * * <p> * The controlled vocabularies are used in order to find names of * all available attributes of the given bibliographic repository, to * find all possible values of some attributes, and to specify * availability of the ordering and searching criteria. Here belong * methods {@link #getVocabularyFinder getVocabularyFinder}, {@link * #getSupportedValues getSupportedValues}, and {@link * #getSupportedCriteria getSupportedCriteria}. * </p> * * <p> * The other <em>raison d'etre</em> for the BibRefSupport interface is * to have a place where some common constants can be put in. The * constants specify common vocabulary names (examples are {@link * #RESOURCE_TYPES} or {@link #JOURNAL_TITLES}, explicitly defined * bibliographic resource types (for example, {@link #TYPE_BOOK} or * {@link #TYPE_ARTICLE}), and few other things. * </p> * * <p> * And finally, there are some methods allowing to improve an * efficient access to the supporting resources by calling explicitly * {@link #connect connect} and {@link #disconnect disconnect}. * </p> * * <p> * It was an intention to separate completely methods dealing with * bibliographic repositories (as defined in interface {@link * BibRefQuery}) and methods helping with other things (as defined * here). This <em>box of bricks</em> approach helps to use different * communication protocols for bibliographic and supporting * repositories. For example, the performance can be sometimes * improved when the client loads separately all controlled * vocabularies and use them locally while the access to the * bibliographic repository is still remote. * </p> * *<H3>The implementation is advised to used the following constructor</h3> * * <p> *<pre> * public NameOfAnImplementation (String[] args, Hashtable props) {...} *</pre> * where both <tt>args</tt> and <tt>props</tt> contain implementation * specific parameters and properties. However, some properties are * more probable to be used - the suggested names for them are defined * also in this interface (e.g. {@link #INIT_PROP_LOG}). * </p> * * <p> * The use of this constructor makes easier to load dynamically different * supporting implementations. * </p> * * @author <A HREF="mailto:senger@ebi.ac.uk">Martin Senger</A> * @author Matthew Pocock * @version $Id$ * @since 1.3 */ public interface BibRefSupport { // names for global vocabulary names /** * A vocabulary name. The vocabulary contains stringified names of * all citation types stored in the repository. The names of types * that are explicitly defined by this interface should be equal * to the constant strings for types (such as {@link #TYPE_BOOK}). */ static final String RESOURCE_TYPES = "resource_types"; /** * A vocabulary name. Some bibliographic repositories consist of * {@link BiblioEntryStatus#repositorySubset several * databases}. Their list can be provided by this vocabulary. */ static final String REPOSITORY_SUBSETS = "repository_subsets"; /** * A vocabulary name. The vocabulary contains available * {@link BiblioSubject#subjectHeadings subject headings}. */ static final String SUBJECT_HEADINGS = "subject_headings"; /** * A vocabulary name. The vocabulary contains available languages * used in {@link BibRef#language} and {@link BiblioDescription#language}. */ static final String LANGUAGES = "languages"; /** * A vocabulary name. The vocabulary contains journal titles as * used in {@link BiblioJournal#name}. */ static final String JOURNAL_TITLES = "journal_titles"; /** * A vocabulary name. The vocabulary contains journal * abbreviations as used in {@link BiblioJournal#abbreviation}. */ static final String JOURNAL_ABBREV = "journal_abbreviations"; /** * A vocabulary name. The vocabulary contains names of properties * that characterize a citation as a {@link * BiblioEntryStatus#properties repository/database record}. */ static final String ENTRY_PROPERTIES = "entry_properties"; // names for (some) bibliographic resource types /** A name of a bibliographic resource type. */ static final String TYPE_BOOK = "Book"; /** A name of a bibliographic resource type. */ static final String TYPE_ARTICLE = "Article"; /** A name of a bibliographic resource type. */ static final String TYPE_BOOK_ARTICLE = "BookArticle"; /** A name of a bibliographic resource type. */ static final String TYPE_JOURNAL_ARTICLE = "JournalArticle"; /** A name of a bibliographic resource type. */ static final String TYPE_PATENT = "Patent"; /** A name of a bibliographic resource type. */ static final String TYPE_THESIS = "Thesis"; /** A name of a bibliographic resource type. */ static final String TYPE_PROCEEDING = "Proceeding"; /** A name of a bibliographic resource type. */ static final String TYPE_TECH_REPORT = "TechReport"; /** A name of a bibliographic resource type. */ static final String TYPE_WEB_RESOURCE = "WebResource"; // names for (some) other corners of a bibliographic repository /** A name of a provider type. */ static final String PROVIDER_PERSON = "Person"; /** A name of a provider type. */ static final String PROVIDER_ORGANISATION = "Organisation"; /** A name of a provider type. */ static final String PROVIDER_SERVICE = "Service"; /** A name of a provider type. */ static final String GENERIC_PROVIDER = "Provider"; // names for (some) attribute names /** * <p> * A part of a vocabulary name. It is usually coupled together * with a bibliographic resource type to give a full vocabulary * name. For example: {@link BibRefSupport#TYPE_JOURNAL_ARTICLE * JournalArticle}/ATTR_PROPERTIES. * </p> * * <p> * The vocabulary contains property names for the given resource * type as defined in {@link BibRef#properties}. * </p> */ static final String ATTR_PROPERTIES = "properties"; /** * A vocabulary name, or a part of a vocabulary name. * The vocabulary contains all allowed keys in * {@link BiblioScope#properties}. */ static final String ATTR_SCOPE = "scope"; /** * A vocabulary name, or a part of a vocabulary name. * The vocabulary contains all allowed keys in * {@link BibRef#format}. */ static final String ATTR_FORMAT = "format"; // names characterizing attributes /** * <p> * A role of an attribute. * </p> * * <p> * The introspection mechanism (provided by using controlled * vocabularies) allows to find what attributes are available in * the repository. The attributes which can be used in query * methods should be identified by putting this constant into * their vocabulary entry (somewhere in the {@link * org.biojava.utils.candy.CandyEntry#description description} field). * </p> */ static final String ROLE_ATTR_QUERYABLE = "queryable"; /** * <p> * A role of an attribute. * </p> * * <p> * The introspection mechanism (provided by using controlled * vocabularies) allows to find what attributes are available in * the repository. The attributes which can be used in retrieval * methods should be identified by putting this constant into * their vocabulary entry (somewhere in the {@link * org.biojava.utils.candy.CandyEntry#description description} field). * </p> */ static final String ROLE_ATTR_RETRIEVABLE = "retrievable"; /** * <p> * A property name ("<b>log</b>"). * </p> * * <p> * Used for passing an instance dealing with logging. * </p> */ static final String INIT_PROP_LOG = "log"; /** * <p> * A property name ("<b>bibrefsupport</b>"). * </p> * * <p> * Used for passing an instance of a class implementing this * interface. It is recommended to pass this property, for * example, in the constructor of an implementation of the {@link * BibRefQuery} interace}. * </p> */ static final String INIT_PROP_SUPPORT = "bibrefsupport"; void connect() throws BibRefException; boolean isReady(); void disconnect(); CandyFinder getVocabularyFinder() throws BibRefException; CandyVocabulary getSupportedValues (String resourceType, String attrName) throws BibRefException; BiblioCriterion[] getSupportedCriteria() throws BibRefException; BiblioCriterion[] getSupportedCriteria (String repositorySubset) throws BibRefException; BibRefQuery union (BibRefQuery[] collections, Hashtable properties) throws BibRefException; }
package org.deeplearning4j.iterativereduce.runtime.io; import static org.junit.Assert.*; import org.junit.Test; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; public class TestSVMLightRecordFactory { private String test_svm_light_w_comment = "-1 1:0.43 3:0.12 9284:0.2 # abcdef"; private String test_svm_light_no_comment = "-1 1:0.43 3:0.12 9284:0.2"; private String test_svm_light_positive_label = "1 1:0.43 3:0.12 9284:0.2"; private String test_svm_light_no_label = "1:0.43 3:0.12 9284:0.2"; private String test_svm_light_zero_index = "1 0:0.43 6:0.12 9284:0.2"; @Test public void testSVMLightRecordFactoryParseWithComment() { int feature_size = 9285; INDArray in_vector = Nd4j.create( feature_size ); INDArray out_vector = Nd4j.create( 1 ); SVMLightRecordFactory recFactory = new SVMLightRecordFactory( feature_size ); recFactory.parseFromLine( test_svm_light_w_comment, in_vector, out_vector ); assertEquals( -1.0, out_vector.getDouble( 0 ), 0.0 ); } @Test public void testSVMLightRecordFactoryParseWithNoComment() { int feature_size = 9285; INDArray in_vector = Nd4j.create( feature_size ); INDArray out_vector = Nd4j.create( 1 ); SVMLightRecordFactory recFactory = new SVMLightRecordFactory( feature_size ); recFactory.parseFromLine( test_svm_light_no_comment, in_vector, out_vector ); assertEquals( -1.0, out_vector.getDouble( 0 ), 0.0 ); } @Test public void testSVMLightRecordFactoryParseWithPositiveLabel() { int feature_size = 9285; INDArray in_vector = Nd4j.create( feature_size ); INDArray out_vector = Nd4j.create( 1 ); SVMLightRecordFactory recFactory = new SVMLightRecordFactory( feature_size ); recFactory.parseFromLine( test_svm_light_positive_label, in_vector, out_vector ); assertEquals( 1.0, out_vector.getDouble( 0 ), 0.0 ); } @Test public void testSVMLightRecordFactoryParseNoLabelException() { int feature_size = 9285; boolean caughtParseException = false; INDArray in_vector = Nd4j.create( feature_size ); INDArray out_vector = Nd4j.create( 1 ); SVMLightRecordFactory recFactory = new SVMLightRecordFactory( feature_size ); try { recFactory.parseFromLine( test_svm_light_no_label, in_vector, out_vector ); } catch (NumberFormatException e ) { // should catch this caughtParseException = true; } assertEquals( true, caughtParseException ); } @Test public void testSVMLightRecordFactoryParseZeroIndex() { int feature_size = 9285; INDArray in_vector = Nd4j.create( feature_size ); INDArray out_vector = Nd4j.create( 1 ); SVMLightRecordFactory recFactory = new SVMLightRecordFactory( feature_size ); try { recFactory.parseFromLine( test_svm_light_zero_index, in_vector, out_vector ); } catch (NumberFormatException e ) { } assertEquals( 0.43, in_vector.getDouble( 0 ), 0.0 ); } }
package org.ccci.gto.android.common.support.v4.fragment; import android.database.Cursor; import android.os.Build; import android.support.v4.app.ListFragment; import android.widget.CursorAdapter; import android.widget.ListAdapter; import org.ccci.gto.android.common.support.v4.util.FragmentUtils; public class AbstractListFragment extends ListFragment { protected final <T> T findView(final Class<T> clazz, final int id) { return FragmentUtils.findView(this, clazz, id); } protected final <T> T getListener(final Class<T> clazz) { return FragmentUtils.getListener(this, clazz); } protected void changeCursor(final Cursor cursor) { final Cursor old = this.swapCursor(cursor); if (old != null) { old.close(); } } protected Cursor swapCursor(final Cursor cursor) { final ListAdapter adapter = getListAdapter(); if (adapter instanceof CursorAdapter) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { return ((CursorAdapter) adapter).swapCursor(cursor); } else { ((CursorAdapter) adapter).changeCursor(cursor); } } else if (adapter instanceof android.support.v4.widget.CursorAdapter) { return ((android.support.v4.widget.CursorAdapter) adapter).swapCursor(cursor); } return null; } }
import static System.out; public class Board { public static String s1 = "|O| | | | | | | | | | | | | | | | | | | | | | | | ||"; public static String s2 = "| |O| | | | | | | | | | | | | | | | | | | | | | | ||"; public static String s3 = "| | |O| | | | | | | | | | | | | | | | | | | | | | ||"; public static String s4 = "| | | |O| | | | | | | | | | | | | | | | | | | | | ||"; public static String s5 = "| | | | |O| | | | | | | | | | | | | | | | | | | | ||"; public static String s6 = "| | | | | |O| | | | | | | | | | | | | | | | | | | ||"; public static String s7 = "| | | | | | |O| | | | | | | | | | | | | | | | | | ||"; public static String s8 = "| | | | | | | |O| | | | | | | | | | | | | | | | | ||"; public static String s9 = "| | | | | | | | |O| | | | | | | | | | | | | | | | ||"; public static String s10 = "| | | | | | | | | |O| | | | | | | | | | | | | | | ||"; public static String s11 = "| | | | | | | | | | |O| | | | | | | | | | | | | | ||"; public static String s12 = "| | | | | | | | | | | |O| | | | | | | | | | | | | ||"; public static String s13 = "| | | | | | | | | | | | |O| | | | | | | | | | | | ||"; public static String s14 = "| | | | | | | | | | | | | |O| | | | | | | | | | | ||"; public static String s15 = "| | | | | | | | | | | | | | |O| | | | | | | | | | ||"; public static String s16 = "| | | | | | | | | | | | | | | |O| | | | | | | | | ||"; public static String s17 = "| | | | | | | | | | | | | | | | |O| | | | | | | | ||"; public static String s18 = "| | | | | | | | | | | | | | | | | |O| | | | | | | ||"; public static String s19 = "| | | | | | | | | | | | | | | | | | |O| | | | | | ||"; public static String s20 = "| | | | | | | | | | | | | | | | | | | |O| | | | | ||"; public static String s21 = "| | | | | | | | | | | | | | | | | | | | |O| | | | ||"; public static String s22 = "| | | | | | | | | | | | | | | | | | | | | |O| | | ||"; public static String s23 = "| | | | | | | | | | | | | | | | | | | | | | |O| | ||"; public static String s24 = "| | | | | | | | | | | | | | | | | | | | | | | |O| ||"; public static String s25 = "| | | | | | | | | | | | | | | | | | | | | | | | |O||"; public static double p1s; public static double p2s; public static int whoseTurn = 1; //1 = Player 1, 2 = Player 2 public static String[] cards = { "Everthing seems quiet, that's strange....", "Time to take a rest....", "Fight the odd battle.", "This, I daresay, is quite strange...." }; public static String[] special = { "Ooh, a dragon!", "Hide!", "Ooh boy.", "Well, this is not good." }; public static void start() { p1s = 1; p2s = 1; playGame(); } public static void playGame() { while(p1s <= 25 && p2s <= 25) { if(p1s = 1) { out.println(s1); } if(p1s = 2) { out.println(s2); } if(p1s = 3) { out.println(s3); } if(p1s = 4) { out.println(s4); } if(p1s = 5) { out.println(s5); } if(p1s = 6) { out.println(s6); } if(p1s = 7) { out.println(s7); } if(p1s = 8) { out.println(s8); } if(p1s = 9) { out.println(s9); } if(p1s = 10) { out.println(s10); } if(p1s = 11) { out.println(s11); } if(p1s = 12) { out.println(s12); } if(p1s = 13) { out.println(s13); } if(p1s = 14) { out.println(s14); } if(p1s = 15) { out.println(s15); } if(p1s = 16) { out.println(s16); } if(p1s = 17) { out.println(s17); } if(p1s = 18) { out.println(s18); } if(p1s = 19) { out.println(s19); } if(p1s = 20) { out.println(s20); } if(p1s = 21) { out.println(s21); } if(p1s = 22) { out.println(s22); } if(p1s = 23) { out.println(s23); } if(p1s = 24) { out.println(s24); } if(p1s = 25) { out.println(s25); } if(whoseTurn = 1) { double s = Math.random(); if(s > 5 || s < 0) { for(String str : cards) { System.out.println(str); } p1s++; } else { for(String str : special) { System.out.println(str); } double nextSpace = p1s + Math.random(); nextSpace = p1s; } whoseTurn = 2; } else { double s = Math.random(); if(s > 5 || s < 0) { for(String str : cards) { System.out.println(str); } p2s++; } else { for(String str : special) { System.out.println(str); } double nextSpace = p2s + Math.random(); nextSpace = p2s; } whoseTurn = 1; } } System.out.println("You win!"); System.exit(0); } }
package ru.ifmo.ctddev.gmwcs.graph; import org.jgrapht.UndirectedGraph; import org.jgrapht.graph.SimpleGraph; import ru.ifmo.ctddev.gmwcs.LDSU; import ru.ifmo.ctddev.gmwcs.Pair; import java.io.*; import java.text.ParseException; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; public class SimpleIO implements GraphIO { private File nodeIn; private File nodeOut; private File edgeIn; private File edgeOut; private List<String> nodeList; private List<Pair<String, String>> edgeList; private Map<String, Node> nodeMap; private Map<String, Map<String, Edge>> edgeMap; public SimpleIO(File nodeIn, File nodeOut, File edgeIn, File edgeOut) { this.nodeIn = nodeIn; this.edgeOut = edgeOut; this.edgeIn = edgeIn; this.nodeOut = nodeOut; nodeMap = new LinkedHashMap<>(); nodeList = new ArrayList<>(); edgeList = new ArrayList<>(); edgeMap = new LinkedHashMap<>(); } @Override public UndirectedGraph<Node, Edge> read() throws FileNotFoundException, ParseException { try (Scanner nodes = new Scanner(new BufferedReader(new FileReader(nodeIn))); Scanner edges = new Scanner(new BufferedReader(new FileReader(edgeIn)))) { UndirectedGraph<Node, Edge> graph = new SimpleGraph<>(Edge.class); parseNodes(nodes, graph); parseEdges(edges, graph); return graph; } } private void parseNodes(Scanner nodes, UndirectedGraph<Node, Edge> graph) throws ParseException { int lnum = 0; while (nodes.hasNextLine()) { lnum++; String line = nodes.nextLine(); if (line.startsWith(" continue; } StringTokenizer tokenizer = new StringTokenizer(line); if (!tokenizer.hasMoreTokens()) { continue; } String node = tokenizer.nextToken(); nodeList.add(node); if (!tokenizer.hasMoreTokens()) { throw new ParseException("Expected weight of node in line", lnum); } String weightStr = tokenizer.nextToken(); try { double weight = Double.parseDouble(weightStr); Node vertex = new Node(lnum, weight); if (nodeMap.containsKey(node)) { throw new ParseException("Duplicate node " + node, 0); } nodeMap.put(node, vertex); graph.addVertex(vertex); } catch (NumberFormatException e) { throw new ParseException("Expected floating point value of node weight in line", lnum); } } } private void parseEdges(Scanner edges, UndirectedGraph<Node, Edge> graph) throws ParseException { int lnum = 0; while (edges.hasNextLine()) { lnum++; String line = edges.nextLine(); if (line.startsWith(" continue; } StringTokenizer tokenizer = new StringTokenizer(line); if (!tokenizer.hasMoreTokens()) { continue; } String first = tokenizer.nextToken(); if (!tokenizer.hasMoreTokens()) { throw new ParseException("Expected name of second node in edge list in line", lnum); } String second = tokenizer.nextToken(); if (!tokenizer.hasMoreTokens()) { throw new ParseException("Expected weight of edge in line", lnum); } try { double weight = Double.parseDouble(tokenizer.nextToken()); if (!nodeMap.containsKey(first) || !nodeMap.containsKey(second)) { throw new ParseException("There's no such vertex in edge list in line", lnum); } Edge edge = new Edge(lnum, weight); Node from = nodeMap.get(first); Node to = nodeMap.get(second); if (edgeMap.get(first) != null && edgeMap.get(first).get(second) != null || edgeMap.get(second) != null && edgeMap.get(second).get(first) != null) { throw new ParseException("Duplicate edge " + first + " -- " + second, 0); } graph.addEdge(from, to, edge); edgeList.add(new Pair<>(first, second)); if (!edgeMap.containsKey(first)) { edgeMap.put(first, new LinkedHashMap<>()); } edgeMap.get(first).put(second, edge); } catch (NumberFormatException e) { throw new ParseException("Expected floating point value of edge in line", lnum); } } } @Override public void write(List<Unit> units) throws IOException { Set<Unit> unitSet = new LinkedHashSet<>(); if (units == null) { units = new ArrayList<>(); } unitSet.addAll(units); writeNodes(unitSet); writeEdges(unitSet); } @Override public Node getNode(String name) { return nodeMap.get(name); } private void writeEdges(Set<Unit> units) throws IOException { try (Writer writer = new BufferedWriter(new FileWriter(edgeOut))) { for (Pair<String, String> p : edgeList) { Edge edge = edgeMap.get(p.first).get(p.second); writer.write(p.first + "\t" + p.second + "\t" + (units.contains(edge) ? edge.getWeight() : "n/a")); writer.write("\n"); } } } private void writeNodes(Set<Unit> units) throws IOException { try (Writer writer = new BufferedWriter(new FileWriter(nodeOut))) { for (String name : nodeList) { Node node = nodeMap.get(name); writer.write(name + "\t" + (units.contains(node) ? node.getWeight() : "n/a")); writer.write("\n"); } } } public LDSU<Unit> getSynonyms(File s) throws FileNotFoundException, ParseException { LDSU<Unit> synonyms = new LDSU<>(); nodeMap.values().forEach(synonyms::add); for (Pair<String, String> p : edgeList) { Edge edge = edgeMap.get(p.first).get(p.second); synonyms.add(edge); } try (Scanner sc = new Scanner(new BufferedReader(new FileReader(s)))) { while (sc.hasNextLine()) { String line = sc.nextLine(); List<Unit> eq = new ArrayList<>(); line = getEdges(line, eq); getNodes(line, eq); if (eq.isEmpty()) { continue; } Unit main = eq.get(0); for (int i = 1; i < eq.size(); i++) { synonyms.merge(main, eq.get(i)); } } } return synonyms; } private String getEdges(String line, List<Unit> eq) throws ParseException { Pattern pattern = Pattern.compile("([^\\s\\-]+)\\s* Matcher matcher = pattern.matcher(line); while (matcher.find()) { String from = matcher.group(1); String to = matcher.group(2); if (from == null) { throw new ParseException("No such node " + from + " but it was occurred in synonym file", 0); } if (to == null) { throw new ParseException("No such node " + to + " but it was occurred in synonym file", 0); } Edge edge; if (edgeMap.get(from).get(to) != null) { edge = edgeMap.get(from).get(to); } else { edge = edgeMap.get(to).get(from); } if (edge == null) { throw new ParseException("No such edge " + from + " -- " + to, 0); } eq.add(edge); line = line.replace(matcher.group(), ""); } return line; } private void getNodes(String line, List<Unit> eq) throws ParseException { Pattern pattern = Pattern.compile("[^\\s]+"); Matcher matcher = pattern.matcher(line); while (matcher.find()) { String name = matcher.group(); Node node = getNode(name); if (node == null) { throw new ParseException("No such node " + name + " but it was occured in synonym file", 0); } eq.add(node); } } }
package org.broad.igv.session; import org.apache.log4j.Logger; import org.broad.igv.Globals; import org.broad.igv.feature.Locus; import org.broad.igv.feature.RegionOfInterest; import org.broad.igv.feature.genome.Genome; import org.broad.igv.feature.genome.GenomeManager; import org.broad.igv.lists.GeneList; import org.broad.igv.lists.GeneListManager; import org.broad.igv.renderer.ColorScale; import org.broad.igv.renderer.ColorScaleFactory; import org.broad.igv.renderer.ContinuousColorScale; import org.broad.igv.track.*; import org.broad.igv.ui.IGV; import org.broad.igv.ui.TrackFilter; import org.broad.igv.ui.TrackFilterElement; import org.broad.igv.ui.color.ColorUtilities; import org.broad.igv.ui.panel.FrameManager; import org.broad.igv.ui.panel.ReferenceFrame; import org.broad.igv.ui.panel.TrackPanel; import org.broad.igv.ui.panel.TrackPanelScrollPane; import org.broad.igv.ui.util.MessageUtils; import org.broad.igv.util.FileUtils; import org.broad.igv.util.FilterElement.BooleanOperator; import org.broad.igv.util.FilterElement.Operator; import org.broad.igv.util.ParsingUtils; import org.broad.igv.util.ResourceLocator; import org.broad.igv.util.Utilities; import org.broad.igv.util.collections.CollUtils; import org.w3c.dom.*; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBElement; import javax.xml.bind.JAXBException; import javax.xml.bind.Unmarshaller; import java.awt.*; import java.io.IOException; import java.io.InputStream; import java.lang.ref.WeakReference; import java.util.*; import java.util.List; /** * Class to parse an IGV session file */ public class IGVSessionReader implements SessionReader { private static Logger log = Logger.getLogger(IGVSessionReader.class); private static String INPUT_FILE_KEY = "INPUT_FILE_KEY"; // Temporary values used in processing //package-private for unit testing Collection<ResourceLocator> dataFiles; private Collection<ResourceLocator> missingDataFiles; private static Map<String, String> attributeSynonymMap = new HashMap(); private boolean panelElementPresent = false; private int version; private IGV igv; private static WeakReference<IGVSessionReader> currentReader; /** * Classes that have been registered for use with JAXB */ private static List<Class> registeredClasses = new ArrayList<Class>(); /** * Map of track id -> track. It is important to maintain the order in which tracks are added, thus * the use of LinkedHashMap. We add tracks here when loaded and remove them when attributes are specified. */ private final Map<String, List<Track>> leftoverTrackDictionary = Collections.synchronizedMap(new LinkedHashMap()); /** * Map of id -> track, for second pass through when tracks reference each other */ private final Map<String, List<Track>> allTracks = Collections.synchronizedMap(new LinkedHashMap<String, List<Track>>()); public List<Track> getTracksById(String trackId){ return allTracks.get(trackId); } /** * Map of full path -> relative path. */ Map<String, String> fullToRelPathMap = new HashMap<String, String>(); private Track geneTrack = null; private Track seqTrack = null; private boolean hasTrackElments; //Temporary holder for generating tracks protected static AbstractTrack nextTrack; static { attributeSynonymMap.put("DATA FILE", "DATA SET"); attributeSynonymMap.put("TRACK NAME", "NAME"); registerClass(AbstractTrack.class); } /** * Session Element types */ public static enum SessionElement { PANEL("Panel"), PANEL_LAYOUT("PanelLayout"), TRACK("Track"), COLOR_SCALE("ColorScale"), COLOR_SCALES("ColorScales"), DATA_TRACK("DataTrack"), DATA_TRACKS("DataTracks"), FEATURE_TRACKS("FeatureTracks"), DATA_FILE("DataFile"), RESOURCE("Resource"), RESOURCES("Resources"), FILES("Files"), FILTER_ELEMENT("FilterElement"), FILTER("Filter"), SESSION("Session"), GLOBAL("Global"), REGION("Region"), REGIONS("Regions"), DATA_RANGE("DataRange"), PREFERENCES("Preferences"), PROPERTY("Property"), GENE_LIST("GeneList"), HIDDEN_ATTRIBUTES("HiddenAttributes"), VISIBLE_ATTRIBUTES("VisibleAttributes"), ATTRIBUTE("Attribute"), VISIBLE_ATTRIBUTE("VisibleAttribute"), FRAME("Frame"); private String name; SessionElement(String name) { this.name = name; } public String getText() { return name; } @Override public String toString() { return getText(); } static public SessionElement findEnum(String value) { if (value == null) { return null; } else { return SessionElement.valueOf(value); } } } /** * Session Attribute types */ public static enum SessionAttribute { BOOLEAN_OPERATOR("booleanOperator"), COLOR("color"), ALT_COLOR("altColor"), COLOR_MODE("colorMode"), CHROMOSOME("chromosome"), END_INDEX("end"), EXPAND("expand"), SQUISH("squish"), DISPLAY_MODE("displayMode"), FILTER_MATCH("match"), FILTER_SHOW_ALL_TRACKS("showTracks"), GENOME("genome"), GROUP_TRACKS_BY("groupTracksBy"), HEIGHT("height"), ID("id"), ITEM("item"), LOCUS("locus"), NAME("name"), SAMPLE_ID("sampleID"), RESOURCE_TYPE("resourceType"), OPERATOR("operator"), RELATIVE_PATH("relativePath"), RENDERER("renderer"), SCALE("scale"), START_INDEX("start"), VALUE("value"), VERSION("version"), VISIBLE("visible"), WINDOW_FUNCTION("windowFunction"), RENDER_NAME("renderName"), GENOTYPE_HEIGHT("genotypeHeight"), VARIANT_HEIGHT("variantHeight"), PREVIOUS_HEIGHT("previousHeight"), FEATURE_WINDOW("featureVisibilityWindow"), DISPLAY_NAME("displayName"), COLOR_SCALE("colorScale"), HAS_GENE_TRACK("hasGeneTrack"), //RESOURCE ATTRIBUTES PATH("path"), LABEL("label"), SERVER_URL("serverURL"), HYPERLINK("hyperlink"), INFOLINK("infolink"), URL("url"), FEATURE_URL("featureURL"), DESCRIPTION("description"), TYPE("type"), COVERAGE("coverage"), TRACK_LINE("trackLine"), CHR("chr"), START("start"), END("end"); //TODO Add the following into the Attributes /* ShadeBasesOption shadeBases; boolean shadeCenters; boolean flagUnmappedPairs; boolean showAllBases; int insertSizeThreshold; boolean colorByStrand; boolean colorByAmpliconStrand; */ private String name; SessionAttribute(String name) { this.name = name; } public String getText() { return name; } @Override public String toString() { return getText(); } } public IGVSessionReader(IGV igv) { this.igv = igv; currentReader = new WeakReference<IGVSessionReader>(this); } /** * @param inputStream * @param session * @param sessionPath @return * @throws RuntimeException */ public void loadSession(InputStream inputStream, Session session, String sessionPath) { log.debug("Load session"); Document document = null; try { document = Utilities.createDOMDocumentFromXmlStream(inputStream); } catch (Exception e) { log.error("Load session error", e); throw new RuntimeException(e); } NodeList tracks = document.getElementsByTagName("Track"); hasTrackElments = tracks.getLength() > 0; HashMap additionalInformation = new HashMap(); additionalInformation.put(INPUT_FILE_KEY, sessionPath); NodeList nodes = document.getElementsByTagName(SessionElement.GLOBAL.getText()); if (nodes == null || nodes.getLength() == 0) { nodes = document.getElementsByTagName(SessionElement.SESSION.getText()); } processRootNode(session, nodes.item(0), additionalInformation, sessionPath); // section only (no Panel or Track elements). addLeftoverTracks(leftoverTrackDictionary.values()); if (igv != null) { if (session.getGroupTracksBy() != null && session.getGroupTracksBy().length() > 0) { igv.setGroupByAttribute(session.getGroupTracksBy()); } if (session.isRemoveEmptyPanels()) { igv.getMainPanel().removeEmptyDataPanels(); } igv.resetOverlayTracks(); } } private void processRootNode(Session session, Node node, HashMap additionalInformation, String rootPath) { if ((node == null) || (session == null)) { MessageUtils.showMessage("Invalid session file: root node not found"); return; } String nodeName = node.getNodeName(); if (!(nodeName.equalsIgnoreCase(SessionElement.GLOBAL.getText()) || nodeName.equalsIgnoreCase(SessionElement.SESSION.getText()))) { MessageUtils.showMessage("Session files must begin with a \"Global\" or \"Session\" element. Found: " + nodeName); return; } process(session, node, additionalInformation, rootPath); Element element = (Element) node; String versionString = getAttribute(element, SessionAttribute.VERSION.getText()); try { version = Integer.parseInt(versionString); } catch (NumberFormatException e) { log.error("Non integer version number in session file: " + versionString); } // Load the genome, which can be an ID, or a path or URL to a .genome or indexed fasta file. String genomeId = getAttribute(element, SessionAttribute.GENOME.getText()); String hasGeneTrackStr = getAttribute(element, SessionAttribute.HAS_GENE_TRACK.getText()); boolean hasGeneTrack = true; if(hasGeneTrackStr != null){ hasGeneTrack = Boolean.parseBoolean(hasGeneTrackStr); } if (genomeId != null && genomeId.length() > 0) { if (genomeId.equals(GenomeManager.getInstance().getGenomeId())) { // We don't have to reload the genome, but the gene track for the current genome should be restored. if(hasGeneTrack){ Genome genome = GenomeManager.getInstance().getCurrentGenome(); IGV.getInstance().setGenomeTracks(genome.getGeneTrack()); } } else { // Selecting a genome will actually "reset" the session so we have to // save the path and restore it. String sessionPath = session.getPath(); //Loads genome from list, or from server or cache igv.selectGenomeFromList(genomeId); if (!GenomeManager.getInstance().getGenomeId().equals(genomeId)) { String genomePath = genomeId; if (!ParsingUtils.pathExists(genomePath)) { genomePath = FileUtils.getAbsolutePath(genomeId, session.getPath()); } if (ParsingUtils.pathExists(genomePath)) { try { IGV.getInstance().loadGenome(genomePath, null, hasGeneTrack); } catch (IOException e) { throw new RuntimeException("Error loading genome: " + genomeId); } } else { MessageUtils.showMessage("Warning: Could not locate genome: " + genomeId); } } session.setPath(sessionPath); } } if(!hasGeneTrack && igv.hasGeneTrack()){ //Need to remove gene track if it was loaded because it's not supposed to be in the session igv.removeTracks(Arrays.<Track>asList(GenomeManager.getInstance().getCurrentGenome().getGeneTrack())); geneTrack = null; }else{ //For later lookup and to prevent dual adding, we keep a reference to the gene track geneTrack = GenomeManager.getInstance().getCurrentGenome().getGeneTrack(); if(geneTrack != null){ allTracks.put(geneTrack.getId(), Arrays.asList(geneTrack)); } } session.setLocus(getAttribute(element, SessionAttribute.LOCUS.getText())); session.setGroupTracksBy(getAttribute(element, SessionAttribute.GROUP_TRACKS_BY.getText())); String removeEmptyTracks = getAttribute(element, "removeEmptyTracks"); if (removeEmptyTracks != null) { try { Boolean b = Boolean.parseBoolean(removeEmptyTracks); session.setRemoveEmptyPanels(b); } catch (Exception e) { log.error("Error parsing removeEmptyTracks string: " + removeEmptyTracks, e); } } session.setVersion(version); NodeList elements = element.getChildNodes(); process(session, elements, additionalInformation, rootPath); // ReferenceFrame.getInstance().invalidateLocationScale(); } //TODO Check to make sure tracks are not being created twice //TODO -- DONT DO THIS FOR NEW SESSIONS private void addLeftoverTracks(Collection<List<Track>> tmp) { Map<String, TrackPanel> trackPanelCache = new HashMap(); if (version < 3 || !panelElementPresent) { log.debug("Adding \"leftover\" tracks"); //For resetting track panels later List<Map<TrackPanelScrollPane, Integer>> trackPanelAttrs = null; if(IGV.hasInstance()){ trackPanelAttrs = IGV.getInstance().getTrackPanelAttrs(); } for (List<Track> tracks : tmp) { for (Track track : tracks) { if (track != geneTrack && track != seqTrack && track.getResourceLocator() != null) { TrackPanel panel = trackPanelCache.get(track.getResourceLocator().getPath()); if (panel == null) { panel = IGV.getInstance().getPanelFor(track.getResourceLocator()); trackPanelCache.put(track.getResourceLocator().getPath(), panel); } panel.addTrack(track); } } } if(IGV.hasInstance()){ IGV.getInstance().resetPanelHeights(trackPanelAttrs.get(0), trackPanelAttrs.get(1)); } } } /** * Process a single session element node. * * @param session * @param element */ private void process(Session session, Node element, HashMap additionalInformation, String rootPath) { if ((element == null) || (session == null)) { return; } String nodeName = element.getNodeName(); if (nodeName.equalsIgnoreCase(SessionElement.RESOURCES.getText()) || nodeName.equalsIgnoreCase(SessionElement.FILES.getText())) { processResources(session, (Element) element, additionalInformation, rootPath); } else if (nodeName.equalsIgnoreCase(SessionElement.RESOURCE.getText()) || nodeName.equalsIgnoreCase(SessionElement.DATA_FILE.getText())) { processResource(session, (Element) element, additionalInformation, rootPath); } else if (nodeName.equalsIgnoreCase(SessionElement.REGIONS.getText())) { processRegions(session, (Element) element, additionalInformation, rootPath); } else if (nodeName.equalsIgnoreCase(SessionElement.REGION.getText())) { processRegion(session, (Element) element, additionalInformation, rootPath); } else if (nodeName.equalsIgnoreCase(SessionElement.GENE_LIST.getText())) { processGeneList(session, (Element) element, additionalInformation); } else if (nodeName.equalsIgnoreCase(SessionElement.FILTER.getText())) { processFilter(session, (Element) element, additionalInformation, rootPath); } else if (nodeName.equalsIgnoreCase(SessionElement.FILTER_ELEMENT.getText())) { processFilterElement(session, (Element) element, additionalInformation, rootPath); } else if (nodeName.equalsIgnoreCase(SessionElement.COLOR_SCALES.getText())) { processColorScales(session, (Element) element, additionalInformation, rootPath); } else if (nodeName.equalsIgnoreCase(SessionElement.COLOR_SCALE.getText())) { processColorScale(session, (Element) element, additionalInformation, rootPath); } else if (nodeName.equalsIgnoreCase(SessionElement.PREFERENCES.getText())) { processPreferences(session, (Element) element, additionalInformation); } else if (nodeName.equalsIgnoreCase(SessionElement.DATA_TRACKS.getText()) || nodeName.equalsIgnoreCase(SessionElement.FEATURE_TRACKS.getText()) || nodeName.equalsIgnoreCase(SessionElement.PANEL.getText())) { processPanel(session, (Element) element, additionalInformation, rootPath); } else if (nodeName.equalsIgnoreCase(SessionElement.PANEL_LAYOUT.getText())) { processPanelLayout(session, (Element) element, additionalInformation); } else if (nodeName.equalsIgnoreCase(SessionElement.HIDDEN_ATTRIBUTES.getText())) { processHiddenAttributes(session, (Element) element, additionalInformation); } else if (nodeName.equalsIgnoreCase(SessionElement.VISIBLE_ATTRIBUTES.getText())) { processVisibleAttributes(session, (Element) element, additionalInformation); } } private void processResources(Session session, Element element, HashMap additionalInformation, String rootPath) { dataFiles = new ArrayList(); missingDataFiles = new ArrayList(); NodeList elements = element.getChildNodes(); process(session, elements, additionalInformation, rootPath); if (missingDataFiles.size() > 0) { StringBuffer message = new StringBuffer(); message.append("<html>The following data file(s) could not be located.<ul>"); for (ResourceLocator file : missingDataFiles) { if (file.isLocal()) { message.append("<li>"); message.append(file.getPath()); message.append("</li>"); } else { message.append("<li>Server: "); message.append(file.getServerURL()); message.append(" Path: "); message.append(file.getPath()); message.append("</li>"); } } message.append("</ul>"); message.append("Common reasons for this include: "); message.append("<ul><li>The session or data files have been moved.</li> "); message.append("<li>The data files are located on a drive that is not currently accessible.</li></ul>"); message.append("</html>"); MessageUtils.showMessage(message.toString()); } if (dataFiles.size() > 0) { final List<String> errors = new ArrayList<String>(); // Load files concurrently -- TODO, put a limit on # of threads? List<Thread> threads = new ArrayList(dataFiles.size()); long t0 = System.currentTimeMillis(); int i = 0; List<Runnable> synchronousLoads = new ArrayList<Runnable>(); for (final ResourceLocator locator : dataFiles) { final String suppliedPath = locator.getPath(); final String relPath = fullToRelPathMap.get(suppliedPath); Runnable runnable = new Runnable() { public void run() { List<Track> tracks = null; try { tracks = igv.load(locator); for (Track track : tracks) { if (track == null) { log.info("Null track for resource " + locator.getPath()); continue; } String id = track.getId(); if (id == null) { log.info("Null track id for resource " + locator.getPath()); continue; } if (relPath != null) { id = id.replace(suppliedPath, relPath); } List<Track> trackList = leftoverTrackDictionary.get(id); if (trackList == null) { trackList = new ArrayList(); leftoverTrackDictionary.put(id, trackList); allTracks.put(id, trackList); } trackList.add(track); } } catch (Exception e) { log.error("Error loading resource " + locator.getPath(), e); String ms = "<b>" + locator.getPath() + "</b><br>&nbs;p&nbsp;" + e.toString() + "<br>"; errors.add(ms); } } }; boolean isAlignment = locator.getPath().endsWith(".bam") || locator.getPath().endsWith(".entries") || locator.getPath().endsWith(".sam"); // Run synchronously if in batch mode or if there are no "track" elments, or if this is an alignment file // EVERYTHING IS RUN SYNCHRONOUSLY FOR NOW UNTIL WE CAN FIGURE OUT WHAT TO DO TO PREVENT MULTIPLE // AUTHENTICATION DIALOGS if (isAlignment || Globals.isBatch() || !hasTrackElments) { synchronousLoads.add(runnable); } else { Thread t = new Thread(runnable); threads.add(t); t.start(); } i++; } // Wait for all threads to complete for (Thread t : threads) { try { t.join(); } catch (InterruptedException ignore) { } } // Now load data that must be loaded synchronously for (Runnable runnable : synchronousLoads) { runnable.run(); } long dt = System.currentTimeMillis() - t0; log.debug("Total load time = " + dt); if (errors.size() > 0) { StringBuffer buf = new StringBuffer(); buf.append("<html>Errors were encountered loading the session:<br>"); for (String msg : errors) { buf.append(msg); } MessageUtils.showMessage(buf.toString()); } } dataFiles = null; } /** * Load a single resource. * <p/> * Package private for unit testing * * @param session * @param element * @param additionalInformation */ void processResource(Session session, Element element, HashMap additionalInformation, String rootPath) { String nodeName = element.getNodeName(); boolean oldSession = nodeName.equals(SessionElement.DATA_FILE.getText()); String label = getAttribute(element, SessionAttribute.LABEL.getText()); String name = getAttribute(element, SessionAttribute.NAME.getText()); String sampleId = getAttribute(element, SessionAttribute.SAMPLE_ID.getText()); String description = getAttribute(element, SessionAttribute.DESCRIPTION.getText()); String type = getAttribute(element, SessionAttribute.TYPE.getText()); String coverage = getAttribute(element, SessionAttribute.COVERAGE.getText()); String trackLine = getAttribute(element, SessionAttribute.TRACK_LINE.getText()); String colorString = getAttribute(element, SessionAttribute.COLOR.getText()); //String relPathValue = getAttribute(element, SessionAttribute.RELATIVE_PATH.getText()); //boolean isRelativePath = ((relPathValue != null) && relPathValue.equalsIgnoreCase("true")); String serverURL = getAttribute(element, SessionAttribute.SERVER_URL.getText()); // Older sessions used the "name" attribute for the path. String path = getAttribute(element, SessionAttribute.PATH.getText()); if (oldSession && name != null) { path = name; int idx = name.lastIndexOf("/"); if (idx > 0 && idx + 1 < name.length()) { name = name.substring(idx + 1); } } if (rootPath == null) { log.error("Null root path -- this is not expected"); MessageUtils.showMessage("Unexpected error loading session: null root path"); return; } String absolutePath = FileUtils.getAbsolutePath(path, rootPath); fullToRelPathMap.put(absolutePath, path); ResourceLocator resourceLocator = new ResourceLocator(serverURL, absolutePath); if (coverage != null) { String absoluteCoveragePath = FileUtils.getAbsolutePath(coverage, rootPath); resourceLocator.setCoverage(absoluteCoveragePath); } String url = getAttribute(element, SessionAttribute.URL.getText()); if (url == null) { url = getAttribute(element, SessionAttribute.FEATURE_URL.getText()); } resourceLocator.setUrl(url); String infolink = getAttribute(element, SessionAttribute.HYPERLINK.getText()); if (infolink == null) { infolink = getAttribute(element, SessionAttribute.INFOLINK.getText()); } resourceLocator.setInfolink(infolink); // Label is deprecated in favor of name. if (name != null) { resourceLocator.setName(name); } else { resourceLocator.setName(label); } resourceLocator.setSampleId(sampleId); resourceLocator.setDescription(description); // This test added to get around earlier bug in the writer if (type != null && !type.equals("local")) { resourceLocator.setType(type); } resourceLocator.setCoverage(coverage); resourceLocator.setTrackLine(trackLine); if (colorString != null) { try { Color c = ColorUtilities.stringToColor(colorString); resourceLocator.setColor(c); } catch (Exception e) { log.error("Error setting color: ", e); } } dataFiles.add(resourceLocator); NodeList elements = element.getChildNodes(); process(session, elements, additionalInformation, rootPath); } private void processRegions(Session session, Element element, HashMap additionalInformation, String rootPath) { session.clearRegionsOfInterest(); NodeList elements = element.getChildNodes(); process(session, elements, additionalInformation, rootPath); } private void processRegion(Session session, Element element, HashMap additionalInformation, String rootPath) { String chromosome = getAttribute(element, SessionAttribute.CHROMOSOME.getText()); String start = getAttribute(element, SessionAttribute.START_INDEX.getText()); String end = getAttribute(element, SessionAttribute.END_INDEX.getText()); String description = getAttribute(element, SessionAttribute.DESCRIPTION.getText()); RegionOfInterest region = new RegionOfInterest(chromosome, new Integer(start), new Integer(end), description); IGV.getInstance().addRegionOfInterest(region); NodeList elements = element.getChildNodes(); process(session, elements, additionalInformation, rootPath); } private void processHiddenAttributes(Session session, Element element, HashMap additionalInformation) { // session.clearRegionsOfInterest(); NodeList elements = element.getChildNodes(); if (elements.getLength() > 0) { Set<String> attributes = new HashSet(); for (int i = 0; i < elements.getLength(); i++) { Node childNode = elements.item(i); if (childNode.getNodeName().equals(IGVSessionReader.SessionElement.ATTRIBUTE.getText())) { attributes.add(((Element) childNode).getAttribute(IGVSessionReader.SessionAttribute.NAME.getText())); } } session.setHiddenAttributes(attributes); } } /** * For backward compatibility * * @param session * @param element * @param additionalInformation */ private void processVisibleAttributes(Session session, Element element, HashMap additionalInformation) { // session.clearRegionsOfInterest(); NodeList elements = element.getChildNodes(); if (elements.getLength() > 0) { Set<String> visibleAttributes = new HashSet(); for (int i = 0; i < elements.getLength(); i++) { Node childNode = elements.item(i); if (childNode.getNodeName().equals(IGVSessionReader.SessionElement.VISIBLE_ATTRIBUTE.getText())) { visibleAttributes.add(((Element) childNode).getAttribute(IGVSessionReader.SessionAttribute.NAME.getText())); } } final List<String> attributeNames = AttributeManager.getInstance().getAttributeNames(); Set<String> hiddenAttributes = new HashSet<String>(attributeNames); hiddenAttributes.removeAll(visibleAttributes); session.setHiddenAttributes(hiddenAttributes); } } private void processGeneList(Session session, Element element, HashMap additionalInformation) { String name = getAttribute(element, SessionAttribute.NAME.getText()); String txt = element.getTextContent(); String[] genes = txt.trim().split("\\s+"); GeneList gl = new GeneList(name, Arrays.asList(genes)); GeneListManager.getInstance().addGeneList(gl); session.setCurrentGeneList(gl); // Adjust frames processFrames(element); } private void processFrames(Element element) { NodeList elements = element.getChildNodes(); if (elements.getLength() > 0) { Map<String, ReferenceFrame> frames = new HashMap(); for (ReferenceFrame f : FrameManager.getFrames()) { frames.put(f.getName(), f); } List<ReferenceFrame> reorderedFrames = new ArrayList(); for (int i = 0; i < elements.getLength(); i++) { Node childNode = elements.item(i); if (childNode.getNodeName().equalsIgnoreCase(SessionElement.FRAME.getText())) { String frameName = getAttribute((Element) childNode, SessionAttribute.NAME.getText()); ReferenceFrame f = frames.get(frameName); if (f != null) { reorderedFrames.add(f); try { String chr = getAttribute((Element) childNode, SessionAttribute.CHR.getText()); final String startString = getAttribute((Element) childNode, SessionAttribute.START.getText()).replace(",", ""); final String endString = getAttribute((Element) childNode, SessionAttribute.END.getText()).replace(",", ""); int start = ParsingUtils.parseInt(startString); int end = ParsingUtils.parseInt(endString); org.broad.igv.feature.Locus locus = new Locus(chr, start, end); f.jumpTo(locus); } catch (NumberFormatException e) { e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. } } } } if (reorderedFrames.size() > 0) { FrameManager.setFrames(reorderedFrames); } } IGV.getInstance().resetFrames(); } private void processFilter(Session session, Element element, HashMap additionalInformation, String rootPath) { String match = getAttribute(element, SessionAttribute.FILTER_MATCH.getText()); String showAllTracks = getAttribute(element, SessionAttribute.FILTER_SHOW_ALL_TRACKS.getText()); String filterName = getAttribute(element, SessionAttribute.NAME.getText()); TrackFilter filter = new TrackFilter(filterName, null); additionalInformation.put(SessionElement.FILTER, filter); NodeList elements = element.getChildNodes(); process(session, elements, additionalInformation, rootPath); // Save the filter session.setFilter(filter); // Set filter properties if ("all".equalsIgnoreCase(match)) { IGV.getInstance().setFilterMatchAll(true); } else if ("any".equalsIgnoreCase(match)) { IGV.getInstance().setFilterMatchAll(false); } if ("true".equalsIgnoreCase(showAllTracks)) { IGV.getInstance().setFilterShowAllTracks(true); } else { IGV.getInstance().setFilterShowAllTracks(false); } } private void processFilterElement(Session session, Element element, HashMap additionalInformation, String rootPath) { TrackFilter filter = (TrackFilter) additionalInformation.get(SessionElement.FILTER); String item = getAttribute(element, SessionAttribute.ITEM.getText()); String operator = getAttribute(element, SessionAttribute.OPERATOR.getText()); String value = getAttribute(element, SessionAttribute.VALUE.getText()); String booleanOperator = getAttribute(element, SessionAttribute.BOOLEAN_OPERATOR.getText()); Operator opEnum = CollUtils.findValueOf(Operator.class, operator); BooleanOperator boolEnum = BooleanOperator.valueOf(booleanOperator.toUpperCase()); TrackFilterElement trackFilterElement = new TrackFilterElement(filter, item, opEnum, value, boolEnum); filter.add(trackFilterElement); NodeList elements = element.getChildNodes(); process(session, elements, additionalInformation, rootPath); } /** * A counter to generate unique panel names. Needed for backward-compatibility of old session files. */ private int panelCounter = 1; private void processPanel(Session session, Element element, HashMap additionalInformation, String rootPath) { panelElementPresent = true; String panelName = element.getAttribute("name"); if (panelName == null) { panelName = "Panel" + panelCounter++; } List<Track> panelTracks = new ArrayList(); NodeList elements = element.getChildNodes(); for (int i = 0; i < elements.getLength(); i++) { Node childNode = elements.item(i); if (childNode.getNodeName().equalsIgnoreCase(SessionElement.DATA_TRACK.getText()) || // Is this a track? childNode.getNodeName().equalsIgnoreCase(SessionElement.TRACK.getText())) { List<Track> tracks = processTrack(session, (Element) childNode, additionalInformation, rootPath); if (tracks != null) { panelTracks.addAll(tracks); } } else { process(session, childNode, additionalInformation, rootPath); } } //We make a second pass through, resolving references to tracks which may have been processed afterwards. //For instance if Track 2 referenced Track 4 //TODO Make this less hacky for (Track track: panelTracks){ if(track instanceof FeatureTrack){ FeatureTrack featureTrack = (FeatureTrack) track; featureTrack.updateTrackReferences(panelTracks); }else if(track instanceof DataSourceTrack){ DataSourceTrack dataTrack = (DataSourceTrack) track; dataTrack.updateTrackReferences(panelTracks); } } TrackPanel panel = IGV.getInstance().getTrackPanel(panelName); panel.addTracks(panelTracks); } private void processPanelLayout(Session session, Element element, HashMap additionalInformation) { String nodeName = element.getNodeName(); String panelName = nodeName; NamedNodeMap tNodeMap = element.getAttributes(); for (int i = 0; i < tNodeMap.getLength(); i++) { Node node = tNodeMap.item(i); String name = node.getNodeName(); if (name.equals("dividerFractions")) { String value = node.getNodeValue(); String[] tokens = value.split(","); double[] divs = new double[tokens.length]; try { for (int j = 0; j < tokens.length; j++) { divs[j] = Double.parseDouble(tokens[j]); } session.setDividerFractions(divs); } catch (NumberFormatException e) { log.error("Error parsing divider locations", e); } } } } /** * Process a track element. This should return a single track, but could return multiple tracks since the * uniqueness of the track id is not enforced. * * @param session * @param element * @param additionalInformation * @return */ private List<Track> processTrack(Session session, Element element, HashMap additionalInformation, String rootPath) { String id = getAttribute(element, SessionAttribute.ID.getText()); // Get matching tracks. List<Track> matchedTracks = allTracks.get(id); if (matchedTracks == null) { log.info("Warning. No tracks were found with id: " + id + " in session file"); String className = getAttribute(element, "clazz"); //We try anyway, some tracks can be reconstructed without a resource element //They must have a source, though try{ if(className != null && ( className.contains("FeatureTrack") || className.contains("DataSourceTrack") ) && element.hasChildNodes()){ Class clazz = Class.forName(className); Unmarshaller u = getJAXBContext().createUnmarshaller(); Track track = unmarshalTrackElement(u, element, null, clazz); matchedTracks = new ArrayList<Track>(Arrays.asList(track)); allTracks.put(track.getId(), matchedTracks); } } catch (JAXBException e) { //pass } catch (ClassNotFoundException e) { //pass } } else { try { Unmarshaller u = getJAXBContext().createUnmarshaller(); for (final Track track : matchedTracks) { // Special case for sequence & gene tracks, they need to be removed before being placed. if (igv != null && version >= 4 && (track == geneTrack || track == seqTrack)) { igv.removeTracks(Arrays.asList(track)); } unmarshalTrackElement(u, element, (AbstractTrack) track); } } catch (JAXBException e) { throw new RuntimeException(e); } leftoverTrackDictionary.remove(id); } NodeList elements = element.getChildNodes(); process(session, elements, additionalInformation, rootPath); return matchedTracks; } private static void setNextTrack(AbstractTrack track){ nextTrack = track; } /** * Used for unmarshalling track; JAXB needs a static no-arg factory method * @return */ public static AbstractTrack getNextTrack(){ return nextTrack; } /** * Unmarshal element into specified class * @param u * @param e * @param track * @return * @throws JAXBException */ protected Track unmarshalTrackElement(Unmarshaller u, Element e, AbstractTrack track) throws JAXBException{ return unmarshalTrackElement(u, e, track, track.getClass()); } /** * * @param u * @param element * @param track The track into which to unmarshal. Can be null if the relevant static factory method can handle * creating a new instance * @param trackClass Class of track to use for unmarshalling * @return The unmarshalled track * @throws JAXBException */ protected Track unmarshalTrackElement(Unmarshaller u, Element element, AbstractTrack track, Class trackClass) throws JAXBException{ AbstractTrack ut; synchronized (IGVSessionReader.class){ setNextTrack(track); ut = unmarshalTrack(u, element, trackClass, trackClass); } ut.restorePersistentState(element); return ut; } private void processColorScales(Session session, Element element, HashMap additionalInformation, String rootPath) { NodeList elements = element.getChildNodes(); process(session, elements, additionalInformation, rootPath); } private void processColorScale(Session session, Element element, HashMap additionalInformation, String rootPath) { String trackType = getAttribute(element, SessionAttribute.TYPE.getText()); String value = getAttribute(element, SessionAttribute.VALUE.getText()); setColorScaleSet(session, trackType, value); NodeList elements = element.getChildNodes(); process(session, elements, additionalInformation, rootPath); } private void processPreferences(Session session, Element element, HashMap additionalInformation) { NodeList elements = element.getChildNodes(); for (int i = 0; i < elements.getLength(); i++) { Node child = elements.item(i); if (child.getNodeName().equalsIgnoreCase(SessionElement.PROPERTY.getText())) { Element childNode = (Element) child; String name = getAttribute(childNode, SessionAttribute.NAME.getText()); String value = getAttribute(childNode, SessionAttribute.VALUE.getText()); session.setPreference(name, value); } } } /** * Process a list of session element nodes. * * @param session * @param elements */ private void process(Session session, NodeList elements, HashMap additionalInformation, String rootPath) { for (int i = 0; i < elements.getLength(); i++) { Node childNode = elements.item(i); process(session, childNode, additionalInformation, rootPath); } } public void setColorScaleSet(Session session, String type, String value) { if (type == null | value == null) { return; } TrackType trackType = CollUtils.valueOf(TrackType.class, type.toUpperCase(), TrackType.OTHER); // TODO -- refactor to remove instanceof / cast. Currently only ContinuousColorScale is handled ColorScale colorScale = ColorScaleFactory.getScaleFromString(value); if (colorScale instanceof ContinuousColorScale) { session.setColorScale(trackType, (ContinuousColorScale) colorScale); } // ColorScaleFactory.setColorScale(trackType, colorScale); } private String getAttribute(Element element, String key) { String value = element.getAttribute(key); if (value != null) { if (value.trim().equals("")) { value = null; } } return value; } private static JAXBContext jc = null; public static synchronized JAXBContext getJAXBContext() throws JAXBException { if(jc == null){ jc = JAXBContext.newInstance(registeredClasses.toArray(new Class[0]), new HashMap<String, Object>()); } return jc; } /** * Register this class with JAXB, so it can be saved and restored to a session. * The class must conform the JAXBs requirements (e.g. no-arg constructor or factory method) * @param clazz */ //@api public static synchronized void registerClass(Class clazz){ registeredClasses.add(clazz); jc = null; } /** * Unmarshal node. We first attempt to unmarshal into the specified {@code clazz} * if that fails, we try the superclass, and so on up. * * @param node * @param unmarshalClass Class to which to use for unmarshalling * @param firstClass The first class used for invocation. For helpful error message only * * @return */ public static AbstractTrack unmarshalTrack(Unmarshaller u, Node node, Class unmarshalClass, Class firstClass) throws JAXBException{ if(unmarshalClass == null || unmarshalClass.equals(Object.class)){ throw new JAXBException(firstClass + " and none of its superclasses are known"); } if(AbstractTrack.knownUnknownTrackClasses.contains(unmarshalClass)){ return unmarshalTrack(u, node, firstClass, unmarshalClass.getSuperclass()); } JAXBElement el; try { el = u.unmarshal(node, unmarshalClass); } catch (JAXBException e) { AbstractTrack.knownUnknownTrackClasses.add(unmarshalClass); return unmarshalTrack(u, node, firstClass, unmarshalClass.getSuperclass()); } return (AbstractTrack) el.getValue(); } /** * Uses #sessionReader to lookup matching tracks by id, or * searches allTracks if sessionReader is null * @param trackId * @param allTracks * @return */ public static Track getMatchingTrack(String trackId, List<Track> allTracks){ IGVSessionReader reader = currentReader.get(); List<Track> matchingTracks; if(reader != null){ matchingTracks = reader.getTracksById(trackId); }else{ if(allTracks == null) throw new IllegalStateException("No session reader and no tracks to search to resolve Track references"); matchingTracks = new ArrayList<Track>(); for(Track track: allTracks){ if(trackId.equals(track.getId())){ matchingTracks.add(track); break; } } } if (matchingTracks == null || matchingTracks.size() == 0) { //Either the session file is corrupted, or we just haven't loaded the relevant track yet return null; }else if (matchingTracks.size() >= 2) { log.debug("Found multiple tracks with id " + trackId + ", using the first"); } return matchingTracks.get(0); } }
package org.compevol.ssgd; import dr.evolution.coalescent.PiecewiseConstantPopulation; import dr.evomodel.coalescent.PiecewisePopulationModel; import dr.evomodel.substmodel.FrequencyModel; import dr.evomodel.substmodel.HKY; import dr.inference.model.AbstractModel; import dr.inference.model.Model; import dr.inference.model.Variable; import dr.xml.AbstractXMLObjectParser; import dr.xml.ElementRule; import dr.xml.XMLObject; import dr.xml.XMLObjectParser; import dr.xml.XMLParseException; import dr.xml.XMLSyntaxRule; /** * @author Arman Bilge */ public class HKYSkylineIntegrator extends AbstractModel implements Integrator { private final HKY hky; private final FrequencyModel frequencyModel; private final PiecewisePopulationModel populationModel; private boolean betaKnown = false; private double beta; public HKYSkylineIntegrator(final HKY hky, final PiecewisePopulationModel populationModel) { super("HKYSkylineIntegrator"); this.hky = hky; addModel(hky); frequencyModel = hky.getFrequencyModel(); this.populationModel = populationModel; addModel(populationModel); } private void calculateBeta() { final double kappa = hky.getKappa(); final double freqA = frequencyModel.getFrequency(0); final double freqC = frequencyModel.getFrequency(1); final double freqG = frequencyModel.getFrequency(2); final double freqT = frequencyModel.getFrequency(3); final double freqR = freqA + freqG; final double freqY = freqC + freqT; beta = 1.0 / (2 * (freqR * freqY + kappa * (freqA * freqG + freqC * freqT))); betaKnown = true; } @Override public double integratedProbability(final int iState, final double iTime, final int jState, final double jTime, final double mu) { if (!betaKnown) calculateBeta(); final double tau = Math.abs(iTime - jTime); final H H; if (iState % 2 == jState % 2) { // transition H = new H() { @Override public double apply(final double t, final double N) { return transitionH(t, N, iState, jState, tau, mu); } }; } else { // transversion H = new H() { @Override public double apply(final double t, final double N) { return transversionH(t, N, jState, tau, mu); } }; } return integrateIntervals(H, Math.max(iTime, jTime)); } private double integrateIntervals(final H H, final double start) { final PiecewiseConstantPopulation df = (PiecewiseConstantPopulation) populationModel.getDemographicFunction(); final int m = df.getNumArguments(); int k; double current; for (k = 0, current = 0; current < start; current += df.getEpochDuration(k++)); double previous = start; double g = 1.0; double integratedP = 0; for (int i = k; i < m; ++i) { final double N = df.getEpochDemographic(i - 1); integratedP += g * Math.exp(previous / N) * (H.apply(current, N) - H.apply(previous, N)); g *= Math.exp(-(current - previous) / N); previous = current; current += df.getEpochDuration(i); } final double N = df.getEpochDemographic(m - 1); integratedP -= g * Math.exp(previous / N) * H.apply(previous, N); return integratedP; } private interface H { double apply(double t, double N); } private double transitionH(final double t, final double N, final int i, final int j, final double tau, final double mu) { final int ihat = i + 2 % 4; final int pm = i == j ? -1 : 1; final double betamu = beta * mu; final double twobetamuN = 2 * betamu * N; final double mbetamutwotptau = -betamu * (2*t + tau); final double freqi = frequencyModel.getFrequency(i); final double freqj = frequencyModel.getFrequency(j); final double freq = freqi + frequencyModel.getFrequency(ihat); final double freqkappam1p1 = freq * (hky.getKappa() - 1) + 1; return Math.exp(-t/N) * (pm * freqi * Math.exp(mbetamutwotptau * freqkappam1p1) / (twobetamuN * freqkappam1p1 + 1) - freqj * ((1 - freq) * Math.exp(mbetamutwotptau) / (twobetamuN + 1) + freq)) / freq; } private double transversionH(final double t, final double N, final int j, final double tau, final double mu) { final double betamu = beta * mu; return frequencyModel.getFrequency(j) * Math.exp(-t/N) * (Math.exp(-betamu * (2*t + tau)) / (2 * betamu * N + 1) - 1); } @Override protected void handleModelChangedEvent(Model model, Object o, int i) { betaKnown = false; } @Override protected void handleVariableChangedEvent(Variable variable, int i, Variable.ChangeType changeType) { betaKnown = false; } @Override protected void storeState() { // Nothing to do } @Override protected void restoreState() { betaKnown = false; } @Override protected void acceptState() { // Nothing to do } public static final XMLObjectParser PARSER = new AbstractXMLObjectParser() { @Override public Object parseXMLObject(final XMLObject xo) throws XMLParseException { return new HKYSkylineIntegrator((HKY) xo.getChild(HKY.class), (PiecewisePopulationModel) xo.getChild(PiecewisePopulationModel.class)); } @Override public XMLSyntaxRule[] getSyntaxRules() { return rules; } private final XMLSyntaxRule[] rules = {new ElementRule(HKY.class), new ElementRule(PiecewisePopulationModel.class)}; @Override public String getParserDescription() { return "An integrator that supports the HKY and skyline models."; } @Override public Class getReturnType() { return HKYSkylineIntegrator.class; } @Override public String getParserName() { return "hkySkylineIntegrator"; } }; }
package org.bluray.net; import org.davic.net.Locator; import org.davic.net.InvalidLocatorException; import org.videolan.BDJUtil; import java.util.ArrayList; import java.util.Arrays; import java.util.InputMismatchException; import java.util.Scanner; import java.util.logging.Logger; public class BDLocator extends Locator { public BDLocator(String url) throws InvalidLocatorException { super(url); logger.info("Parsing locator " + url); if (!url.startsWith("bd: throw new InvalidLocatorException("Locator must start with bd:// (" + url + ")"); Scanner scan = new Scanner(url.substring(5)); scan.useDelimiter("[\\.:&]"); try { while (scan.hasNext()) { String name = scan.next(); if (scan.hasNext()) { if (name.equals("JAR")) { scan.useDelimiter("[:]"); String temp = scan.next(); jar = Integer.parseInt(temp.substring(0, 5)); isJarItem = true; if (temp.length() > 5) pathSegments = temp.substring(5); } else if (name.equals("SOUND")) { sound = scan.nextInt(16); isSoundItem = true; } else if (name.equals("PLAYLIST")) { playList = scan.nextInt(); isPlayItem = true; parsePlaylist(scan); } else if (name.length() == 32) disc = name; else if (name.length() <= 4) titleNum = Integer.parseInt(name, 16); else throw new InvalidLocatorException("Invalid identifier (" + url + ")"); } else { throw new InvalidLocatorException("Missing value (" + url + ")"); } } } catch (InputMismatchException ex) { throw new InvalidLocatorException("Failed to parse value (" + url + ")"); } catch (NumberFormatException ex) { throw new InvalidLocatorException("Failed to parse value (" + url + ")"); } } public BDLocator(String disc, int titleNum, int playList) throws InvalidLocatorException { super(""); this.disc = disc; this.titleNum = titleNum; this.playList = playList; } public BDLocator(String disc, int titleNum, int jar, int sound) throws InvalidLocatorException { super(""); this.disc = disc; this.titleNum = titleNum; this.jar = jar; this.sound = sound; } public BDLocator(String disc, int titleNum, int playList, int playItem, int mark, String[] componentTags) throws InvalidLocatorException { super(""); this.disc = disc; this.titleNum = titleNum; this.playList = playList; this.playItem = playItem; this.mark = mark; this.componentTags = componentTags.clone(); for (String comp : componentTags) { if (comp.startsWith("A1:")) primaryAudioNum = Integer.parseInt(comp.substring(3)); else if (comp.startsWith("A2:")) secondaryAudioNum = Integer.parseInt(comp.substring(3)); else if (comp.startsWith("V1:")) primaryVideoNum = Integer.parseInt(comp.substring(3)); else if (comp.startsWith("V2:")) secondaryVideoNum = Integer.parseInt(comp.substring(3)); else if (comp.startsWith("P:")) textStreamNum = Integer.parseInt(comp.substring(2)); else throw new InvalidLocatorException("Invalid component"); } } public String[] getComponentTags() { return componentTags; } public String getPathSegments() { return pathSegments; } public String getDiscId() { return disc; } public int getPrimaryAudioStreamNumber() { return primaryAudioNum; } public int getSecondaryAudioStreamNumber() { return secondaryAudioNum; } public int getPrimaryVideoStreamNumber() { return primaryVideoNum; } public int getSecondaryVideoStreamNumber() { return secondaryVideoNum; } public int getJarFileId() { return jar; } public int getMarkId() { return mark; } public int getPGTextStreamNumber() { return textStreamNum; } public int getPlayItemId() { return playItem; } public int getPlayListId() { return playList; } public int getSoundId() { return sound; } public int getTitleNumber() { return titleNum; } public boolean isJarFileItem() { return isJarItem; } public boolean isPlayListItem() { return isPlayItem; } public boolean isSoundItem() { return isSoundItem; } public String toExternalForm() { StringBuilder str = new StringBuilder(); str.append("bd: if (disc != null || disc != "") str.append(disc + "."); if (titleNum != -1) str.append(Integer.toString(titleNum, 16) + "."); if (isJarItem) { str.append("JAR:"); str.append(BDJUtil.makeFiveDigitStr(jar)); if (pathSegments != null) str.append(pathSegments); else if (sound != -1) { str.append(".SOUND:"); str.append(Integer.toString(sound, 16)); } } else if (isPlayItem) { str.append("PLAYLIST:"); str.append(BDJUtil.makeFiveDigitStr(playList)); if (playItem != -1) { str.append(".ITEM:"); str.append(BDJUtil.makeFiveDigitStr(playItem)); } if (mark != -1) { str.append(".MARK:"); str.append(BDJUtil.makeFiveDigitStr(mark)); } if (componentTags != null) { for (int i = 0; i < componentTags.length; i++) { if (i != 0) str.append("&"); str.append(componentTags[i]); } } } else if (isSoundItem) { str.append("SOUND:"); str.append(Integer.toString(sound, 16)); } return str.toString(); } public String toString() { return toExternalForm(); } public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; BDLocator other = (BDLocator) obj; if (!Arrays.equals(componentTags, other.componentTags)) return false; if (disc == null) { if (other.disc != null) return false; } else if (!disc.equals(other.disc)) return false; if (isJarItem != other.isJarItem) return false; if (isPlayItem != other.isPlayItem) return false; if (isSoundItem != other.isSoundItem) return false; if (jar != other.jar) return false; if (mark != other.mark) return false; if (pathSegments == null) { if (other.pathSegments != null) return false; } else if (!pathSegments.equals(other.pathSegments)) return false; if (playItem != other.playItem) return false; if (playList != other.playList) return false; if (primaryAudioNum != other.primaryAudioNum) return false; if (primaryVideoNum != other.primaryVideoNum) return false; if (secondaryAudioNum != other.secondaryAudioNum) return false; if (secondaryVideoNum != other.secondaryVideoNum) return false; if (sound != other.sound) return false; if (textStreamNum != other.textStreamNum) return false; if (titleNum != other.titleNum) return false; return true; } public int hashCode() { final int prime = 31; int result = 1; result = prime * result + Arrays.hashCode(componentTags); result = prime * result + ((disc == null) ? 0 : disc.hashCode()); result = prime * result + (isJarItem ? 1231 : 1237); result = prime * result + (isPlayItem ? 1231 : 1237); result = prime * result + (isSoundItem ? 1231 : 1237); result = prime * result + jar; result = prime * result + mark; result = prime * result + ((pathSegments == null) ? 0 : pathSegments.hashCode()); result = prime * result + playItem; result = prime * result + playList; result = prime * result + primaryAudioNum; result = prime * result + primaryVideoNum; result = prime * result + secondaryAudioNum; result = prime * result + secondaryVideoNum; result = prime * result + sound; result = prime * result + textStreamNum; result = prime * result + titleNum; return result; } private void parsePlaylist(Scanner scan) throws InvalidLocatorException { ArrayList<String> components = new ArrayList<String>(4); try { while (scan.hasNext()) { String name = scan.next(); if (scan.hasNext()) { if (name.equals("A1")) { String a1 = scan.next(); primaryAudioNum = Integer.parseInt(a1); components.add(name + ":" + a1); } else if (name.equals("A2")) { String a2 = scan.next(); secondaryAudioNum = Integer.parseInt(a2); components.add(name + ":" + a2); } else if (name.equals("V1")) { String v1 = scan.next(); primaryVideoNum = Integer.parseInt(v1); components.add(name + ":" + v1); } else if (name.equals("V2")) { String v2 = scan.next(); secondaryVideoNum = Integer.parseInt(v2); components.add(name + ":" + v2); } else if (name.equals("P")) { String p = scan.next(); textStreamNum = Integer.parseInt(p); components.add(name + ":" + p); } else if (name.equals("MARK")) mark = scan.nextInt(); else if (name.equals("ITEM")) playItem = scan.nextInt(); else if (name.length() == 32) disc = name; else if (name.length() <= 4) titleNum = Integer.parseInt(name, 16); else throw new InvalidLocatorException("Invalid identifier"); } else { throw new InvalidLocatorException("Missing value"); } } } catch (InputMismatchException ex) { throw new InvalidLocatorException("Failed to parse value"); } } public static final int NOTLOCATED = -1; protected String pathSegments = null; protected String[] componentTags = null; protected String disc = null; protected int primaryAudioNum = -1; protected int secondaryAudioNum = -1; protected int primaryVideoNum = -1; protected int secondaryVideoNum = -1; protected int textStreamNum = -1; protected int jar = -1; protected int mark = -1; protected int playItem = -1; protected int playList = -1; protected int sound = -1; protected int titleNum = -1; protected boolean isJarItem = false; protected boolean isPlayItem = false; protected boolean isSoundItem = false; private static final Logger logger = Logger.getLogger(BDLocator.class.getName()); }
package net.imglib2.util; import java.util.ArrayList; import java.util.HashSet; import edu.jhu.ece.iacl.utility.ArrayUtil; import ij.IJ; import ij.ImagePlus; import ij.process.ImageProcessor; import net.imglib2.Cursor; import net.imglib2.RandomAccess; import net.imglib2.img.Img; import net.imglib2.img.array.ArrayImgFactory; import net.imglib2.img.imageplus.ImagePlusImg; import net.imglib2.img.imageplus.ImagePlusImgFactory; import net.imglib2.ops.operation.randomaccessibleinterval.unary.DistanceMap; import net.imglib2.type.NativeType; import net.imglib2.type.Type; import net.imglib2.type.numeric.RealType; import net.imglib2.type.numeric.integer.AbstractIntegerType; import net.imglib2.type.numeric.real.FloatType; public class ImgUtil { public static <T extends NativeType<T>> void write(Img<T> img, String fn) { try { ImagePlus ipdp = ImgUtil.toImagePlus( img ); IJ.save(ipdp, fn); } catch(Exception e) { e.printStackTrace(); } } public static <B extends AbstractIntegerType<B>> Img<FloatType> signedDistance(Img<B> mask){ DistanceMap<B> dm = new DistanceMap<B>(); ArrayImgFactory<FloatType> ffactory = new ArrayImgFactory<FloatType>(); Img<FloatType> sdf = ffactory.create(mask, new FloatType()); Img<FloatType> sdfi = ffactory.create(mask, new FloatType()); ImgUtil.printNumNonZero(mask); // inside region dm.compute(mask, sdf); // negate mask Img<B> maskInv = ImgUtil.thresholdMap(mask, 1, false); ImgUtil.printNumNonZero(maskInv); // oustide region dm.compute(maskInv, sdfi); Cursor<FloatType> cursor = sdf.cursor(); Cursor<FloatType> cursori = sdfi.cursor(); while(cursor.hasNext()){ cursor. fwd(); cursori.fwd(); // Distance map returns squared distance double inVal = Math.sqrt(cursor.get().getRealDouble()); double outVal = Math.sqrt(cursori.get().getRealDouble()); if(inVal > 0){ inVal -= 0.5; } if(outVal > 0){ outVal -= 0.5; } inVal *= -1; // negate inside cursor.get().set( (float)(inVal + outVal) ); // add outside } return sdf; } public static <T extends Type<T>> void fill(Img<T> img, T value){ Cursor<T> cursor = img.cursor(); while(cursor.hasNext()){ cursor.next().set(value); } } public static <S extends RealType<S>> void printNumNonZero(Img<S> img){ Cursor<S> c = img.cursor(); int num = 0; while(c.hasNext()){ S val = c.next(); if( val.getRealDouble() != 0 ){ num++; } } System.out.println(img + " nnz: " + num ); } public static <T extends RealType<T>> int[][][] toIntArray3d(Img<T> img){ int[][][] out = new int[(int)img.dimension(0)][(int)img.dimension(1)][(int)img.dimension(2)]; Cursor<T> cursor = img.localizingCursor(); int[] pos = new int[3]; while(cursor.hasNext()){ cursor.fwd(); cursor.localize(pos); out[pos[0]][pos[1]][pos[2]] = (int)(cursor.get().getRealDouble()); } return out; } public static <T extends RealType<T>> boolean[][][] toBooleanArray3dNeg(Img<T> img){ boolean[][][] out = new boolean[(int)img.dimension(0)][(int)img.dimension(1)][(int)img.dimension(2)]; Cursor<T> cursor = img.localizingCursor(); int[] pos = new int[3]; while(cursor.hasNext()){ cursor.next(); cursor.localize(pos); out[pos[0]][pos[1]][pos[2]] = (cursor.get().getRealDouble() < 0.5); } return out; } public static <T extends RealType<T>> float[][][] toFloatArray3d(Img<T> img){ float[][][] out = new float[(int)img.dimension(0)][(int)img.dimension(1)][(int)img.dimension(2)]; Cursor<T> cursor = img.localizingCursor(); int[] pos = new int[3]; while(cursor.hasNext()){ cursor.next(); cursor.localize(pos); out[pos[0]][pos[1]][pos[2]] = (cursor.get().getRealFloat()); } return out; } public static <T extends RealType<T>> void copyToImg(Img<T> img, int[][][] in){ Cursor<T> cursor = img.localizingCursor(); int[] pos = new int[3]; while(cursor.hasNext()){ cursor.next(); cursor.localize(pos); cursor.get().setReal( (double) in[pos[0]][pos[1]][pos[2]] ); } } public static <T extends RealType<T>> void copyToImg(Img<T> img, float[][][] in){ Cursor<T> cursor = img.localizingCursor(); int[] pos = new int[3]; while(cursor.hasNext()){ cursor.next(); cursor.localize(pos); cursor.get().setReal( in[pos[0]][pos[1]][pos[2]]); } } public static <T extends RealType<T>> void copyToImg(Img<T> img, int[][][][] in){ Cursor<T> cursor = img.localizingCursor(); int[] pos = new int[4]; while(cursor.hasNext()){ cursor.next(); cursor.localize(pos); cursor.get().setReal( (double) in[pos[0]][pos[1]][pos[2]][pos[3]] ); } } public static <T extends RealType<T>> void copyToImg(Img<T> img, float[][][][] in){ Cursor<T> cursor = img.localizingCursor(); int[] pos = new int[4]; while(cursor.hasNext()){ cursor.next(); cursor.localize(pos); cursor.get().setReal( in[pos[0]][pos[1]][pos[2]][pos[3]] ); } } public static <T extends NativeType<T> & RealType<T>> Img<T> createEdgeImg(int[] sz, double[] w, T t, double sigma){ ArrayImgFactory<T> factory = new ArrayImgFactory<T>(); Img<T> out = factory.create( sz, t); double[] ctr = ArrayUtil.toDouble(sz); ArrayUtil.divide(ctr, 2); System.out.println(" ctr = " + ArrayUtil.printArray(ctr)); Cursor<T> c = out.localizingCursor(); double[] pos = new double[3]; while(c.hasNext()){ T val = c.next(); c.localize(pos); double[] pt = ArrayUtil.subtract(pos, ctr); double[] res = ArrayUtil.multiply( w , pt); val.setReal( sigmoid( ArrayUtil.sum(res), sigma ) ); } return out; } public static double sigmoid(double x, double sigma) { return (1 / (1 + Math.exp( - sigma * x ))); } public static <T extends NativeType<T> & RealType<T>> Img<T> createGradientImg(int[] sz, double[] w, T t){ ArrayImgFactory<T> factory = new ArrayImgFactory<T>(); Img<T> out = factory.create( sz, t); Cursor<T> c = out.localizingCursor(); int[] pos = new int[3]; while(c.hasNext()){ T val = c.next(); c.localize(pos); double[] res = ArrayUtil.multiply( w , ArrayUtil.toDouble(pos)); val.setReal( ArrayUtil.sum(res) ); } return out; } public static <T extends NativeType<T> & RealType<T>> Img<T> createGradientImgX(int width, int height, int depth, T t){ ArrayImgFactory<T> factory = new ArrayImgFactory<T>(); Img<T> out = factory.create(new int[]{width,height,depth}, t); Cursor<T> c = out.localizingCursor(); int[] pos = new int[3]; while(c.hasNext()){ T val = c.next(); c.localize(pos); val.setReal(pos[0]); } return out; } public static <T extends NativeType<T> & RealType<T>> Img<T> createGradientImgY(int width, int height, int depth, T t){ ArrayImgFactory<T> factory = new ArrayImgFactory<T>(); Img<T> out = factory.create(new int[]{width,height,depth}, t); Cursor<T> c = out.localizingCursor(); int[] pos = new int[3]; while(c.hasNext()){ T val = c.next(); c.localize(pos); val.setReal(pos[1]); } return out; } public static <T extends NativeType<T> & RealType<T>> Img<T> createGradientImgZ(int width, int height, int depth, T t){ ArrayImgFactory<T> factory = new ArrayImgFactory<T>(); Img<T> out = factory.create(new int[]{width,height,depth}, t); Cursor<T> c = out.localizingCursor(); int[] pos = new int[3]; while(c.hasNext()){ T val = c.next(); c.localize(pos); val.setReal(pos[2]); } return out; } public static < T extends RealType< T >> Img<T> threshold(Img<T> img, double thresh, boolean greaterThan){ Img<T> out = img.factory().create(img, img.firstElement()); RandomAccess<T> ra = out.randomAccess(); Cursor<T> c = img.cursor(); while(c.hasNext()){ T t = c.next(); ra.setPosition(c); if( greaterThan && t.getRealDouble() > thresh) { ra.get().set(t); } else if( !greaterThan && t.getRealDouble() < thresh ) { ra.get().set(t); } } return out; } public static < T extends RealType< T >> Img<T> thresholdMap(Img<T> img, double thresh, boolean greaterThan){ Img<T> out = img.factory().create(img, img.firstElement()); RandomAccess<T> ra = out.randomAccess(); Cursor<T> c = img.cursor(); while(c.hasNext()){ T t = c.next(); ra.setPosition(c); if( greaterThan && t.getRealDouble() > thresh) { ra.get().setOne(); } else if( !greaterThan && t.getRealDouble() < thresh ) { ra.get().setOne(); } } return out; } public static < T extends NativeType< T >> ImagePlusImg<T, ?> copyToImagePlus(Img<T> img) throws Exception { ImagePlusImgFactory<T> factory = new ImagePlusImgFactory<T>(); ImagePlusImg<T, ?> ipImg = factory.create(img, img.firstElement()); System.out.println("create image plus of type: " + img.firstElement().getClass()); System.out.println("result is of type: " + ipImg.firstElement().getClass()); Cursor<T> c_in = img.cursor(); RandomAccess<T> ra = ipImg.randomAccess(); while(c_in.hasNext()){ c_in.fwd(); ra.setPosition(c_in); ra.get().set(c_in.get()); } return ipImg; } public static < T extends NativeType< T >> ImagePlus toImagePlus(Img<T> img) throws Exception{ return copyToImagePlus(img).getImagePlus(); } public static <L extends AbstractIntegerType<L>> ArrayList<Integer> uniqueInt( Img<L> img ){ ArrayList<Integer> set = new ArrayList<Integer>(); Cursor<L> cursor = img.cursor(); while(cursor.hasNext()){ int l = cursor.next().getInteger(); if(!set.contains(l)){ set.add(l); } } return set; } public static HashSet<Float> unique( ImagePlus img ){ HashSet<Float> set = new HashSet<Float>(); ImageProcessor ip = img.getProcessor(); int N = ip.getPixelCount(); for (int i=0; i<N; i++) { set.add( ip.getf(i) ); } return set; } public static <L extends AbstractIntegerType<L>> HashSet<L> unique2( Img<L> img ){ HashSet<L> set = new HashSet<L>(); Cursor<L> cursor = img.cursor(); while(cursor.hasNext()){ set.add(cursor.next()); } return set; } public static <L extends AbstractIntegerType<L>> void combineValues( Img<L> img, int[][] spec, boolean strict){ removeDuplicates(spec); Cursor<L> cursor = img.cursor(); while(cursor.hasNext()){ cursor.fwd(); int currLab = cursor.get().getInteger(); // current label if(strict){ cursor.get().setInteger( searchStrict( spec, currLab) ); }else{ cursor.get().setInteger( search( spec, currLab) ); } } } public static <L extends AbstractIntegerType<L>> void replaceValues( Img<L> img, int[][] spec ){ removeDuplicates(spec); Cursor<L> cursor = img.cursor(); while(cursor.hasNext()){ cursor.fwd(); int currLab = cursor.get().getInteger(); // current label cursor.get().setInteger( replace( spec, currLab) ); } } /** * returns the (first) row in array that contains val * @param array * @param val * @return */ private static int searchStrict(int[][] array, int val){ for(int i=0; i<array.length; i++){ for(int j=0; j<array[0].length; j++){ if(array[i][j]==val){ return i; } } } return -1; } /** * returns the (first) row in array that contains val * @param array * @param val * @return */ private static int search(int[][] array, int val){ for(int i=0; i<array.length; i++){ for(int j=0; j<array[0].length; j++){ if(array[i][j]==val){ return i; } } } return val; } /** * returns the (first) row in array that contains val * @param array * @param val * @return */ private static int replace(int[][] array, int val){ for(int i=0; i<array.length; i++){ if(array[i][0]==val){ return array[i][1]; } } return val; } private static void removeDuplicates(int[][] in){ int currentVal = 0; for(int i = 0; i < in.length;i++ ) for(int j = 0; j < in[i].length;j++){ currentVal = in[i][j]; if(currentVal > Integer.MIN_VALUE){ for(int ii = i; ii < in.length;ii++ ) for(int jj = 0; jj < in[ii].length;jj++){ if(!(i == ii && j == jj) && in[ii][jj] == in[i][j]){ in[ii][jj] = Integer.MIN_VALUE; } } } } } public <T extends RealType<T>> int numLessThanZero(Img<T> in) { int count = 0; Cursor<T> cursor = in.cursor(); while (cursor.hasNext()) { if (cursor.next().getRealDouble() <= 0) { count++; } } return count; } public int numLessThanZero(float[][][] in) { int nx = in.length; int ny = in[0].length; int nz = in[0][0].length; int count = 0; for (int x = 0; x < nx; x++) for (int y = 0; y < ny; y++) for (int z = 0; z < nz; z++) { if (in[x][y][z] <= 0) { count++; } } return count; } }
package org.dimagi.chatscreen; import javax.microedition.midlet.MIDlet; import javax.microedition.midlet.MIDletStateChangeException; import javax.microedition.lcdui.Display; import javax.microedition.lcdui.Form; import javax.microedition.lcdui.StringItem; import javax.microedition.lcdui.TextField; import org.dimagi.entity.Question; import de.esoco.ewt.UserInterfaceContext; import de.esoco.ewt.EWT; public class ChatScreenMIDlet extends MIDlet { protected void startApp() throws MIDletStateChangeException { ChatScreenForm chatScreenCanvas = new ChatScreenForm(); //myNewForm.insert(0, chatScreenCanvas); Display.getDisplay(this).setCurrent(chatScreenCanvas); UserInterfaceContext aContext = EWT.createUserInterfaceContext(Display.getDisplay(this)); } protected void pauseApp() { // TODO Auto-generated method stub } protected void destroyApp(boolean arg0) throws MIDletStateChangeException { // TODO Auto-generated method stub } }
package net.jsenko.jpct; import org.apache.maven.plugin.logging.Log; import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.errors.GitAPIException; import org.eclipse.jgit.lib.Config; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectReader; import org.eclipse.jgit.lib.Ref; import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.revwalk.RevWalk; import org.eclipse.jgit.treewalk.AbstractTreeIterator; import org.eclipse.jgit.treewalk.CanonicalTreeParser; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.HashSet; import java.util.Set; import static org.eclipse.jgit.api.ListBranchCommand.ListMode.ALL; /** * Class for working with the project git repository. * * @author Jakub Senko */ public class GitTools { private final Log log; private Git git; /** * Tool for interacting with project's git repository * * @param gitDir .git directory * @throws IOException */ public GitTools(File gitDir, Log log) throws IOException { this.log = log; git = Git.open(gitDir); } /** * Look up the first ancestor directory that contains git repository and use it to create GitTools instance. Return null on * failure. * * @param dir directory in which to start the search * @return null when not found */ public static GitTools lookup(File dir, Log log) { do { log.debug("Searching for .git in " + dir + '.'); File gitDir = new File(dir, ".git"); if (gitDir.exists()) { log.debug("Found " + gitDir + '.'); try { return new GitTools(gitDir, log); } catch (IOException e) { return null; } } dir = dir.getParentFile(); } while (dir != null); return null; } public File getGitFolder() { return git.getRepository().getDirectory(); } /** * Parse ref string (e.g. "HEAD") and return an abstraction of sha-1 id it points to. */ public Ref getRef(String ref) { try { return git.getRepository().getRef(ref); } catch (IOException e) { log.error(e.toString()); return null; } } /** * Get url of a remote repository or null on failure */ public String getRemoteUrl(String remoteName) { Config storedConfig = git.getRepository().getConfig(); return storedConfig.getString("remote", remoteName, "url"); } /** * Used for creating a patch */ private AbstractTreeIterator getTreeIterator(ObjectId objectId) throws IOException { final CanonicalTreeParser p = new CanonicalTreeParser(); final ObjectReader or = git.getRepository().newObjectReader(); p.reset(or, new RevWalk(git.getRepository()).parseTree(objectId)); return p; } public Set<Ref> getAllBranches() { try { return new HashSet<>(git.branchList().setListMode(ALL).call()); } catch (GitAPIException e) { log.debug("Error: Could not list branches.", e); return null; } } /** * Creates a patch file that represents a diff between two commits * * @param out file into which the patch data is written, if it does not exist causes error * @param from starting commit * @param to target commit (result of patching the starting commit) * @param includeStaged include changes staged for commit in the patch * @return false when an error occurs */ public boolean createPatch(File out, ObjectId from, ObjectId to, boolean includeStaged) { // TODO: remove file and use output stream as parameter try (OutputStream outputStream = new FileOutputStream(out, false)) { git.diff().setOutputStream(outputStream) .setCached(includeStaged) .setOldTree(getTreeIterator(from)) .setNewTree(getTreeIterator(to)) .call(); return true; } catch (IOException|GitAPIException e) { log.debug("Error when creating a patch from " + from + " to " + to + ", include staged = " + includeStaged + ": " + e); return false; } } /** * Given a start commit id and a list of branches, find the latest of all common ancestors between the start and the branch * tips latest = closest to commit, based on the commit time (assuming the value is correct, if not another of the ancestors * may be returned) * * @return null on failure */ public ObjectId findBase(ObjectId start, Set<ObjectId> others) { /* * commits in the start branch may load in multiple chunks in future because the branches should be relatively short */ Set<RevCommit> workingCommits = new HashSet<>(); Set<RevCommit> baseCandidates = new HashSet<>(); RevWalk walk = new RevWalk(git.getRepository()); try { walk.markStart(walk.parseCommit(start)); } catch (IOException e) { log.error(e.toString()); } for (RevCommit commit : walk) { workingCommits.add(commit); } walk.dispose(); log.debug(workingCommits.toString()); for (ObjectId other : others) { try { walk.markStart(walk.parseCommit(other)); } catch (IOException e) { log.debug(e.toString()); } for (RevCommit candidate : walk) { if (workingCommits.contains(candidate)) { baseCandidates.add(candidate); log.debug("Found base candidate " + candidate.getId() + " from " + other); break; } } walk.dispose(); } // we have candidates, find the latest so the path contains the least amount of data RevCommit result = null; for (RevCommit candidate : baseCandidates) { if (result == null || (candidate.getCommitTime() > result.getCommitTime())) result = candidate; } log.debug("baseCandidates = " + baseCandidates); return result; } /** * Wrapper around the other method. Takes specific parameters (from user) for convenience. * * @return null on failure */ public ObjectId findBase(String startBranchRef, String remoteName) { Ref start = getRef(startBranchRef); Set<ObjectId> branches = new HashSet<>(); for (Ref branch : getAllBranches()) { if (branch.getName().startsWith("refs/remotes/" + remoteName + "/") && !branch.isSymbolic()) branches.add(branch.getObjectId()); } if (branches.size() == 0) log.debug("Warning: No remote branches found for " + remoteName); return findBase(start.getObjectId(), branches); } }
package seedu.tasklist.ui; import java.util.logging.Logger; import javafx.application.Platform; import javafx.collections.ObservableList; import javafx.fxml.FXML; import javafx.scene.control.ListView; import javafx.scene.layout.AnchorPane; import javafx.scene.layout.Region; import seedu.tasklist.commons.core.LogsCenter; import seedu.tasklist.commons.events.ui.TaskPanelSelectionChangedEvent; import seedu.tasklist.model.task.ReadOnlyTask; public class UpcomingTaskPanel extends UiPart<Region> { private final Logger logger = LogsCenter.getLogger(UpcomingTaskPanel.class); private static final String FXML = "UpcomingTaskPanel.fxml"; @FXML private ListView<ReadOnlyTask> todayTaskListView; @FXML private ListView<ReadOnlyTask> tomorrowTaskListView; public UpcomingTaskPanel(AnchorPane upcomingTaskPlaceholder, ObservableList<ReadOnlyTask> todayTask, ObservableList<ReadOnlyTask> tomorrowTask) { super(FXML); setTodayListView(todayTask); setTomorrowListView(tomorrowTask); setEventHandlerForSelectionChangeEvent(); } private void setTodayListView(ObservableList<ReadOnlyTask> todayList) { todayTaskListView.setItems(todayList); } private void setTomorrowListView(ObservableList<ReadOnlyTask> tomorrowList) { tomorrowTaskListView.setItems(tomorrowList); } private void setEventHandlerForSelectionChangeEvent() { setEventHandlerForTodaySelectionChangeEvent(); setEventHandlerForTomorrowSelectionChangeEvent(); } private void setEventHandlerForTodaySelectionChangeEvent() { todayTaskListView.getSelectionModel().selectedItemProperty() .addListener((observable, oldValue, newValue) -> { if (newValue != null) { logger.fine("Selection in today list panel changed to : '" + newValue + "'"); raise(new TaskPanelSelectionChangedEvent(newValue)); } }); } private void setEventHandlerForTomorrowSelectionChangeEvent() { tomorrowTaskListView.getSelectionModel().selectedItemProperty() .addListener((observable, oldValue, newValue) -> { if (newValue != null) { logger.fine("Selection in tomorrow list panel changed to : '" + newValue + "'"); raise(new TaskPanelSelectionChangedEvent(newValue)); } }); } public void todayScrollTo(int index) { Platform.runLater(() -> { todayTaskListView.scrollTo(index); todayTaskListView.getSelectionModel().clearAndSelect(index); }); } public void tomorrowScrollTo(int index) { Platform.runLater(() -> { tomorrowTaskListView.scrollTo(index); tomorrowTaskListView.getSelectionModel().clearAndSelect(index); }); } }
package org.drpowell.grandannotator; import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; public class VCFVariant implements GenomicVariant { private Map<String, Object> info; private String qual; public String format; private String [] row; private int start; // fixme should this be final? private int end; private static final Boolean INFO_FLAG_TRUE = new Boolean(true); public VCFVariant(String line) { this(line.split("\t")); } public VCFVariant(String [] row) { this.row = row; // FIXME - should defensive copy? start = Integer.parseInt(row[VCFFixedColumns.POS.ordinal()]); end = start + getRef().length() - 1; info = splitInfoField(row[VCFFixedColumns.INFO.ordinal()]); } public static Map<String, Object> splitInfoField(String info) { Map<String, Object> map = new LinkedHashMap<String, Object>(); if (".".equals(info)) { return map; } String [] entries = info.split(";"); for (String entry : entries) { String [] keyvalue = entry.split("=",2); if (map.containsKey(keyvalue[0])) { throw new RuntimeException("Unable to deal with duplicated keys in the INFO field of a VCF"); } if (keyvalue.length == 1) { map.put(keyvalue[0], INFO_FLAG_TRUE); } else { map.put(keyvalue[0], keyvalue[1]); } } return map; } public static String joinInfo(Map<String, Object> info) { if (info.size() == 0) { return "."; } StringBuilder sb = new StringBuilder(); for (Entry<String, Object> e: info.entrySet()) { if (e.getValue() == INFO_FLAG_TRUE) { sb.append(e.getKey()).append(";"); } else { sb.append(e.getKey()).append("=").append(e.getValue()).append(";"); } } return sb.substring(0, sb.length()-1); // no need for the last semicolon } public Map<String, Object> getInfo() { return info; } public Double getQual() { return Double.valueOf(qual); } private void updateInfo() { row[VCFFixedColumns.INFO.ordinal()] = joinInfo(info); } public String toString() { updateInfo(); StringBuilder sb = new StringBuilder(row[0]); for (int i = 1; i < row.length; i++) { sb.append("\t").append(row[i]); } return sb.toString(); } @Override public String getSequence() { return row[VCFFixedColumns.CHROM.ordinal()]; } @Override public int getStart() { return start; } @Override public int getEnd() { return end; } public String getID() { return row[VCFFixedColumns.ID.ordinal()]; } @Override public String getRef() { return row[VCFFixedColumns.REF.ordinal()]; } @Override public String getAlt() { return row[VCFFixedColumns.ALT.ordinal()]; } public String getFilter() { return row[VCFFixedColumns.FILTER.ordinal()]; } public String getFormat() { return row[VCFFixedColumns.FORMAT.ordinal()]; } public VCFVariant mergeID(String newID) { int idcol = VCFFixedColumns.ID.ordinal(); String oldID = row[idcol]; if (!".".equals(oldID)) { if (oldID.equals(newID)) { return this; } // should probably log this -- changing a previously-written rsID } row[idcol] = newID; return this; } }
package org.verdictdb.coordinator; import org.verdictdb.VerdictResultStream; import org.verdictdb.VerdictSingleResult; import java.math.BigDecimal; import java.util.ArrayList; import java.util.HashMap; import java.util.List; public class QueryResultAccuracyEstimatorFromDifference extends QueryResultAccuracyEstimator { Coordinator runningCoordinator; // the values of the result should be within [(1-valueError)*prevValue, (1+valueError)*prevValue] of the previous result. // Otherwise, it will fetch next result. Double valueError = 0.05; // the #row of the result should be within [(1-groupCountError)*prev#row, (1+groupCountError)*prev#row] of the previous result. // Otherwise, it will fetch next result. Double groupCountError = 0.05; // key is the values of non-aggregated column, value is the values of aggregated column HashMap<List<Object>, List<Object>> aggregatedMap = new HashMap<>(); QueryResultAccuracyEstimatorFromDifference(Coordinator runningCoordinator) { this.runningCoordinator = runningCoordinator; } public void setValueError(Double valueError) { this.valueError = valueError; } public void setGroupCountError(Double groupCountError) { this.groupCountError = groupCountError; } /** * fetch the answer from stream until converge * * @return the accurate answer */ @Override public boolean isLastResultAccurate() { if (!checkConverge()) { return false; } else { log.debug("Break condition has reached."); log.debug("Aborts an ExecutionContext: " + this); if (runningCoordinator != null) { Coordinator c = runningCoordinator; runningCoordinator = null; c.abort(); } return true; } } private boolean checkConverge() { HashMap<List<Object>, List<Object>> newAggregatedMap = new HashMap<>(); VerdictSingleResult currentAnswer = answers.get(answers.size() - 1); // query result without asyncAggregate if (currentAnswer.getMetaData()!=null && currentAnswer.getMetaData().isAggregate.isEmpty()) { return true; } while (currentAnswer.next()) { List<Object> aggregatedValues = new ArrayList<>(); List<Object> nonAggregatedValues = new ArrayList<>(); for (int i = 0; i < currentAnswer.getColumnCount(); i++) { if (currentAnswer.getMetaData().isAggregate.get(i)) { aggregatedValues.add(currentAnswer.getValue(i)); } else { nonAggregatedValues.add(currentAnswer.getValue(i)); } } newAggregatedMap.put(nonAggregatedValues, aggregatedValues); } aggregatedMap = newAggregatedMap; currentAnswer.rewind(); if (answers.size() == 1) { return false; } VerdictSingleResult previousAnswer = answers.get(answers.size() - 2); // check if #groupCountError is converged if (currentAnswer.getRowCount() < previousAnswer.getRowCount() * (1 - groupCountError) || currentAnswer.getRowCount() > previousAnswer.getRowCount() * (1 + groupCountError)) { return false; } Boolean isValueConverged = true; for (List<Object> nonAggregatedValues : newAggregatedMap.keySet()) { if (isValueConverged && aggregatedMap.containsKey(nonAggregatedValues)) { List<Object> prevAggregatedValues = aggregatedMap.get(nonAggregatedValues); List<Object> aggregatedValues = newAggregatedMap.get(nonAggregatedValues); for (Object v : aggregatedValues) { int idx = aggregatedValues.indexOf(v); double newValue, oldValue; if (v instanceof BigDecimal) { newValue = ((BigDecimal) v).doubleValue(); oldValue = ((BigDecimal) prevAggregatedValues.get(idx)).doubleValue(); } else { newValue = (double) v; oldValue = (double) prevAggregatedValues.get(idx); } if (newValue < oldValue * (1 - valueError) || newValue > oldValue * (1 + valueError)) { isValueConverged = false; break; } } } if (!isValueConverged) { break; } } return isValueConverged; } }
/* * Project : SimpleUtils * Author : bassem.zohdy * Email : bassem.zohdy@gmail.com */ package simple.utils.file; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.zip.GZIPInputStream; // TODO: Auto-generated Javadoc /** * The Class FileLinesConsumer. * * @param <T> * the generic type */ public class FileLinesConsumer<T> { /** The filter. */ private final Predicate<String> filter; /** The converter. */ private final Function<String, T> converter; /** The consumer. */ private final Consumer<T> consumer; /** The buffer size. */ private int bufferSize = 1024 * 8; /** * Instantiates a new file lines consumer. * * @param filter * the filter * @param converter * the converter * @param consumer * the consumer */ private FileLinesConsumer(Predicate<String> filter, Function<String, T> converter, Consumer<T> consumer) { if (filter != null) this.filter = filter; else this.filter = (s) -> true; if (converter != null) this.converter = converter; else throw new IllegalStateException("Have to provide converter"); if (consumer != null) this.consumer = consumer; else throw new IllegalStateException("Have to provide consumer"); } /** * Of. * * @param <T> * the generic type * @param filter * the filter * @param converter * the converter * @param consumer * the consumer * @return the file lines consumer */ public static <T> FileLinesConsumer<T> of(Predicate<String> filter, Function<String, T> converter, Consumer<T> consumer) { return new FileLinesConsumer<T>(filter, converter, consumer); } /** * Load. * * @param file * the file */ public void load(File file) { try (InputStream inputStream = new FileInputStream(file); Reader reader = new InputStreamReader(inputStream); BufferedReader br = new BufferedReader(reader, bufferSize)) { br.lines().parallel() // parallel .filter(filter) // filter .map(converter) // converter .forEachOrdered(consumer); // consumer } catch (Exception e) { } } /** * Load gz. * * @param file * the file */ public void loadGZ(File file) { try (InputStream inputStream = new GZIPInputStream(new FileInputStream( file)); Reader reader = new InputStreamReader(inputStream); BufferedReader br = new BufferedReader(reader, bufferSize)) { br.lines().parallel() // parallel .filter(filter) // filter .map(converter) // converter .forEachOrdered(consumer); // consumer } catch (Exception e) { } } }
package org.ovirt.engine.ui.webadmin.section.main.view.tab.disk; import javax.inject.Inject; import org.ovirt.engine.core.common.businessentities.Disk; import org.ovirt.engine.ui.common.uicommon.model.DetailModelProvider; import org.ovirt.engine.ui.common.view.AbstractSubTabFormView; import org.ovirt.engine.ui.common.widget.form.FormBuilder; import org.ovirt.engine.ui.common.widget.form.FormItem; import org.ovirt.engine.ui.common.widget.form.GeneralFormPanel; import org.ovirt.engine.ui.common.widget.label.TextBoxLabel; import org.ovirt.engine.ui.uicommonweb.models.disks.DiskGeneralModel; import org.ovirt.engine.ui.uicommonweb.models.disks.DiskListModel; import org.ovirt.engine.ui.webadmin.ApplicationConstants; import org.ovirt.engine.ui.webadmin.gin.ClientGinjectorProvider; import org.ovirt.engine.ui.webadmin.section.main.presenter.tab.disk.SubTabDiskGeneralPresenter; import com.google.gwt.core.client.GWT; import com.google.gwt.editor.client.Editor; import com.google.gwt.editor.client.SimpleBeanEditorDriver; import com.google.gwt.uibinder.client.UiBinder; import com.google.gwt.uibinder.client.UiField; import com.google.gwt.user.client.ui.Widget; public class SubTabDiskGeneralView extends AbstractSubTabFormView<Disk, DiskListModel, DiskGeneralModel> implements SubTabDiskGeneralPresenter.ViewDef, Editor<DiskGeneralModel> { interface ViewUiBinder extends UiBinder<Widget, SubTabDiskGeneralView> { ViewUiBinder uiBinder = GWT.create(ViewUiBinder.class); } interface Driver extends SimpleBeanEditorDriver<DiskGeneralModel, SubTabDiskGeneralView> { Driver driver = GWT.create(Driver.class); } TextBoxLabel alias = new TextBoxLabel(); TextBoxLabel description = new TextBoxLabel(); TextBoxLabel diskId = new TextBoxLabel(); TextBoxLabel lunId = new TextBoxLabel(); TextBoxLabel quotaName = new TextBoxLabel(); @UiField(provided = true) GeneralFormPanel formPanel; FormBuilder formBuilder; private ApplicationConstants constants = ClientGinjectorProvider.instance().getApplicationConstants(); @Inject public SubTabDiskGeneralView(DetailModelProvider<DiskListModel, DiskGeneralModel> modelProvider) { super(modelProvider); // Init formPanel formPanel = new GeneralFormPanel(); initWidget(ViewUiBinder.uiBinder.createAndBindUi(this)); Driver.driver.initialize(this); // Build a form using the FormBuilder formBuilder = new FormBuilder(formPanel, 1, 6); formBuilder.setColumnsWidth("300px"); //$NON-NLS-1$ formBuilder.addFormItem(new FormItem(constants.aliasDisk(), alias, 0, 0)); formBuilder.addFormItem(new FormItem(constants.descriptionDisk(), description, 1, 0)); formBuilder.addFormItem(new FormItem(constants.idDisk(), diskId, 2, 0)); formBuilder.addFormItem(new FormItem(constants.lunIdSanStorage(), lunId, 3, 0) { @Override public boolean isVisible() { return getDetailModel().isLun(); } }); formBuilder.addFormItem(new FormItem(constants.quota(), quotaName, 4, 0) { @Override public boolean isVisible() { return getDetailModel().isQuotaAvailable(); } }); } @Override public void setMainTabSelectedItem(Disk selectedItem) { Driver.driver.edit(getDetailModel()); formBuilder.showForm(getDetailModel()); } }
package org.innovateuk.ifs.application.overview.controller; import org.innovateuk.ifs.application.form.ApplicationForm; import org.innovateuk.ifs.application.forms.populator.AssessorQuestionFeedbackPopulator; import org.innovateuk.ifs.application.overview.populator.ApplicationOverviewModelPopulator; import org.innovateuk.ifs.application.resource.ApplicationResource; import org.innovateuk.ifs.application.resource.ApplicationState; import org.innovateuk.ifs.application.service.ApplicationRestService; import org.innovateuk.ifs.application.service.QuestionService; import org.innovateuk.ifs.commons.security.SecuredBySpring; import org.innovateuk.ifs.competition.resource.CompetitionStatus; import org.innovateuk.ifs.filter.CookieFlashMessageFilter; import org.innovateuk.ifs.user.resource.ProcessRoleResource; import org.innovateuk.ifs.user.resource.UserResource; import org.innovateuk.ifs.user.resource.UserRoleType; import org.innovateuk.ifs.user.service.ProcessRoleService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import static java.lang.String.format; import static org.innovateuk.ifs.application.resource.ApplicationState.OPEN; /** * This controller will handle all requests that are related to the application overview. * Application overview is the page that contains the most basic information about the current application and * the basic information about the competition the application is related to. */ @Controller @RequestMapping("/application") @SecuredBySpring(value="Controller", description = "TODO", securedType = ApplicationController.class) @PreAuthorize("hasAuthority('applicant')") public class ApplicationController { @Autowired private ApplicationOverviewModelPopulator applicationOverviewModelPopulator; @Autowired private QuestionService questionService; @Autowired private ProcessRoleService processRoleService; @Autowired private ApplicationRestService applicationRestService; @Autowired private CookieFlashMessageFilter cookieFlashMessageFilter; @Autowired private AssessorQuestionFeedbackPopulator assessorQuestionFeedbackPopulator; @GetMapping("/{applicationId}") public String applicationDetails(ApplicationForm form, Model model, @PathVariable("applicationId") long applicationId, UserResource user) { ApplicationResource application = applicationRestService.getApplicationById(applicationId) .getSuccessObjectOrThrowException(); if (application.getCompetitionStatus() != CompetitionStatus.OPEN) { return format("redirect:/application/%s/summary", application.getId()); } if (form == null) { form = new ApplicationForm(); } form.setApplication(application); changeApplicationStatusToOpen(application, user); Long userId = user.getId(); model.addAttribute("form", form); model.addAttribute("model", applicationOverviewModelPopulator.populateModel(application, userId)); return "application-details"; } private void changeApplicationStatusToOpen(ApplicationResource applicationResource, UserResource userResource) { //IFS-2456 TODO: We should reconsider this approach and the value of tracking CREATED state. if (ApplicationState.CREATED.equals(applicationResource.getApplicationState()) && userResource.hasRoles(UserRoleType.LEADAPPLICANT)) { applicationRestService.updateApplicationState(applicationResource.getId(), OPEN).getSuccessObjectOrThrowException(); } } @PostMapping(value = "/{applicationId}") public String applicationDetails(@PathVariable("applicationId") long applicationId, UserResource user, HttpServletRequest request) { ProcessRoleResource assignedBy = processRoleService.findProcessRole(user.getId(), applicationId); questionService.assignQuestion(applicationId, request, assignedBy); return "redirect:/application/" + applicationId; } @GetMapping(value = "/{applicationId}/question/{questionId}/feedback") public String applicationAssessorQuestionFeedback(Model model, @PathVariable("applicationId") long applicationId, @PathVariable("questionId") long questionId) { ApplicationResource applicationResource = applicationRestService.getApplicationById(applicationId) .getSuccessObjectOrThrowException(); if (!applicationResource.getCompetitionStatus().isFeedbackReleased()) { return "redirect:/application/" + applicationId + "/summary"; } model.addAttribute("model", assessorQuestionFeedbackPopulator.populate(applicationResource, questionId)); return "application-assessor-feedback"; } @GetMapping("/terms-and-conditions") public String termsAndConditions() { return "application-terms-and-conditions"; } /** * Assign a question to a user * * @param applicationId the application for which the user is assigned * @param sectionId section id for showing details * @param request request parameters * @return */ @PostMapping("/{applicationId}/section/{sectionId}") public String assignQuestion(@PathVariable("applicationId") long applicationId, @PathVariable("sectionId") long sectionId, UserResource user, HttpServletRequest request, HttpServletResponse response) { doAssignQuestion(applicationId, user, request, response); return "redirect:/application/" + applicationId + "/section/" + sectionId; } private void doAssignQuestion(Long applicationId, UserResource user, HttpServletRequest request, HttpServletResponse response) { ProcessRoleResource assignedBy = processRoleService.findProcessRole(user.getId(), applicationId); questionService.assignQuestion(applicationId, request, assignedBy); cookieFlashMessageFilter.setFlashMessage(response, "assignedQuestion"); } }
package techreborn.api; import net.minecraft.inventory.CraftingInventory; import net.minecraft.item.ItemStack; import net.minecraft.recipe.Recipe; import net.minecraft.util.Identifier; import net.minecraft.world.World; public class RollingMachineRecipe { public static final RollingMachineRecipe instance = new RollingMachineRecipe(); public void addShapedOreRecipe(Identifier resourceLocation, ItemStack outputItemStack, Object... objectInputs) { } public void addShapelessOreRecipe(Identifier resourceLocation, ItemStack outputItemStack, Object... objectInputs) { } public ItemStack findMatchingRecipeOutput(CraftingInventory inv, World world) { return ItemStack.EMPTY; } public Recipe findMatchingRecipe(CraftingInventory inv, World world) { return null; } }
package org.exist.storage; import org.apache.log4j.Logger; import org.exist.EXistException; import org.exist.backup.ConsistencyCheck; import org.exist.backup.ErrorReport; import org.exist.backup.SystemExport; import org.exist.management.AgentFactory; import org.exist.util.Configuration; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; import java.util.List; import java.util.Properties; public class ConsistencyCheckTask implements SystemTask { private static final Logger LOG = Logger.getLogger(ConsistencyCheckTask.class); private String exportDir; private boolean createBackup = false; private boolean paused = false; public void configure(Configuration config, Properties properties) throws EXistException { exportDir = properties.getProperty("output", "export"); File dir = new File(exportDir); if (!dir.isAbsolute()) dir = new File((String)config.getProperty(BrokerPool.PROPERTY_DATA_DIR), exportDir); dir.mkdirs(); exportDir = dir.getAbsolutePath(); if (LOG.isDebugEnabled()) LOG.debug("Using output directory " + exportDir); String backup = properties.getProperty("backup", "no"); createBackup = backup.equalsIgnoreCase("YES"); } public void execute(DBBroker broker) throws EXistException { if (paused) { if (LOG.isDebugEnabled()) LOG.debug("Consistency check is paused."); return; } long start = System.currentTimeMillis(); PrintWriter report = openLog(); CheckCallback cb = new CheckCallback(report); try { if (LOG.isDebugEnabled()) LOG.debug("Starting consistency check..."); boolean doBackup = createBackup; ConsistencyCheck check = new ConsistencyCheck(broker); List errors = check.checkAll(cb); if (!errors.isEmpty()) { if (LOG.isDebugEnabled()) LOG.debug("Errors found: " + errors.size()); doBackup = true; if (fatalErrorsFound(errors)) { if (LOG.isDebugEnabled()) LOG.debug("Fatal errors were found: pausing the consistency check task."); paused = true; } } AgentFactory.getInstance().updateErrors(broker.getBrokerPool(), errors, start); if (doBackup) { File exportFile = SystemExport.getUniqueFile("data", ".zip", exportDir); if (LOG.isDebugEnabled()) LOG.debug("Creating emergency backup to file: " + exportFile.getAbsolutePath()); SystemExport sysexport = new SystemExport(broker, null); sysexport.export(exportFile.getAbsolutePath(), errors); } } finally { report.close(); } } private boolean fatalErrorsFound(List errors) { for (int i = 0; i < errors.size(); i++) { ErrorReport error = (ErrorReport) errors.get(i); switch (error.getErrcode()) { // the following errors are considered fatal: export the db and stop the task case ErrorReport.CHILD_COLLECTION : case ErrorReport.RESOURCE_ACCESS_FAILED : return true; } } // no fatal errors return false; } private PrintWriter openLog() throws EXistException { try { File file = SystemExport.getUniqueFile("report", ".log", exportDir); OutputStream os = new BufferedOutputStream(new FileOutputStream(file)); return new PrintWriter(new OutputStreamWriter(os, "UTF-8")); } catch (UnsupportedEncodingException e) { throw new EXistException("ERROR: failed to create report file in " + exportDir, e); } catch (FileNotFoundException e) { throw new EXistException("ERROR: failed to create report file in " + exportDir, e); } } private class CheckCallback implements ConsistencyCheck.ProgressCallback, SystemExport.StatusCallback { private PrintWriter log; private boolean errorFound = false; private CheckCallback(PrintWriter log) { this.log = log; } public void startDocument(String path) { } public void startDocument(String name, int current, int count) { } public void startCollection(String path) { if (errorFound) log.write(" errorFound = false; log.write("COLLECTION: "); log.write(path); log.write('\n'); } public void error(ErrorReport error) { log.write(" log.write(error.toString()); log.write('\n'); } public void error(String message, Throwable exception) { log.write(" log.write("EXPORT ERROR: "); log.write(message); log.write('\n'); exception.printStackTrace(log); } } }
package org.javacc.parser; import java.io.*; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.Hashtable; import java.util.Iterator; import java.util.List; import java.util.Map; import org.javacc.utils.JavaFileGenerator; import static org.javacc.parser.JavaCCGlobals.*; /** * Generate lexer. */ public class LexGen extends CodeGenerator implements JavaCCParserConstants { private static final String DUMP_STATIC_VAR_DECLARATIONS_TEMPLATE_RESOURCE_URL = "/templates/DumpStaticVarDeclarations.template"; private static final String DUMP_DEBUG_METHODS_TEMPLATE_RESOURCE_URL = "/templates/DumpDebugMethods.template"; private static final String BOILERPLATER_METHOD_RESOURCE_URL = "/templates/TokenManagerBoilerPlateMethods.template"; public static String staticString; public static String tokMgrClassName; // Hashtable of vectors static Hashtable allTpsForState = new Hashtable(); public static int lexStateIndex = 0; static int[] kinds; public static int maxOrdinal = 1; public static String lexStateSuffix; static String[] newLexState; public static int[] lexStates; public static boolean[] ignoreCase; public static Action[] actions; public static Hashtable initStates = new Hashtable(); public static int stateSetSize; public static int maxLexStates; public static String[] lexStateName; static NfaState[] singlesToSkip; public static long[] toSkip; public static long[] toSpecial; public static long[] toMore; public static long[] toToken; public static int defaultLexState; public static RegularExpression[] rexprs; public static int[] maxLongsReqd; public static int[] initMatch; public static int[] canMatchAnyChar; public static boolean hasEmptyMatch; public static boolean[] canLoop; public static boolean[] stateHasActions; public static boolean hasLoop = false; public static boolean[] canReachOnMore; public static boolean[] hasNfa; public static boolean[] mixed; public static NfaState initialState; public static int curKind; static boolean hasSkipActions = false; static boolean hasMoreActions = false; static boolean hasTokenActions = false; static boolean hasSpecial = false; static boolean hasSkip = false; static boolean hasMore = false; public static RegularExpression curRE; public static boolean keepLineCol; public static String errorHandlingClass; void PrintClassHead() { int i, j; List tn = new ArrayList(toolNames); tn.add(toolName); // TODO :: CBA -- Require Unification of output language specific processing into a single Enum class genCodeLine("/* " + getIdString(tn, tokMgrClassName + getFileExtension(Options.getOutputLanguage())) + " */"); int l = 0, kind; i = 1; for (;;) { if (cu_to_insertion_point_1.size() <= l) break; kind = ((Token)cu_to_insertion_point_1.get(l)).kind; if(kind == PACKAGE || kind == IMPORT) { for (; i < cu_to_insertion_point_1.size(); i++) { kind = ((Token)cu_to_insertion_point_1.get(i)).kind; if (kind == SEMICOLON || kind == ABSTRACT || kind == FINAL || kind == PUBLIC || kind == CLASS || kind == INTERFACE) { cline = ((Token)(cu_to_insertion_point_1.get(l))).beginLine; ccol = ((Token)(cu_to_insertion_point_1.get(l))).beginColumn; for (j = l; j < i; j++) { printToken((Token)(cu_to_insertion_point_1.get(j))); } if (kind == SEMICOLON) printToken((Token)(cu_to_insertion_point_1.get(j))); genCodeLine(""); break; } } l = ++i; } else break; } genCodeLine(""); genCodeLine("/** Token Manager. */"); //genCodeLine("@SuppressWarnings(\"unused\")"); genAnnotation("SuppressWarnings(\"unused\")"); if(Options.getSupportClassVisibilityPublic()) { //genModifier("public "); genModifier("public "); } //genCodeLine("class " + tokMgrClassName + " implements " + //cu_name + "Constants"); //String superClass = Options.stringValue(Options.USEROPTION__TOKEN_MANAGER_SUPER_CLASS); genClassStart(null, tokMgrClassName, new String[]{}, new String[]{cu_name + "Constants"}); //genCodeLine("{"); // } if (token_mgr_decls != null && token_mgr_decls.size() > 0) { Token t = (Token)token_mgr_decls.get(0); boolean commonTokenActionSeen = false; boolean commonTokenActionNeeded = Options.getCommonTokenAction(); printTokenSetup((Token)token_mgr_decls.get(0)); ccol = 1; for (j = 0; j < token_mgr_decls.size(); j++) { t = (Token)token_mgr_decls.get(j); if (t.kind == IDENTIFIER && commonTokenActionNeeded && !commonTokenActionSeen) commonTokenActionSeen = t.image.equals("CommonTokenAction"); printToken(t); } genCodeLine(""); if (commonTokenActionNeeded && !commonTokenActionSeen) JavaCCErrors.warning("You have the COMMON_TOKEN_ACTION option set. " + "But it appears you have not defined the method :\n"+ " " + staticString + "void CommonTokenAction(Token t)\n" + "in your TOKEN_MGR_DECLS. The generated token manager will not compile."); } else if (Options.getCommonTokenAction()) { JavaCCErrors.warning("You have the COMMON_TOKEN_ACTION option set. " + "But you have not defined the method :\n"+ " " + staticString + "void CommonTokenAction(Token t)\n" + "in your TOKEN_MGR_DECLS. The generated token manager will not compile."); } genCodeLine(""); genCodeLine(" /** Debug output. */"); genCodeLine(" public " + staticString + " java.io.PrintStream debugStream = System.out;"); genCodeLine(" /** Set debug output. */"); genCodeLine(" public " + staticString + " void setDebugStream(java.io.PrintStream ds) { debugStream = ds; }"); if(Options.getTokenManagerUsesParser() && !Options.getStatic()){ genCodeLine(""); genCodeLine(" /** The parser. */"); genCodeLine(" public " + cu_name + " parser = null;"); } } @SuppressWarnings("unchecked") protected void writeTemplate(String name, Object... additionalOptions) throws IOException { Map<String, Object> options = new HashMap<String, Object>(Options.getOptions()); options.put("maxOrdinal", Integer.valueOf(maxOrdinal)); options.put("maxLexStates", Integer.valueOf(maxLexStates)); options.put("hasEmptyMatch", Boolean.valueOf(hasEmptyMatch)); options.put("hasSkip", Boolean.valueOf(hasSkip)); options.put("hasMore", Boolean.valueOf(hasMore)); options.put("hasSpecial", Boolean.valueOf(hasSpecial)); options.put("hasMoreActions", Boolean.valueOf(hasMoreActions)); options.put("hasSkipActions", Boolean.valueOf(hasSkipActions)); options.put("hasTokenActions", Boolean.valueOf(hasTokenActions)); options.put("stateSetSize", stateSetSize); options.put("hasActions", hasMoreActions || hasSkipActions || hasTokenActions); options.put("tokMgrClassName", tokMgrClassName); int x = 0; for (int l : maxLongsReqd) x = Math.max(x, l); options.put("maxLongs", x); options.put("cu_name", cu_name); // options.put("", .valueOf(maxOrdinal)); for (int i = 0; i < additionalOptions.length; i++) { Object o = additionalOptions[i]; if (o instanceof Map<?,?>) { options.putAll((Map<String,Object>) o); } else { if (i == additionalOptions.length - 1) throw new IllegalArgumentException("Must supply pairs of [name value] args"); options.put((String) o, additionalOptions[i+1]); i++; } } JavaFileGenerator gen = new JavaFileGenerator(name, options); StringWriter sw = new StringWriter(); gen.generate(new PrintWriter(sw)); sw.close(); genCode(sw.toString()); } void DumpDebugMethods() throws IOException { writeTemplate(DUMP_DEBUG_METHODS_TEMPLATE_RESOURCE_URL); } static void BuildLexStatesTable() { Iterator it = rexprlist.iterator(); TokenProduction tp; int i; String[] tmpLexStateName = new String[lexstate_I2S.size()]; while (it.hasNext()) { tp = (TokenProduction)it.next(); List respecs = tp.respecs; List tps; for (i = 0; i < tp.lexStates.length; i++) { if ((tps = (List)allTpsForState.get(tp.lexStates[i])) == null) { tmpLexStateName[maxLexStates++] = tp.lexStates[i]; allTpsForState.put(tp.lexStates[i], tps = new ArrayList()); } tps.add(tp); } if (respecs == null || respecs.size() == 0) continue; RegularExpression re; for (i = 0; i < respecs.size(); i++) if (maxOrdinal <= (re = ((RegExprSpec)respecs.get(i)).rexp).ordinal) maxOrdinal = re.ordinal + 1; } kinds = new int[maxOrdinal]; toSkip = new long[maxOrdinal / 64 + 1]; toSpecial = new long[maxOrdinal / 64 + 1]; toMore = new long[maxOrdinal / 64 + 1]; toToken = new long[maxOrdinal / 64 + 1]; toToken[0] = 1L; actions = new Action[maxOrdinal]; actions[0] = actForEof; hasTokenActions = actForEof != null; initStates = new Hashtable(); canMatchAnyChar = new int[maxLexStates]; canLoop = new boolean[maxLexStates]; stateHasActions = new boolean[maxLexStates]; lexStateName = new String[maxLexStates]; singlesToSkip = new NfaState[maxLexStates]; System.arraycopy(tmpLexStateName, 0, lexStateName, 0, maxLexStates); for (i = 0; i < maxLexStates; i++) canMatchAnyChar[i] = -1; hasNfa = new boolean[maxLexStates]; mixed = new boolean[maxLexStates]; maxLongsReqd = new int[maxLexStates]; initMatch = new int[maxLexStates]; newLexState = new String[maxOrdinal]; newLexState[0] = nextStateForEof; hasEmptyMatch = false; lexStates = new int[maxOrdinal]; ignoreCase = new boolean[maxOrdinal]; rexprs = new RegularExpression[maxOrdinal]; RStringLiteral.allImages = new String[maxOrdinal]; canReachOnMore = new boolean[maxLexStates]; } static int GetIndex(String name) { for (int i = 0; i < lexStateName.length; i++) if (lexStateName[i] != null && lexStateName[i].equals(name)) return i; throw new Error(); // Should never come here } public static void AddCharToSkip(char c, int kind) { singlesToSkip[lexStateIndex].AddChar(c); singlesToSkip[lexStateIndex].kind = kind; } public void start() throws IOException { if (!Options.getBuildTokenManager() || Options.getUserTokenManager() || JavaCCErrors.get_error_count() > 0) return; keepLineCol = Options.getKeepLineColumn(); errorHandlingClass = Options.getTokenMgrErrorClass(); List choices = new ArrayList(); Enumeration e; TokenProduction tp; int i, j; staticString = (Options.getStatic() ? "static " : ""); tokMgrClassName = cu_name + "TokenManager"; PrintClassHead(); BuildLexStatesTable(); e = allTpsForState.keys(); boolean ignoring = false; while (e.hasMoreElements()) { NfaState.ReInit(); RStringLiteral.ReInit(); String key = (String)e.nextElement(); lexStateIndex = GetIndex(key); lexStateSuffix = "_" + lexStateIndex; List allTps = (List)allTpsForState.get(key); initStates.put(key, initialState = new NfaState()); ignoring = false; singlesToSkip[lexStateIndex] = new NfaState(); singlesToSkip[lexStateIndex].dummy = true; if (key.equals("DEFAULT")) defaultLexState = lexStateIndex; for (i = 0; i < allTps.size(); i++) { tp = (TokenProduction)allTps.get(i); int kind = tp.kind; boolean ignore = tp.ignoreCase; List rexps = tp.respecs; if (i == 0) ignoring = ignore; for (j = 0; j < rexps.size(); j++) { RegExprSpec respec = (RegExprSpec)rexps.get(j); curRE = respec.rexp; rexprs[curKind = curRE.ordinal] = curRE; lexStates[curRE.ordinal] = lexStateIndex; ignoreCase[curRE.ordinal] = ignore; if (curRE.private_rexp) { kinds[curRE.ordinal] = -1; continue; } if (curRE instanceof RStringLiteral && !((RStringLiteral)curRE).image.equals("")) { ((RStringLiteral)curRE).GenerateDfa(this, curRE.ordinal); if (i != 0 && !mixed[lexStateIndex] && ignoring != ignore) { mixed[lexStateIndex] = true; } } else if (curRE.CanMatchAnyChar()) { if (canMatchAnyChar[lexStateIndex] == -1 || canMatchAnyChar[lexStateIndex] > curRE.ordinal) canMatchAnyChar[lexStateIndex] = curRE.ordinal; } else { Nfa temp; if (curRE instanceof RChoice) choices.add(curRE); temp = curRE.GenerateNfa(ignore); temp.end.isFinal = true; temp.end.kind = curRE.ordinal; initialState.AddMove(temp.start); } if (kinds.length < curRE.ordinal) { int[] tmp = new int[curRE.ordinal + 1]; System.arraycopy(kinds, 0, tmp, 0, kinds.length); kinds = tmp; } //System.out.println(" ordina : " + curRE.ordinal); kinds[curRE.ordinal] = kind; if (respec.nextState != null && !respec.nextState.equals(lexStateName[lexStateIndex])) newLexState[curRE.ordinal] = respec.nextState; if (respec.act != null && respec.act.getActionTokens() != null && respec.act.getActionTokens().size() > 0) actions[curRE.ordinal] = respec.act; switch(kind) { case TokenProduction.SPECIAL : hasSkipActions |= (actions[curRE.ordinal] != null) || (newLexState[curRE.ordinal] != null); hasSpecial = true; toSpecial[curRE.ordinal / 64] |= 1L << (curRE.ordinal % 64); toSkip[curRE.ordinal / 64] |= 1L << (curRE.ordinal % 64); break; case TokenProduction.SKIP : hasSkipActions |= (actions[curRE.ordinal] != null); hasSkip = true; toSkip[curRE.ordinal / 64] |= 1L << (curRE.ordinal % 64); break; case TokenProduction.MORE : hasMoreActions |= (actions[curRE.ordinal] != null); hasMore = true; toMore[curRE.ordinal / 64] |= 1L << (curRE.ordinal % 64); if (newLexState[curRE.ordinal] != null) canReachOnMore[GetIndex(newLexState[curRE.ordinal])] = true; else canReachOnMore[lexStateIndex] = true; break; case TokenProduction.TOKEN : hasTokenActions |= (actions[curRE.ordinal] != null); toToken[curRE.ordinal / 64] |= 1L << (curRE.ordinal % 64); break; } } } // Generate a static block for initializing the nfa transitions NfaState.ComputeClosures(); for (i = 0; i < initialState.epsilonMoves.size(); i++) ((NfaState)initialState.epsilonMoves.elementAt(i)).GenerateCode(); if (hasNfa[lexStateIndex] = (NfaState.generatedStates != 0)) { initialState.GenerateCode(); initialState.GenerateInitMoves(this); } if (initialState.kind != Integer.MAX_VALUE && initialState.kind != 0) { if ((toSkip[initialState.kind / 64] & (1L << initialState.kind)) != 0L || (toSpecial[initialState.kind / 64] & (1L << initialState.kind)) != 0L) hasSkipActions = true; else if ((toMore[initialState.kind / 64] & (1L << initialState.kind)) != 0L) hasMoreActions = true; else hasTokenActions = true; if (initMatch[lexStateIndex] == 0 || initMatch[lexStateIndex] > initialState.kind) { initMatch[lexStateIndex] = initialState.kind; hasEmptyMatch = true; } } else if (initMatch[lexStateIndex] == 0) initMatch[lexStateIndex] = Integer.MAX_VALUE; RStringLiteral.FillSubString(); if (hasNfa[lexStateIndex] && !mixed[lexStateIndex]) RStringLiteral.GenerateNfaStartStates(this, initialState); RStringLiteral.DumpDfaCode(this); if (hasNfa[lexStateIndex]) NfaState.DumpMoveNfa(this); if (stateSetSize < NfaState.generatedStates) stateSetSize = NfaState.generatedStates; } for (i = 0; i < choices.size(); i++) ((RChoice)choices.get(i)).CheckUnmatchability(); NfaState.DumpStateSets(this); CheckEmptyStringMatch(); NfaState.DumpNonAsciiMoveMethods(this); RStringLiteral.DumpStrLiteralImages(this); DumpFillToken(); DumpGetNextToken(); if (Options.getDebugTokenManager()) { NfaState.DumpStatesForKind(this); DumpDebugMethods(); } if (hasLoop) { genCodeLine(staticString + "int[] jjemptyLineNo = new int[" + maxLexStates + "];"); genCodeLine(staticString + "int[] jjemptyColNo = new int[" + maxLexStates + "];"); genCodeLine(staticString + "" + Options.getBooleanType() + "[] jjbeenHere = new " + Options.getBooleanType() + "[" + maxLexStates + "];"); } if (hasSkipActions) DumpSkipActions(); if (hasMoreActions) DumpMoreActions(); if (hasTokenActions) DumpTokenActions(); NfaState.PrintBoilerPlate(this); String charStreamName; if (Options.getUserCharStream()) charStreamName = "CharStream"; else { if (Options.getJavaUnicodeEscape()) charStreamName = "JavaCharStream"; else charStreamName = "SimpleCharStream"; } writeTemplate(BOILERPLATER_METHOD_RESOURCE_URL, "charStreamName", charStreamName, "lexStateNameLength", lexStateName.length, "generatedStates", NfaState.generatedStates); DumpStaticVarDeclarations(charStreamName); genCodeLine( "}"); // TODO :: CBA -- Require Unification of output language specific processing into a single Enum class String fileName = Options.getOutputDirectory() + File.separator + tokMgrClassName + getFileExtension(Options.getOutputLanguage()); if (Options.getBuildParser()) { saveOutput(fileName); } } static void CheckEmptyStringMatch() { int i, j, k, len; boolean[] seen = new boolean[maxLexStates]; boolean[] done = new boolean[maxLexStates]; String cycle; String reList; Outer: for (i = 0; i < maxLexStates; i++) { if (done[i] || initMatch[i] == 0 || initMatch[i] == Integer.MAX_VALUE || canMatchAnyChar[i] != -1) continue; done[i] = true; len = 0; cycle = ""; reList = ""; for (k = 0; k < maxLexStates; k++) seen[k] = false; j = i; seen[i] = true; cycle += lexStateName[j] + " while (newLexState[initMatch[j]] != null) { cycle += newLexState[initMatch[j]]; if (seen[j = GetIndex(newLexState[initMatch[j]])]) break; cycle += " done[j] = true; seen[j] = true; if (initMatch[j] == 0 || initMatch[j] == Integer.MAX_VALUE || canMatchAnyChar[j] != -1) continue Outer; if (len != 0) reList += "; "; reList += "line " + rexprs[initMatch[j]].getLine() + ", column " + rexprs[initMatch[j]].getColumn(); len++; } if (newLexState[initMatch[j]] == null) cycle += lexStateName[lexStates[initMatch[j]]]; for (k = 0; k < maxLexStates; k++) canLoop[k] |= seen[k]; hasLoop = true; if (len == 0) JavaCCErrors.warning(rexprs[initMatch[i]], "Regular expression" + ((rexprs[initMatch[i]].label.equals("")) ? "" : (" for " + rexprs[initMatch[i]].label)) + " can be matched by the empty string (\"\") in lexical state " + lexStateName[i] + ". This can result in an endless loop of " + "empty string matches."); else { JavaCCErrors.warning(rexprs[initMatch[i]], "Regular expression" + ((rexprs[initMatch[i]].label.equals("")) ? "" : (" for " + rexprs[initMatch[i]].label)) + " can be matched by the empty string (\"\") in lexical state " + lexStateName[i] + ". This regular expression along with the " + "regular expressions at " + reList + " forms the cycle \n " + cycle + "\ncontaining regular expressions with empty matches." + " This can result in an endless loop of empty string matches."); } } } private void DumpStaticVarDeclarations(String charStreamName) throws IOException { int i; genCodeLine(""); genCodeLine("/** Lexer state names. */"); genCodeLine("public static final String[] lexStateNames = {"); for (i = 0; i < maxLexStates; i++) genCodeLine(" \"" + lexStateName[i] + "\","); genCodeLine("};"); if (maxLexStates > 1) { genCodeLine(""); genCodeLine("/** Lex State array. */"); genCode("public static final int[] jjnewLexState = {"); for (i = 0; i < maxOrdinal; i++) { if (i % 25 == 0) genCode("\n "); if (newLexState[i] == null) genCode("-1, "); else genCode(GetIndex(newLexState[i]) + ", "); } genCodeLine("\n};"); } if (hasSkip || hasMore || hasSpecial) { // Bit vector for TOKEN genCode("static final long[] jjtoToken = {"); for (i = 0; i < maxOrdinal / 64 + 1; i++) { if (i % 4 == 0) genCode("\n "); genCode("0x" + Long.toHexString(toToken[i]) + "L, "); } genCodeLine("\n};"); } if (hasSkip || hasSpecial) { // Bit vector for SKIP genCode("static final long[] jjtoSkip = {"); for (i = 0; i < maxOrdinal / 64 + 1; i++) { if (i % 4 == 0) genCode("\n "); genCode("0x" + Long.toHexString(toSkip[i]) + "L, "); } genCodeLine("\n};"); } if (hasSpecial) { // Bit vector for SPECIAL genCode("static final long[] jjtoSpecial = {"); for (i = 0; i < maxOrdinal / 64 + 1; i++) { if (i % 4 == 0) genCode("\n "); genCode("0x" + Long.toHexString(toSpecial[i]) + "L, "); } genCodeLine("\n};"); } if (hasMore) { // Bit vector for MORE genCode("static final long[] jjtoMore = {"); for (i = 0; i < maxOrdinal / 64 + 1; i++) { if (i % 4 == 0) genCode("\n "); genCode("0x" + Long.toHexString(toMore[i]) + "L, "); } genCodeLine("\n};"); } writeTemplate(DUMP_STATIC_VAR_DECLARATIONS_TEMPLATE_RESOURCE_URL, "charStreamName", charStreamName, "protected", isJavaLanguage() ? "protected" : "", "private", isJavaLanguage() ? "private" : "", "final", isJavaLanguage() ? "final" : "", "lexStateNameLength", lexStateName.length); } // Assumes l != 0L static char MaxChar(long l) { for (int i = 64; i if ((l & (1L << i)) != 0L) return (char)i; return 0xffff; } void DumpFillToken() { final double tokenVersion = JavaFiles.getVersion("Token.java"); final boolean hasBinaryNewToken = tokenVersion > 4.09; genCodeLine(staticString + "protected Token jjFillToken()"); genCodeLine("{"); genCodeLine(" final Token t;"); genCodeLine(" final String curTokenImage;"); if (keepLineCol) { genCodeLine(" final int beginLine;"); genCodeLine(" final int endLine;"); genCodeLine(" final int beginColumn;"); genCodeLine(" final int endColumn;"); } if (hasEmptyMatch) { genCodeLine(" if (jjmatchedPos < 0)"); genCodeLine(" {"); genCodeLine(" if (image == null)"); genCodeLine(" curTokenImage = \"\";"); genCodeLine(" else"); genCodeLine(" curTokenImage = image.toString();"); if (keepLineCol) { genCodeLine(" beginLine = endLine = input_stream.getEndLine();"); genCodeLine(" beginColumn = endColumn = input_stream.getEndColumn();"); } genCodeLine(" }"); genCodeLine(" else"); genCodeLine(" {"); genCodeLine(" String im = jjstrLiteralImages[jjmatchedKind];"); genCodeLine(" curTokenImage = (im == null) ? input_stream.GetImage() : im;"); if (keepLineCol) { genCodeLine(" beginLine = input_stream.getBeginLine();"); genCodeLine(" beginColumn = input_stream.getBeginColumn();"); genCodeLine(" endLine = input_stream.getEndLine();"); genCodeLine(" endColumn = input_stream.getEndColumn();"); } genCodeLine(" }"); } else { genCodeLine(" String im = jjstrLiteralImages[jjmatchedKind];"); genCodeLine(" curTokenImage = (im == null) ? input_stream.GetImage() : im;"); if (keepLineCol) { genCodeLine(" beginLine = input_stream.getBeginLine();"); genCodeLine(" beginColumn = input_stream.getBeginColumn();"); genCodeLine(" endLine = input_stream.getEndLine();"); genCodeLine(" endColumn = input_stream.getEndColumn();"); } } if (Options.getTokenFactory().length() > 0) { genCodeLine(" t = " + Options.getTokenFactory() + ".newToken(jjmatchedKind, curTokenImage);"); } else if (hasBinaryNewToken) { genCodeLine(" t = Token.newToken(jjmatchedKind, curTokenImage);"); } else { genCodeLine(" t = Token.newToken(jjmatchedKind);"); genCodeLine(" t.kind = jjmatchedKind;"); genCodeLine(" t.image = curTokenImage;"); } if (keepLineCol) { genCodeLine(""); genCodeLine(" t.beginLine = beginLine;"); genCodeLine(" t.endLine = endLine;"); genCodeLine(" t.beginColumn = beginColumn;"); genCodeLine(" t.endColumn = endColumn;"); } genCodeLine(""); genCodeLine(" return t;"); genCodeLine("}"); } void DumpGetNextToken() { int i; genCodeLine(""); genCodeLine(staticString + "int curLexState = " + defaultLexState + ";"); genCodeLine(staticString + "int defaultLexState = " + defaultLexState + ";"); genCodeLine(staticString + "int jjnewStateCnt;"); genCodeLine(staticString + "int jjround;"); genCodeLine(staticString + "int jjmatchedPos;"); genCodeLine(staticString + "int jjmatchedKind;"); genCodeLine(""); genCodeLine("/** Get the next Token. */"); genCodeLine("public " + staticString + "Token getNextToken()" + " "); genCodeLine("{"); if (hasSpecial) { genCodeLine(" Token specialToken = null;"); } genCodeLine(" Token matchedToken;"); genCodeLine(" int curPos = 0;"); genCodeLine(""); genCodeLine(" EOFLoop :\n for (;;)"); genCodeLine(" {"); genCodeLine(" try"); genCodeLine(" {"); genCodeLine(" curChar = input_stream.BeginToken();"); genCodeLine(" }"); genCodeLine(" catch(Exception e)"); genCodeLine(" {"); if (Options.getDebugTokenManager()) genCodeLine(" debugStream.println(\"Returning the <EOF> token.\\n\");"); genCodeLine(" jjmatchedKind = 0;"); genCodeLine(" jjmatchedPos = -1;"); genCodeLine(" matchedToken = jjFillToken();"); if (hasSpecial) genCodeLine(" matchedToken.specialToken = specialToken;"); if (nextStateForEof != null || actForEof != null) genCodeLine(" TokenLexicalActions(matchedToken);"); if (Options.getCommonTokenAction()) genCodeLine(" CommonTokenAction(matchedToken);"); genCodeLine(" return matchedToken;"); genCodeLine(" }"); if (hasMoreActions || hasSkipActions || hasTokenActions) { genCodeLine(" image = jjimage;"); genCodeLine(" image.setLength(0);"); genCodeLine(" jjimageLen = 0;"); } genCodeLine(""); String prefix = ""; if (hasMore) { genCodeLine(" for (;;)"); genCodeLine(" {"); prefix = " "; } String endSwitch = ""; String caseStr = ""; // this also sets up the start state of the nfa if (maxLexStates > 1) { genCodeLine(prefix + " switch(curLexState)"); genCodeLine(prefix + " {"); endSwitch = prefix + " }"; caseStr = prefix + " case "; prefix += " "; } prefix += " "; for(i = 0; i < maxLexStates; i++) { if (maxLexStates > 1) genCodeLine(caseStr + i + ":"); if (singlesToSkip[i].HasTransitions()) { // added the backup(0) to make JIT happy genCodeLine(prefix + "try { input_stream.backup(0);"); if (singlesToSkip[i].asciiMoves[0] != 0L && singlesToSkip[i].asciiMoves[1] != 0L) { genCodeLine(prefix + " while ((curChar < 64" + " && (0x" + Long.toHexString(singlesToSkip[i].asciiMoves[0]) + "L & (1L << curChar)) != 0L) || \n" + prefix + " (curChar >> 6) == 1" + " && (0x" + Long.toHexString(singlesToSkip[i].asciiMoves[1]) + "L & (1L << (curChar & 077))) != 0L)"); } else if (singlesToSkip[i].asciiMoves[1] == 0L) { genCodeLine(prefix + " while (curChar <= " + (int)MaxChar(singlesToSkip[i].asciiMoves[0]) + " && (0x" + Long.toHexString(singlesToSkip[i].asciiMoves[0]) + "L & (1L << curChar)) != 0L)"); } else if (singlesToSkip[i].asciiMoves[0] == 0L) { genCodeLine(prefix + " while (curChar > 63 && curChar <= " + ((int)MaxChar(singlesToSkip[i].asciiMoves[1]) + 64) + " && (0x" + Long.toHexString(singlesToSkip[i].asciiMoves[1]) + "L & (1L << (curChar & 077))) != 0L)"); } if (Options.getDebugTokenManager()) { genCodeLine(prefix + "{"); genCodeLine(" debugStream.println(" + (maxLexStates > 1 ? "\"<\" + lexStateNames[curLexState] + \">\" + " : "") + "\"Skipping character : \" + " + errorHandlingClass+".addEscapes(String.valueOf(curChar)) + \" (\" + (int)curChar + \")\");"); } genCodeLine(prefix + " curChar = input_stream.BeginToken();"); if (Options.getDebugTokenManager()) genCodeLine(prefix + "}"); genCodeLine(prefix + "}"); genCodeLine(prefix + "catch (java.io.IOException e1) { continue EOFLoop; }"); } if (initMatch[i] != Integer.MAX_VALUE && initMatch[i] != 0) { if (Options.getDebugTokenManager()) genCodeLine(" debugStream.println(\" Matched the empty string as \" + tokenImage[" + initMatch[i] + "] + \" token.\");"); genCodeLine(prefix + "jjmatchedKind = " + initMatch[i] + ";"); genCodeLine(prefix + "jjmatchedPos = -1;"); genCodeLine(prefix + "curPos = 0;"); } else { genCodeLine(prefix + "jjmatchedKind = 0x" + Integer.toHexString(Integer.MAX_VALUE) + ";"); genCodeLine(prefix + "jjmatchedPos = 0;"); } if (Options.getDebugTokenManager()) genCodeLine(" debugStream.println(" + (maxLexStates > 1 ? "\"<\" + lexStateNames[curLexState] + \">\" + " : "") + "\"Current character : \" + " + errorHandlingClass+".addEscapes(String.valueOf(curChar)) + \" (\" + (int)curChar + \") " + "at line \" + input_stream.getEndLine() + \" column \" + input_stream.getEndColumn());"); if (Options.getTableDriven()) { if (maxLexStates > 1) { JavaCCErrors.semantic_error("Table driven code generation cannot (yet) be used with lexical states"); } JavaCCErrors.warning("Table driven code generation is experimental."); genCodeLine(prefix + "curPos = jjRunDfa();"); } else { genCodeLine(prefix + "curPos = jjMoveStringLiteralDfa0_" + i + "();"); } if (canMatchAnyChar[i] != -1) { if (initMatch[i] != Integer.MAX_VALUE && initMatch[i] != 0) genCodeLine(prefix + "if (jjmatchedPos < 0 || (jjmatchedPos == 0 && jjmatchedKind > " + canMatchAnyChar[i] + "))"); else genCodeLine(prefix + "if (jjmatchedPos == 0 && jjmatchedKind > " + canMatchAnyChar[i] + ")"); genCodeLine(prefix + "{"); if (Options.getDebugTokenManager()) genCodeLine(" debugStream.println(\" Current character matched as a \" + tokenImage[" + canMatchAnyChar[i] + "] + \" token.\");"); genCodeLine(prefix + " jjmatchedKind = " + canMatchAnyChar[i] + ";"); if (initMatch[i] != Integer.MAX_VALUE && initMatch[i] != 0) genCodeLine(prefix + " jjmatchedPos = 0;"); genCodeLine(prefix + "}"); } if (maxLexStates > 1) genCodeLine(prefix + "break;"); } if (maxLexStates > 1) genCodeLine(endSwitch); else if (maxLexStates == 0) genCodeLine(" jjmatchedKind = 0x" + Integer.toHexString(Integer.MAX_VALUE) + ";"); if (maxLexStates > 1) prefix = " "; else prefix = ""; if (maxLexStates > 0) { genCodeLine(prefix + " if (jjmatchedKind != 0x" + Integer.toHexString(Integer.MAX_VALUE) + ")"); genCodeLine(prefix + " {"); genCodeLine(prefix + " if (jjmatchedPos + 1 < curPos)"); if (Options.getDebugTokenManager()) { genCodeLine(prefix + " {"); genCodeLine(prefix + " debugStream.println(" + "\" Putting back \" + (curPos - jjmatchedPos - 1) + \" characters into the input stream.\");"); } genCodeLine(prefix + " input_stream.backup(curPos - jjmatchedPos - 1);"); if (Options.getDebugTokenManager()) genCodeLine(prefix + " }"); if (Options.getDebugTokenManager()) { if (Options.getJavaUnicodeEscape() || Options.getUserCharStream()) genCodeLine(" debugStream.println(" + "\"****** FOUND A \" + tokenImage[jjmatchedKind] + \" MATCH " + "(\" + "+errorHandlingClass+".addEscapes(new String(input_stream.GetSuffix(jjmatchedPos + 1))) + " + "\") ******\\n\");"); else genCodeLine(" debugStream.println(" + "\"****** FOUND A \" + tokenImage[jjmatchedKind] + \" MATCH " + "(\" + "+errorHandlingClass+".addEscapes(new String(input_stream.GetSuffix(jjmatchedPos + 1))) + " + "\") ******\\n\");"); } if (hasSkip || hasMore || hasSpecial) { genCodeLine(prefix + " if ((jjtoToken[jjmatchedKind >> 6] & " + "(1L << (jjmatchedKind & 077))) != 0L)"); genCodeLine(prefix + " {"); } genCodeLine(prefix + " matchedToken = jjFillToken();"); if (hasSpecial) genCodeLine(prefix + " matchedToken.specialToken = specialToken;"); if (hasTokenActions) genCodeLine(prefix + " TokenLexicalActions(matchedToken);"); if (maxLexStates > 1) { genCodeLine(" if (jjnewLexState[jjmatchedKind] != -1)"); genCodeLine(prefix + " curLexState = jjnewLexState[jjmatchedKind];"); } if (Options.getCommonTokenAction()) genCodeLine(prefix + " CommonTokenAction(matchedToken);"); genCodeLine(prefix + " return matchedToken;"); if (hasSkip || hasMore || hasSpecial) { genCodeLine(prefix + " }"); if (hasSkip || hasSpecial) { if (hasMore) { genCodeLine(prefix + " else if ((jjtoSkip[jjmatchedKind >> 6] & " + "(1L << (jjmatchedKind & 077))) != 0L)"); } else genCodeLine(prefix + " else"); genCodeLine(prefix + " {"); if (hasSpecial) { genCodeLine(prefix + " if ((jjtoSpecial[jjmatchedKind >> 6] & " + "(1L << (jjmatchedKind & 077))) != 0L)"); genCodeLine(prefix + " {"); genCodeLine(prefix + " matchedToken = jjFillToken();"); genCodeLine(prefix + " if (specialToken == null)"); genCodeLine(prefix + " specialToken = matchedToken;"); genCodeLine(prefix + " else"); genCodeLine(prefix + " {"); genCodeLine(prefix + " matchedToken.specialToken = specialToken;"); genCodeLine(prefix + " specialToken = (specialToken.next = matchedToken);"); genCodeLine(prefix + " }"); if (hasSkipActions) genCodeLine(prefix + " SkipLexicalActions(matchedToken);"); genCodeLine(prefix + " }"); if (hasSkipActions) { genCodeLine(prefix + " else"); genCodeLine(prefix + " SkipLexicalActions(null);"); } } else if (hasSkipActions) genCodeLine(prefix + " SkipLexicalActions(null);"); if (maxLexStates > 1) { genCodeLine(" if (jjnewLexState[jjmatchedKind] != -1)"); genCodeLine(prefix + " curLexState = jjnewLexState[jjmatchedKind];"); } genCodeLine(prefix + " continue EOFLoop;"); genCodeLine(prefix + " }"); } if (hasMore) { if (hasMoreActions) genCodeLine(prefix + " MoreLexicalActions();"); else if (hasSkipActions || hasTokenActions) genCodeLine(prefix + " jjimageLen += jjmatchedPos + 1;"); if (maxLexStates > 1) { genCodeLine(" if (jjnewLexState[jjmatchedKind] != -1)"); genCodeLine(prefix + " curLexState = jjnewLexState[jjmatchedKind];"); } genCodeLine(prefix + " curPos = 0;"); genCodeLine(prefix + " jjmatchedKind = 0x" + Integer.toHexString(Integer.MAX_VALUE) + ";"); genCodeLine(prefix + " try {"); genCodeLine(prefix + " curChar = input_stream.readChar();"); if (Options.getDebugTokenManager()) genCodeLine(" debugStream.println(" + (maxLexStates > 1 ? "\"<\" + lexStateNames[curLexState] + \">\" + " : "") + "\"Current character : \" + " + ""+errorHandlingClass+".addEscapes(String.valueOf(curChar)) + \" (\" + (int)curChar + \") " + "at line \" + input_stream.getEndLine() + \" column \" + input_stream.getEndColumn());"); genCodeLine(prefix + " continue;"); genCodeLine(prefix + " }"); genCodeLine(prefix + " catch (java.io.IOException e1) { }"); } } genCodeLine(prefix + " }"); genCodeLine(prefix + " int error_line = input_stream.getEndLine();"); genCodeLine(prefix + " int error_column = input_stream.getEndColumn();"); genCodeLine(prefix + " String error_after = null;"); genCodeLine(prefix + " " + Options.getBooleanType() + " EOFSeen = false;"); genCodeLine(prefix + " try { input_stream.readChar(); input_stream.backup(1); }"); genCodeLine(prefix + " catch (java.io.IOException e1) {"); genCodeLine(prefix + " EOFSeen = true;"); genCodeLine(prefix + " error_after = curPos <= 1 ? \"\" : input_stream.GetImage();"); genCodeLine(prefix + " if (curChar == '\\n' || curChar == '\\r') {"); genCodeLine(prefix + " error_line++;"); genCodeLine(prefix + " error_column = 0;"); genCodeLine(prefix + " }"); genCodeLine(prefix + " else"); genCodeLine(prefix + " error_column++;"); genCodeLine(prefix + " }"); genCodeLine(prefix + " if (!EOFSeen) {"); genCodeLine(prefix + " input_stream.backup(1);"); genCodeLine(prefix + " error_after = curPos <= 1 ? \"\" : input_stream.GetImage();"); genCodeLine(prefix + " }"); genCodeLine(prefix + " throw new "+errorHandlingClass+"(" + "EOFSeen, curLexState, error_line, error_column, error_after, curChar, "+errorHandlingClass+".LEXICAL_ERROR);"); } if (hasMore) genCodeLine(prefix + " }"); genCodeLine(" }"); genCodeLine("}"); genCodeLine(""); } public void DumpSkipActions() { Action act; genCodeLine(staticString + "void SkipLexicalActions(Token matchedToken)"); genCodeLine("{"); genCodeLine(" switch(jjmatchedKind)"); genCodeLine(" {"); Outer: for (int i = 0; i < maxOrdinal; i++) { if ((toSkip[i / 64] & (1L << (i % 64))) == 0L) continue; for (;;) { if (((act = (Action)actions[i]) == null || act.getActionTokens() == null || act.getActionTokens().size() == 0) && !canLoop[lexStates[i]]) continue Outer; genCodeLine(" case " + i + " :"); if (initMatch[lexStates[i]] == i && canLoop[lexStates[i]]) { genCodeLine(" if (jjmatchedPos == -1)"); genCodeLine(" {"); genCodeLine(" if (jjbeenHere[" + lexStates[i] + "] &&"); genCodeLine(" jjemptyLineNo[" + lexStates[i] + "] == input_stream.getBeginLine() &&"); genCodeLine(" jjemptyColNo[" + lexStates[i] + "] == input_stream.getBeginColumn())"); genCodeLine(" throw new "+errorHandlingClass+"(" + "(\"Error: Bailing out of infinite loop caused by repeated empty string matches " + "at line \" + input_stream.getBeginLine() + \", " + "column \" + input_stream.getBeginColumn() + \".\"), "+errorHandlingClass+".LOOP_DETECTED);"); genCodeLine(" jjemptyLineNo[" + lexStates[i] + "] = input_stream.getBeginLine();"); genCodeLine(" jjemptyColNo[" + lexStates[i] + "] = input_stream.getBeginColumn();"); genCodeLine(" jjbeenHere[" + lexStates[i] + "] = true;"); genCodeLine(" }"); } if ((act = (Action)actions[i]) == null || act.getActionTokens().size() == 0) break; genCode( " image.append"); if (RStringLiteral.allImages[i] != null) { genCodeLine("(jjstrLiteralImages[" + i + "]);"); genCodeLine(" lengthOfMatch = jjstrLiteralImages[" + i + "].length();"); } else { genCodeLine("(input_stream.GetSuffix(jjimageLen + (lengthOfMatch = jjmatchedPos + 1)));"); } printTokenSetup((Token)act.getActionTokens().get(0)); ccol = 1; for (int j = 0; j < act.getActionTokens().size(); j++) printToken((Token)act.getActionTokens().get(j)); genCodeLine(""); break; } genCodeLine(" break;"); } genCodeLine(" default :"); genCodeLine(" break;"); genCodeLine(" }"); genCodeLine("}"); } public void DumpMoreActions() { Action act; genCodeLine(staticString + "void MoreLexicalActions()"); genCodeLine("{"); genCodeLine(" jjimageLen += (lengthOfMatch = jjmatchedPos + 1);"); genCodeLine(" switch(jjmatchedKind)"); genCodeLine(" {"); Outer: for (int i = 0; i < maxOrdinal; i++) { if ((toMore[i / 64] & (1L << (i % 64))) == 0L) continue; for (;;) { if (((act = (Action)actions[i]) == null || act.getActionTokens() == null || act.getActionTokens().size() == 0) && !canLoop[lexStates[i]]) continue Outer; genCodeLine(" case " + i + " :"); if (initMatch[lexStates[i]] == i && canLoop[lexStates[i]]) { genCodeLine(" if (jjmatchedPos == -1)"); genCodeLine(" {"); genCodeLine(" if (jjbeenHere[" + lexStates[i] + "] &&"); genCodeLine(" jjemptyLineNo[" + lexStates[i] + "] == input_stream.getBeginLine() &&"); genCodeLine(" jjemptyColNo[" + lexStates[i] + "] == input_stream.getBeginColumn())"); genCodeLine(" throw new "+errorHandlingClass+"(" + "(\"Error: Bailing out of infinite loop caused by repeated empty string matches " + "at line \" + input_stream.getBeginLine() + \", " + "column \" + input_stream.getBeginColumn() + \".\"), "+errorHandlingClass+".LOOP_DETECTED);"); genCodeLine(" jjemptyLineNo[" + lexStates[i] + "] = input_stream.getBeginLine();"); genCodeLine(" jjemptyColNo[" + lexStates[i] + "] = input_stream.getBeginColumn();"); genCodeLine(" jjbeenHere[" + lexStates[i] + "] = true;"); genCodeLine(" }"); } if ((act = (Action)actions[i]) == null || act.getActionTokens().size() == 0) { break; } genCode( " image.append"); if (RStringLiteral.allImages[i] != null) genCodeLine("(jjstrLiteralImages[" + i + "]);"); else genCodeLine("(input_stream.GetSuffix(jjimageLen));"); genCodeLine(" jjimageLen = 0;"); printTokenSetup((Token)act.getActionTokens().get(0)); ccol = 1; for (int j = 0; j < act.getActionTokens().size(); j++) printToken((Token)act.getActionTokens().get(j)); genCodeLine(""); break; } genCodeLine(" break;"); } genCodeLine(" default :"); genCodeLine(" break;"); genCodeLine(" }"); genCodeLine("}"); } public void DumpTokenActions() { Action act; int i; genCodeLine(staticString + "void TokenLexicalActions(Token matchedToken)"); genCodeLine("{"); genCodeLine(" switch(jjmatchedKind)"); genCodeLine(" {"); Outer: for (i = 0; i < maxOrdinal; i++) { if ((toToken[i / 64] & (1L << (i % 64))) == 0L) continue; for (;;) { if (((act = (Action)actions[i]) == null || act.getActionTokens() == null || act.getActionTokens().size() == 0) && !canLoop[lexStates[i]]) continue Outer; genCodeLine(" case " + i + " :"); if (initMatch[lexStates[i]] == i && canLoop[lexStates[i]]) { genCodeLine(" if (jjmatchedPos == -1)"); genCodeLine(" {"); genCodeLine(" if (jjbeenHere[" + lexStates[i] + "] &&"); genCodeLine(" jjemptyLineNo[" + lexStates[i] + "] == input_stream.getBeginLine() &&"); genCodeLine(" jjemptyColNo[" + lexStates[i] + "] == input_stream.getBeginColumn())"); genCodeLine(" throw new "+errorHandlingClass+"(" + "(\"Error: Bailing out of infinite loop caused by repeated empty string matches " + "at line \" + input_stream.getBeginLine() + \", " + "column \" + input_stream.getBeginColumn() + \".\"), "+errorHandlingClass+".LOOP_DETECTED);"); genCodeLine(" jjemptyLineNo[" + lexStates[i] + "] = input_stream.getBeginLine();"); genCodeLine(" jjemptyColNo[" + lexStates[i] + "] = input_stream.getBeginColumn();"); genCodeLine(" jjbeenHere[" + lexStates[i] + "] = true;"); genCodeLine(" }"); } if ((act = (Action)actions[i]) == null || act.getActionTokens().size() == 0) break; if (i == 0) { genCodeLine(" image.setLength(0);"); // For EOF no image is there } else { genCode( " image.append"); if (RStringLiteral.allImages[i] != null) { genCodeLine("(jjstrLiteralImages[" + i + "]);"); genCodeLine(" lengthOfMatch = jjstrLiteralImages[" + i + "].length();"); } else { genCodeLine("(input_stream.GetSuffix(jjimageLen + (lengthOfMatch = jjmatchedPos + 1)));"); } } printTokenSetup((Token)act.getActionTokens().get(0)); ccol = 1; for (int j = 0; j < act.getActionTokens().size(); j++) printToken((Token)act.getActionTokens().get(j)); genCodeLine(""); break; } genCodeLine(" break;"); } genCodeLine(" default :"); genCodeLine(" break;"); genCodeLine(" }"); genCodeLine("}"); } public static void reInit() { actions = null; allTpsForState = new Hashtable(); canLoop = null; canMatchAnyChar = null; canReachOnMore = null; curKind = 0; curRE = null; defaultLexState = 0; errorHandlingClass = null; hasEmptyMatch = false; hasLoop = false; hasMore = false; hasMoreActions = false; hasNfa = null; hasSkip = false; hasSkipActions = false; hasSpecial = false; hasTokenActions = false; ignoreCase = null; initMatch = null; initStates = new Hashtable(); initialState = null; keepLineCol = false; kinds = null; lexStateIndex = 0; lexStateName = null; lexStateSuffix = null; lexStates = null; maxLexStates = 0; maxLongsReqd = null; maxOrdinal = 1; mixed = null; newLexState = null; rexprs = null; singlesToSkip = null; stateHasActions = null; stateSetSize = 0; staticString = null; toMore = null; toSkip = null; toSpecial = null; toToken = null; tokMgrClassName = null; } }
package org.exist.xmldb; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.zip.DataFormatException; import java.util.zip.Inflater; import org.apache.xmlrpc.XmlRpcException; import org.exist.external.org.apache.commons.io.output.ByteArrayOutputStream; import org.exist.security.Permission; import org.exist.storage.serializers.EXistOutputKeys; import org.exist.util.EXistInputSource; import org.xml.sax.InputSource; import org.xmldb.api.base.Collection; import org.xmldb.api.base.ErrorCodes; import org.xmldb.api.base.Resource; import org.xmldb.api.base.XMLDBException; public abstract class AbstractRemoteResource implements EXistResource, ExtendedResource, Resource { protected XmldbURI path = null ; protected String mimeType=null; protected RemoteCollection parent; protected File file=null; protected File contentFile=null; protected InputSource inputSource = null; protected boolean isLocal=false; protected long contentLen = 0L; protected Permission permissions = null; protected Date dateCreated= null; protected Date dateModified= null; public AbstractRemoteResource(RemoteCollection parent,XmldbURI documentName) throws XMLDBException { this.parent = parent; if (documentName.numSegments()>1) { this.path = documentName; } else { this.path = parent.getPathURI().append(documentName); } } protected void finalize() throws Throwable { freeLocalResources(); super.finalize(); } public void freeLocalResources() { file = null; inputSource = null; if(contentFile!=null) { contentFile.delete(); contentFile=null; } isLocal=true; } protected Properties getProperties() { return parent.properties; } /* (non-Javadoc) * @see org.xmldb.api.base.Resource#getContent() */ public Object getContent() throws XMLDBException { Object res=getExtendedContent(); // Backward compatibility if(isLocal) return res; if(res!=null) { if(res instanceof File) { return readFile((File)res); } else if(res instanceof InputSource) { return readFile((InputSource)res); } } return res; } /* (non-Javadoc) * @see org.xmldb.api.base.Resource#getContent() */ // Backward compatibility protected byte[] getData() throws XMLDBException { Object res=getExtendedContent(); if(res!=null) { if(res instanceof File) { return readFile((File)res); } else if(res instanceof InputSource) { return readFile((InputSource)res); } else if(res instanceof String) { try { return ((String)res).getBytes("UTF-8"); } catch(UnsupportedEncodingException uee) { throw new XMLDBException(ErrorCodes.VENDOR_ERROR, uee.getMessage(), uee); } } } return (byte[])res; } public int getContentLength() throws XMLDBException { return (int)contentLen; } /* (non-Javadoc) * @see org.exist.xmldb.EXistResource#getCreationTime() */ public Date getCreationTime() throws XMLDBException { return dateCreated; } public long getExtendedContentLength() throws XMLDBException { return contentLen; } /* (non-Javadoc) * @see org.exist.xmldb.EXistResource#getLastModificationTime() */ public Date getLastModificationTime() throws XMLDBException { return dateModified; } /* (non-Javadoc) * @see org.exist.xmldb.EXistResource#getMimeType() */ public String getMimeType() { return mimeType; } /* (non-Javadoc) * @see org.xmldb.api.base.Resource#getParentCollection() */ public Collection getParentCollection() throws XMLDBException { return parent; } public Permission getPermissions() { return permissions; } protected boolean setContentInternal(Object value) throws XMLDBException { freeLocalResources(); boolean wasSet=false; if (value instanceof File) { file = (File) value; wasSet=true; } else if (value instanceof InputSource) { inputSource = (InputSource) value; wasSet=true; } return wasSet; } public void setContentLength(int len) { this.contentLen = len; } /* (non-Javadoc) * @see org.exist.xmldb.EXistResource#setMimeType(java.lang.String) */ public void setMimeType(String mime) { this.mimeType = mime; } public void setPermissions(Permission perms) { permissions = perms; } public void getContentIntoAFile(File localfile) throws XMLDBException { FileOutputStream fos=null; BufferedOutputStream bos=null; try { fos=new FileOutputStream(localfile); bos=new BufferedOutputStream(fos); getContentIntoAStream(bos); } catch (IOException ioe) { throw new XMLDBException(ErrorCodes.VENDOR_ERROR, ioe.getMessage(), ioe); } finally { if(bos!=null) { try { bos.close(); } catch(IOException ioe) { // IgnoreIT(R) } } if(fos!=null) { try { fos.close(); } catch(IOException ioe) { // IgnoreIT(R) } } } } protected void getRemoteContentIntoLocalFile(OutputStream os, boolean isRetrieve, int handle, int pos) throws XMLDBException { FileOutputStream fos=null; BufferedOutputStream bos=null; Properties properties = getProperties(); String command = null; List<Object> params = new ArrayList<Object>(); if(isRetrieve) { command = "retrieveFirstChunk"; params.add(new Integer(handle)); params.add(new Integer(pos)); } else { command = "getDocumentData"; params.add(path.toString()); } if (properties == null) properties = new Properties(); params.add(properties); try { File tmpfile=File.createTempFile("eXistARR",getResourceType().equals("XMLResource")?".xml":".bin"); tmpfile.deleteOnExit(); fos=new FileOutputStream(tmpfile); bos=new BufferedOutputStream(fos); Map<?,?> table = (Map<?,?>) parent.getClient().execute(command, params); String method; boolean useLongOffset; if(table.containsKey("supports-long-offset") && (Boolean)(table.get("supports-long-offset"))) { useLongOffset=true; method="getNextExtendedChunk"; } else { useLongOffset=false; method="getNextChunk"; } long offset = ((Integer)table.get("offset")).intValue(); byte[] data = (byte[])table.get("data"); boolean isCompressed=properties.getProperty(EXistOutputKeys.COMPRESS_OUTPUT, "no").equals("yes"); // One for the local cached file Inflater dec = null; byte[] decResult = null; int decLength = 0; if(isCompressed) { dec = new Inflater(); decResult = new byte[65536]; dec.setInput(data); do { decLength = dec.inflate(decResult); bos.write(decResult,0,decLength); // And other for the stream where we want to save it! if(os!=null) os.write(decResult,0,decLength); } while(decLength==decResult.length || !dec.needsInput()); } else { bos.write(data); // And other for the stream where we want to save it! if(os!=null) os.write(data); } while(offset > 0) { params.clear(); params.add(table.get("handle")); params.add(useLongOffset?Long.toString(offset):new Integer((int)offset)); table = (Map<?,?>) parent.getClient().execute(method, params); offset = useLongOffset?new Long((String)table.get("offset")).longValue():((Integer)table.get("offset")).longValue(); data = (byte[])table.get("data"); // One for the local cached file if(isCompressed) { dec.setInput(data); do { decLength = dec.inflate(decResult); bos.write(decResult,0,decLength); // And other for the stream where we want to save it! if(os!=null) os.write(decResult,0,decLength); } while(decLength==decResult.length || !dec.needsInput()); } else { bos.write(data); // And other for the stream where we want to save it! if(os!=null) os.write(data); } } if(dec!=null) dec.end(); isLocal=false; contentFile=tmpfile; } catch (XmlRpcException xre) { throw new XMLDBException(ErrorCodes.INVALID_RESOURCE, xre.getMessage(), xre); } catch (IOException ioe) { throw new XMLDBException(ErrorCodes.VENDOR_ERROR, ioe.getMessage(), ioe); } catch (DataFormatException dfe) { throw new XMLDBException(ErrorCodes.VENDOR_ERROR, dfe.getMessage(), dfe); } finally { if(bos!=null) { try { bos.close(); } catch(IOException ioe) { //IgnoreIT(R) } } if(fos!=null) { try { fos.close(); } catch(IOException ioe) { //IgnoreIT(R) } } } } protected static InputStream getAnyStream(Object obj) throws XMLDBException { InputStream bis=null; if(obj instanceof String) { try { bis=new ByteArrayInputStream(((String)obj).getBytes("UTF-8")); } catch(UnsupportedEncodingException uee) { throw new XMLDBException(ErrorCodes.VENDOR_ERROR,uee.getMessage(),uee); } } else if(obj instanceof byte[]) { bis=new ByteArrayInputStream((byte[])obj); } else { throw new XMLDBException(ErrorCodes.VENDOR_ERROR,"don't know how to handle value of type " + obj.getClass().getName()); } return bis; } protected void getContentIntoAStreamInternal(OutputStream os, Object obj, boolean isRetrieve, int handle, int pos) throws XMLDBException { if(file!=null || contentFile!=null || inputSource!=null || obj!=null) { FileInputStream fis=null; InputStream bis=null; try { // First, the local content, then the remote one!!!! if(file!=null) { fis=new FileInputStream(file); bis=new BufferedInputStream(fis,655360); } else if(inputSource!=null) { bis=inputSource.getByteStream(); } else if(obj!=null) { bis=getAnyStream(obj); } else { fis=new FileInputStream(contentFile); bis=new BufferedInputStream(fis,655360); } int readed; byte buffer[]=new byte[65536]; while((readed=bis.read(buffer))!=-1) { os.write(buffer,0,readed); } } catch(IOException ioe) { throw new XMLDBException(ErrorCodes.VENDOR_ERROR,ioe.getMessage(),ioe); } finally { if(inputSource!=null) { if(bis!=null) { // As it comes from an input source, we cannot blindly close it, // but at least let's reset it! (if it is possible) if(bis.markSupported()) { try { bis.reset(); } catch(IOException ioe) { //IgnoreIT(R) } } } } else { if(bis!=null) { try { bis.close(); } catch(IOException ioe) { //IgnoreIT(R) } } if(fis!=null) { try { fis.close(); } catch(IOException ioe) { //IgnoreIT(R) } } } } } else { // Let's fetch it, and save just in time!!! getRemoteContentIntoLocalFile(os,isRetrieve,handle,pos); } } protected Object getExtendedContentInternal(Object obj, boolean isRetrieve, int handle, int pos) throws XMLDBException { if(obj != null) return obj; if(file!=null) return file; if(inputSource!=null) return inputSource; if(contentFile==null) getRemoteContentIntoLocalFile(null,isRetrieve,handle,pos); return contentFile; } protected InputStream getStreamContentInternal(Object obj, boolean isRetrieve, int handle, int pos) throws XMLDBException { InputStream retval=null; if(file!=null) { try { retval=new FileInputStream(file); } catch(FileNotFoundException fnfe) { throw new XMLDBException(ErrorCodes.VENDOR_ERROR, fnfe.getMessage(), fnfe); } } else if(inputSource!=null) { retval=inputSource.getByteStream(); } else if(obj!=null) { retval=getAnyStream(obj); } else { // At least one value, please!!! if(contentFile==null) getRemoteContentIntoLocalFile(null,isRetrieve,handle,pos); try { retval=new FileInputStream(contentFile); } catch(FileNotFoundException fnfe) { throw new XMLDBException(ErrorCodes.VENDOR_ERROR, fnfe.getMessage(), fnfe); } } return retval; } protected long getStreamLengthInternal(Object obj) throws XMLDBException { long retval=-1; if(file!=null) { retval=file.length(); } else if(inputSource!=null && inputSource instanceof EXistInputSource) { retval=((EXistInputSource)inputSource).getByteStreamLength(); } else if(obj!=null) { if(obj instanceof String) { try { retval=((String)obj).getBytes("UTF-8").length; } catch(UnsupportedEncodingException uee) { throw new XMLDBException(ErrorCodes.VENDOR_ERROR,uee.getMessage(),uee); } } else if(obj instanceof byte[]) { retval=((byte[])obj).length; } else { throw new XMLDBException(ErrorCodes.VENDOR_ERROR,"don't know how to handle value of type " + obj.getClass().getName()); } } else if(contentFile!=null) { retval=contentFile.length(); } else { Properties properties = getProperties(); List<Object> params = new ArrayList<Object>(); params.add(path.toString()); params.add(properties); try { Map<?,?> table = (Map<?,?>) parent.getClient().execute("describeResource", params); retval=((Integer)table.get("content-length")).intValue(); } catch (XmlRpcException xre) { throw new XMLDBException(ErrorCodes.INVALID_RESOURCE, xre.getMessage(), xre); } } return retval; } private static byte[] readFile(File file) throws XMLDBException { String errmsg="file "+ file.getAbsolutePath(); InputStream is = null; try { is = new FileInputStream(file); return readFile(is,errmsg); } catch (FileNotFoundException e) { throw new XMLDBException(ErrorCodes.VENDOR_ERROR, errmsg + " could not be found", e); } finally { if(is != null) { try { is.close(); } catch(IOException ioe) { //ignore(ioe); } } } } private static byte[] readFile(InputSource is) throws XMLDBException { String retval="<streamunknown>"; if(is instanceof EXistInputSource) { retval=((EXistInputSource)is).getSymbolicPath(); } return readFile(is.getByteStream(),"input source "+retval); } private static byte[] readFile(InputStream is,String errmsg) throws XMLDBException { if(errmsg==null) errmsg="stream"; try { ByteArrayOutputStream bos = new ByteArrayOutputStream(2048); byte[] temp = new byte[1024]; int count = 0; while((count = is.read(temp)) > -1) { bos.write(temp, 0, count); } return bos.toByteArray(); } catch (IOException e) { throw new XMLDBException(ErrorCodes.VENDOR_ERROR, "IO exception while reading " + errmsg, e); } } protected void setDateCreated(Date dateCreated) { this.dateCreated = dateCreated; } protected void setDateModified(Date dateModified) { this.dateModified = dateModified; } }
package org.ensembl.healthcheck.testcase.eg_core; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.List; import org.ensembl.healthcheck.DatabaseRegistryEntry; import org.ensembl.healthcheck.ReportManager; import org.ensembl.healthcheck.Team; import org.ensembl.healthcheck.testcase.EnsTestCase; import org.ensembl.healthcheck.testcase.eg_compara.AbstractControlledRows; import org.ensembl.healthcheck.util.SqlTemplate; import org.ensembl.healthcheck.util.SqlUncheckedException; import org.ensembl.healthcheck.util.SqlTemplate.ResultSetCallback; /** * * Checks whether all toplevel sequences in a core database are present as * dnafrag regions in the compara master database, if the genome is in * the compara master database. * * @author mnuhn * */ public class SeqRegionsConsistentWithComparaMaster extends AbstractControlledRows { final int reportMaxMissingRows = 20; protected Connection testDbConn; protected SqlTemplate sqlTemplateTestDb; protected void init(DatabaseRegistryEntry dbre) { super.init(); testDbConn = dbre.getConnection(); sqlTemplateTestDb = getSqlTemplate(testDbConn); setTeamResponsible(Team.ENSEMBL_GENOMES); } @Override protected boolean runTest(DatabaseRegistryEntry dbre) { init(dbre); List<Integer> allSpeciesIds = sqlTemplateTestDb.queryForDefaultObjectList( "select distinct species_id from meta where species_id is not null", Integer.class ); if (allSpeciesIds.size() == 0) { ReportManager.problem(this, testDbConn, "No species configured!"); } boolean allSpeciesPassed = true; for(int speciesId : allSpeciesIds) { allSpeciesPassed &= runTestForSpecies(dbre, speciesId); } return allSpeciesPassed; } protected boolean runTestForSpecies(DatabaseRegistryEntry dbre, int speciesId) { String productionName = fetchSingleMetaValueFor(sqlTemplateTestDb, speciesId, "species.production_name"); String assemblyDefault = fetchSingleMetaValueFor(sqlTemplateTestDb, speciesId, "assembly.default"); String genebuildStartDate = fetchSingleMetaValueFor(sqlTemplateTestDb, speciesId, "genebuild.start_date"); if (!speciesConfiguredForDnaCompara(productionName)) { getLogger().info("Skipping species " + productionName + ", because it is not linked to any method involving DNA comparisons in the compara master."); return true; } getLogger().info("Testing species " + productionName); boolean hasEntryInMasterDb = fetchHasGenomeDbId( productionName, assemblyDefault, genebuildStartDate ); if (!hasEntryInMasterDb) { ReportManager.correct(this, testDbConn, "Species " + productionName + " has no genome_db entry in the master database."); return true; } int genomeDbId = fetchGenomeDbId( productionName, assemblyDefault, genebuildStartDate ); int toplevelSeqRegionCount = fetchToplevelSeqRegionCount(); int dnaFragRowCountFor = fetchDnaFragRowCountFor(genomeDbId); boolean sequenceCountsOk = toplevelSeqRegionCount == dnaFragRowCountFor; if (sequenceCountsOk) { ReportManager.correct(this, testDbConn, "Sequence counts for this " + "species are " + toplevelSeqRegionCount + " both in " + "core and compara master database."); } else { ReportManager.problem(this, testDbConn, "Sequence counts for this " + "species are " + toplevelSeqRegionCount + " toplevel " + "sequence regions in the core database and " + dnaFragRowCountFor + " dna frags in the compara " + "master database. The counts should be equal.\n" + "This can happen, if the assembly has been changed, " + "but the assembly.default entry in the meta table has " + "not been changed. In that case the dna_frag table in " + "the compara master database is not updated " + "by the populate_mdb.pl script." ); } boolean allToplevelSeqRegionInDnaFragTable = assertToplevelSeqRegionInDnaFragTable(genomeDbId); return sequenceCountsOk && allToplevelSeqRegionInDnaFragTable; } protected int fetchToplevelSeqRegionCount() { List<Integer> numSeqRegionsList = sqlTemplateTestDb.queryForDefaultObjectList( "select count(*) from seq_region join seq_region_attrib using (seq_region_id) join attrib_type using (attrib_type_id) where code='toplevel'", Integer.class); assertLengthIsOne(numSeqRegionsList); Integer numSeqRegions = numSeqRegionsList.get(0); return numSeqRegions; } protected class SeqRegionData { public int seq_region_id; public String seq_region_name; public int seq_region_length; public String coord_system_name; public String toString() { return "seq_region_id = " + seq_region_id + "\n" + "seq_region.name = " + seq_region_name + "\n" + "seq_region.length = " + seq_region_length + "\n" + "coord_system.name = " + coord_system_name ; } } protected boolean assertToplevelSeqRegionInDnaFragTable(final int genomeDbId) { final EnsTestCase thisTest = this; Boolean allRowsExistInDnaFragTable = sqlTemplateTestDb.execute( "select" + " seq_region.seq_region_id, " + " seq_region.name, " + " seq_region.length, " + " coord_system.name " + "from " + " seq_region join seq_region_attrib using (seq_region_id) " + " join attrib_type using (attrib_type_id) " + " join coord_system using (coord_system_id) " + "where " + "code='toplevel' ", new ResultSetCallback<Boolean>() { @Override public Boolean process(ResultSet rs) throws SQLException { SeqRegionData seqRegionData = new SeqRegionData(); int missingRows = 0; boolean allRowsExistInDnaFragTable = true; while (rs.next()) { seqRegionData.seq_region_id = rs.getInt(1); seqRegionData.seq_region_name = rs.getString(2); seqRegionData.seq_region_length = rs.getInt(3); seqRegionData.coord_system_name = rs.getString(4); int numCorrespondingRowsInDnaFragTable = fetchNumCorrespondingRowsInDnaFragTable( seqRegionData, genomeDbId ); boolean currentRowExistsInDnaFragTable = false; if (numCorrespondingRowsInDnaFragTable == 1) { currentRowExistsInDnaFragTable = true; } if (numCorrespondingRowsInDnaFragTable == 0) { ReportManager.problem(thisTest, testDbConn, "The following seq region is not in the dnafrag table in the master database:\n" + seqRegionData); ReportManager.problem(thisTest, testDbConn, "The seq region that comes up with this sql in the core database:\n\n" + createUsefulSqlCore(seqRegionData) + "\n\n" + "should come up with this sql:\n\n" + createUsefulSqlMaster(seqRegionData, genomeDbId) ); missingRows++; if (missingRows>=reportMaxMissingRows) { ReportManager.problem(thisTest, testDbConn, "No more rows will be reported, because the maximum of " + reportMaxMissingRows + " has been reached."); return false; } currentRowExistsInDnaFragTable = false; } if (numCorrespondingRowsInDnaFragTable > 1) { throw new RuntimeException("Unexpected value for numCorrespondingRowsInDnaFragTable:" + numCorrespondingRowsInDnaFragTable); } allRowsExistInDnaFragTable &= currentRowExistsInDnaFragTable; } return allRowsExistInDnaFragTable; } }, // No bound parameters new Object[0] ); return allRowsExistInDnaFragTable; } protected String createUsefulSqlMaster(final SeqRegionData seqRegionData, final int genomeDbId) { return "select * " + "from dnafrag " + "where genome_db_id = "+genomeDbId+" " + "and name = '" + seqRegionData.seq_region_name + "' " + "and length = " + seqRegionData.seq_region_length + " " + "and coord_system_name='" + seqRegionData.coord_system_name + "'"; } protected String createUsefulSqlCore(final SeqRegionData seqRegionData) { return "select \n" + " seq_region.seq_region_id, \n" + " seq_region.name, \n" + " seq_region.length, \n" + " coord_system.name \n" + "from \n" + " seq_region join seq_region_attrib using (seq_region_id) \n" + " join attrib_type using (attrib_type_id) \n" + " join coord_system using (coord_system_id) \n" + "where \n" + " code='toplevel' \n" + " and seq_region_id="+seqRegionData.seq_region_id+"; \n" ; } protected int fetchNumCorrespondingRowsInDnaFragTable( final SeqRegionData seqRegionData, final int genomeDbId ) { List<Integer> numSeqRegionsInDnaFragTableList = masterSqlTemplate.queryForDefaultObjectList( "select " + " count(*) " + "from " + " dnafrag " + "where " + " genome_db_id=?" + " and name=?" + " and length=?" + " and coord_system_name=?", Integer.class, genomeDbId, seqRegionData.seq_region_name, seqRegionData.seq_region_length, seqRegionData.coord_system_name ); assertLengthIsOne(numSeqRegionsInDnaFragTableList); return numSeqRegionsInDnaFragTableList.get(0); } protected void assertLengthIsOne(List<?> list) { if (list.size()>1) { throw new RuntimeException("Got more than one return value. Expected only one!"); } if (list.size()==0) { throw new RuntimeException("Got no return value!"); } } protected int fetchDnaFragRowCountFor(int genomeDbId) { List<Integer> metaValueList = masterSqlTemplate.queryForDefaultObjectList( "select count(*) from dnafrag where genome_db_id=" + genomeDbId, Integer.class); assertLengthIsOne(metaValueList); return metaValueList.get(0); } /** * @param productionName * @param assemblyDefault * @param genebuildStartDate * @param masterDbRe */ protected int fetchGenomeDbId( String productionName, String assemblyDefault, String genebuildStartDate) { ResultSet rs = fetchFromGenomeDbId(productionName, assemblyDefault, genebuildStartDate, "genome_db_id"); int genomeDbId; try { boolean hasResult = rs.next(); if (!hasResult) { throw new RuntimeException("Can't fetch Species " + productionName + " from genome_db table!"); } genomeDbId = rs.getInt(1); } catch (SQLException e) { throw new RuntimeException(e); } return genomeDbId; } /** * Check in master database, if there is a genome_db entry for this * species. */ protected boolean fetchHasGenomeDbId( String productionName, String assemblyDefault, String genebuildStartDate) { ResultSet rs = fetchFromGenomeDbId(productionName, assemblyDefault, genebuildStartDate, "count(genome_db_id)"); int genomeDbId; try { boolean hasResult = rs.next(); if (!hasResult) { throw new RuntimeException("Can't count rows on genome_db table!"); } genomeDbId = rs.getInt(1); } catch (SQLException e) { throw new RuntimeException(e); } if (genomeDbId==1) { return true; } if (genomeDbId==0) { return false; } throw new RuntimeException("Unexpected number of matching rows for " + productionName + "in master database!"); } protected ResultSet fetchFromGenomeDbId( String productionName, String assemblyDefault, String genebuildStartDate, String column ) { String sql = "select "+column+" from genome_db where name=? and assembly=? and genebuild=?"; ResultSet rs = null; try { PreparedStatement stmt = this.masterDbConn.prepareStatement(sql); stmt.setString(1, productionName); stmt.setString(2, assemblyDefault); stmt.setString(3, genebuildStartDate); rs = stmt.executeQuery(); } catch (SQLException e) { throw new SqlUncheckedException(e.getMessage()); } return rs; } /** * @param sqlTemplateTestDb * @param metaKey * @return */ protected String fetchSingleMetaValueFor( final SqlTemplate sqlTemplateTestDb, int speciesId, String metaKey ) { String sql = "select meta_value from meta where meta.meta_key = '"+metaKey+"' and species_id="+speciesId; List<String> metaValueList = sqlTemplateTestDb.queryForDefaultObjectList( sql, String.class ); if (metaValueList.size()>1) { throw new RuntimeException("Got more than one meta_value for metaKey "+metaKey+". Expected only one!\n"+sql); } if (metaValueList.size()==0) { throw new RuntimeException("Metakey "+metaKey+" is missing in the meta table!\n"+sql); } String metaValue = metaValueList.get(0); return metaValue; } }
package br.usp.each.saeg.commons.io; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.LinkedList; import java.util.List; public final class Files { private static final int BUFFER_SIZE = 4096; private static final int EOF = -1; private Files() { // no instances } public static List<File> listRecursive(final File dir, final FilenameFilter filter) { if (!dir.isDirectory()) throw new RuntimeException(dir + " is not a directory"); final List<File> files = new LinkedList<File>(); for (final File f : dir.listFiles(filter)) { files.add(f); } for (final File f : dir.listFiles()) { if (f.isDirectory()) files.addAll(listRecursive(f, filter)); } return files; } public static byte[] toByteArray(final InputStream input) throws IOException { final ByteArrayOutputStream output = new ByteArrayOutputStream(); copy(input, output); return output.toByteArray(); } public static void copy(final InputStream input, final OutputStream output) throws IOException { final byte[] buffer = new byte[BUFFER_SIZE]; int n = input.read(buffer); while (EOF != n) { output.write(buffer, 0, n); n = input.read(buffer); } output.flush(); } }
package think.rpgitems.power.impl; import org.bukkit.*; import org.bukkit.block.Block; import org.bukkit.entity.ArmorStand; import org.bukkit.entity.Entity; import org.bukkit.entity.LivingEntity; import org.bukkit.entity.Player; import org.bukkit.event.entity.EntityDamageByEntityEvent; import org.bukkit.event.entity.EntityDamageEvent; import org.bukkit.event.entity.EntityShootBowEvent; import org.bukkit.event.player.PlayerInteractEvent; import org.bukkit.event.player.PlayerToggleSneakEvent; import org.bukkit.event.player.PlayerToggleSprintEvent; import org.bukkit.inventory.ItemStack; import org.bukkit.scheduler.BukkitRunnable; import org.bukkit.util.BoundingBox; import org.bukkit.util.RayTraceResult; import org.bukkit.util.Vector; import org.librazy.nclangchecker.LangKey; import think.rpgitems.RPGItems; import think.rpgitems.data.LightContext; import think.rpgitems.power.*; import java.util.*; import java.util.logging.Level; import java.util.stream.Collectors; import java.util.stream.Stream; import static think.rpgitems.Events.*; import static think.rpgitems.power.Utils.checkCooldown; /** * @Author ReinWD * @email ReinWDD@gmail.com * Wrote & Maintained by ReinWD * if you have any issue, please send me email or @ReinWD in issues. * Accepted language: , English. */ @PowerMeta(defaultTrigger = "RIGHT_CLICK", generalInterface = PowerPlain.class) public class PowerBeam extends BasePower implements PowerPlain, PowerRightClick, PowerLeftClick, PowerSneak, PowerSneaking, PowerSprint, PowerBowShoot, PowerHitTaken, PowerHit, PowerHurt { @Property public int length = 10; @Property public Particle particle = Particle.LAVA; @Property public int amount = 200; @Property public Mode mode = Mode.BEAM; @Property public boolean pierce = true; @Property public boolean ignoreWall = true; @Property public double damage = 20; @Property public int movementTicks = 40; @Property public double offsetX = 0; @Property public double offsetY = 0; @Property public double offsetZ = 0; @Property public double spawnsPerBlock = 2; double lengthPerSpawn = 1 / spawnsPerBlock; /** * Cost of this power */ @Property public int cost = 0; /** * Cooldown time of this power */ @Property public long cooldown = 0; @Property public boolean cone = false; @Property public double coneRange = 30; @Property public boolean homing = false; @Property public double homingAngle = 1; @Property public double homingRange = 30; @Property public HomingTargetMode homingTargetMode = HomingTargetMode.ONE_TARGET; @Property public Target homingTarget = Target.MOBS; @Property public int stepsBeforeHoming = 5; @Property public int burstCount = 1; @Property public int beamAmount = 1; @Property public int burstInterval = 1; @Property public int bounce = 0; @Property public boolean hitSelfWhenBounced = false; @Property public double gravity = 0; @Property @Serializer(ExtraDataSerializer.class) @Deserializer(ExtraDataSerializer.class) public Object extraData; @Property public double speed = 0; @Property public boolean requireHurtByEntity = true; /** * Whether to suppress the hit trigger */ @Property public boolean suppressMelee = false; private Set<Material> transp = Stream.of(Material.values()) .filter(material -> material.isBlock()) .filter(material -> !material.isSolid() || !material.isOccluding()) .collect(Collectors.toSet()); @Override public PowerResult<Void> fire(Player player, ItemStack stack) { if (!checkCooldown(this, player, cooldown, true, true)) return PowerResult.cd(); if (!getItem().consumeDurability(stack, cost)) return PowerResult.cost(); return beam(player, stack); } @Override public @LangKey(skipCheck = true) String getName() { return "beam"; } @Override public String displayText() { return null; } @Override public PowerResult<Void> leftClick(Player player, ItemStack stack, PlayerInteractEvent event) { return fire(player, stack); } @Override public PowerResult<Void> rightClick(Player player, ItemStack stack, PlayerInteractEvent event) { return fire(player, stack); } @Override public PowerResult<Void> sneak(Player player, ItemStack stack, PlayerToggleSneakEvent event) { return fire(player, stack); } @Override public PowerResult<Void> sneaking(Player player, ItemStack stack) { return fire(player, stack); } @Override public PowerResult<Void> sprint(Player player, ItemStack stack, PlayerToggleSprintEvent event) { return fire(player, stack); } @Override public PowerResult<Float> bowShoot(Player player, ItemStack itemStack, EntityShootBowEvent e) { return fire(player, itemStack).with(e.getForce()); } @Override public PowerResult<Double> hit(Player player, ItemStack stack, LivingEntity entity, double damage, EntityDamageByEntityEvent event) { return fire(player, stack).with(event.getDamage()); } @Override public PowerResult<Double> takeHit(Player target, ItemStack stack, double damage, EntityDamageEvent event) { if (!requireHurtByEntity || event instanceof EntityDamageByEntityEvent) { return fire(target, stack).with(event.getDamage()); } return PowerResult.noop(); } private PowerResult<Void> beam(LivingEntity from, ItemStack stack) { if (burstCount > 0) { for (int i = 0; i < burstCount; i++) { new BukkitRunnable() { @Override public void run() { if (cone) { for (int j = 0; j < beamAmount; j++) { internalFireBeam(from, stack); } } else { internalFireBeam(from, stack); } } }.runTaskLaterAsynchronously(RPGItems.plugin, i * burstInterval); } return PowerResult.ok(); } else { return internalFireBeam(from, stack); } } final Vector crosser = new Vector(1, 1, 1); private PowerResult<Void> internalFireBeam(LivingEntity from, ItemStack stack) { lengthPerSpawn = 1 / spawnsPerBlock; Location fromLocation = from.getEyeLocation(); Vector towards = from.getEyeLocation().getDirection(); if (cone) { double phi = random.nextDouble() * 360; double theta; if (coneRange > 0) { theta = random.nextDouble() * coneRange; Vector clone = towards.clone(); Vector cross = clone.clone().add(crosser); Vector vertical = clone.getCrossProduct(cross).getCrossProduct(towards); towards.rotateAroundAxis(vertical, Math.toRadians(theta)); towards.rotateAroundAxis(clone, Math.toRadians(phi)); } } Entity target = null; if (from instanceof Player && homing) { target = getNextTarget(from.getEyeLocation().getDirection(), fromLocation, from); // Utils.getLivingEntitiesInCone(Utils.getNearestLivingEntities(this, fromLocation, ((Player) from), Math.min(1000, length), 0), fromLocation.toVector(), homingRange, from.getEyeLocation().getDirection()).stream() // .filter(livingEntity -> { // switch (homingTarget) { // case MOBS: // return !(livingEntity instanceof Player); // case PLAYERS: // return livingEntity instanceof Player && !((Player) livingEntity).getGameMode().equals(GameMode.SPECTATOR); // case ALL: // return !(livingEntity instanceof Player) || !((Player) livingEntity).getGameMode().equals(GameMode.SPECTATOR); // return true; // .findFirst().orElse(null); } switch (mode) { case BEAM: new PlainTask(from, towards, amount, length, target, bounce, stack).runTask(RPGItems.plugin); break; case PROJECTILE: new MovingTask(from, towards, amount, length, target, bounce, stack).runTask(RPGItems.plugin); break; } return PowerResult.ok(); } private Random random = new Random(); private Vector yUnit = new Vector(0, 1, 0); @Override public PowerResult<Void> hurt(Player target, ItemStack stack, EntityDamageEvent event) { if (!requireHurtByEntity || event instanceof EntityDamageByEntityEvent) { return fire(target, stack); } return PowerResult.noop(); } class PlainTask extends BukkitRunnable { private int bounces; private double length; private final ItemStack stack; private LivingEntity from; private Vector towards; private final int apS; private Entity target; boolean bounced = false; public PlainTask(LivingEntity from, Vector towards, int amount, double actualLength, Entity target, int bounces, ItemStack stack) { this.from = from; this.towards = towards; this.length = actualLength; this.stack = stack; this.apS = amount / ((int) Math.floor(actualLength)); this.target = target; this.bounces = bounces; } @Override public void run() { World world = from.getWorld(); towards.normalize(); Location lastLocation = from.getEyeLocation(); double lpT = length / ((double) movementTicks); double partsPerTick = lpT / lengthPerSpawn; for (int i = 0; i < movementTicks; i++) { boolean isStepHit = false; Vector step = new Vector(0, 0, 0); for (int j = 0; j < partsPerTick; j++) { boolean isHit = tryHit(from, lastLocation, stack, bounced && hitSelfWhenBounced); isStepHit = isHit || isStepHit; Block block = lastLocation.getBlock(); if (transp.contains(block.getType())) { spawnParticle(from, world, lastLocation, (int) Math.ceil(apS / partsPerTick)); } else if (!ignoreWall) { if (bounces > 0) { bounces bounced = true; makeBounce(block, towards, lastLocation.clone().subtract(step)); } else { LightContext.removeTemp(from.getUniqueId(), DAMAGE_SOURCE_ITEM); return; } } step = towards.clone().normalize().multiply(lengthPerSpawn); lastLocation.add(step); towards = addGravity(towards, partsPerTick); towards = homingCorrect(towards, lastLocation, target, i, () -> target = getNextTarget(from.getEyeLocation().getDirection(), from.getEyeLocation(), from)); } if (isStepHit && homingTargetMode.equals(HomingTargetMode.MULTI_TARGET)) { target = getNextTarget(from.getEyeLocation().getDirection(), from.getEyeLocation(), from); } if (isStepHit && !pierce) { LightContext.clear(); return; } } LightContext.clear(); } } Vector gravityVector = new Vector(0, -gravity / 20, 0); private Vector addGravity(Vector towards, double partsPerTick) { double gravityPerTick = (-gravity / 20d) / partsPerTick; gravityVector.setY(gravityPerTick); return towards.add(gravityVector); } private class MovingTask extends BukkitRunnable { private final LivingEntity from; private int bounces; private Vector towards; private final ItemStack stack; private final int amountPerSec; private final List<BukkitRunnable> runnables = new LinkedList<>(); private Entity target; boolean bounced = false; public MovingTask(LivingEntity from, Vector towards, int apS, double actualLength, Entity target, int bounces, ItemStack stack) { this.from = from; this.towards = towards; this.stack = stack; this.amountPerSec = apS / ((int) Math.floor(actualLength)); this.target = target; this.bounces = bounces; } @Override public void run() { World world = from.getWorld(); double lpT = ((double) length) / ((double) movementTicks); double partsPerTick = lpT / lengthPerSpawn; Location lastLocation = from.getEyeLocation(); towards.normalize(); final int[] finalI = {0}; BukkitRunnable bukkitRunnable = new BukkitRunnable() { @Override public void run() { try { boolean isStepHit = false; Vector step = new Vector(0, 0, 0); for (int k = 0; k < partsPerTick; k++) { boolean isHit = tryHit(from, lastLocation, stack, bounced && hitSelfWhenBounced); isStepHit = isHit || isStepHit; Block block = lastLocation.getBlock(); if (transp.contains(block.getType())) { spawnParticle(from, world, lastLocation, (int) (amountPerSec / spawnsPerBlock)); } else if (!ignoreWall) { if (bounces > 0) { bounces bounced = true; makeBounce(block, towards, lastLocation.clone().subtract(step)); } else { this.cancel(); return; } } step = towards.clone().normalize().multiply(lengthPerSpawn); lastLocation.add(step); towards = addGravity(towards, partsPerTick); towards = homingCorrect(towards, lastLocation, target, finalI[0], () -> target = getNextTarget(from.getEyeLocation().getDirection(), from.getEyeLocation(), from)); } if (isStepHit && homingTargetMode.equals(HomingTargetMode.MULTI_TARGET)) { target = getNextTarget(from.getEyeLocation().getDirection(), from.getEyeLocation(), from); } if (isStepHit && !pierce) { this.cancel(); LightContext.clear(); return; } if (finalI[0] >= movementTicks) { this.cancel(); LightContext.clear(); } finalI[0]++; } catch (Exception ex) { from.getServer().getLogger().log(Level.WARNING, "", ex); this.cancel(); LightContext.clear(); } } }; bukkitRunnable.runTaskTimer(RPGItems.plugin, 0, 1); } } private void makeBounce(Block block, Vector towards, Location lastLocation) { RayTraceResult rayTraceResult = block.rayTrace(lastLocation, towards, towards.length(), FluidCollisionMode.NEVER); if (rayTraceResult == null) { return; } else { towards.rotateAroundNonUnitAxis(rayTraceResult.getHitBlockFace().getDirection(), Math.toRadians(180)).multiply(-1); } } private Vector homingCorrect(Vector towards, Location lastLocation, Entity target, int i, Runnable runnable) { if (target == null || i < stepsBeforeHoming) { return towards; } if (target.isDead()) { runnable.run(); } Location targetLocation; if (target instanceof LivingEntity) { targetLocation = ((LivingEntity) target).getEyeLocation(); } else { targetLocation = target.getLocation(); } Vector clone = towards.clone(); Vector targetDirection = targetLocation.toVector().subtract(lastLocation.toVector()); float angle = clone.angle(targetDirection); Vector crossProduct = clone.clone().getCrossProduct(targetDirection); double actualAng = homingAngle / spawnsPerBlock; if (angle > Math.toRadians(actualAng)) { //will create a enlarging circle clone.add(clone.clone().getCrossProduct(crossProduct).normalize().multiply(-1 * Math.tan(actualAng))); // will create a exact circle. // clone.rotateAroundAxis(crossProduct, actualAng); } else { clone = targetDirection.normalize(); } return clone; } private LivingEntity getNextTarget(Vector towards, Location lastLocation, Entity from) { int radius = Math.min(this.length, 300); return Utils.getLivingEntitiesInCone(from.getNearbyEntities(radius, this.length, this.length).stream() .filter(entity -> entity instanceof LivingEntity && !entity.equals(from) && !entity.isDead()) .map(entity -> ((LivingEntity) entity)) .collect(Collectors.toList()) , lastLocation.toVector(), homingRange, towards).stream() .filter(livingEntity -> { if (isUtilArmorStand(livingEntity)){ return false; } switch (homingTarget) { case MOBS: return !(livingEntity instanceof Player); case PLAYERS: return livingEntity instanceof Player && !((Player) livingEntity).getGameMode().equals(GameMode.SPECTATOR); case ALL: return !(livingEntity instanceof Player) || !((Player) livingEntity).getGameMode().equals(GameMode.SPECTATOR); } return true; }) .findFirst().orElse(null); } private boolean isUtilArmorStand(LivingEntity livingEntity) { if(livingEntity instanceof ArmorStand){ ArmorStand arm = (ArmorStand) livingEntity; return arm.isMarker() && !arm.isVisible(); } return false; } private boolean spawnInWorld = false; private void spawnParticle(LivingEntity from, World world, Location lastLocation, int i) { if ((lastLocation.distance(from.getEyeLocation()) < 1)) { return; } if (spawnInWorld) { if (from instanceof Player) { ((Player) from).spawnParticle(this.particle, lastLocation, i / 2, offsetX, offsetY, offsetZ, speed, extraData); } }else { world.spawnParticle(this.particle, lastLocation, i, offsetX, offsetY, offsetZ, speed, extraData, false); } spawnInWorld = !spawnInWorld; } private boolean tryHit(LivingEntity from, Location loc, ItemStack stack, boolean canHitSelf) { double offsetLength = new Vector(offsetX, offsetY, offsetZ).length(); double length = Double.isNaN(offsetLength) ? 0 : Math.max(offsetLength, 10); Collection<Entity> candidates = from.getWorld().getNearbyEntities(loc, length, length, length); boolean result = false; if (!pierce) { List<Entity> collect = candidates.stream() .filter(entity -> (entity instanceof LivingEntity) && (!isUtilArmorStand((LivingEntity) entity)) && (canHitSelf || !entity.equals(from)) && !entity.isDead()) .filter(entity -> canHit(loc, entity)) .limit(1) .collect(Collectors.toList()); if (!collect.isEmpty()) { Entity entity = collect.get(0); if (entity instanceof LivingEntity) { LightContext.putTemp(from.getUniqueId(), DAMAGE_SOURCE, getNamespacedKey().toString()); LightContext.putTemp(from.getUniqueId(), OVERRIDING_DAMAGE, damage); LightContext.putTemp(from.getUniqueId(), SUPPRESS_MELEE, suppressMelee); LightContext.putTemp(from.getUniqueId(), DAMAGE_SOURCE_ITEM, stack); ((LivingEntity) entity).damage(damage, from); LightContext.clear(); } return true; } } else { List<Entity> collect = candidates.stream() .filter(entity -> (entity instanceof LivingEntity) && (!isUtilArmorStand((LivingEntity) entity)) && (canHitSelf || !entity.equals(from))) .filter(entity -> canHit(loc, entity)) .collect(Collectors.toList()); LightContext.putTemp(from.getUniqueId(), DAMAGE_SOURCE, getNamespacedKey().toString()); LightContext.putTemp(from.getUniqueId(), OVERRIDING_DAMAGE, damage); LightContext.putTemp(from.getUniqueId(), SUPPRESS_MELEE, suppressMelee); LightContext.putTemp(from.getUniqueId(), DAMAGE_SOURCE_ITEM, stack); if (!collect.isEmpty()) { collect.stream() .map(entity -> ((LivingEntity) entity)) .forEach(livingEntity -> { livingEntity.damage(damage, from); }); result = true; } LightContext.clear(); } return result; } private boolean canHit(Location loc, Entity entity) { BoundingBox boundingBox = entity.getBoundingBox(); BoundingBox particleBox; double x = Math.max(offsetX, 0.1); double y = Math.max(offsetY, 0.1); double z = Math.max(offsetZ, 0.1); particleBox = BoundingBox.of(loc, x + 0.1, y + 0.1, z + 0.1); return boundingBox.overlaps(particleBox) || particleBox.overlaps(boundingBox); } private enum Mode { BEAM, PROJECTILE, ; } public class ExtraDataSerializer implements Getter, Setter { @Override public String get(Object object) { if (object instanceof Particle.DustOptions) { Color color = ((Particle.DustOptions) object).getColor(); return color.getRed() + "," + color.getGreen() + "," + color.getBlue() + "," + ((Particle.DustOptions) object).getSize(); } return ""; } @Override public Optional set(String value) throws IllegalArgumentException { String[] split = value.split(",", 4); int r = Integer.parseInt(split[0]); int g = Integer.parseInt(split[1]); int b = Integer.parseInt(split[2]); float size = Float.parseFloat(split[3]); return Optional.of(new Particle.DustOptions(Color.fromRGB(r, g, b), size)); } } enum Target { MOBS, PLAYERS, ALL } private enum HomingTargetMode { ONE_TARGET, MULTI_TARGET; } }
package org.jtrfp.trcl.flow; import java.awt.Color; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import javax.swing.JOptionPane; import org.jtrfp.jtrfp.FileLoadException; import org.jtrfp.trcl.BriefingScreen; import org.jtrfp.trcl.DisplayModeHandler; import org.jtrfp.trcl.EarlyLoadingScreen; import org.jtrfp.trcl.GLFont; import org.jtrfp.trcl.HUDSystem; import org.jtrfp.trcl.LevelLoadingScreen; import org.jtrfp.trcl.SatelliteDashboard; import org.jtrfp.trcl.NAVSystem; import org.jtrfp.trcl.UpfrontDisplay; import org.jtrfp.trcl.beh.MatchDirection; import org.jtrfp.trcl.beh.MatchPosition; import org.jtrfp.trcl.core.Camera; import org.jtrfp.trcl.core.ResourceManager; import org.jtrfp.trcl.core.TR; import org.jtrfp.trcl.core.TRFutureTask; import org.jtrfp.trcl.file.NDXFile; import org.jtrfp.trcl.file.VOXFile; import org.jtrfp.trcl.file.VOXFile.MissionLevel; import org.jtrfp.trcl.file.Weapon; import org.jtrfp.trcl.obj.DebrisSystem; import org.jtrfp.trcl.obj.Explosion.ExplosionType; import org.jtrfp.trcl.obj.ExplosionSystem; import org.jtrfp.trcl.obj.Player; import org.jtrfp.trcl.obj.PowerupSystem; import org.jtrfp.trcl.obj.ProjectileFactory; import org.jtrfp.trcl.obj.SmokeSystem; import org.jtrfp.trcl.snd.SoundSystem; public class Game { //// PROPERTIES public static final String PAUSED = "paused"; public static final String CURRENT_MISSION = "currentMission"; private TR tr; private VOXFile vox; private int levelIndex = 0; private String playerName="DEBUG"; private Difficulty difficulty; private Mission currentMission; private HUDSystem hudSystem; private NAVSystem navSystem; private SatelliteDashboard satDashboard; private Player player; private GLFont greenFont,upfrontFont; private UpfrontDisplay upfrontDisplay; private EarlyLoadingScreen earlyLoadingScreen; private LevelLoadingScreen levelLoadingScreen; private BriefingScreen briefingScreen; private final DisplayModeHandler displayModes = new DisplayModeHandler(); public Object[] earlyLoadingMode, levelLoadingMode, briefingMode, gameplayMode, performanceReportMode; private final PropertyChangeSupport pcSupport = new PropertyChangeSupport(this); private boolean paused=false; private volatile boolean aborting=false; private TRFutureTask<Void>[] startupTask = new TRFutureTask[]{null}; private static final int UPFRONT_HEIGHT = 23; private final double FONT_SIZE=.07; private boolean inGameplay =false; public Game(TR tr, VOXFile vox) { setTr(tr); setVox(vox); if (!tr.getTrConfig()[0].isDebugMode()) setupNameWithUser(); }// end constructor private void setupNameWithUser() { setPlayerName((String) JOptionPane.showInputDialog(tr.getRootWindow(), "Callsign:", "Pilot Registration", JOptionPane.PLAIN_MESSAGE, null, null, "Councilor")); String difficulty = (String) JOptionPane.showInputDialog( tr.getRootWindow(), "Difficulty:", "Pilot Registration", JOptionPane.PLAIN_MESSAGE, null, new String[] { "Easy", "Normal", "Hard", "Furious" }, "Normal"); if (difficulty.contentEquals("Easy")) { setDifficulty(Difficulty.EASY); } if (difficulty.contentEquals("Normal")) { setDifficulty(Difficulty.NORMAL); } if (difficulty.contentEquals("Hard")) { setDifficulty(Difficulty.HARD); } if (difficulty.contentEquals("Furious")) { setDifficulty(Difficulty.FURIOUS); } }// end setupNameWithUser() public void save(File fileToSaveTo) { // TODO } /** * @return the tr */ public TR getTr() { return tr; } /** * @param tr * the tr to set */ public synchronized void setTr(TR tr) { this.tr = tr; } /** * @return the vox */ public synchronized VOXFile getVox() { return vox; } /** * @param vox * the vox to set */ public synchronized void setVox(VOXFile vox) { if(this.vox==vox) return;//No change. pcSupport.firePropertyChange("vox", this.vox, vox); this.vox = vox; } /** * @return the levelIndex */ public synchronized int getLevelIndex() { return levelIndex; } public boolean isInGameplay(){ return inGameplay; } private boolean setInGameplay(boolean newValue){ boolean old = inGameplay; inGameplay=newValue; pcSupport.firePropertyChange("inGameplay", old, newValue); return old; } public void setLevelIndex(int levelIndex) throws IllegalAccessException, FileNotFoundException, IOException, FileLoadException { this.levelIndex = levelIndex; if (levelIndex != -1) {// -1 means 'abort' MissionLevel lvl = vox.getLevels()[getLevelIndex()]; final String lvlFileName = lvl.getLvlFile(); setCurrentMission(new Mission(tr, this, tr.getResourceManager() .getLVL(lvlFileName), lvlFileName.substring(0, lvlFileName.lastIndexOf('.')), getLevelIndex() % 3 == 0)); }//end if(levelIndex!=-1) }// end setLevelIndex(...) /** * @return the playerName */ public synchronized String getPlayerName() { return playerName; } /** * @param playerName * the playerName to set */ public synchronized void setPlayerName(String playerName) { this.playerName = playerName; } enum Difficulty { EASY, NORMAL, HARD, FURIOUS } /** * @return the difficulty */ public synchronized Difficulty getDifficulty() { return difficulty; } /** * @param difficulty * the difficulty to set */ public synchronized void setDifficulty(Difficulty difficulty) { this.difficulty = difficulty; } public NAVSystem getNavSystem(){ return navSystem; } public synchronized void setLevel(String skipToLevel) throws IllegalAccessException, FileNotFoundException, IOException, FileLoadException { final MissionLevel[] levs = vox.getLevels(); for (int index = 0; index < levs.length; index++) { if (levs[index].getLvlFile().toUpperCase() .contentEquals(skipToLevel.toUpperCase())) setLevelIndex(index); }// end for(levs) }// end setLevel() public HUDSystem getHUDSystem(){ return hudSystem; } public synchronized void boot() throws IllegalAccessException, FileNotFoundException, IOException, FileLoadException { // Set up player, HUD, fonts... System.out.println("Booting..."); System.out.println("Initializing general resources..."); greenFont = new GLFont(tr.getResourceManager().getFont("OCRA.zip", "OCRA.ttf"),tr); NDXFile ndx = tr.getResourceManager().getNDXFile("STARTUP\\FONT.NDX"); upfrontFont = new GLFont(tr.getResourceManager().getFontBIN("STARTUP\\FONT.BIN", ndx), UPFRONT_HEIGHT, ndx.getWidths(), 32,tr); earlyLoadingScreen = new EarlyLoadingScreen(tr.getWorld(), tr, greenFont); earlyLoadingScreen.setStatusText("Reticulating Splines..."); earlyLoadingMode = new Object []{ earlyLoadingScreen }; displayModes.setDisplayMode(earlyLoadingMode); upfrontDisplay = new UpfrontDisplay(tr.getWorld(),tr); satDashboard = new SatelliteDashboard(tr); satDashboard.setVisible(false); tr.getWorld().add(satDashboard); hudSystem = new HUDSystem(tr.getWorld(),greenFont); hudSystem.deactivate(); navSystem = new NAVSystem(tr.getWorld(), tr); navSystem.deactivate(); // Make color zero translucent. final ResourceManager rm = tr.getResourceManager(); final Color[] pal = tr.getGlobalPalette(); pal[0] = new Color(0, 0, 0, 0); tr.setGlobalPalette(pal); // POWERUPS earlyLoadingScreen.setStatusText("Loading powerup assets..."); rm.setPowerupSystem(new PowerupSystem(tr)); rm.getPowerupSystem().activate(); // EXPLOSIONS earlyLoadingScreen.setStatusText("Loading explosion assets..."); rm.setExplosionFactory(new ExplosionSystem(tr)); rm.getExplosionFactory().activate(); // SMOKE earlyLoadingScreen.setStatusText("Loading smoke assets..."); rm.setSmokeSystem(new SmokeSystem(tr)); rm.getSmokeSystem().activate(); // DEBRIS earlyLoadingScreen.setStatusText("Loading debris assets..."); rm.setDebrisSystem(new DebrisSystem(tr)); rm.getDebrisSystem().activate(); // SETUP PROJECTILE FACTORIES earlyLoadingScreen.setStatusText("Setting up projectile factories..."); Weapon[] w = Weapon.values(); ProjectileFactory[] pf = new ProjectileFactory[w.length]; for (int i = 0; i < w.length; i++) { pf[i] = new ProjectileFactory(tr, w[i], ExplosionType.Blast); }// end for(weapons) rm.setProjectileFactories(pf); player = new Player(tr, tr.getResourceManager().getBINModel( "SHIP.BIN", tr.getGlobalPaletteVL(), tr.gpu.get().getGl())); final Camera camera = tr.renderer.get().getCamera(); camera.probeForBehavior(MatchPosition.class).setTarget(player); camera.probeForBehavior(MatchDirection.class).setTarget(player); tr.getWorld().add(player); System.out.println("\t...Done."); levelLoadingScreen = new LevelLoadingScreen(tr.getWorld(),tr); briefingScreen = new BriefingScreen(tr.getWorld(),tr,greenFont); earlyLoadingScreen.setStatusText("Ready."); levelLoadingMode = new Object[]{ levelLoadingScreen, upfrontDisplay }; gameplayMode = new Object[]{ navSystem, hudSystem, upfrontDisplay, rm.getDebrisSystem(), rm.getPowerupSystem(), rm.getProjectileFactories(), rm.getExplosionFactory(), rm.getSmokeSystem() }; briefingMode = new Object[]{ briefingScreen }; setLevelIndex(0); }// end boot() public synchronized void doGameplay() throws IllegalAccessException, FileNotFoundException, IOException, FileLoadException { setInGameplay(true); try { MissionLevel[] levels = vox.getLevels(); tr.getThreadManager().setPaused(false); while (getLevelIndex() < levels.length && getLevelIndex() != -1) { Mission.Result result = null; final Mission mission = getCurrentMission(); if (mission == null) break; while (result == null) result = getCurrentMission().go(); if (result.isAbort()) break; // Rube Goldberg style increment setLevelIndex(getLevelIndex() + 1); }// end while(getLevelIndex<length) System.out.println("Escaping game loop."); tr.getThreadManager().setPaused(true); setInGameplay(false); } catch (IllegalAccessException e) { throw e; } catch (FileNotFoundException e) { throw e; } catch (IOException e) { throw e; } catch (FileLoadException e) { throw e; } finally { tr.getThreadManager().setPaused(true); setInGameplay(false); }//end finally{} }// end beginGameplay() public void setCurrentMission(Mission mission) { pcSupport.firePropertyChange("currentMission", this.currentMission, mission); this.currentMission=mission; } public void abort(){ try{setLevelIndex(-1);} catch(Exception e){tr.showStopper(e);}//Shouldn't happen. abortCurrentMission(); cleanup(); tr.getGameShell().applyGFXState(); } private void cleanup() { if(hudSystem!=null) hudSystem.deactivate(); if(navSystem!=null) navSystem.deactivate(); if(upfrontDisplay!=null) upfrontDisplay.deactivate(); if(earlyLoadingScreen!=null) earlyLoadingScreen.deactivate(); if(levelLoadingScreen!=null) levelLoadingScreen.deactivate(); if(briefingScreen!=null) briefingScreen.deactivate(); if(tr.getResourceManager().getPowerupSystem()!=null) tr.getResourceManager().getPowerupSystem().deactivate(); if(tr.getResourceManager().getSmokeSystem()!=null) tr.getResourceManager().getSmokeSystem().deactivate(); if(tr.getResourceManager().getExplosionFactory()!=null) tr.getResourceManager().getExplosionFactory().deactivate(); if(player!=null) tr.getWorld().remove(player); TR.nuclearGC(); } public void abortCurrentMission(){ tr.getThreadManager().setPaused(true); synchronized(startupTask){ if(startupTask[0]!=null) startupTask[0].get();//Don't abort while setting up. }//end sync{} if(currentMission!=null) currentMission.abort(); setCurrentMission(null); }//end abortCurrentMission() public Mission getCurrentMission() { /*if(currentMission==null){ setLevelIndex(getLevelIndex()); }*/ return currentMission; }//end getCurrentMission public Player getPlayer(){ return player; } public GLFont getGreenFont() { return greenFont; } public GLFont getUpfrontFont() { return upfrontFont; } /** * @return the upfrontDisplay */ public UpfrontDisplay getUpfrontDisplay() { return upfrontDisplay; } public LevelLoadingScreen getLevelLoadingScreen() { return levelLoadingScreen; } public Game setDisplayMode(Object[] mode) { displayModes.setDisplayMode(mode); tr.getThreadManager().visibilityCalc(true); return this; } public BriefingScreen getBriefingScreen(){ return briefingScreen; } /** * @return the paused */ public boolean isPaused() { return paused; } /** * @param paused the paused to set */ public Game setPaused(boolean paused) { if(paused==this.paused) return this;//nothing to do. pcSupport.firePropertyChange(PAUSED, this.paused, paused); this.paused = paused; final SoundSystem ss = getTr().soundSystem.get(); ss.setPaused(paused); getTr().getThreadManager().setPaused(paused); if(paused) upfrontDisplay.submitPersistentMessage("Paused--F3 to Resume"); else upfrontDisplay.removePersistentMessage(); return this; } public Game addPropertyChangeListener(String propertyName, PropertyChangeListener l){ pcSupport.addPropertyChangeListener(propertyName, l); return this; } public Game removePropertyChangeListener(PropertyChangeListener l){ pcSupport.removePropertyChangeListener(l); return this; } /** * @return the mapDashboard */ public SatelliteDashboard getSatDashboard() { return satDashboard; } /** * @param satDashboard the mapDashboard to set */ public void setSatDashboard(SatelliteDashboard satDashboard) { this.satDashboard = satDashboard; } }// end Game
package org.opencb.opencga.storage.core.variant.annotation; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import org.opencb.biodata.models.variant.Variant; import org.opencb.biodata.models.variant.annotation.ConsequenceType; import org.opencb.biodata.models.variant.annotation.VariantAnnotation; import org.opencb.biodata.models.variation.GenomicVariant; import org.opencb.cellbase.core.client.CellBaseClient; import org.opencb.cellbase.core.common.core.CellbaseConfiguration; import org.opencb.cellbase.core.lib.DBAdaptorFactory; import org.opencb.cellbase.core.lib.api.variation.VariantAnnotationDBAdaptor; import org.opencb.cellbase.core.lib.api.variation.VariationDBAdaptor; import org.opencb.cellbase.lib.mongodb.db.MongoDBAdaptorFactory; import org.opencb.datastore.core.QueryOptions; import org.opencb.datastore.core.QueryResponse; import org.opencb.datastore.core.QueryResult; import org.opencb.opencga.storage.core.variant.adaptors.VariantDBAdaptor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.*; import java.net.URI; import java.net.URISyntaxException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.*; import java.util.concurrent.*; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; public class CellBaseVariantAnnotator implements VariantAnnotator { private final JsonFactory factory; private VariantAnnotationDBAdaptor variantAnnotationDBAdaptor; private CellBaseClient cellBaseClient; private ObjectMapper jsonObjectMapper; private VariationDBAdaptor variationDBAdaptor; private DBAdaptorFactory dbAdaptorFactory; protected static Logger logger = LoggerFactory.getLogger(CellBaseVariantAnnotator.class); public CellBaseVariantAnnotator() { this.factory = new JsonFactory(); this.jsonObjectMapper = new ObjectMapper(factory); this.dbAdaptorFactory = null; this.cellBaseClient = null; } public CellBaseVariantAnnotator(CellbaseConfiguration cellbaseConfiguration, String cellbaseSpecies, String cellbaseAssembly) { this(); /** * Connecting to CellBase database */ dbAdaptorFactory = new MongoDBAdaptorFactory(cellbaseConfiguration); variantAnnotationDBAdaptor = dbAdaptorFactory.getGenomicVariantAnnotationDBAdaptor(cellbaseSpecies, cellbaseAssembly); variationDBAdaptor = dbAdaptorFactory.getVariationDBAdaptor(cellbaseSpecies, cellbaseAssembly); } public CellBaseVariantAnnotator(CellBaseClient cellBaseClient) { this(); this.cellBaseClient = cellBaseClient; if(cellBaseClient == null) { throw new NullPointerException("CellBaseClient can not be null"); } } /////// CREATE ANNOTATION @Override public URI createAnnotation(VariantDBAdaptor variantDBAdaptor, Path outDir, String fileName, QueryOptions options) throws IOException { if(cellBaseClient == null && dbAdaptorFactory == null) { throw new IllegalStateException("Cant createAnnotation without a CellBase source (DBAdaptorFactory or a CellBaseClient)"); } URI fileUri; Path path = Paths.get(outDir != null? outDir.toString() : "/tmp" ,fileName + ".annot.json.gz"); try { fileUri = new URI("file", path.toString(), null); } catch (URISyntaxException e) { e.printStackTrace(); return null; } /** Open output stream **/ OutputStream outputStream; outputStream = new FileOutputStream(path.toFile()); if(options != null && options.getBoolean("gzip", true)) { outputStream = new GZIPOutputStream(outputStream); } /** Innitialice Json serializer**/ ObjectWriter writer = jsonObjectMapper.writerWithType(VariantAnnotation.class); /** Getting iterator from OpenCGA Variant database. **/ QueryOptions iteratorQueryOptions = new QueryOptions(); // ArrayList<String> exclude = new ArrayList<>(); // iteratorQueryOptions.add("exclude", exclude); if(options != null) { //Parse query options if (!options.getBoolean(VariantAnnotationManager.ANNOTATE_ALL, false)) { iteratorQueryOptions.put("annotationExists", false); } } Variant variant = null; int batchSize = options.getInt(VariantAnnotationManager.BATCH_SIZE, 100); List<GenomicVariant> genomicVariantList = new ArrayList<>(batchSize); Iterator<Variant> iterator = variantDBAdaptor.iterator(iteratorQueryOptions); while(iterator.hasNext()) { variant = iterator.next(); // If Variant is SV some work is needed if(variant.getAlternate().length() + variant.getReference().length() > Variant.SV_THRESHOLD*2) { //TODO: Manage SV variants // logger.info("Skip variant! {}", genomicVariant); logger.info("Skip variant! {}", variant.getChromosome() + ":" + variant.getStart() + ":" + (variant.getReference().length() > 10? variant.getReference().substring(0,10) + "...[" + variant.getReference().length() + "]" : variant.getReference()) + ":" + (variant.getAlternate().length() > 10? variant.getAlternate().substring(0,10) + "...[" + variant.getAlternate().length() + "]" : variant.getAlternate()) ); logger.debug("Skip variant! {}", variant); } else { GenomicVariant genomicVariant = new GenomicVariant(variant.getChromosome(), variant.getStart(), variant.getReference(), variant.getAlternate()); genomicVariantList.add(genomicVariant); } if(genomicVariantList.size() == batchSize || !iterator.hasNext()) { List<VariantAnnotation> variantAnnotationList; if(cellBaseClient != null) { variantAnnotationList = getVariantAnnotationsREST(genomicVariantList); } else { variantAnnotationList = getVariantAnnotationsDbAdaptor(genomicVariantList); } for (VariantAnnotation variantAnnotation : variantAnnotationList) { outputStream.write(writer.writeValueAsString(variantAnnotation).getBytes()); outputStream.write('\n'); } genomicVariantList.clear(); } } outputStream.close(); return fileUri; } /////// CREATE ANNOTATION - AUX METHODS private List<VariantAnnotation> getVariantAnnotationsREST(List<GenomicVariant> genomicVariantList) throws IOException { QueryResponse<QueryResult<VariantAnnotation>> queryResponse; List<String> genomicVariantStringList = new ArrayList<>(genomicVariantList.size()); for (GenomicVariant genomicVariant : genomicVariantList) { genomicVariantStringList.add(genomicVariant.toString()); } queryResponse = cellBaseClient.get( CellBaseClient.Category.genomic, CellBaseClient.SubCategory.variant, genomicVariantStringList, CellBaseClient.Resource.fullAnnotation, null); if(queryResponse == null) { logger.error("CellBase REST error. Returned null. Skipping variants. {}", cellBaseClient.getLastQuery()); return Collections.emptyList(); } Collection<QueryResult<VariantAnnotation>> response = queryResponse.getResponse(); if(response.size() != genomicVariantList.size()) { throw new IOException("QueryResult size != " + genomicVariantList.size() + ". " + queryResponse); } QueryResult<VariantAnnotation>[] queryResults = response.toArray(new QueryResult[1]); List<VariantAnnotation> variantAnnotationList = new ArrayList<>(genomicVariantList.size()); for (QueryResult<VariantAnnotation> queryResult : queryResults) { variantAnnotationList.addAll(queryResult.getResult()); } return variantAnnotationList; } private List<VariantAnnotation> getVariantAnnotationsDbAdaptor(List<GenomicVariant> genomicVariantList) throws IOException { org.opencb.cellbase.core.lib.dbquery.QueryOptions queryOptions = new org.opencb.cellbase.core.lib.dbquery.QueryOptions(); List<VariantAnnotation> variantAnnotationList = new ArrayList<>(genomicVariantList.size()); Map<String, List<ConsequenceType>> consequenceTypes = getConsequenceTypes(genomicVariantList, queryOptions); Map<String, String> variantIds = getVariantId(genomicVariantList, queryOptions); for (GenomicVariant genomicVariant : genomicVariantList) { VariantAnnotation variantAnnotation = new VariantAnnotation( genomicVariant.getChromosome(), genomicVariant.getPosition(), genomicVariant.getPosition(), genomicVariant.getReference(), genomicVariant.getAlternative()); String key = genomicVariant.toString(); variantAnnotation.setConsequenceTypes(consequenceTypes.get(key)); variantAnnotation.setId(variantIds.get(key)); variantAnnotationList.add(variantAnnotation); } return variantAnnotationList; } // FIXME To delete when available in cellbase private Map<String, List<ConsequenceType>> getConsequenceTypes(List<GenomicVariant> genomicVariants, org.opencb.cellbase.core.lib.dbquery.QueryOptions queryOptions) throws IOException { Map<String, List<ConsequenceType>> map = new HashMap<>(genomicVariants.size()); List<org.opencb.cellbase.core.lib.dbquery.QueryResult> queryResultList = variantAnnotationDBAdaptor.getAllConsequenceTypesByVariantList(genomicVariants, queryOptions); for (org.opencb.cellbase.core.lib.dbquery.QueryResult queryResult : queryResultList) { Object result = queryResult.getResult(); List list = result instanceof Collection ? new ArrayList((Collection) result) : Collections.singletonList(result); if(list.get(0) instanceof ConsequenceType) { map.put(queryResult.getId(), list); } else { throw new IOException("queryResult result : " + queryResult + " is not a ConsequenceType"); } } return map; } // FIXME To delete when available in cellbase private Map<String, String> getVariantId(List<GenomicVariant> genomicVariant, org.opencb.cellbase.core.lib.dbquery.QueryOptions queryOptions) throws IOException { List<org.opencb.cellbase.core.lib.dbquery.QueryResult> variationQueryResultList = variationDBAdaptor.getIdByVariantList(genomicVariant, queryOptions); Map<String, String> map = new HashMap<>(genomicVariant.size()); for (org.opencb.cellbase.core.lib.dbquery.QueryResult queryResult : variationQueryResultList) { map.put(queryResult.getId(), queryResult.getResult().toString()); } return map; } /////// LOAD ANNOTATION @Override public void loadAnnotation(final VariantDBAdaptor variantDBAdaptor, final URI uri, QueryOptions options) throws IOException { final int batchSize = options.getInt(VariantAnnotationManager.BATCH_SIZE, 100); final int numConsumers = options.getInt(VariantAnnotationManager.NUM_WRITERS, 6); ExecutorService executor = Executors.newFixedThreadPool(numConsumers); final BlockingQueue<VariantAnnotation> queue = new ArrayBlockingQueue<>(batchSize*numConsumers*2); final VariantAnnotation lastElement = new VariantAnnotation(); executor.execute(new Runnable() { // producer @Override public void run() { try { int readsCounter = 0; /** Open input stream **/ InputStream inputStream; inputStream = new FileInputStream(Paths.get(uri).toFile()); inputStream = new GZIPInputStream(inputStream); /** Innitialice Json parse**/ JsonParser parser = factory.createParser(inputStream); while (parser.nextToken() != null) { VariantAnnotation variantAnnotation = parser.readValueAs(VariantAnnotation.class); queue.put(variantAnnotation); readsCounter++; if (readsCounter % 1000 == 0) { logger.info("Element {}", readsCounter); } } for (int i = 0; i < numConsumers; i++) { //Add a lastElement marker. Consumers will stop reading when read this element. queue.put(lastElement); } logger.debug("Put Last element. queue size = {}", queue.size()); inputStream.close(); } catch (IOException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } } }); for (int i = 0; i < numConsumers; i++) { executor.execute(new Runnable() { @Override public void run() { try { List<VariantAnnotation> batch = new ArrayList<>(batchSize); VariantAnnotation elem = queue.take(); while (elem != lastElement) { batch.add(elem); if (batch.size() == batchSize) { variantDBAdaptor.updateAnnotations(batch, new QueryOptions()); batch.clear(); logger.debug("thread updated batch"); } elem = queue.take(); } if (!batch.isEmpty()) { //Upload remaining elements variantDBAdaptor.updateAnnotations(batch, new QueryOptions()); } logger.debug("thread finished updating annotations"); } catch (InterruptedException e) { e.printStackTrace(); } } }); } executor.shutdown(); try { executor.awaitTermination(Integer.MAX_VALUE, TimeUnit.SECONDS); } catch (InterruptedException e) { logger.error("annotation interrupted"); e.printStackTrace(); } /** Join try { producerThread.join(); for (Thread consumerThread : consumers) { consumerThread.join(); } } catch (InterruptedException e) { e.printStackTrace(); } **/ // while (parser.nextToken() != null) { // VariantAnnotation variantAnnotation = parser.readValueAs(VariantAnnotation.class); //// System.out.println("variantAnnotation = " + variantAnnotation); // batch.add(variantAnnotation); // if(batch.size() == batchSize || parser.nextToken() == null) { // variantDBAdaptor.updateAnnotations(batch, new QueryOptions()); // batch.clear(); } }
package think.rpgitems.power.impl; import com.google.common.util.concurrent.AtomicDouble; import com.udojava.evalex.Expression; import org.bukkit.*; import org.bukkit.block.Block; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.entity.ArmorStand; import org.bukkit.entity.Entity; import org.bukkit.entity.LivingEntity; import org.bukkit.entity.Player; import org.bukkit.event.entity.EntityDamageByEntityEvent; import org.bukkit.event.entity.EntityDamageEvent; import org.bukkit.event.entity.EntityShootBowEvent; import org.bukkit.event.player.PlayerInteractEvent; import org.bukkit.event.player.PlayerToggleSneakEvent; import org.bukkit.event.player.PlayerToggleSprintEvent; import org.bukkit.inventory.ItemStack; import org.bukkit.scheduler.BukkitRunnable; import org.bukkit.util.BoundingBox; import org.bukkit.util.RayTraceResult; import org.bukkit.util.Vector; import org.librazy.nclangchecker.LangKey; import think.rpgitems.RPGItems; import think.rpgitems.data.LightContext; import think.rpgitems.power.*; import java.math.BigDecimal; import java.util.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import java.util.stream.Stream; import static think.rpgitems.Events.*; import static think.rpgitems.power.Utils.checkCooldown; /** * @Author ReinWD * @email ReinWDD@gmail.com * Wrote & Maintained by ReinWD * if you have any issue, please send me email or @ReinWD in issues. * Accepted language: , English. * <p> * Beam version 2.0 */ @PowerMeta(defaultTrigger = "RIGHT_CLICK", generalInterface = PowerPlain.class) public class PowerBeam extends BasePower implements PowerPlain, PowerRightClick, PowerLeftClick, PowerSneak, PowerSneaking, PowerSprint, PowerBowShoot, PowerHitTaken, PowerHit, PowerHurt { @Property public int length = 10; @Property public Particle particle = Particle.LAVA; @Property public Mode mode = Mode.BEAM; @Property public int pierce = 0; @Property public boolean ignoreWall = true; @Property public double damage = 20; @Property public int speed = 20; // public int movementTicks = 40; @Property public double offsetX = 0; @Property public double offsetY = 0; @Property public double offsetZ = 0; @Property public double particleSpeed = 0; @Property public double spawnsPerBlock = 2; /** * Cost of this power */ @Property public int cost = 0; /** * Cooldown time of this power */ @Property public long cooldown = 0; // used to judge legacy 1.0 // @Property // public boolean cone = false; @Property public double cone = 10; // changed Name // public double coneRange = 30; @Property public double homing = 0; @Property public double homingAngle = 30; // @Property // public double homingRange = 30; @Property public HomingMode homingMode = HomingMode.ONE_TARGET; // public HomingTargetMode homingTargetMode = HomingTargetMode.ONE_TARGET; @Property public Target homingTarget = Target.MOBS; @Property public int stepsBeforeHoming = 0; @Property public int burstCount = 1; @Property public int beamAmount = 1; @Property public int burstInterval = 10; @Property public int bounce = 0; @Property public boolean hitSelfWhenBounced = false; @Property public double gravity = 0; @Property @Serializer(ExtraDataSerializer.class) @Deserializer(ExtraDataSerializer.class) public Object extraData; @Property public boolean requireHurtByEntity = true; /** * Whether to suppress the hit trigger */ @Property public boolean suppressMelee = false; @Property public String speedBias = ""; @Property public BeamShape shape = BeamShape.PLAIN; @Property public String shapeParam = "{}"; @Override public void init(ConfigurationSection section) { if (section.contains("coneRange")) { updateFromV1(section); } super.init(section); } private void updateFromV1(ConfigurationSection section) { double cone = section.getDouble("coneRange"); int movementTicks = section.getInt("movementTicks"); int length = section.getInt("length"); double originSpeed = section.getDouble("speed"); double homingAngle = section.getDouble("homingAngle"); double homingRange = section.getDouble("homingRange"); String homingTargetMode = section.getString("homingTargetMode"); section.set("cone", cone); section.set("speed", ((double) length) / ((double) movementTicks)); section.set("particleSpeed", originSpeed); section.set("homing", homingAngle); section.set("homingAngle", homingRange); section.set("homingMode", homingTargetMode); } private static Set<Material> transp = Stream.of(Material.values()) .filter(material -> material.isBlock()) .filter(material -> !material.isSolid() || !material.isOccluding()) .collect(Collectors.toSet()); final Vector crosser = new Vector(1, 1, 1); private Random random = new Random(); @Override public @LangKey(skipCheck = true) String getName() { return "beam"; } @Override public String displayText() { return null; } @Override public PowerResult<Void> fire(Player player, ItemStack stack) { if (!checkCooldown(this, player, cooldown, true, true)) return PowerResult.cd(); if (!getItem().consumeDurability(stack, cost)) return PowerResult.cost(); return beam(player, stack); } @Override public PowerResult<Void> leftClick(Player player, ItemStack stack, PlayerInteractEvent event) { return fire(player, stack); } @Override public PowerResult<Void> rightClick(Player player, ItemStack stack, PlayerInteractEvent event) { return fire(player, stack); } @Override public PowerResult<Void> sneak(Player player, ItemStack stack, PlayerToggleSneakEvent event) { return fire(player, stack); } @Override public PowerResult<Void> sneaking(Player player, ItemStack stack) { return fire(player, stack); } @Override public PowerResult<Void> sprint(Player player, ItemStack stack, PlayerToggleSprintEvent event) { return fire(player, stack); } @Override public PowerResult<Float> bowShoot(Player player, ItemStack itemStack, EntityShootBowEvent e) { return fire(player, itemStack).with(e.getForce()); } @Override public PowerResult<Double> hit(Player player, ItemStack stack, LivingEntity entity, double damage, EntityDamageByEntityEvent event) { return fire(player, stack).with(event.getDamage()); } @Override public PowerResult<Double> takeHit(Player target, ItemStack stack, double damage, EntityDamageEvent event) { if (!requireHurtByEntity || event instanceof EntityDamageByEntityEvent) { return fire(target, stack).with(event.getDamage()); } return PowerResult.noop(); } private PowerResult<Void> beam(LivingEntity from, ItemStack stack) { if (burstCount > 0) { final int currentBurstCount = burstCount; final int currentBurstInterval = burstInterval; AtomicInteger bursted = new AtomicInteger(0); class FireTask extends BukkitRunnable { @Override public void run() { for (int j = 0; j < beamAmount; j++) { internalFireBeam(from, stack); } if (bursted.getAndAdd(1) < currentBurstCount) { new FireTask().runTaskLaterAsynchronously(RPGItems.plugin, currentBurstInterval); } } } new FireTask().runTaskAsynchronously(RPGItems.plugin); return PowerResult.ok(); } else { return internalFireBeam(from, stack); } } private PowerResult<Void> internalFireBeam(LivingEntity from, ItemStack stack) { Location fromLocation = from.getEyeLocation(); Vector towards = from.getEyeLocation().getDirection(); if (cone != 0) { double phi = random.nextDouble() * 360; double theta; theta = random.nextDouble() * cone; Vector clone = towards.clone(); Vector cross = clone.clone().add(crosser); Vector vertical = clone.getCrossProduct(cross).getCrossProduct(towards); towards.rotateAroundAxis(vertical, Math.toRadians(theta)); towards.rotateAroundAxis(clone, Math.toRadians(phi)); } Queue<Entity> targets = null; if (from instanceof Player && homing > 0) { targets = new LinkedList<>(getTargets(from.getEyeLocation().getDirection(), fromLocation, from)); } MovingTask movingTask = new MovingTaskBuilder(this) .fromEntity(from) .towards(towards) .targets(targets) .itemStack(stack) .build(); movingTask.runTask(RPGItems.plugin); return PowerResult.ok(); } @Override public PowerResult<Void> hurt(Player target, ItemStack stack, EntityDamageEvent event) { if (!requireHurtByEntity || event instanceof EntityDamageByEntityEvent) { return fire(target, stack); } return PowerResult.noop(); } private static class MovingTask extends BukkitRunnable { private int length = 10; private Particle particle = Particle.LAVA; private Mode mode = Mode.BEAM; private int pierce = 0; private boolean ignoreWall = true; private double damage = 20; private int speed = 20; private double offsetX = 0; private double offsetY = 0; private double offsetZ = 0; private double particleSpeed = 0; private double spawnsPerBlock = 2; private double homing = 0; private HomingMode homingMode = HomingMode.ONE_TARGET; private int ticksBeforeHoming = 5; private int bounce = 0; private boolean hitSelfWhenBounced = false; private double gravity = 0; private boolean suppressMelee = false; private BeamShape shape = BeamShape.PLAIN; private String shapeParam = "{}"; private Object extraData = null; private PowerBeam power; private String speedBias = ""; private Queue<Entity> targets; private Entity fromEntity; private Location fromLocation; private Vector towards; double lengthPerSpawn; AtomicDouble lengthRemains = new AtomicDouble(0); AtomicDouble spawnedLength = new AtomicDouble(0); AtomicInteger currentStep = new AtomicInteger(0); Location lastLocation = fromLocation; private ItemStack itemStack; boolean bounced = false; World world; MovingTask() { } MovingTask(PowerBeam config) { this.length = config.length; this.particle = config.particle; this.mode = config.mode; this.pierce = config.pierce; this.ignoreWall = config.ignoreWall; this.damage = config.damage; this.speed = config.speed; this.offsetX = config.offsetX; this.offsetY = config.offsetY; this.offsetZ = config.offsetZ; this.spawnsPerBlock = config.spawnsPerBlock; this.homing = config.homing; this.homingMode = config.homingMode; this.ticksBeforeHoming = config.stepsBeforeHoming; this.bounce = config.bounce; this.hitSelfWhenBounced = config.hitSelfWhenBounced; this.gravity = config.gravity; this.particleSpeed = config.particleSpeed; this.suppressMelee = config.suppressMelee; this.shape = config.shape; this.shapeParam = config.shapeParam; this.extraData = config.extraData; this.speedBias = config.speedBias; power = config; lengthPerSpawn = 1 / spawnsPerBlock; } @Override public void run() { world = fromLocation.getWorld(); if (world == null) return; if (Double.isInfinite(lengthPerSpawn)) { return; } if (mode.equals(Mode.BEAM)) { this.speed = 20 * length; } new RecursiveTask().runTaskAsynchronously(RPGItems.plugin); } public void setItemStack(ItemStack stack) { this.itemStack = stack; } class RecursiveTask extends BukkitRunnable { @Override public void run() { try { double lengthInThisTick = getNextLength(spawnedLength, length) + lengthRemains.get(); int cycle = 0; while ((lengthInThisTick -= lengthPerSpawn) > 0) { double lengthInThisSpawn = lengthPerSpawn; if (spawnedLength.get() + lengthInThisTick > length) { lengthInThisSpawn = length - spawnedLength.get() + 0.1; } boolean isHit = tryHit(fromEntity, lastLocation, itemStack, bounced && hitSelfWhenBounced); if (cycle++ > 2) { if (!ignoreWall && !transp.contains(lastLocation.getBlock().getType())) { if (bounce > 0) { bounce bounced = true; makeBounce(lastLocation.getBlock(), towards, lastLocation); } else { return; } } cycle = 0; } spawnParticle(fromEntity, world, lastLocation, 1); Vector step = towards.normalize().multiply(lengthInThisSpawn); lastLocation.add(step); spawnedLength.addAndGet(lengthPerSpawn); homingCorrect(towards, lastLocation, lengthInThisTick, targets.peek(), ticksBeforeHoming, () -> { if (homingMode.equals(HomingMode.ONE_TARGET)) targets.poll(); }); if (isHit) { if (pierce > 0) { pierce if (homingMode.equals(HomingMode.MULTI_TARGET)) { targets.poll(); } } else { return; } } } addGravity(towards, 1); lengthRemains.set(lengthInThisTick + lengthPerSpawn); if (spawnedLength.get() >= length) { return; } new RecursiveTask().runTaskLaterAsynchronously(RPGItems.plugin, 1); } catch (Exception e) { this.cancel(); } } private double getNextLength(AtomicDouble spawnedLength, int length) { Expression eval = new Expression(speedBias).with("x", new Expression.LazyNumber() { @Override public BigDecimal eval() { return BigDecimal.valueOf(spawnedLength.get() / ((double) length)); } @Override public String getString() { return String.valueOf(spawnedLength.get() / ((double) length)); } }); return speed * (eval.eval().doubleValue()) / 20; } } // private final LivingEntity from; // private int bounces; // private Vector towards; // private final ItemStack stack; // private final int amountPerSec; // private final List<BukkitRunnable> runnables = new LinkedList<>(); // private Entity target; // boolean bounced = false; // public MovingTask(LivingEntity from, Vector towards, int apS, double actualLength, Entity target, int bounces, ItemStack stack) { // this.from = from; // this.towards = towards; // this.stack = stack; // this.amountPerSec = apS / ((int) Math.floor(actualLength)); // this.target = target; // this.bounces = bounces; // @Override // public void run() { // World world = from.getWorld(); // double lpT = ((double) length) / ((double) movementTicks); // double partsPerTick = lpT / lengthPerSpawn; // Location lastLocation = from.getEyeLocation(); // towards.normalize(); // final int[] finalI = {0}; // BukkitRunnable bukkitRunnable = new BukkitRunnable() { // @Override // public void run() { // try { // boolean isStepHit = false; // Vector step = new Vector(0, 0, 0); // for (int k = 0; k < partsPerTick; k++) { // boolean isHit = tryHit(from, lastLocation, stack, bounced && hitSelfWhenBounced); // isStepHit = isHit || isStepHit; // Block block = lastLocation.getBlock(); // if (transp.contains(block.getType())) { // spawnParticle(from, world, lastLocation, (int) (amountPerSec / spawnsPerBlock)); // } else if (!ignoreWall) { // if (bounces > 0) { // bounces--; // bounced = true; // makeBounce(block, towards, lastLocation.clone().subtract(step)); // } else { // this.cancel(); // return; // step = towards.clone().normalize().multiply(lengthPerSpawn); // lastLocation.add(step); // towards = addGravity(towards, partsPerTick); // towards = homingCorrect(towards, lastLocation, target, finalI[0], () -> target = getNextTarget(from.getEyeLocation().getDirection(), from.getEyeLocation(), from)); // if (isStepHit && homingTargetMode.equals(HomingTargetMode.MULTI_TARGET)) { // target = getNextTarget(from.getEyeLocation().getDirection(), from.getEyeLocation(), from); // if (isStepHit && !pierce) { // this.cancel(); // LightContext.clear(); // return; // if (finalI[0] >= movementTicks) { // this.cancel(); // LightContext.clear(); // finalI[0]++; // } catch (Exception ex) { // from.getServer().getLogger().log(Level.WARNING, "", ex); // this.cancel(); // LightContext.clear(); // bukkitRunnable.runTaskTimer(RPGItems.plugin, 0, 1); private void makeBounce(Block block, Vector towards, Location lastLocation) { RayTraceResult rayTraceResult = block.rayTrace(lastLocation, towards, towards.length(), FluidCollisionMode.NEVER); if (rayTraceResult == null) { return; } else { towards.rotateAroundNonUnitAxis(rayTraceResult.getHitBlockFace().getDirection(), Math.toRadians(180)).multiply(-1); } } private Vector homingCorrect(Vector towards, Location lastLocation, double lengthInThisTick, Entity target, int i, Runnable runnable) { if (target == null || i < ticksBeforeHoming) { return towards; } if (target.isDead()) { runnable.run(); } Location targetLocation; if (target instanceof LivingEntity) { targetLocation = ((LivingEntity) target).getEyeLocation(); } else { targetLocation = target.getLocation(); } Vector clone = towards.clone(); Vector targetDirection = targetLocation.toVector().subtract(lastLocation.toVector()); float angle = clone.angle(targetDirection); Vector crossProduct = clone.clone().getCrossProduct(targetDirection); double actualAng = homing / 20 / (lengthInThisTick / lengthPerSpawn); if (angle > Math.toRadians(actualAng)) { if (this.shape.equals(BeamShape.LEGACY_HOMING)) { //will create a enlarging circle clone.add(clone.clone().getCrossProduct(crossProduct).normalize().multiply(-1 * Math.tan(actualAng))); } else { // will create a exact circle.} clone.rotateAroundAxis(crossProduct, actualAng); } } else { clone = targetDirection.normalize(); } return clone; } // private LivingEntity getNextTarget(Vector towards, Location lastLocation, Entity from) { // int radius = Math.min(this.length, 300); // return Utils.getLivingEntitiesInCone(from.getNearbyEntities(radius, this.length, this.length).stream() // .filter(entity -> entity instanceof LivingEntity && !entity.equals(from) && !entity.isDead()) // .map(entity -> ((LivingEntity) entity)) // .collect(Collectors.toList()) // , lastLocation.toVector(), homingAngle, towards).stream() // .filter(livingEntity -> { // if (isUtilArmorStand(livingEntity)) { // return false; // switch (homingTarget) { // case MOBS: // return !(livingEntity instanceof Player); // case PLAYERS: // return livingEntity instanceof Player && !((Player) livingEntity).getGameMode().equals(GameMode.SPECTATOR); // case ALL: // return !(livingEntity instanceof Player) || !((Player) livingEntity).getGameMode().equals(GameMode.SPECTATOR); // return true; // .findFirst().orElse(null); private boolean spawnInWorld = false; private void spawnParticle(Entity from, World world, Location lastLocation, int i) { Location eyeLocation; if (from instanceof Player) { eyeLocation = ((Player) from).getEyeLocation(); if ((lastLocation.distance(eyeLocation) < 1)) { return; } if (spawnInWorld) { ((Player) from).spawnParticle(this.particle, lastLocation, i / 2, offsetX, offsetY, offsetZ, particleSpeed, extraData); } else { world.spawnParticle(this.particle, lastLocation, i, offsetX, offsetY, offsetZ, particleSpeed, extraData, false); } spawnInWorld = !spawnInWorld; } else { world.spawnParticle(this.particle, lastLocation, i, offsetX, offsetY, offsetZ, particleSpeed, extraData, false); } } private boolean tryHit(Entity from, Location loc, ItemStack stack, boolean canHitSelf) { if (from == null) return false; double offsetLength = new Vector(offsetX, offsetY, offsetZ).length(); double length = Double.isNaN(offsetLength) ? 0 : Math.max(offsetLength, 10); Collection<Entity> candidates = from.getWorld().getNearbyEntities(loc, length, length, length); boolean result = false; if (pierce > 0) { List<Entity> collect = candidates.stream() .filter(entity -> (entity instanceof LivingEntity) && (!isUtilArmorStand((LivingEntity) entity)) && (canHitSelf || !entity.equals(from)) && !entity.isDead()) .filter(entity -> canHit(loc, entity)) .limit(1) .collect(Collectors.toList()); if (!collect.isEmpty()) { Entity entity = collect.get(0); if (entity instanceof LivingEntity) { LightContext.putTemp(from.getUniqueId(), DAMAGE_SOURCE, power.getNamespacedKey().toString()); LightContext.putTemp(from.getUniqueId(), OVERRIDING_DAMAGE, damage); LightContext.putTemp(from.getUniqueId(), SUPPRESS_MELEE, suppressMelee); LightContext.putTemp(from.getUniqueId(), DAMAGE_SOURCE_ITEM, stack); ((LivingEntity) entity).damage(damage, from); LightContext.clear(); } return true; } } else { List<Entity> collect = candidates.stream() .filter(entity -> (entity instanceof LivingEntity) && (!isUtilArmorStand((LivingEntity) entity)) && (canHitSelf || !entity.equals(from))) .filter(entity -> canHit(loc, entity)) .collect(Collectors.toList()); LightContext.putTemp(from.getUniqueId(), DAMAGE_SOURCE, power.getNamespacedKey().toString()); LightContext.putTemp(from.getUniqueId(), OVERRIDING_DAMAGE, damage); LightContext.putTemp(from.getUniqueId(), SUPPRESS_MELEE, suppressMelee); LightContext.putTemp(from.getUniqueId(), DAMAGE_SOURCE_ITEM, stack); if (!collect.isEmpty()) { collect.stream() .map(entity -> ((LivingEntity) entity)) .forEach(livingEntity -> { livingEntity.damage(damage, from); }); result = true; } LightContext.clear(); } return result; } private boolean canHit(Location loc, Entity entity) { BoundingBox boundingBox = entity.getBoundingBox(); BoundingBox particleBox; double x = Math.max(offsetX, 0.1); double y = Math.max(offsetY, 0.1); double z = Math.max(offsetZ, 0.1); particleBox = BoundingBox.of(loc, x + 0.1, y + 0.1, z + 0.1); return boundingBox.overlaps(particleBox) || particleBox.overlaps(boundingBox); } Vector gravityVector = new Vector(0, -gravity / 20, 0); private Vector addGravity(Vector towards, double partsPerTick) { double gravityPerTick = (-gravity / 20d) / partsPerTick; gravityVector.setY(gravityPerTick); return towards.add(gravityVector); } public void setTarget(Queue<Entity> targets) { this.targets = targets; } public void setFromEntity(Entity fromEntity) { this.fromEntity = fromEntity; this.fromLocation = fromEntity.getLocation(); this.towards = fromLocation.getDirection(); } public void setFromLocation(Location from) { this.fromLocation = from; } public void setTowards(Vector towards) { this.towards = towards; } } // can be called anywhere, maybe class MovingTaskBuilder { MovingTask movingTask; public MovingTaskBuilder(PowerBeam power) { this.movingTask = new MovingTask(power); } public MovingTaskBuilder towards(Vector towards) { movingTask.setTowards(towards); return this; } public MovingTaskBuilder fromLocation(Location location) { movingTask.setFromLocation(location); return this; } public MovingTaskBuilder fromEntity(Entity entity) { movingTask.setFromEntity(entity); return this; } public MovingTaskBuilder targets(Queue<Entity> targets) { movingTask.setTarget(targets); return this; } public MovingTaskBuilder itemStack(ItemStack stack) { movingTask.setItemStack(stack); return this; } public MovingTask build() { return movingTask; } } private static boolean isUtilArmorStand(LivingEntity livingEntity) { if (livingEntity instanceof ArmorStand) { ArmorStand arm = (ArmorStand) livingEntity; return arm.isMarker() && !arm.isVisible(); } return false; } private List<Entity> getTargets(Vector direction, Location fromLocation, LivingEntity from) { int radius = Math.min(this.length, 300); return Utils.getLivingEntitiesInConeSorted(from.getNearbyEntities(radius, this.length * 1.5, this.length * 1.5).stream() .filter(entity -> entity instanceof LivingEntity && !entity.equals(from) && !entity.isDead()) .map(entity -> ((LivingEntity) entity)) .collect(Collectors.toList()) , fromLocation.toVector(), homingAngle, direction).stream() .filter(livingEntity -> { if (isUtilArmorStand(livingEntity)) { return false; } switch (homingTarget) { case MOBS: return !(livingEntity instanceof Player); case PLAYERS: return livingEntity instanceof Player && !((Player) livingEntity).getGameMode().equals(GameMode.SPECTATOR); case ALL: return !(livingEntity instanceof Player) || !((Player) livingEntity).getGameMode().equals(GameMode.SPECTATOR); } return true; }).collect(Collectors.toList()); } private enum Mode { BEAM, PROJECTILE, ; } public class ExtraDataSerializer implements Getter, Setter { @Override public String get(Object object) { if (object instanceof Particle.DustOptions) { Color color = ((Particle.DustOptions) object).getColor(); return color.getRed() + "," + color.getGreen() + "," + color.getBlue() + "," + ((Particle.DustOptions) object).getSize(); } return ""; } @Override public Optional set(String value) throws IllegalArgumentException { String[] split = value.split(",", 4); int r = Integer.parseInt(split[0]); int g = Integer.parseInt(split[1]); int b = Integer.parseInt(split[2]); float size = Float.parseFloat(split[3]); return Optional.of(new Particle.DustOptions(Color.fromRGB(r, g, b), size)); } } enum Target { MOBS, PLAYERS, ALL } public enum BeamShape { PLAIN(PlainBias.class, Void.class), DNA(DnaBias.class, DnaBias.DnaParams.class), CIRCLE(CircleBias.class, CircleBias.CircleParams.class), LEGACY_HOMING(PlainBias.class, Void.class); private Class<? extends IBias> iBias; private Class<?> paramType; BeamShape(Class<? extends IBias> iBias, Class<?> paramType) { this.iBias = iBias; this.paramType = paramType; } public List<Vector> getBiases(Location location, Vector towards, MovingTask context, String params) { return null; } } interface IBias<T> { List<Vector> getBiases(Location location, Vector towards, MovingTask context, T params); } static class PlainBias implements IBias<Void> { @Override public List<Vector> getBiases(Location location, Vector towards, MovingTask context, Void params) { return null; } } static class CircleBias implements IBias<CircleBias.CircleParams> { private CircleParams params; @Override public List<Vector> getBiases(Location location, Vector towards, MovingTask context, CircleParams params) { return null; } static class CircleParams { public double r = 1; public String rFunc = ""; } } static class DnaBias implements IBias<DnaBias.DnaParams> { @Override public List<Vector> getBiases(Location location, Vector towards, MovingTask context, DnaParams params) { return null; } static class DnaParams { double amount = 2; double r = 1; String rFunc = ""; } } private enum HomingMode { ONE_TARGET, MULTI_TARGET, MOUSE_TRACK } }
// Portions of this code are: package org.lockss.jetty; import java.io.*; import java.util.*; import org.apache.commons.logging.Log; import org.mortbay.http.*; import org.mortbay.http.handler.*; import org.mortbay.log.LogFactory; import org.mortbay.util.*; import com.sun.jimi.core.*; import com.sun.jimi.core.raster.JimiRasterImage; import org.lockss.app.LockssDaemon; import org.lockss.config.CurrentConfig; import org.lockss.plugin.CachedUrl; import org.lockss.proxy.ProxyManager; import org.lockss.util.*; import org.lockss.util.StringUtil; /** Extension of ResourceHandler that allows flexibility in finding the * Resource. Mostly copied here because some things in ResourceHandler * aren't public or protected. */ public class LockssResourceHandler extends AbstractHttpHandler { private static Log log = LogFactory.getLog(ResourceHandler.class); private LockssDaemon theDaemon = null; private ProxyManager proxyMgr = null; private boolean _acceptRanges=true; private boolean _redirectWelcomeFiles ; private String _redirectRootTo ; private String[] _methods=null; private String _allowed; private boolean _dirAllowed=true; private int _minGzipLength =-1; private StringMap _methodMap = new StringMap(); { setAllowedMethods(new String[] { HttpRequest.__GET, HttpRequest.__MOVE, HttpRequest.__POST, HttpRequest.__HEAD, HttpRequest.__OPTIONS, HttpRequest.__TRACE }); } /** Construct a ResourceHandler. */ public LockssResourceHandler(LockssDaemon daemon) { theDaemon = daemon; proxyMgr = theDaemon.getProxyManager(); } public synchronized void start() throws Exception { super.start(); } public void stop() throws InterruptedException { super.stop(); } public void setRedirectRootTo(String target) { _redirectRootTo = target; } public String[] getAllowedMethods() { return _methods; } public void setAllowedMethods(String[] methods) { StringBuffer b = new StringBuffer(); _methods=methods; _methodMap.clear(); for (int i=0;i<methods.length;i++) { _methodMap.put(methods[i],methods[i]); if (i>0) b.append(','); b.append(methods[i]); } _allowed=b.toString(); } public boolean isMethodAllowed(String method) { return _methodMap.get(method)!=null; } public String getAllowedString() { return _allowed; } public boolean isDirAllowed() { return _dirAllowed; } public void setDirAllowed(boolean dirAllowed) { _dirAllowed = dirAllowed; } public boolean isAcceptRanges() { return _acceptRanges; } /** * @return True if welcome files are redirected to. False if forward is used. */ public boolean getRedirectWelcome() { return _redirectWelcomeFiles; } /** * @param redirectWelcome True if welcome files are redirected to. False * if forward is used. */ public void setRedirectWelcome(boolean redirectWelcome) { _redirectWelcomeFiles = redirectWelcome; } /** Set if the handler accepts range requests. * Default is false; * @param ar True if the handler should accept ranges */ public void setAcceptRanges(boolean ar) { _acceptRanges=ar; } /** Get minimum content length for GZIP encoding. * @return Minimum length of content for gzip encoding or -1 if disabled. */ public int getMinGzipLength() { return _minGzipLength; } /** Set minimum content length for GZIP encoding. * @param minGzipLength If set to a positive integer, then static content * larger than this will be served as gzip content encoded * if a matching resource is found ending with ".gz" */ public void setMinGzipLength(int minGzipLength) { _minGzipLength = minGzipLength; } /** get Resource to serve. * Map a path to a resource. The default implementation calls * HttpContext.getResource but derived handers may provide * their own mapping. * @param pathInContext The path to find a resource for. * @return The resource to serve. */ protected Resource getResource(HttpRequest request, String pathInContext) throws IOException { return getHttpContext().getResource(pathInContext); } public void handle(String pathInContext, String pathParams, HttpRequest request, HttpResponse response) throws HttpException, IOException { Resource resource = getResource(request, pathInContext); if (resource==null) return; // Is the method allowed? if (!isMethodAllowed(request.getMethod())) { if(log.isDebugEnabled())log.debug("Method not allowed: "+request.getMethod()); if (resource.exists()) { setAllowHeader(response); response.sendError(HttpResponse.__405_Method_Not_Allowed); } return; } // Handle the request try { if(log.isDebugEnabled())log.debug("PATH="+pathInContext+" RESOURCE="+resource); // check filename String method=request.getMethod(); if (method.equals(HttpRequest.__GET) || method.equals(HttpRequest.__POST) || method.equals(HttpRequest.__HEAD)) handleGet(request, response, pathInContext, pathParams, resource); else if (method.equals(HttpRequest.__PUT)) handlePut(request, response, pathInContext, resource); else if (method.equals(HttpRequest.__DELETE)) handleDelete(request, response, pathInContext, resource); else if (method.equals(HttpRequest.__OPTIONS)) handleOptions(response, pathInContext); else if (method.equals(HttpRequest.__MOVE)) handleMove(request, response, pathInContext, resource); else if (method.equals(HttpRequest.__TRACE)) handleTrace(request, response); else { if(log.isDebugEnabled())log.debug("Unknown action:"+method); // anything else... try{ if (resource.exists()) response.sendError(HttpResponse.__501_Not_Implemented); } catch(Exception e) {LogSupport.ignore(log,e);} } } catch(IllegalArgumentException e) { LogSupport.ignore(log,e); } finally { if (resource!=null && !(resource instanceof CachedResource)) resource.release(); } } public void handleGet(HttpRequest request, HttpResponse response, String pathInContext, String pathParams, Resource resource) throws IOException { if(log.isDebugEnabled())log.debug("Looking for "+resource); if (resource!=null && resource.exists()) { // check if directory if (resource.isDirectory()) { if (!pathInContext.endsWith("/") && !pathInContext.equals("/")) { log.debug("Redirect to directory/"); String q=request.getQuery(); StringBuffer buf=request.getRequestURL(); if (q!=null&&q.length()!=0) { buf.append('?'); buf.append(q); } response.setField(HttpFields.__Location, URI.addPaths(buf.toString(),"/")); response.setStatus(302); request.setHandled(true); return; } if (_redirectRootTo != null && pathInContext.equals("/")) { log.debug("Redirect root to " + _redirectRootTo); String q=request.getQuery(); StringBuffer buf=request.getRequestURL(); if (q!=null&&q.length()!=0) { buf.append('?'); buf.append(q); } response.setField(HttpFields.__Location, URI.addPaths(buf.toString(), _redirectRootTo)); response.setStatus(302); request.setHandled(true); return; } // See if index file exists String welcome=getHttpContext().getWelcomeFile(resource); if (welcome!=null) { // Forward to the index String ipath=URI.addPaths(pathInContext,welcome); if (_redirectWelcomeFiles) { // Redirect to the index ipath=URI.addPaths(getHttpContext().getContextPath(),ipath); response.setContentLength(0); response.sendRedirect(ipath); } else { URI uri=request.getURI(); uri.setPath(URI.addPaths(uri.getPath(),welcome)); getHttpContext().handle(ipath,pathParams,request,response); } return; } // Check modified dates if (!passConditionalHeaders(request,response,resource)) return; // If we got here, no forward to index took place sendDirectory(request,response,resource,pathInContext.length()>1); } else if (handleLockssRedirect(request, response, pathInContext, pathParams, resource)) { return; } // check if it is a file else if (resource.exists()) { // Check modified dates if (!passConditionalHeaders(request,response,resource)) return; sendData(request,response,pathInContext,resource,true); } else // don't know what it is log.warn("Unknown file type"); } } // CachedUrls may have content, yet specify a redirect elsewhere. The // redirect must be returned to the requestor. boolean handleLockssRedirect(HttpRequest request, HttpResponse response, String pathInContext, String pathParams, Resource resource) { if (!(resource instanceof CuUrlResource)) { return false; } CuUrlResource cur = (CuUrlResource)resource; String nodeUrl = cur.getProperty(CachedUrl.PROPERTY_NODE_URL); String rTo = cur.getProperty(CachedUrl.PROPERTY_REDIRECTED_TO); String reqUrl = request.getRequestURL().toString(); // follow any redirect property, unless it points at current URL. (Can // happen on "directory" nodes, which have two names, with and without // slash.) if (rTo != null) { if (rTo.equals(reqUrl)) { return false; } else { sendLockssRedirect(request, response, pathInContext, pathParams, resource, rTo); return true; } } else // Can't count on directory node having a redirected-to property, // bacause it might have been with a final slash, hence no redirect. // If request path doesn't end with slash but node's path does, // then this is a request for a node that's actually a "directory", // so issue the redirect. if (!pathInContext.endsWith("/")) { URI nodeUri = new URI(nodeUrl); if (nodeUri.getPath().endsWith("/")) { sendLockssRedirect(request, response, pathInContext, pathParams, resource, nodeUrl); return true; } } return false; } void sendLockssRedirect(HttpRequest request, HttpResponse response, String pathInContext, String pathParams, Resource resource, String to) { response.setField(HttpFields.__Location, to); response.setStatus(HttpResponse.__301_Moved_Permanently); request.setHandled(true); } /* Check modification date headers. */ private boolean passConditionalHeaders(HttpRequest request, HttpResponse response, Resource resource) throws IOException { boolean ignoreIfModified = false; if (CurrentConfig.getCurrentConfig().getBoolean(ProxyManager.PARAM_IGNORE_IF_MODIFIED_WHEN_CONTENT_LENGTH_WRONG, ProxyManager.DEFAULT_IGNORE_IF_MODIFIED_WHEN_CONTENT_LENGTH_WRONG) && resource instanceof CuUrlResource) { CuUrlResource cur = (CuUrlResource)resource; String clenHdr = cur.getProperty(HttpFields.__ContentLength); if (!StringUtil.isNullString(clenHdr)) { try { long clen = Long.parseLong(clenHdr); if (clen != resource.length()) { ignoreIfModified = true; log.debug("ignoring If-Modified-Since: " + cur.getURL()); } } catch (NumberFormatException e) { log.warn("Error parsing Content-Length: " + clenHdr + " of " + cur.getURL()); } } } if (!request.getMethod().equals(HttpRequest.__HEAD)) { // If we have meta data for the file // Try a direct match for most common requests. Avoids // parsing the date. ResourceCache.ResourceMetaData metaData = (ResourceCache.ResourceMetaData)resource.getAssociate(); if (metaData != null && resource.lastModified() > 0) { String ifms=request.getField(HttpFields.__IfModifiedSince); if (ignoreIfModified) { ifms = null; } String mdlm=metaData.getLastModified(); if (ifms != null && mdlm != null && ifms.equals(mdlm)) { response.setStatus(HttpResponse.__304_Not_Modified); request.setHandled(true); return false; } } long date=0; // Parse the if[un]modified dates and compare to resource if ((date=request.getDateField(HttpFields.__IfUnmodifiedSince))>0) { if (resource.lastModified()/1000 > date/1000) { response.sendError(HttpResponse.__412_Precondition_Failed); return false; } } if (!ignoreIfModified && (date=request.getDateField(HttpFields.__IfModifiedSince))>0) { if (resource.lastModified()/1000 <= date/1000) { response.setStatus(HttpResponse.__304_Not_Modified); request.setHandled(true); return false; } } } return true; } void handlePut(HttpRequest request, HttpResponse response, String pathInContext, Resource resource) throws IOException { if(log.isDebugEnabled())log.debug("PUT "+pathInContext+" in "+resource); boolean exists=resource!=null && resource.exists(); if (exists && !passConditionalHeaders(request,response,resource)) return; if (pathInContext.endsWith("/")) { if (!exists) { if (!resource.getFile().mkdirs()) response.sendError(HttpResponse.__403_Forbidden, "Directories could not be created"); else { request.setHandled(true); response.setStatus(HttpResponse.__201_Created); response.commit(); } } else { request.setHandled(true); response.setStatus(HttpResponse.__200_OK); response.commit(); } } else { try { int toRead = request.getContentLength(); InputStream in = request.getInputStream(); OutputStream out = resource.getOutputStream(); if (toRead>=0) IO.copy(in,out,toRead); else IO.copy(in,out); out.close(); request.setHandled(true); response.setStatus(exists ?HttpResponse.__200_OK :HttpResponse.__201_Created); response.commit(); } catch (Exception ex) { log.warn(LogSupport.EXCEPTION,ex); response.sendError(HttpResponse.__403_Forbidden, ex.getMessage()); } } } void handleDelete(HttpRequest request, HttpResponse response, String pathInContext, Resource resource) throws IOException { if(log.isDebugEnabled())log.debug("DELETE "+pathInContext+" from "+resource); if (!resource.exists() || !passConditionalHeaders(request,response,resource)) return; try { // delete the file if (resource.delete()) response.setStatus(HttpResponse.__204_No_Content); else response.sendError(HttpResponse.__403_Forbidden); // Send response request.setHandled(true); } catch (SecurityException sex) { log.warn(LogSupport.EXCEPTION,sex); response.sendError(HttpResponse.__403_Forbidden, sex.getMessage()); } } void handleMove(HttpRequest request, HttpResponse response, String pathInContext, Resource resource) throws IOException { if (!resource.exists() || !passConditionalHeaders(request,response,resource)) return; String newPath = URI.canonicalPath(request.getField("New-uri")); if (newPath==null) { response.sendError(HttpResponse.__405_Method_Not_Allowed, "Bad new uri"); return; } String contextPath = getHttpContext().getContextPath(); if (contextPath!=null && !newPath.startsWith(contextPath)) { response.sendError(HttpResponse.__405_Method_Not_Allowed, "Not in context"); return; } // Find path try { // XXX - Check this String newInfo=newPath; if (contextPath!=null) newInfo=newInfo.substring(contextPath.length()); Resource newFile = getHttpContext().getBaseResource().addPath(newInfo); if(log.isDebugEnabled())log.debug("Moving "+resource+" to "+newFile); resource.renameTo(newFile); response.setStatus(HttpResponse.__204_No_Content); request.setHandled(true); } catch (Exception ex) { log.warn(LogSupport.EXCEPTION,ex); setAllowHeader(response); response.sendError(HttpResponse.__405_Method_Not_Allowed, "Error:"+ex); return; } } void handleOptions(HttpResponse response, String pathInContext) throws IOException { if ("*".equals(pathInContext)) return; setAllowHeader(response); response.commit(); } void setAllowHeader(HttpResponse response) { response.setField(HttpFields.__Allow, getAllowedString()); } public void writeHeaders(HttpResponse response,Resource resource, long count) throws IOException { ResourceCache.ResourceMetaData metaData = (ResourceCache.ResourceMetaData)resource.getAssociate(); CuUrlResource cur = null; String ctype = null; if (resource instanceof CuUrlResource) { cur = (CuUrlResource)resource; ctype = cur.getProperty(CachedUrl.PROPERTY_CONTENT_TYPE); } if (ctype == null && CurrentConfig.getCurrentConfig() .getBoolean(ProxyManager.PARAM_INFER_MIME_TYPE, ProxyManager.DEFAULT_INFER_MIME_TYPE)) { ctype = metaData.getMimeType(); log.trace("ctype from metadata: " + ctype); } response.setContentType(ctype); if (count != -1) { String origContentLength = null; if (cur != null) { origContentLength = cur.getProperty(HttpFields.__ContentLength); } if (count==resource.length()) { response.setField(HttpFields.__ContentLength,metaData.getLength()); } else { if (count < Integer.MAX_VALUE) { response.setContentLength((int)count); } else { response.setField(HttpFields.__ContentLength, Long.toString(count)); } } if (origContentLength != null && !StringUtil.equalStrings(origContentLength, response.getField(HttpFields.__ContentLength))) { response.setField(origPrefix(HttpFields.__ContentLength), origContentLength); } } if (resource.lastModified() > 0) { response.setField(HttpFields.__LastModified, metaData.getLastModified()); } if (_acceptRanges && response.getHttpRequest().getDotVersion()>0) response.setField(HttpFields.__AcceptRanges,"bytes"); if (cur != null) { addStoredHeaders(response, cur); } } // Headers that should never be copied from one connection to another. // Copied from ProxyHandler._DontProxyHeaders String[] DONT_PROXY_HEADERS = { HttpFields.__Connection, HttpFields.__ProxyConnection, HttpFields.__Connection, HttpFields.__KeepAlive, HttpFields.__TransferEncoding, HttpFields.__TE, HttpFields.__Trailer, HttpFields.__ProxyAuthorization, HttpFields.__ProxyAuthenticate, HttpFields.__Upgrade, }; private void addStoredHeaders(HttpResponse response, CuUrlResource cur) { Map<String,List<String>> hdrMap = cur.getPropertyMap(); for (Map.Entry<String,List<String>> ent : hdrMap.entrySet()) { String key = ent.getKey(); List<String> valLst = ent.getValue(); // CuUrlResource always produces one-element lists; skip if malformed if (valLst.size() != 1) { log.warn("Unexpected CuUrlResource property list for " + key + " (" + valLst.size() + " items): " + valLst); continue; } // Maintain the original value of some keys by prefixing with orig_ if (isHeaderKey(key, CachedUrl.LOCKSS_PREFIX_ORIG_PROPERTIES)) { String prefKey = origPrefix(key); // Don't replace existing orig_ header if (response.getField(prefKey) != null) { continue; } response.setField(prefKey, valLst.get(0)); continue; } // Skip keys that already have a value in the response if (response.getField(key) != null) { continue; } // Skip internal props and connection-oriented props if (isHeaderKey(key, CachedUrl.LOCKSS_INTERNAL_PROPERTIES) || isHeaderKey(key, DONT_PROXY_HEADERS)) { continue; } // If configured to copy response props, or if this is an audit prop // (repair info, checksum) that we're configured to include, store it // in the response. if (isHeaderKey(key, CachedUrl.LOCKSS_AUDIT_PROPERTIES) ? proxyMgr.isIncludeLockssAuditProps() : proxyMgr.isCopyStoredResponseHeaders()) { response.setField(key, valLst.get(0)); } } } boolean isHeaderKey(String key, String[] keyset) { for (String s : keyset) { if (key.equalsIgnoreCase(s)) { return true; } } return false; } private String origPrefix(String s) { return CuResourceHandler.ORIG_HEADER_PREFIX + s; } public void sendData(HttpRequest request, HttpResponse response, String pathInContext, Resource resource, boolean writeHeaders) throws IOException { long resLength=resource.length(); // see if there are any range headers Enumeration reqRanges = request.getDotVersion()>0 ?request.getFieldValues(HttpFields.__Range) :null; if (!writeHeaders || reqRanges == null || !reqRanges.hasMoreElements()) { // look for a gziped content. Resource data=resource; if (_minGzipLength>0) { String accept=request.getField(HttpFields.__AcceptEncoding); if (accept!=null && resLength>_minGzipLength && !pathInContext.endsWith(".gz")) { Resource gz = getHttpContext().getResource(pathInContext+".gz"); if (gz.exists() && accept.indexOf("gzip")>=0) { if(log.isDebugEnabled())log.debug("gzip="+gz); response.setField(HttpFields.__ContentEncoding,"gzip"); data=gz; resLength=data.length(); } } } writeHeaders(response,resource,resLength); request.setHandled(true); // (sethm) Begin content rewrite hack. // Content Rewriting: If the PARAM_REWRITE_GIF_PNG // config parameter is set, and this is GIF content, use // JIMI to rewrite as PNG InputStream in = data.getInputStream(); OutputStream out = null; boolean enableRewrite = CurrentConfig.getCurrentConfig().getBoolean(ProxyManager.PARAM_REWRITE_GIF_PNG, ProxyManager.DEFAULT_REWRITE_GIF_PNG); if (!proxyMgr.isRepairRequest(request) && enableRewrite && "image/gif".equals(HeaderUtil.getMimeTypeFromContentType(response.getContentType())) && "from-cache".equals(response.getField("X-Lockss"))) { try { JimiRasterImage img = Jimi.getRasterImage(in, Jimi.SYNCHRONOUS); // Content length cannot be known before the data is // written. Remove the Content-Length header. response.removeField("Content-Length"); response.setContentType("image/png"); out = response.getOutputStream(); Jimi.putImage("image/png", img, out); out.flush(); } catch (JimiException ex) { throw new IOException(ex.getMessage()); } } else { out = response.getOutputStream(); IO.copy(in, out, resLength); } if (in != null) { in.close(); } if (out != null) { out.close(); } // End hack. // OutputStream out = response.getOutputStream(); // data.writeTo(out,0,resLength); return; } // Parse the satisfiable ranges List ranges =InclusiveByteRange.satisfiableRanges(reqRanges,resLength); if(log.isDebugEnabled())log.debug("ranges: " + reqRanges + " == " + ranges); // if there are no satisfiable ranges, send 416 response if (ranges==null || ranges.size()==0) { log.debug("no satisfiable ranges"); writeHeaders(response, resource, resLength); response.setStatus(HttpResponse.__416_Requested_Range_Not_Satisfiable); response.setReason((String)HttpResponse.__statusMsg .get(TypeUtil.newInteger(HttpResponse.__416_Requested_Range_Not_Satisfiable))); response.setField(HttpFields.__ContentRange, InclusiveByteRange.to416HeaderRangeString(resLength)); OutputStream out = response.getOutputStream(); resource.writeTo(out,0,resLength); request.setHandled(true); return; } // if there is only a single valid range (must be satisfiable // since were here now), send that range with a 216 response if ( ranges.size()== 1) { InclusiveByteRange singleSatisfiableRange = (InclusiveByteRange)ranges.get(0); if(log.isDebugEnabled())log.debug("single satisfiable range: " + singleSatisfiableRange); long singleLength = singleSatisfiableRange.getSize(resLength); writeHeaders(response,resource,singleLength); response.setStatus(HttpResponse.__206_Partial_Content); response.setReason((String)HttpResponse.__statusMsg .get(TypeUtil.newInteger(HttpResponse.__206_Partial_Content))); response.setField(HttpFields.__ContentRange, singleSatisfiableRange.toHeaderRangeString(resLength)); OutputStream out = response.getOutputStream(); resource.writeTo(out, singleSatisfiableRange.getFirst(resLength), singleLength); request.setHandled(true); return; } // multiple non-overlapping valid ranges cause a multipart // 216 response which does not require an overall // content-length header ResourceCache.ResourceMetaData metaData = (ResourceCache.ResourceMetaData)resource.getAssociate(); String encoding = metaData.getMimeType(); MultiPartResponse multi = new MultiPartResponse(response); response.setStatus(HttpResponse.__206_Partial_Content); response.setReason((String)HttpResponse.__statusMsg .get(TypeUtil.newInteger(HttpResponse.__206_Partial_Content))); // If the request has a "Request-Range" header then we need to // send an old style multipart/x-byteranges Content-Type. This // keeps Netscape and acrobat happy. This is what Apache does. String ctp; if (request.containsField(HttpFields.__RequestRange)) ctp = "multipart/x-byteranges; boundary="; else ctp = "multipart/byteranges; boundary="; response.setContentType(ctp+multi.getBoundary()); InputStream in=(resource instanceof CachedResource) ?null:resource.getInputStream(); OutputStream out = response.getOutputStream(); long pos=0; for (int i=0;i<ranges.size();i++) { InclusiveByteRange ibr = (InclusiveByteRange) ranges.get(i); String header=HttpFields.__ContentRange+": "+ ibr.toHeaderRangeString(resLength); if(log.isDebugEnabled())log.debug("multi range: "+encoding+" "+header); multi.startPart(encoding,new String[]{header}); long start=ibr.getFirst(resLength); long size=ibr.getSize(resLength); if (in!=null) { // Handle non cached resource if (start<pos) { in.close(); in=resource.getInputStream(); pos=0; } while (pos<start) { pos += in.skip(start-pos); } IO.copy(in,out,size); pos+=size; } else // Handle cached resource resource.writeTo(out,start,size); } if (in!=null) in.close(); multi.close(); request.setHandled(true); return; } void sendDirectory(HttpRequest request, HttpResponse response, Resource resource, boolean parent) throws IOException { if (!_dirAllowed) { response.sendError(HttpResponse.__403_Forbidden); return; } request.setHandled(true); if(log.isDebugEnabled())log.debug("sendDirectory: "+resource); byte[] data=null; if (resource instanceof CachedResource) data=((CachedResource)resource).getCachedData(); if (data==null) { String base = URI.addPaths(request.getPath(),"/"); String dir = resource.getListHTML(base,parent); if (dir==null) { response.sendError(HttpResponse.__403_Forbidden, "No directory"); return; } data=dir.getBytes("UTF8"); if (resource instanceof CachedResource) ((CachedResource)resource).setCachedData(data); } response.setContentType("text/html; charset=UTF8"); response.setContentLength(data.length); if (request.getMethod().equals(HttpRequest.__HEAD)) { response.commit(); return; } response.getOutputStream().write(data,0,data.length); response.commit(); } }
package org.jtrfp.trcl.gpu; import java.util.ArrayList; import java.util.HashSet; import java.util.concurrent.Callable; import java.util.concurrent.Future; import org.apache.commons.math3.geometry.euclidean.threed.Vector3D; import org.jtrfp.trcl.Controller; import org.jtrfp.trcl.LineSegment; import org.jtrfp.trcl.RenderMode; import org.jtrfp.trcl.Sequencer; import org.jtrfp.trcl.Tickable; import org.jtrfp.trcl.TransparentTriangleList; import org.jtrfp.trcl.Triangle; import org.jtrfp.trcl.TriangleList; import org.jtrfp.trcl.core.TR; import org.jtrfp.trcl.core.TRFuture; public class Model { // [FRAME][LIST] private ArrayList<ArrayList<Triangle>> tLists = new ArrayList<ArrayList<Triangle>>(); private ArrayList<ArrayList<Triangle>> ttLists = new ArrayList<ArrayList<Triangle>>(); private ArrayList<ArrayList<LineSegment>> lsLists = new ArrayList<ArrayList<LineSegment>>(); private TransparentTriangleList ttpList; private TriangleList tpList; private int frameDelay; private boolean smoothAnimation; public static final String UNNAMED = "[unnamed]"; private String debugName = UNNAMED; private boolean animateUV = false; private Controller controller; private final TR tr; private long animationUpdateThresholdMillis = 0; private static final long ANIMATION_UPDATE_INTERVAL = 10; private final ArrayList<Tickable> tickableAnimators = new ArrayList<Tickable>(); private volatile boolean animated=false; private boolean modelFinalized = false; private TRFuture<Model> finalizedModel; //Keeps hard references to Textures to keep them from getting gobbled. private final HashSet<TextureDescription> textures = new HashSet<TextureDescription>(); public Model(boolean smoothAnimation, TR tr, String debugName) { this.tr = tr; this.smoothAnimation = smoothAnimation; this.debugName = debugName; // Frame zero tLists.add(new ArrayList<Triangle>()); lsLists.add(new ArrayList<LineSegment>()); ttLists.add(new ArrayList<Triangle>()); } public TriangleList getTriangleList() { try{finalizedModel.get();} catch(Exception e){throw new RuntimeException(e);} return tpList; } public TransparentTriangleList getTransparentTriangleList() { try{finalizedModel.get();} catch(Exception e){throw new RuntimeException(e);} return ttpList; } public ArrayList<ArrayList<Triangle>> getRawTriangleLists() { return tLists; } public ArrayList<ArrayList<Triangle>> getRawTransparentTriangleLists() { return ttLists; } ArrayList<ArrayList<LineSegment>> getRawLineSegmentLists() { return lsLists; } public Vector3D getMaximumVertexDims(){ double maxX=0,maxY=0,maxZ = 0; final TransparentTriangleList ttList = getTransparentTriangleList(); if(ttList != null){ final Vector3D mV = ttList.getMaximumVertexDims(); maxX = mV.getX(); maxY = mV.getY(); maxZ = mV.getZ(); } final TriangleList tList = getTriangleList(); if(tList != null){ final Vector3D mV = tList.getMaximumVertexDims(); maxX = Math.max(mV.getX(),maxX); maxY = Math.max(mV.getY(),maxY); maxZ = Math.max(mV.getZ(),maxZ); } return new Vector3D(maxX,maxY,maxZ); }//end getMaximumVertexValue() public Vector3D getMinimumVertexDims(){ double maxX=0,maxY=0,maxZ = 0; final TransparentTriangleList ttList = getTransparentTriangleList(); if(ttList != null){ final Vector3D mV = ttList.getMaximumVertexDims(); maxX = mV.getX(); maxY = mV.getY(); maxZ = mV.getZ(); } final TriangleList tList = getTriangleList(); if(tList != null){ final Vector3D mV = tList.getMaximumVertexDims(); maxX = Math.min(mV.getX(),maxX); maxY = Math.min(mV.getY(),maxY); maxZ = Math.min(mV.getZ(),maxZ); } return new Vector3D(maxX,maxY,maxZ); }//end getMinimumVertexValue() /** * Sets up formal GPU primitive lists * * @return */ public TRFuture<Model> finalizeModel() { return finalizedModel = tr.getThreadManager().submitToThreadPool(new Callable<Model>(){ @Override public Model call() throws Exception { Future<Void> tpFuture=null, ttpFuture=null; if(modelFinalized) return Model.this; modelFinalized = true; if(animated)//Discard frame zero {tLists.remove(0);ttLists.remove(0);} Controller c = controller; {//Start scope numFrames final int numFrames = tLists.size(); if (c == null) {if(frameDelay==0)frameDelay=1; setController(new Sequencer(getFrameDelayInMillis(), numFrames, true));} Triangle[][] tris = new Triangle[numFrames][]; for (int i = 0; i < numFrames; i++) { tris[i] = tLists.get(i).toArray(new Triangle[] {}); assert tris[i]!=null:"tris intolerably null";//Verify poss. race condition. for(Triangle triangle:tLists.get(i)) textures.add(triangle.texture); }// Get all frames for each triangle if (tris[0].length != 0) { tpList = new TriangleList(tris, getFrameDelayInMillis(), "Model."+debugName, animateUV, getController(), tr, Model.this); tpFuture = tpList.uploadToGPU(); }// end if(length!=0) else tpList = null; }//end scope numFrames {//start scope numFrames final int numFrames = ttLists.size(); Triangle[][] ttris = new Triangle[numFrames][]; for (int i = 0; i < numFrames; i++) { ttris[i] = ttLists.get(i).toArray(new Triangle[] {}); for(Triangle triangle:ttLists.get(i)) textures.add(triangle.texture); }// Get all frames for each triangle if (ttris[0].length != 0) { ttpList = new TransparentTriangleList(ttris, getFrameDelayInMillis(), debugName, animateUV, getController(), tr, Model.this); ttpFuture = ttpList.uploadToGPU(); }// end if(length!=0) else ttpList = null; tLists =null; ttLists=null; lsLists=null; }//end scope numframes return Model.this; }}); }// end finalizeModel() public void addFrame(Model m) { if(!animated)animated=true; // Opaque Triangles { tLists.add(m.getRawTriangleLists().get(0)); } // Transparent triangles { ttLists.add(m.getRawTransparentTriangleLists().get(0)); } // Line Segs { lsLists.add(m.getRawLineSegmentLists().get(0)); } }// end addFrame(...) /** * * @return`The time between frames in milliseconds * @since Jan 5, 2013 */ public int getFrameDelayInMillis() { return frameDelay; } /** * @param frameDelay * the frameDelay to set */ public void setFrameDelayInMillis(int frameDelayInMillis) { if(frameDelayInMillis<=0) throw new IllegalArgumentException("Frame interval in millis is intolerably zero or negative: "+frameDelayInMillis); this.frameDelay = frameDelayInMillis; } public void addTriangle(Triangle triangle) { if (triangle.isAlphaBlended()) { ttLists.get(0).add(triangle); } else tLists.get(0).add(triangle); } public void addLineSegment(LineSegment seg) { lsLists.get(0).add(seg); } public void addTriangles(Triangle[] tris) { for (Triangle t : tris) { addTriangle(t); } } public void addLineSegments(LineSegment[] lss) { for (LineSegment ls : lss) { addLineSegment(ls); } }// end addLineSegments /** * @return the smoothAnimation */ public boolean isSmoothAnimation() { return smoothAnimation; } /** * @param smoothAnimation * the smoothAnimation to set */ public void setSmoothAnimation(boolean smoothAnimation) { this.smoothAnimation = smoothAnimation; } public static Model buildCube(double w, double h, double d, TextureDescription tunnelTexturePalette, double[] origin, boolean hasAlpha, TR tr) { return buildCube(w, h, d, tunnelTexturePalette, origin, 0, 0, 1, 1, hasAlpha, tr); } public static Model buildCube(double w, double h, double d, TextureDescription tunnelTexturePalette, double[] origin, TR tr) { return buildCube(w, h, d, tunnelTexturePalette, origin, 0, 0, 1, 1, tr); } public static Model buildCube(double w, double h, double d, TextureDescription tunnelTexturePalette, double[] origin, double u0, double v0, double u1, double v1, TR tr) { return buildCube(w, h, d, tunnelTexturePalette, origin, u0, v0, u1, v1, false, tr); } public static Model buildCube(double w, double h, double d, TextureDescription tunnelTexturePalette, double[] origin, double u0, double v0, double u1, double v1, boolean hasAlpha, TR tr) { return buildCube(w,h,d,tunnelTexturePalette,origin,u0,v0,u1,v1,hasAlpha,true,tr); } public static Model buildCube(double w, double h, double d, TextureDescription tunnelTexturePalette, double[] origin, double u0, double v0, double u1, double v1, boolean hasAlpha, boolean hasNorm, TR tr) { Model m = new Model(false, tr, "Model.buildCube"); // Top m.addTriangles(Triangle.quad2Triangles( new double[] { 0 - origin[0], w - origin[0], w - origin[0], 0 - origin[0] }, new double[] { 0 - origin[1], 0 - origin[1], 0 - origin[1], 0 - origin[1] }, new double[] { 0 - origin[2], 0 - origin[2], d - origin[2], d - origin[2] }, new double[] { u0, u1, u1, u0 }, new double[] { v1, v1, v0, v0 }, tunnelTexturePalette, RenderMode.STATIC, hasAlpha, hasNorm?Vector3D.MINUS_K:Vector3D.ZERO,"Model.buildCube.front")); // Bottom m.addTriangles(Triangle.quad2Triangles( new double[] { 0 - origin[0], w - origin[0], w - origin[0], 0 - origin[0] }, new double[] { h - origin[1], h - origin[1], h - origin[1], h - origin[1] }, new double[] { d - origin[2], d - origin[2], 0 - origin[2], 0 - origin[2] }, new double[] { u0, u1, u1, u0 }, new double[] { v1, v1, v0, v0 }, tunnelTexturePalette, RenderMode.STATIC, hasAlpha, hasNorm?Vector3D.MINUS_K:Vector3D.ZERO,"Model.buildCube.front")); // Front m.addTriangles(Triangle.quad2Triangles( new double[] { 0 - origin[0],w - origin[0], w - origin[0], 0 - origin[0] }, new double[] { h - origin[1], h - origin[1], 0 - origin[1], 0 - origin[1] }, new double[] { 0 - origin[2], 0 - origin[2], 0 - origin[2],0 - origin[2] }, new double[] { u0, u1, u1, u0 }, new double[] { v1, v1, v0, v0 }, tunnelTexturePalette, RenderMode.STATIC, hasAlpha, hasNorm?Vector3D.MINUS_K:Vector3D.ZERO,"Model.buildCube.front")); // Left m.addTriangles(Triangle.quad2Triangles( new double[] { 0 - origin[0], 0 - origin[0],0 - origin[0], 0 - origin[0]}, new double[] { 0 - origin[1], 0 - origin[1], h - origin[1], h - origin[1] }, new double[] { 0 - origin[2], d - origin[2], d - origin[2], 0 - origin[2] }, new double[] { u0, u1, u1, u0 }, new double[] { v1, v1, v0, v0 }, tunnelTexturePalette, RenderMode.STATIC, hasAlpha, hasNorm?Vector3D.MINUS_I:Vector3D.ZERO,"Model.buildCube.left")); // Right m.addTriangles(Triangle.quad2Triangles(new double[] { w - origin[0], w - origin[0], w - origin[0], w - origin[0] }, new double[] { h - origin[1], h - origin[1], 0 - origin[1], 0 - origin[1] }, new double[] { 0 - origin[2], d - origin[2], d - origin[2], 0 - origin[2] }, new double[] { u0, u1, u1, u0 }, new double[] { v1, v1, v0, v0 }, tunnelTexturePalette, RenderMode.STATIC, hasAlpha, hasNorm?Vector3D.PLUS_I:Vector3D.ZERO,"Model.buildCube.right")); // Back m.addTriangles(Triangle.quad2Triangles(new double[] { 0 - origin[0], w - origin[0], w - origin[0], 0 - origin[0] }, new double[] { 0 - origin[1], 0 - origin[1], h - origin[1], h - origin[1] }, new double[] { d - origin[2], d - origin[2], d - origin[2], d - origin[2] }, new double[] { u0, u1, u1, u0 }, new double[] { v0, v0, v1, v1 }, tunnelTexturePalette, RenderMode.STATIC, hasAlpha, hasNorm?Vector3D.PLUS_K:Vector3D.ZERO,"Model.buildCube.back")); return m; }// end buildCube /** * @return the debugName */ public String getDebugName() { return debugName; } /** * @param debugName * the debugName to set */ public void setDebugName(String debugName) { this.debugName = debugName; } /** * @return the animateUV */ public boolean isAnimateUV() { return animateUV; } /** * @param animateUV * the animateUV to set */ public void setAnimateUV(boolean animateUV) { this.animateUV = animateUV; } /** * @param controller * the controller to set */ public void setController(Controller controller) { this.controller = controller; } /** * @return the controller */ public Controller getController() { return controller; } public void proposeAnimationUpdate() { long currentTimeMillis = System.currentTimeMillis(); if (currentTimeMillis > animationUpdateThresholdMillis) { synchronized(tickableAnimators){ final int size = tickableAnimators.size(); for (int i = 0; i < size; i++){ final Tickable t = tickableAnimators.get(i); if(t!=null) tickableAnimators.get(i).tick(); }//end for(animators) animationUpdateThresholdMillis = currentTimeMillis + ANIMATION_UPDATE_INTERVAL; }//end sync(tickableAnimators) }// end if(time to update) }// end proposeAnimationUpdate() public void addTickableAnimator(Tickable t) { tickableAnimators.add(t); } public double getMaximumVertexValue() { final Vector3D maxDims = getMaximumVertexDims(); double max = maxDims.getX(); max = Math.max(max,maxDims.getY()); max = Math.max(max,maxDims.getZ()); return max; }//end getMaximumVertexValue() public double getMinimumVertexValue() { final Vector3D minDims = getMinimumVertexDims(); double min = minDims.getX(); min = Math.min(min,minDims.getY()); min = Math.min(min,minDims.getZ()); return min; }//end getMaximumVertexValue() public double getMaximumVertexValueAbs(){ return Math.max(getMaximumVertexValue(),Math.abs(getMinimumVertexValue())); }//end getMaximumVertexVAlueAbs() @Override public String toString(){ if(getDebugName()==null) return super.toString(); return "["+this.getClass().getName()+" debugName="+debugName+" hash="+hashCode()+"]"; } }// end Model
package co.zpdev.bots.jitters; import co.zpdev.core.discord.exception.ExceptionHandler; import co.zpdev.core.discord.util.JSONUtil; import co.zpdev.core.discord.util.PostUtil; import co.zpdev.core.discord.util.TimeUtil; import net.dv8tion.jda.core.EmbedBuilder; import net.dv8tion.jda.core.JDA; import net.dv8tion.jda.core.entities.TextChannel; import org.json.JSONException; import org.json.JSONObject; import java.awt.*; import java.io.IOException; import java.io.InputStreamReader; import java.time.Instant; import java.time.LocalDateTime; import java.time.ZoneId; import java.time.ZonedDateTime; import java.util.*; import java.util.concurrent.TimeUnit; /** * @author ZP4RKER */ class ShowUpdater { private JDA jda; private static JSONObject shows = null; ShowUpdater(JDA jda) { this.jda = jda; try { InputStreamReader rd = new InputStreamReader(Jitters.class.getResourceAsStream("/shows.json")); StringBuilder sb = new StringBuilder(); int c; while ((c = rd.read()) != -1) sb.append((char) c); shows = new JSONObject(sb.toString()); } catch (IOException | JSONException e) { ExceptionHandler.handleException("reading json file (shows.json)", e); } } void start() { for (String show : shows.keySet()) { update(show); if (!shows.getJSONObject(show).has("nextepisode")) continue; long airTime = shows.getJSONObject(show).getJSONObject("nextepisode").getLong("airtime") - Instant.now().getEpochSecond(); long fiveMin = airTime - TimeUnit.MINUTES.toSeconds(5); PostUtil.push("Time till airtime", "airtime = " + airTime + ", fiveMin = " + fiveMin); Timer timer = new Timer(); if (fiveMin > 0) timer.schedule(new TimerTask() { @Override public void run() { announce(show, false); } }, fiveMin); if (airTime > 0) timer.schedule(new TimerTask() { @Override public void run() { announce(show, true); } }, airTime); } Timer timer = new Timer(); shows.keySet().forEach(s -> timer.scheduleAtFixedRate(new TimerTask() { @Override public void run() { update(s); } }, 0, TimeUnit.HOURS.toMillis(8))); timer.scheduleAtFixedRate(new TimerTask() { @Override public void run() { updateTopics(); } }, 0, TimeUnit.MINUTES.toMillis(1)); } private void updateTopics() { for (String show : shows.keySet()) { if (!shows.getJSONObject(show).has("nextepisode")) continue; JSONObject data = shows.getJSONObject(show).getJSONObject("nextepisode"); String topic = "Next episode in: " + TimeUtil.toString(Instant.ofEpochSecond(data.getLong("airtime")), true); topic += " (" + data.getString("number") + " \"" + data.getString("name") + "\")"; jda.getTextChannelById(shows.getJSONObject(show).getLong("channel")).getManager().setTopic(topic).queue(); } } /** * Announces the airing of a new episode and a 5 minute reminder beforehand * * @deprecated until fixed * @param show show to announce */ private void announce(String show, boolean now) { if (!shows.getJSONObject(show).has("nextepisode")) return; if (!shows.getJSONObject(show).getJSONObject("nextepisode").has("airtime")) return; JSONObject data = shows.getJSONObject(show); JSONObject nextEp = data.getJSONObject("nextepisode"); TextChannel c = jda.getTextChannelById(data.getLong("channel")); EmbedBuilder embed = new EmbedBuilder() .setColor(Color.decode(data.getString("colour"))) .setTitle(data.getString("name")) .setFooter(data.getString("name") + " - " + nextEp.getString("number"), null) .setImage(data.getString("image")); if (!now) { embed.setDescription("\"" + nextEp.getString("name") + "\" starts in 5 minutes"); } else { embed.setDescription("\"" + nextEp.getString("name") + "\" starts now"); } PostUtil.push("Tried announcing for " + show, "now = " + now + ", airtime = " + nextEp.getLong("airtime") + ", Instant.now() = " + Instant.now().getEpochSecond()); //c.sendMessage(embed.build()).queue(); } private void update(String show) { JSONObject data = new JSONObject(); data.put("id", shows.getJSONObject(show).getString("id")); data.put("channel", shows.getJSONObject(show).getLong("channel")); data.put("colour", shows.getJSONObject(show).getString("colour")); JSONObject sData = JSONUtil.fromUrl("http://api.tvmaze.com/shows/" + data.getString("id")); data.put("name", sData.getString("name")); data.put("image", sData.getJSONObject("image").getString("original")); data.put("summary", sData.getString("summary")); if (sData.getJSONObject("_links").has("nextepisode")) { JSONObject eData = JSONUtil.fromUrl(sData.getJSONObject("_links").getJSONObject("nextepisode").getString("href")); JSONObject nextEp = new JSONObject(); nextEp.put("name", eData.getString("name")); nextEp.put("number", eData.getNumber("season") + "x" + eData.getNumber("number")); nextEp.put("airtime", getInstant(eData).getEpochSecond()); if (!eData.isNull("summary")) nextEp.put("summary", eData.getString("summary")); data.put("nextepisode", nextEp); } shows.put(show, data); } private Instant getInstant(JSONObject eData) { int[] d = Arrays.stream(eData.getString("airdate").split("-")).mapToInt(Integer::parseInt).toArray(); int[] t = eData.getString("airtime").isEmpty() || !eData.has("airtime") ? new int[]{0, 0} : Arrays.stream(eData.getString("airtime").split(":")).mapToInt(Integer::parseInt).toArray(); return ZonedDateTime.of(LocalDateTime.of(d[0], d[1], d[2], t[0], t[1]), ZoneId.of("America/New_York")).toInstant(); } private Date getDate(JSONObject eData) { int[] d = Arrays.stream(eData.getString("airdate").split("-")).mapToInt(Integer::parseInt).toArray(); int[] t = eData.getString("airtime").isEmpty() || !eData.has("airtime") ? new int[]{0, 0} : Arrays.stream(eData.getString("airtime").split(":")).mapToInt(Integer::parseInt).toArray(); Calendar c = Calendar.getInstance(); c.set(d[0], d[1] - 1, d[2], t[0], t[1]); return c.getTime(); } }
package think.rpgitems.power.impl; import org.bukkit.*; import org.bukkit.block.Block; import org.bukkit.entity.Entity; import org.bukkit.entity.LivingEntity; import org.bukkit.entity.Player; import org.bukkit.event.entity.EntityDamageByEntityEvent; import org.bukkit.event.entity.EntityDamageEvent; import org.bukkit.event.entity.EntityShootBowEvent; import org.bukkit.event.player.PlayerInteractEvent; import org.bukkit.event.player.PlayerToggleSneakEvent; import org.bukkit.event.player.PlayerToggleSprintEvent; import org.bukkit.inventory.ItemStack; import org.bukkit.scheduler.BukkitRunnable; import org.bukkit.util.BoundingBox; import org.bukkit.util.RayTraceResult; import org.bukkit.util.Vector; import org.librazy.nclangchecker.LangKey; import think.rpgitems.RPGItems; import think.rpgitems.data.Context; import think.rpgitems.power.*; import java.util.*; import java.util.stream.Collectors; import java.util.stream.Stream; import static think.rpgitems.Events.*; import static think.rpgitems.power.Utils.checkCooldown; /** * @Author ReinWD * @email ReinWDD@gmail.com * Wrote & Maintained by ReinWD * if you have any issue, please send me email or @ReinWD in issues. * Accepted language: , English. */ @PowerMeta(defaultTrigger = "RIGHT_CLICK") public class PowerBeam extends BasePower implements PowerPlain, PowerRightClick, PowerLeftClick, PowerSneak, PowerSneaking, PowerSprint, PowerBowShoot, PowerHitTaken, PowerHit, PowerHurt { @Property public int length = 10; @Property public Particle particle = Particle.LAVA; @Property public int amount = 200; @Property public Mode mode = Mode.BEAM; @Property public boolean pierce = true; @Property public boolean ignoreWall = true; @Property public double damage = 20; @Property public int movementTicks = 40; @Property public double offsetX = 0; @Property public double offsetY = 0; @Property public double offsetZ = 0; @Property public double spawnsPerBlock = 2; double lengthPerSpawn = 1 / spawnsPerBlock; /** * Cost of this power */ @Property public int cost = 0; /** * Cooldown time of this power */ @Property public long cooldown = 0; @Property public boolean cone = false; @Property public double coneRange = 30; @Property public boolean homing = false; @Property public double homingAngle = 1; @Property public double homingRange = 30; @Property public Target homingTarget = Target.MOBS; @Property public int stepsBeforeHoming = 5; @Property public int burstCount = 1; @Property public int beamAmount = 1; @Property public int burstInterval = 1; @Property public int bounce = 0; @Property public boolean hitSelfWhenBounced = false; @Property public double gravity = 0; @Property @Serializer(ExtraDataSerializer.class) @Deserializer(ExtraDataSerializer.class) public Object extraData; @Property public double speed = 0; /** * Whether to suppress the hit trigger */ @Property public boolean suppressMelee = false; private Set<Material> transp = Stream.of(Material.values()) .filter(material -> material.isBlock()) .filter(material -> !material.isSolid() || !material.isOccluding()) .collect(Collectors.toSet()); @Override public PowerResult<Void> fire(Player player, ItemStack stack) { if (!checkCooldown(this, player, cooldown, true, true)) return PowerResult.cd(); if (!getItem().consumeDurability(stack, cost)) return PowerResult.cost(); return beam(player); } @Override public @LangKey(skipCheck = true) String getName() { return "beam"; } @Override public String displayText() { return null; } @Override public PowerResult<Void> leftClick(Player player, ItemStack stack, PlayerInteractEvent event) { return fire(player, stack); } @Override public PowerResult<Void> rightClick(Player player, ItemStack stack, PlayerInteractEvent event) { return fire(player, stack); } @Override public PowerResult<Void> sneak(Player player, ItemStack stack, PlayerToggleSneakEvent event) { return fire(player, stack); } @Override public PowerResult<Void> sneaking(Player player, ItemStack stack) { return fire(player, stack); } @Override public PowerResult<Void> sprint(Player player, ItemStack stack, PlayerToggleSprintEvent event) { return fire(player, stack); } @Override public PowerResult<Float> bowShoot(Player player, ItemStack itemStack, EntityShootBowEvent e) { return beam(player).with(e.getForce()); } @Override public PowerResult<Double> hit(Player player, ItemStack stack, LivingEntity entity, double damage, EntityDamageByEntityEvent event) { return beam(player).with(event.getDamage()); } @Override public PowerResult<Double> takeHit(Player target, ItemStack stack, double damage, EntityDamageEvent event) { return beam(target).with(event.getDamage()); } private PowerResult<Void> beam(LivingEntity from) { if (burstCount > 0) { for (int i = 0; i < burstCount; i++) { new BukkitRunnable() { @Override public void run() { if (cone) { for (int j = 0; j < beamAmount; j++) { fire(from); } } else { fire(from); } } }.runTaskLaterAsynchronously(RPGItems.plugin, i * burstInterval); } return PowerResult.ok(); } else { return fire(from); } } private PowerResult<Void> fire(LivingEntity from) { lengthPerSpawn = 1 / spawnsPerBlock; Location fromLocation = from.getEyeLocation(); Vector towards = from.getEyeLocation().getDirection(); if (cone) { double phi = random.nextInt(360); double theta; if (coneRange > 0) { theta = random.nextInt(((int) Math.round(coneRange))); double shiftLen = towards.length() * Math.tan(Math.toRadians(theta)); Vector clone = towards.clone(); Vector shift = clone.crossProduct(yUnit).normalize().multiply(shiftLen); shift.rotateAroundNonUnitAxis(towards, Math.toRadians(phi)); towards = towards.add(shift); } } Entity target = null; if (from instanceof Player && homing) { final Target homingTarget = this.homingTarget; target = Utils.getLivingEntitiesInCone(Utils.getNearestLivingEntities(this, fromLocation, ((Player) from), Math.min(1000, length), 0), fromLocation.toVector(), homingRange, from.getEyeLocation().getDirection()).stream() .filter(livingEntity -> { switch(homingTarget){ case MOBS: return !(livingEntity instanceof Player); case PLAYERS: return livingEntity instanceof Player; case ALL: break; } return true; }) .findFirst().orElse(null); } switch (mode) { case BEAM: new PlainTask(from, towards, amount, length, target, bounce).runTask(RPGItems.plugin); break; case PROJECTILE: new MovingTask(from, towards, amount, length, target, bounce).runTask(RPGItems.plugin); break; } return PowerResult.ok(); } private Random random = new Random(); private Vector yUnit = new Vector(0, 1, 0); @Override public PowerResult<Void> hurt(Player target, ItemStack stack, EntityDamageEvent event) { return fire(target, stack); } class PlainTask extends BukkitRunnable { private int bounces; private double length; private LivingEntity from; private Vector towards; private final int apS; private Entity target; boolean bounced = false; public PlainTask(LivingEntity from, Vector towards, int amount, double actualLength, Entity target, int bounces) { this.from = from; this.towards = towards; this.length = actualLength; this.apS = amount / ((int) Math.floor(actualLength)); this.target = target; this.bounces = bounces; } @Override public void run() { World world = from.getWorld(); towards.normalize(); Location lastLocation = from.getEyeLocation(); double lpT = length / ((double) movementTicks); double partsPerTick = lpT / lengthPerSpawn; for (int i = 0; i < movementTicks; i++) { boolean isHit = false; Vector step = new Vector(0, 0, 0); for (int j = 0; j < partsPerTick; j++) { isHit = tryHit(from, lastLocation, bounced && hitSelfWhenBounced) || isHit; Block block = lastLocation.getBlock(); if (transp.contains(block.getType())) { spawnParticle(from, world, lastLocation, (int) Math.ceil(apS / partsPerTick)); } else if (!ignoreWall) { if (bounces > 0) { bounces bounced = true; makeBounce(block, towards, lastLocation.clone().subtract(step)); } else { return; } } step = towards.clone().normalize().multiply(lengthPerSpawn); lastLocation.add(step); towards = addGravity(towards, partsPerTick); towards = homingCorrect(towards, lastLocation, target, i); } if (isHit) return; } } } Vector gravityVector = new Vector(0, -gravity / 20, 0); private Vector addGravity(Vector towards, double partsPerTick) { double gravityPerTick = (-gravity / 20d) / partsPerTick; gravityVector.setY(gravityPerTick); return towards.add(gravityVector); } private class MovingTask extends BukkitRunnable { private final LivingEntity from; private int bounces; private Vector towards; private final int amountPerSec; private final List<BukkitRunnable> runnables = new LinkedList<>(); private Entity target; boolean bounced = false; public MovingTask(LivingEntity from, Vector towards, int apS, double actualLength, Entity target, int bounces) { this.from = from; this.towards = towards; this.amountPerSec = apS / ((int) Math.floor(actualLength)); this.target = target; this.bounces = bounces; } @Override public void run() { World world = from.getWorld(); double lpT = ((double) length) / ((double) movementTicks); double partsPerTick = lpT / lengthPerSpawn; Location lastLocation = from.getEyeLocation(); towards.normalize(); final int[] finalI = {0}; BukkitRunnable bukkitRunnable = new BukkitRunnable() { @Override public void run() { boolean isHit = false; Vector step = new Vector(0, 0, 0); for (int k = 0; k < partsPerTick; k++) { isHit = tryHit(from, lastLocation, bounced && hitSelfWhenBounced) || isHit; Block block = lastLocation.getBlock(); if (transp.contains(block.getType())) { spawnParticle(from, world, lastLocation, (int) (amountPerSec / spawnsPerBlock)); } else if (!ignoreWall) { if (bounces > 0) { bounces bounced = true; makeBounce(block, towards, lastLocation.clone().subtract(step)); } else { this.cancel(); return; } } step = towards.clone().normalize().multiply(lengthPerSpawn); lastLocation.add(step); towards = addGravity(towards, partsPerTick); towards = homingCorrect(towards, lastLocation, target, finalI[0]); } if (isHit) { this.cancel(); return; } if (finalI[0] >= movementTicks) { this.cancel(); } finalI[0]++; } }; bukkitRunnable.runTaskTimer(RPGItems.plugin, 0, 1); } } private void makeBounce(Block block, Vector towards, Location lastLocation) { RayTraceResult rayTraceResult = block.rayTrace(lastLocation, towards, towards.length(), FluidCollisionMode.NEVER); if (rayTraceResult == null) { return; } else { towards.rotateAroundNonUnitAxis(rayTraceResult.getHitBlockFace().getDirection(), Math.toRadians(180)).multiply(-1); } } private Vector homingCorrect(Vector towards, Location lastLocation, Entity target, int i) { if (target == null || i < stepsBeforeHoming || target.isDead()) { return towards; } Location targetLocation; if (target instanceof LivingEntity) { targetLocation = ((LivingEntity) target).getEyeLocation(); } else { targetLocation = target.getLocation(); } Vector clone = towards.clone(); Vector targetDirection = targetLocation.toVector().subtract(lastLocation.toVector()); float angle = clone.angle(targetDirection); Vector crossProduct = clone.clone().getCrossProduct(targetDirection); double actualAng = homingAngle / spawnsPerBlock; if (angle > Math.toRadians(actualAng)) { clone.add(clone.clone().getCrossProduct(crossProduct).normalize().multiply(-1 * Math.tan(actualAng))); } else { clone = targetDirection.normalize(); } return clone; } private void spawnParticle(LivingEntity from, World world, Location lastLocation, int i) { if ((lastLocation.distance(from.getEyeLocation()) < 1)) { return; } if (from instanceof Player) { ((Player) from).spawnParticle(this.particle, lastLocation, i / 2, offsetX, offsetY, offsetZ, speed, extraData); } world.spawnParticle(this.particle, lastLocation, i, offsetX, offsetY, offsetZ, speed, extraData); } private boolean tryHit(LivingEntity from, Location loc, boolean canHitSelf) { double offsetLength = new Vector(offsetX, offsetY, offsetZ).length(); double length =Double.isNaN(offsetLength)?0: Math.max(offsetLength, 10); Collection<Entity> candidates = from.getWorld().getNearbyEntities(loc, length, length, length); if (!pierce) { List<Entity> collect = candidates.stream() .filter(entity -> (entity instanceof LivingEntity) && (canHitSelf || !entity.equals(from))) .filter(entity -> canHit(loc, entity)) .limit(1) .collect(Collectors.toList()); if (!collect.isEmpty()) { Entity entity = collect.get(0); if (entity instanceof LivingEntity) { Context.instance().putTemp(from.getUniqueId(), DAMAGE_SOURCE, getNamespacedKey().toString()); Context.instance().putTemp(from.getUniqueId(), OVERRIDING_DAMAGE, damage); Context.instance().putTemp(from.getUniqueId(), SUPPRESS_MELEE, suppressMelee); ((LivingEntity) entity).damage(damage, from); Context.instance().putTemp(from.getUniqueId(), SUPPRESS_MELEE, null); Context.instance().putTemp(from.getUniqueId(), OVERRIDING_DAMAGE, null); Context.instance().putTemp(from.getUniqueId(), DAMAGE_SOURCE, null); } return true; } } else { List<Entity> collect = candidates.stream() .filter(entity -> (entity instanceof LivingEntity) && (canHitSelf || !entity.equals(from))) .filter(entity -> canHit(loc, entity)) .collect(Collectors.toList()); Context.instance().putTemp(from.getUniqueId(), DAMAGE_SOURCE, getNamespacedKey().toString()); Context.instance().putTemp(from.getUniqueId(), OVERRIDING_DAMAGE, damage); Context.instance().putTemp(from.getUniqueId(), SUPPRESS_MELEE, suppressMelee); if (!collect.isEmpty()) { collect.stream() .map(entity -> ((LivingEntity) entity)) .forEach(livingEntity -> { livingEntity.damage(damage, from); }); } Context.instance().putTemp(from.getUniqueId(), SUPPRESS_MELEE, null); Context.instance().putTemp(from.getUniqueId(), OVERRIDING_DAMAGE, null); Context.instance().putTemp(from.getUniqueId(), DAMAGE_SOURCE, null); } return false; } private boolean canHit(Location loc, Entity entity) { BoundingBox boundingBox = entity.getBoundingBox(); BoundingBox particleBox; double x = Math.max(offsetX, 0.1); double y = Math.max(offsetY, 0.1); double z = Math.max(offsetZ, 0.1); particleBox = BoundingBox.of(loc, x + 0.1, y + 0.1, z + 0.1); return boundingBox.overlaps(particleBox) || particleBox.overlaps(boundingBox); } private enum Mode { BEAM, PROJECTILE, ; } public class ExtraDataSerializer implements Getter, Setter { @Override public String get(Object object) { if (object instanceof Particle.DustOptions) { Color color = ((Particle.DustOptions) object).getColor(); return color.getRed() + "," + color.getGreen() + "," + color.getBlue() + "," + ((Particle.DustOptions) object).getSize(); } return ""; } @Override public Optional set(String value) throws IllegalArgumentException { String[] split = value.split(",", 4); int r = Integer.parseInt(split[0]); int g = Integer.parseInt(split[1]); int b = Integer.parseInt(split[2]); float size = Float.parseFloat(split[3]); return Optional.of(new Particle.DustOptions(Color.fromRGB(r, g, b), size)); } } enum Target{ MOBS, PLAYERS, ALL } }
package org.lockss.protocol; import java.io.*; import java.net.*; import java.security.*; import javax.net.ssl.*; import java.util.*; import org.apache.commons.collections.*; import org.apache.commons.collections.bag.TreeBag; // needed to disambiguate import EDU.oswego.cs.dl.util.concurrent.*; import org.lockss.util.*; import org.lockss.util.Queue; import org.lockss.config.*; import org.lockss.daemon.*; import org.lockss.daemon.status.*; import org.lockss.app.*; import org.lockss.poller.*; /** * BlockingStreamComm implements the streaming mesaage protocol using * blocking sockets. */ public class BlockingStreamComm extends BaseLockssDaemonManager implements ConfigurableManager, LcapStreamComm, PeerMessage.Factory { static Logger log = Logger.getLogger("SComm"); public static final String SERVER_NAME = "StreamComm"; /** Use V3 over SSL **/ public static final String PARAM_USE_V3_OVER_SSL = PREFIX + "v3OverSsl"; public static final boolean DEFAULT_USE_V3_OVER_SSL = false; /** Use client authentication for SSL **/ public static final String PARAM_USE_SSL_CLIENT_AUTH = PREFIX + "sslClientAuth"; public static final boolean DEFAULT_USE_SSL_CLIENT_AUTH = true; /** Name of managed keystore to use for both my private key and peers' * public keys (see org.lockss.keyMgr.keystore.<i>id</i>.name). Set * either this, or both sslPrivateKeystoreName and * sslPublicKeystoreName. */ public static final String PARAM_SSL_KEYSTORE_NAME = PREFIX + "sslKeystoreName"; /** Name of managed keystore to use for my private key (see * org.lockss.keyMgr.keystore.<i>id</i>.name). */ public static final String PARAM_SSL_PRIVATE_KEYSTORE_NAME = PREFIX + "sslPrivateKeystoreName"; /** Name of managed keystore in which to look for peers' public keys (see * org.lockss.keyMgr.keystore.<i>id</i>.name). */ public static final String PARAM_SSL_PUBLIC_KEYSTORE_NAME = PREFIX + "sslPublicKeystoreName"; /** An SSLContext that supports this protocol will be obtained. Note * that this is just passed to <code>SSLContent.getInstance()</code>; * sockets obtained from resulting factory will likely support other * protocols. To ensure that other protocols are not used they should be * included in <code>org.lockss.scomm.disableSslServerProtocols</code> * and <code>org.lockss.scomm.disableSslClientProtocols</code> */ public static final String PARAM_SSL_PROTOCOL = PREFIX + "sslProtocol"; public static final String DEFAULT_SSL_PROTOCOL = "TLSv1.2"; /** SSL protocols to disable in server sockets. */ public static final String PARAM_DISABLE_SSL_SERVER_PROTOCOLS = PREFIX + "disableSslServerProtocols"; public static final List DEFAULT_DISABLE_SSL_SERVER_PROTOCOLS = ListUtil.list("SSLv3", "SSLv2Hello"); /** SSL protocols to disable in client sockets. */ public static final String PARAM_DISABLE_SSL_CLIENT_PROTOCOLS = PREFIX + "disableSslClientProtocols"; public static final List DEFAULT_DISABLE_SSL_CLIENT_PROTOCOLS = ListUtil.list("SSLv3", "SSLv2Hello"); /** If true, listen socket will be bound only to the configured local IP * address **/ public static final String PARAM_BIND_TO_LOCAL_IP_ONLY = PREFIX + "bindToLocalIpOnly"; public static final boolean DEFAULT_BIND_TO_LOCAL_IP_ONLY = false; /** If true, when the listen socket is bound to the local IP address, * outgoing connections will also be made from that address. Should * normally be true; some testing situations require special behavior. */ public static final String PARAM_SEND_FROM_BIND_ADDR = PREFIX + "sendFromBindAddr"; public static final boolean DEFAULT_SEND_FROM_BIND_ADDR = true; /** Max peer channels. Only affects outgoing messages; incoming * connections are always accepted. */ public static final String PARAM_MAX_CHANNELS = PREFIX + "maxChannels"; public static final int DEFAULT_MAX_CHANNELS = 50; /** Min threads in channel thread pool */ public static final String PARAM_CHANNEL_THREAD_POOL_MIN = PREFIX + "threadPool.min"; public static final int DEFAULT_CHANNEL_THREAD_POOL_MIN = 3; /** Max threads in channel thread pool */ public static final String PARAM_CHANNEL_THREAD_POOL_MAX = PREFIX + "threadPool.max"; public static final int DEFAULT_CHANNEL_THREAD_POOL_MAX = 3 * DEFAULT_MAX_CHANNELS; /** Duration after which idle threads will be terminated.. -1 = never */ public static final String PARAM_CHANNEL_THREAD_POOL_KEEPALIVE = PREFIX + "threadPool.keepAlive"; public static final long DEFAULT_CHANNEL_THREAD_POOL_KEEPALIVE = 10 * Constants.MINUTE; /** Connect timeout */ public static final String PARAM_CONNECT_TIMEOUT = PREFIX + "timeout.connect"; public static final long DEFAULT_CONNECT_TIMEOUT = 2 * Constants.MINUTE; /** Data timeout (SO_TIMEOUT), channel is aborted if read times out. * This should be disabled (zero) because the read side of a channel may * legitimately be idle for a long time (if the channel is sending), and * interrupted reads apparently cannot reliably be resumed. If the * channel is truly idle, the send side should close it. */ public static final String PARAM_DATA_TIMEOUT = PREFIX + "timeout.data"; public static final long DEFAULT_DATA_TIMEOUT = 0; /** Data timeout (SO_TIMEOUT) during SSL negotiation (if any). Channel * isn't fully set up yet and idle timeout isn't in effect. Data timeout * ({@value PARAM_DATA_TIMEOUT} may be zero, but SSL negotiation should * always have a timeout. */ public static final String PARAM_SSL_HANDSHAKE_TIMEOUT = PREFIX + "timeout.sslHandshake"; public static final long DEFAULT_SSL_HANDSHAKE_TIMEOUT = 5 * Constants.MINUTE; /** Enable SO_KEEPALIVE if true */ public static final String PARAM_SOCKET_KEEPALIVE = PREFIX + "socketKeepAlive"; public static final boolean DEFAULT_SOCKET_KEEPALIVE = true; /** Enable TCP_NODELAY if true */ public static final String PARAM_TCP_NODELAY = PREFIX + "tcpNoDelay"; public static final boolean DEFAULT_TCP_NODELAY = true; /** Time after which idle channel will be closed */ public static final String PARAM_CHANNEL_IDLE_TIME = PREFIX + "channelIdleTime"; public static final long DEFAULT_CHANNEL_IDLE_TIME = 2 * Constants.MINUTE; /** Time channel remains in DRAIN_INPUT state before closing */ public static final String PARAM_DRAIN_INPUT_TIME = PREFIX + "drainInputTime"; public static final long DEFAULT_DRAIN_INPUT_TIME = 10 * Constants.SECOND; /** Interval at which send thread checks idle timer */ public static final String PARAM_SEND_WAKEUP_TIME = PREFIX + "sendWakeupTime"; public static final long DEFAULT_SEND_WAKEUP_TIME = 1 * Constants.MINUTE; /** Interval before message expiration time at which to retry */ public static final String PARAM_RETRY_BEFORE_EXPIRATION = PREFIX + "retryBeforeExpiration"; public static final long DEFAULT_RETRY_BEFORE_EXPIRATION = 1 * Constants.MINUTE; /** Max time to wait and retry connection to unresponsive peer. May * happen sooner if queued messages will expire sooner */ public static final String PARAM_MAX_PEER_RETRY_INTERVAL = PREFIX + "maxPeerRetryInterval"; public static final long DEFAULT_MAX_PEER_RETRY_INTERVAL = 30 * Constants.MINUTE; /** Min time to wait and retry connection to unresponsive peer. */ public static final String PARAM_MIN_PEER_RETRY_INTERVAL = PREFIX + "minPeerRetryInterval"; public static final long DEFAULT_MIN_PEER_RETRY_INTERVAL = 30 * Constants.SECOND; /** Min time to wait between retry attempts (channel start interval) */ public static final String PARAM_RETRY_DELAY = PREFIX + "retryDelay"; public static final long DEFAULT_RETRY_DELAY = 5 * Constants.SECOND; /** FilePeerMessage will be used for messages larger than this, else * MemoryPeerMessage */ public static final String PARAM_MIN_FILE_MESSAGE_SIZE = PREFIX + "minFileMessageSize"; public static final int DEFAULT_MIN_FILE_MESSAGE_SIZE = 1024; /** Maximum allowable received message size */ public static final String PARAM_MAX_MESSAGE_SIZE = PREFIX + "maxMessageSize"; public static final long DEFAULT_MAX_MESSAGE_SIZE = 1024 * 1024 * 1024; /** Per-peer message send rate limit. Messages queued for send in excess * of this rate will be discarded and counted */ public static final String PARAM_PEER_SEND_MESSAGE_RATE_LIMIT = PREFIX + "peerSendMessageRateLimit"; public static final String DEFAULT_PEER_SEND_MESSAGE_RATE_LIMIT = "unlimited"; /** Per-peer message receive rate limit. Messages received in excess of * this rate will be discarded and counted */ public static final String PARAM_PEER_RECEIVE_MESSAGE_RATE_LIMIT = PREFIX + "peerReceiveMessageRateLimit"; public static final String DEFAULT_PEER_RECEIVE_MESSAGE_RATE_LIMIT = "unlimited"; /** Rough transmission speed will be measured for messages at least this * large, reported at debug level */ public static final String PARAM_MIN_MEASURED_MESSAGE_SIZE = PREFIX + "minMeasuredMessageSize"; public static final long DEFAULT_MIN_MEASURED_MESSAGE_SIZE = 5 * 1024 * 1024; /** Dir for PeerMessage data storage */ public static final String PARAM_DATA_DIR = PREFIX + "messageDataDir"; /** Default is PlatformInfo.getSystemTempDir() */ public static final String DEFAULT_DATA_DIR = "Platform tmp dir"; /** Wrap Socket OutputStream in BufferedOutputStream? */ public static final String PARAM_IS_BUFFERED_SEND = PREFIX + "bufferedSend"; public static final boolean DEFAULT_IS_BUFFERED_SEND = true; /** Amount of time BlockingStreamComm.stopService() should wait for * worker threads to exit. Zero disables wait. */ public static final String PARAM_WAIT_EXIT = PREFIX + "waitExit"; public static final long DEFAULT_WAIT_EXIT = 2 * Constants.SECOND; /** If true, associated channels that refuse messages will be immediately * dissociated */ public static final String PARAM_DISSOCIATE_ON_NO_SEND = PREFIX + "dissociateOnNoSend"; public static final boolean DEFAULT_DISSOCIATE_ON_NO_SEND = true; /** If true, stopChannel() will dissociate unconditionally, matching the * previous behavior. If false it will dissociate only if it changes the * state to CLOSING. */ public static final String PARAM_DISSOCIATE_ON_EVERY_STOP = PREFIX + "dissociateOnEveryStop"; public static final boolean DEFAULT_DISSOCIATE_ON_EVERY_STOP = false; /** If true, unknown peer messages opcodes cause the channel to abort */ public static final String PARAM_ABORT_ON_UNKNOWN_OP = PREFIX + "abortOnUnknownOp"; public static final boolean DEFAULT_ABORT_ON_UNKNOWN_OP = true; static final String WDOG_PARAM_SCOMM = "SComm"; static final long WDOG_DEFAULT_SCOMM = 1 * Constants.HOUR; static final String PRIORITY_PARAM_SCOMM = "SComm"; static final int PRIORITY_DEFAULT_SCOMM = -1; static final String PRIORITY_PARAM_SLISTEN = "SListen"; static final int PRIORITY_DEFAULT_SLISTEN = -1; static final String WDOG_PARAM_CHANNEL = "Channel"; static final long WDOG_DEFAULT_CHANNEL = 30 * Constants.MINUTE; static final String PRIORITY_PARAM_CHANNEL = "Channel"; static final int PRIORITY_DEFAULT_CHANNEL = -1; static final String WDOG_PARAM_RETRY = "SRetry"; static final long WDOG_DEFAULT_RETRY = 1 * Constants.HOUR; static final String PRIORITY_PARAM_RETRY = "SRetry"; static final int PRIORITY_DEFAULT_RETRY = -1; private boolean paramUseV3OverSsl = DEFAULT_USE_V3_OVER_SSL; private boolean paramSslClientAuth = DEFAULT_USE_SSL_CLIENT_AUTH; private String paramSslPrivateKeyStoreName; private String paramSslPublicKeyStoreName; private String paramSslProtocol = DEFAULT_SSL_PROTOCOL; private List<String> paramDisableSslServerProtocols = DEFAULT_DISABLE_SSL_SERVER_PROTOCOLS; private List<String> paramDisableSslClientProtocols = DEFAULT_DISABLE_SSL_CLIENT_PROTOCOLS; private int paramMinFileMessageSize = DEFAULT_MIN_FILE_MESSAGE_SIZE; private long paramMaxMessageSize = DEFAULT_MAX_MESSAGE_SIZE; private long paramMinMeasuredMessageSize = DEFAULT_MIN_MEASURED_MESSAGE_SIZE; private File dataDir = null; private int paramBacklog = DEFAULT_LISTEN_BACKLOG; private int paramMaxChannels = DEFAULT_MAX_CHANNELS; private int paramMinPoolSize = DEFAULT_CHANNEL_THREAD_POOL_MIN; private int paramMaxPoolSize = DEFAULT_CHANNEL_THREAD_POOL_MAX; private long paramPoolKeepaliveTime = DEFAULT_CHANNEL_THREAD_POOL_KEEPALIVE; private long paramConnectTimeout = DEFAULT_CONNECT_TIMEOUT; private long paramSoTimeout = DEFAULT_DATA_TIMEOUT; private long paramSslHandshakeTimeout = DEFAULT_SSL_HANDSHAKE_TIMEOUT; private boolean paramSoKeepAlive = DEFAULT_SOCKET_KEEPALIVE; private boolean paramIsTcpNoDelay = DEFAULT_TCP_NODELAY; private long paramSendWakeupTime = DEFAULT_SEND_WAKEUP_TIME; private long paramRetryBeforeExpiration = DEFAULT_RETRY_BEFORE_EXPIRATION; private long paramMaxPeerRetryInterval = DEFAULT_MAX_PEER_RETRY_INTERVAL; private long paramMinPeerRetryInterval = DEFAULT_MIN_PEER_RETRY_INTERVAL; private long paramRetryDelay = DEFAULT_RETRY_DELAY; protected long paramChannelIdleTime = DEFAULT_CHANNEL_IDLE_TIME; private long paramDrainInputTime = DEFAULT_DRAIN_INPUT_TIME; private boolean paramIsBufferedSend = DEFAULT_IS_BUFFERED_SEND; private long paramWaitExit = DEFAULT_WAIT_EXIT; private boolean paramAbortOnUnknownOp = DEFAULT_ABORT_ON_UNKNOWN_OP; private long lastHungCheckTime = 0; private PooledExecutor pool; protected SSLSocketFactory sslSocketFactory = null; protected SSLServerSocketFactory sslServerSocketFactory = null; private boolean paramDissociateOnNoSend = DEFAULT_DISSOCIATE_ON_NO_SEND; private boolean paramDissociateOnEveryStop = DEFAULT_DISSOCIATE_ON_EVERY_STOP; private boolean enabled = DEFAULT_ENABLED; private boolean running = false; private String bindAddr; private boolean sendFromBindAddr; private SocketFactory sockFact; private ServerSocket listenSock; private PeerIdentity myPeerId; private PeerAddress.Tcp myPeerAddr; private IdentityManager idMgr; protected LockssKeyStoreManager keystoreMgr; private OneShot configShot = new OneShot(); private FifoQueue rcvQueue; // PeerMessages received from channels private ReceiveThread rcvThread; private ListenThread listenThread; private RetryThread retryThread; // Synchronization lock for rcv thread, listen thread manipulations private Object threadLock = new Object(); // Map holds channels and queue associated with each Peer Map<PeerIdentity,PeerData> peers = new HashMap<PeerIdentity,PeerData>(); Comparator ROC = new RetryOrderComparator(); TreeSet<PeerData> peersToRetry = new TreeSet<PeerData>(ROC); // Record of draining channels (no longer associated with peer) so stats // can find them Set<BlockingPeerChannel> drainingChannels = new HashSet(); private Vector messageHandlers = new Vector(); // Vector is synchronized private boolean anyRateLimited; private RateLimiter.LimiterMap sendRateLimiters = new RateLimiter.LimiterMap(PARAM_PEER_SEND_MESSAGE_RATE_LIMIT, DEFAULT_PEER_SEND_MESSAGE_RATE_LIMIT); private RateLimiter.LimiterMap receiveRateLimiters = new RateLimiter.LimiterMap(PARAM_PEER_RECEIVE_MESSAGE_RATE_LIMIT, DEFAULT_PEER_RECEIVE_MESSAGE_RATE_LIMIT); int nPrimary = 0; int nSecondary = 0; int maxPrimary = 0; int maxSecondary = 0; int maxDrainingChannels = 0; Object ctrLock = new Object(); // lock for above counters // Counts number of successful messages with N retries Bag retryHist = new TreeBag(); // Counts number of discarded messages with N retries Bag retryErrHist = new TreeBag(); ChannelStats globalStats = new ChannelStats(); public BlockingStreamComm() { sockFact = null; } class PeerData { PeerIdentity pid; BlockingPeerChannel primary; BlockingPeerChannel secondary; Queue sendQueue = null; // Non-null only when there are queued // messages for a peer that has no active // primary channel. PeerMessage earliestMsg; // Needed until we have separate channel // for each poll long lastRetry = 0; long nextRetry = TimeBase.MAX; // Time at which we should try again to // connect and send held msgs. private int origCnt = 0; // Number of connect attempts private int failCnt = 0; // Number of connect failures private int acceptCnt = 0; // Number of incoming connections int msgsSent = 0; int sendRateLimited = 0; int rcvRateLimited = 0; int msgsRcvd = 0; int lastSendRpt = 0; PeerData(PeerIdentity pid) { this.pid = pid; } PeerIdentity getPid() { return pid; } BlockingPeerChannel getPrimaryChannel() { return primary; } BlockingPeerChannel getSecondaryChannel() { return secondary; } int getSendQueueSize() { Queue q = sendQueue; return q == null ? 0 : q.size(); } long getLastRetry() { return lastRetry; } long getNextRetry() { return nextRetry; } long getFirstExpiration() { if (sendQueue == null || sendQueue.isEmpty()) { return TimeBase.MAX; } PeerMessage msg = earliestMsg; if (msg == null) { return TimeBase.MAX; } return msg.getExpiration(); } int getOrigCnt() { return origCnt; } int getFailCnt() { return failCnt; } int getAcceptCnt() { return acceptCnt; } int getMsgsSent() { return msgsSent; } int getMsgsRcvd() { return msgsRcvd; } void sentMsg() { msgsSent++; } void rcvdMsg() { msgsRcvd++; } int getSendRateLimited() { return sendRateLimited; } int getRcvRateLimited() { return rcvRateLimited; } void rcvRateLimited() { rcvRateLimited++; anyRateLimited = true; } void countPrimary(boolean incr) { synchronized (ctrLock) { if (incr) { nPrimary++; if (nPrimary > maxPrimary) maxPrimary = nPrimary; } else { nPrimary } } } void countSecondary(boolean incr) { synchronized (ctrLock) { if (incr) { nSecondary++; if (nSecondary > maxSecondary) maxSecondary = nSecondary; } else { nSecondary } } } synchronized void associateChannel(BlockingPeerChannel chan) { acceptCnt++; if (primary == null) { primary = chan; countPrimary(true); handOffQueuedMsgs(primary); if (log.isDebug2()) log.debug2("Associated " + chan); } else if (primary == chan) { log.warning("Redundant peer-channel association (" + chan + ")"); } else { if (secondary == null) { secondary = chan; // normal secondary association countSecondary(true); if (log.isDebug2()) log.debug2("Associated secondary " + chan); } else if (secondary == chan) { log.debug("Redundant secondary peer-channel association(" + chan +")"); } else { // maybe should replace if new working and old not. but old will // eventually timeout and close anyway log.warning("Conflicting peer-channel association(" + chan + "), was " + primary); } } } // This may be called more than once by the same channel, from its // multiple worker threads. Redundant calls must be harmless. synchronized void dissociateChannel(BlockingPeerChannel chan) { if (primary == chan) { globalStats.add(primary.getStats()); primary = null; countPrimary(false); if (log.isDebug2()) log.debug2("Removed: " + chan); } if (secondary == chan) { globalStats.add(secondary.getStats()); secondary = null; countSecondary(false); if (log.isDebug2()) log.debug2("Removed secondary: " + chan); } synchronized (drainingChannels) { if (chan.isState(BlockingPeerChannel.ChannelState.DRAIN_INPUT) && chan.getPeer() != null) { // If this channel is draining, remember it so can include in stats if (log.isDebug2()) log.debug2("Add to draining: " + chan); drainingChannels.add(chan); maxDrainingChannels = Math.max(maxDrainingChannels, drainingChannels.size()); } else { // else ensure it's gone if (drainingChannels.remove(chan)) { if (log.isDebug2()) log.debug2("Del from draining: " + chan); } } } } synchronized void send(PeerMessage msg) throws IOException { RateLimiter limiter = getSendRateLimiter(pid); if (limiter != null) { if (!limiter.isEventOk()) { sendRateLimited++; anyRateLimited = true; log.debug2("Pkt rate limited"); return; } else { limiter.event(); } } if (sendQueue != null) { // If queue exists, we're already waiting for connection retry. if (primary != null) { log.error("send: sendQueue and primary channel both exist: " + primary); } enqueueHeld(msg, false ); return; } // A closing channel might refuse the message (return false), in which // case it will have dissociated itself so try again with a new // channel. BlockingPeerChannel last = null; int rpt = 0; boolean retry = true; while (rpt++ <= 3) { lastSendRpt = rpt; BlockingPeerChannel chan = findOrMakeChannel(); if (chan == null) { break; } if (last == chan) throw new IllegalStateException("Got same channel as last time: " + chan); if (chan.send(msg)) { return; } if (chan.isUnusedOriginatingChannel()) { log.warning("Couldn't start channel " + chan); retry = chan.shouldRetry(); break; } last = chan; if (paramDissociateOnNoSend) { dissociateChannel(chan); } } log.error("Couldn't enqueue msg to channel after " + rpt + " tries: " + msg); if (retry && msg.isRequeueable()) { // This counts as a connect failure, as the queue was empty when we // entered failCnt++; // XXX Not counting this as a retry causes unpredictable test results enqueueHeld(msg, true); } } synchronized BlockingPeerChannel findOrMakeChannel() { if (primary != null) { return primary; } if (secondary != null) { // found secondary, no primary. promote secondary to primary primary = secondary; secondary = null; countPrimary(true); countSecondary(false); log.debug2("Promoted " + primary); handOffQueuedMsgs(primary); return primary; } // new primary channel, if we have room if (nPrimary < paramMaxChannels) { try { BlockingPeerChannel chan = getSocketFactory().newPeerChannel(BlockingStreamComm.this, pid); if (log.isDebug2()) log.debug2("Created " + chan); try { handOffQueuedMsgs(chan); lastRetry = TimeBase.nowMs(); chan.startOriginate(); origCnt++; primary = chan; countPrimary(true); return primary; } catch (IOException e) { log.warning("Can't start channel " + chan, e); return null; } } catch (IOException e) { log.warning("Can't create channel " + pid, e); return null; } } return null; } synchronized void enqueueHeld(PeerMessage msg, boolean isRetry) { if (sendQueue == null) { sendQueue = new FifoQueue(); } if (log.isDebug3()) log.debug3("enqueuing held "+ msg); BlockingPeerChannel chan = primary; if (chan != null && !chan.isState(enqueueHeldPrimaryOkStates)) { log.error("enqueueHeld: primary channel exists: " + primary); } sendQueue.put(msg); if (isRetry) { msg.incrRetryCount(); } long retry = calcNextRetry(msg, isRetry); if (retry < nextRetry) { synchronized (peersToRetry) { peersToRetry.remove(this); earliestMsg = msg; nextRetry = retry; if (log.isDebug3()) { log.debug3("Retry " + pid + " at " + Deadline.at(nextRetry).shortString()); } peersToRetry.add(this); if (this == peersToRetry.first()) { retryThread.recalcNext(); } } } } synchronized void handOffQueuedMsgs(BlockingPeerChannel chan) { if (sendQueue != null) { if (log.isDebug2()) { log.debug2("Handing off " + sendQueue.size() + " msgs to " + chan); } chan.enqueueMsgs(sendQueue); sendQueue = null; synchronized (peersToRetry) { peersToRetry.remove(this); } nextRetry = TimeBase.MAX; earliestMsg = null; } } /** * If channel aborts with unsent messages, queue them to try again later. */ synchronized void drainQueue(PeerData pdata, Queue queue, boolean shouldRetry) { // Don't cause trouble if shutting down (happens in unit tests). if (!isRunning() || queue == null || queue.isEmpty()) { return; } failCnt++; if (shouldRetry) { requeueUnsentMsgs(queue); } else { deleteMsgs(queue); } } void requeueUnsentMsgs(Queue queue) { PeerMessage msg; try { int requeued = 0; int deleted = 0; while ((msg = (PeerMessage)queue.get(Deadline.EXPIRED)) != null) { if (msg.isRequeueable() && !msg.isExpired() && msg.getRetryCount() < msg.getRetryMax()) { enqueueHeld(msg, true); requeued++; } else { countMessageErrRetries(msg); msg.delete(); deleted++; } } if (log.isDebug2()) { log.debug2("Requeued " + requeued + ", deleted " + deleted); } } catch (InterruptedException e) { // can't happen (get doesn't wait) } } void deleteMsgs(Queue queue) { PeerMessage msg; try { while ((msg = (PeerMessage)queue.get(Deadline.EXPIRED)) != null) { msg.delete(); } } catch (InterruptedException e) { // can't happen (get doesn't wait) } } // Called by retry thread when we're the first item in peersToRetry synchronized boolean retryIfNeeded() { if (!isRetryNeeded()) { synchronized (peersToRetry) { peersToRetry.remove(this); } return false; } if (primary != null) { return false; } if (TimeBase.nowMs() < getNextRetry()) { return false; } BlockingPeerChannel chan = findOrMakeChannel(); if (chan != null) { return true; } else { log.error("retry: couldn't create channel " + pid); return false; } } boolean isRetryNeeded() { if (sendQueue == null) { return false; } if (sendQueue.isEmpty()) { log.error("Empty send queue " + pid); return false; } return true; } long calcNextRetry(PeerMessage msg, boolean isRetry) { long last = msg.getLastRetry(); long target = TimeBase.MAX; if (msg.getExpiration() > 0) { target = msg.getExpiration() - paramRetryBeforeExpiration; } long intr = msg.getRetryInterval(); if (intr > 0) { target = Math.min(target, last + intr); } log.debug3("last: " + last + ", intr: " + intr + ", target: " + target + ", minPeer: " + paramMinPeerRetryInterval); long retry = Math.max(Math.min(target, lastRetry + paramMaxPeerRetryInterval), lastRetry + paramMinPeerRetryInterval); return retry; } synchronized void abortChannels() { if (primary != null) { primary.abortChannel(); } if (secondary != null) { secondary.abortChannel(); } } synchronized void waitChannelsDone(Deadline timeout) { if (primary != null) { primary.waitThreadsExited(timeout); } if (secondary != null) { secondary.waitThreadsExited(timeout); } } synchronized void checkHung() { if (primary != null) { primary.checkHung(); } if (secondary != null) { secondary.checkHung(); } } public String toString() { StringBuilder sb = new StringBuilder(50); sb.append("[CP: "); sb.append(pid); if (nextRetry != TimeBase.MAX) { sb.append(", "); sb.append(Deadline.restoreDeadlineAt(nextRetry).shortString()); } sb.append("]"); return sb.toString(); } } static BlockingPeerChannel.ChannelState[] enqueueHeldPrimaryOkStates = { BlockingPeerChannel.ChannelState.DISSOCIATING, BlockingPeerChannel.ChannelState.CLOSING}; /** * start the stream comm manager. */ public void startService() { super.startService(); LockssDaemon daemon = getDaemon(); idMgr = daemon.getIdentityManager(); keystoreMgr = daemon.getKeystoreManager(); resetConfig(); anyRateLimited = false; try { myPeerId = getLocalPeerIdentity(); } catch (Exception e) { log.critical("No V3 identity, not starting stream comm", e); enabled = false; return; } log.debug("Local V3 peer: " + myPeerId); PeerAddress pad = myPeerId.getPeerAddress(); if (pad instanceof PeerAddress.Tcp) { myPeerAddr = (PeerAddress.Tcp)pad; } else { log.error("Disabling stream comm; no local TCP peer address: " + pad); enabled = false; } if (enabled) { start(); StatusService statSvc = daemon.getStatusService(); statSvc.registerStatusAccessor(getStatusAccessorName("SCommChans"), new ChannelStatus()); statSvc.registerStatusAccessor(getStatusAccessorName("SCommPeers"), new PeerStatus()); } } protected String getStatusAccessorName(String base) { return base; } /** * stop the stream comm manager * @see org.lockss.app.LockssManager#stopService() */ public void stopService() { StatusService statSvc = getDaemon().getStatusService(); statSvc.unregisterStatusAccessor(getStatusAccessorName("SCommChans")); statSvc.unregisterStatusAccessor(getStatusAccessorName("SCommPeers")); if (isRunning()) { stop(); } super.stopService(); } /** * Set communication parameters from configuration, once only. * Some aspects of this service currently cannot be reconfigured. * @param config the Configuration */ public void setConfig(Configuration config, Configuration prevConfig, Configuration.Differences changedKeys) { // Instances of this manager are started incrementally in testing, // after the daemon is running, so isDaemonInited() won't work here if (isInited()) { // one-time only init if (configShot.once()) { configure(config, prevConfig, changedKeys); } // the following params can be changed on the fly if (changedKeys.contains(PREFIX)) { if (enabled && isRunning() && !config.getBoolean(PARAM_ENABLED, DEFAULT_ENABLED)) { stopService(); } paramMinFileMessageSize = config.getInt(PARAM_MIN_FILE_MESSAGE_SIZE, DEFAULT_MIN_FILE_MESSAGE_SIZE); paramMaxMessageSize = config.getLong(PARAM_MAX_MESSAGE_SIZE, DEFAULT_MAX_MESSAGE_SIZE); paramMinMeasuredMessageSize = config.getLong(PARAM_MIN_MEASURED_MESSAGE_SIZE, DEFAULT_MIN_MEASURED_MESSAGE_SIZE); paramIsBufferedSend = config.getBoolean(PARAM_IS_BUFFERED_SEND, DEFAULT_IS_BUFFERED_SEND); paramIsTcpNoDelay = config.getBoolean(PARAM_TCP_NODELAY, DEFAULT_TCP_NODELAY); paramWaitExit = config.getTimeInterval(PARAM_WAIT_EXIT, DEFAULT_WAIT_EXIT); String paramDataDir = config.get(PARAM_DATA_DIR, PlatformUtil.getSystemTempDir()); File dir = new File(paramDataDir); if (FileUtil.ensureDirExists(dir)) { if (!dir.equals(dataDir)) { dataDir = dir; log.debug2("Message data dir: " + dataDir); } } else { log.warning("No message data dir: " + dir); dataDir = null; } paramMaxChannels = config.getInt(PARAM_MAX_CHANNELS, DEFAULT_MAX_CHANNELS); paramConnectTimeout = config.getTimeInterval(PARAM_CONNECT_TIMEOUT, DEFAULT_CONNECT_TIMEOUT); paramSoTimeout = config.getTimeInterval(PARAM_DATA_TIMEOUT, DEFAULT_DATA_TIMEOUT); paramSslHandshakeTimeout = config.getTimeInterval(PARAM_SSL_HANDSHAKE_TIMEOUT, DEFAULT_SSL_HANDSHAKE_TIMEOUT); paramDisableSslServerProtocols = config.getList(PARAM_DISABLE_SSL_SERVER_PROTOCOLS, DEFAULT_DISABLE_SSL_SERVER_PROTOCOLS); paramDisableSslClientProtocols = config.getList(PARAM_DISABLE_SSL_CLIENT_PROTOCOLS, DEFAULT_DISABLE_SSL_CLIENT_PROTOCOLS); paramSoKeepAlive = config.getBoolean(PARAM_SOCKET_KEEPALIVE, DEFAULT_SOCKET_KEEPALIVE); paramSendWakeupTime = config.getTimeInterval(PARAM_SEND_WAKEUP_TIME, DEFAULT_SEND_WAKEUP_TIME); paramChannelIdleTime = config.getTimeInterval(PARAM_CHANNEL_IDLE_TIME, DEFAULT_CHANNEL_IDLE_TIME); paramDrainInputTime = config.getTimeInterval(PARAM_DRAIN_INPUT_TIME, DEFAULT_DRAIN_INPUT_TIME); paramDissociateOnNoSend = config.getBoolean(PARAM_DISSOCIATE_ON_NO_SEND, DEFAULT_DISSOCIATE_ON_NO_SEND); paramDissociateOnEveryStop = config.getBoolean(PARAM_DISSOCIATE_ON_EVERY_STOP, DEFAULT_DISSOCIATE_ON_EVERY_STOP); paramRetryBeforeExpiration = config.getTimeInterval(PARAM_RETRY_BEFORE_EXPIRATION, DEFAULT_RETRY_BEFORE_EXPIRATION); paramMaxPeerRetryInterval = config.getTimeInterval(PARAM_MAX_PEER_RETRY_INTERVAL, DEFAULT_MAX_PEER_RETRY_INTERVAL); paramMinPeerRetryInterval = config.getTimeInterval(PARAM_MIN_PEER_RETRY_INTERVAL, DEFAULT_MIN_PEER_RETRY_INTERVAL); paramRetryDelay = config.getTimeInterval(PARAM_RETRY_DELAY, DEFAULT_RETRY_DELAY); paramAbortOnUnknownOp = config.getBoolean(PARAM_ABORT_ON_UNKNOWN_OP, DEFAULT_ABORT_ON_UNKNOWN_OP); if (changedKeys.contains(PARAM_PEER_SEND_MESSAGE_RATE_LIMIT)) { sendRateLimiters.resetRateLimiters(config); } if (changedKeys.contains(PARAM_PEER_RECEIVE_MESSAGE_RATE_LIMIT)) { receiveRateLimiters.resetRateLimiters(config); } } } } /** One-time startup configuration */ private void configure(Configuration config, Configuration prevConfig, Configuration.Differences changedKeys) { enabled = config.getBoolean(PARAM_ENABLED, DEFAULT_ENABLED); if (!enabled) { return; } paramMinPoolSize = config.getInt(PARAM_CHANNEL_THREAD_POOL_MIN, DEFAULT_CHANNEL_THREAD_POOL_MIN); paramMaxPoolSize = config.getInt(PARAM_CHANNEL_THREAD_POOL_MAX, DEFAULT_CHANNEL_THREAD_POOL_MAX); paramPoolKeepaliveTime = config.getTimeInterval(PARAM_CHANNEL_THREAD_POOL_KEEPALIVE, DEFAULT_CHANNEL_THREAD_POOL_KEEPALIVE); if (config.getBoolean(PARAM_BIND_TO_LOCAL_IP_ONLY, DEFAULT_BIND_TO_LOCAL_IP_ONLY)) { bindAddr = config.get(IdentityManager.PARAM_LOCAL_IP); } sendFromBindAddr = config.getBoolean(PARAM_SEND_FROM_BIND_ADDR, DEFAULT_SEND_FROM_BIND_ADDR); if (changedKeys.contains(PARAM_USE_V3_OVER_SSL)) { paramUseV3OverSsl = config.getBoolean(PARAM_USE_V3_OVER_SSL, DEFAULT_USE_V3_OVER_SSL); sockFact = null; // XXX shut down old listen socket, do exponential backoff // XXX on bind() to bring up new listen socket // XXX then move this to the "change on the fly" above } if (!paramUseV3OverSsl) return; log.info("Using SSL"); // We're trying to use SSL if (changedKeys.contains(PARAM_USE_SSL_CLIENT_AUTH)) { paramSslClientAuth = config.getBoolean(PARAM_USE_SSL_CLIENT_AUTH, DEFAULT_USE_SSL_CLIENT_AUTH); sockFact = null; } if (sslServerSocketFactory != null && sslSocketFactory != null) { // already initialized return; } if (changedKeys.contains(PARAM_SSL_KEYSTORE_NAME) || changedKeys.contains(PARAM_SSL_PRIVATE_KEYSTORE_NAME) || changedKeys.contains(PARAM_SSL_PUBLIC_KEYSTORE_NAME)) { String name = getOrNull(config, PARAM_SSL_KEYSTORE_NAME); String priv = getOrNull(config, PARAM_SSL_PRIVATE_KEYSTORE_NAME); String pub = getOrNull(config, PARAM_SSL_PUBLIC_KEYSTORE_NAME); if (!StringUtil.isNullString(name)) { paramSslPrivateKeyStoreName = name; paramSslPublicKeyStoreName = name; } if (priv != null) { if (name != null && !priv.equals(name)) { log.warning("Overriding " + PARAM_SSL_KEYSTORE_NAME + ": " + name + " with " + PARAM_SSL_PRIVATE_KEYSTORE_NAME + ": " + priv); } paramSslPrivateKeyStoreName = priv; } if (pub != null) { if (name != null && !pub.equals(name)) { log.warning("Overriding " + PARAM_SSL_KEYSTORE_NAME + ": " + name + " with " + PARAM_SSL_PUBLIC_KEYSTORE_NAME + ": " + pub); } paramSslPublicKeyStoreName = pub; } if (StringUtil.equalStrings(paramSslPublicKeyStoreName, paramSslPrivateKeyStoreName)) { // so can use == later paramSslPrivateKeyStoreName = paramSslPublicKeyStoreName; log.debug("Using keystore " + paramSslPrivateKeyStoreName); } else { log.debug("Using private keystore " + paramSslPrivateKeyStoreName + ", public keystore " + paramSslPublicKeyStoreName); } sockFact = null; } if (changedKeys.contains(PARAM_SSL_PROTOCOL)) { paramSslProtocol = config.get(PARAM_SSL_PROTOCOL, DEFAULT_SSL_PROTOCOL); sockFact = null; } KeyManagerFactory kmf = keystoreMgr.getKeyManagerFactory(paramSslPrivateKeyStoreName, "LCAP"); if (kmf == null) { throw new IllegalArgumentException("Keystore not found: " + paramSslPrivateKeyStoreName); } KeyManager[] kma = kmf.getKeyManagers(); TrustManagerFactory tmf = keystoreMgr.getTrustManagerFactory(paramSslPublicKeyStoreName, "LCAP"); if (tmf == null) { throw new IllegalArgumentException("Keystore not found: " + paramSslPublicKeyStoreName); } TrustManager[] tma = tmf.getTrustManagers(); // Now create an SSLContext from the KeyManager SSLContext sslContext = null; try { RandomManager rmgr = getDaemon().getRandomManager(); SecureRandom rng = rmgr.getSecureRandom(); sslContext = SSLContext.getInstance(paramSslProtocol); sslContext.init(kma, tma, rng); // Now create the SSL socket factories from the context sslServerSocketFactory = sslContext.getServerSocketFactory(); sslSocketFactory = sslContext.getSocketFactory(); log.info("SSL init successful"); } catch (NoSuchAlgorithmException ex) { log.error("Creating SSL context threw " + ex); sslContext = null; } catch (NoSuchProviderException ex) { log.error("Creating SSL context threw " + ex); sslContext = null; } catch (KeyManagementException ex) { log.error("Creating SSL context threw " + ex); sslContext = null; } } String getOrNull(Configuration config, String param) { String val = config.get(param); return "".equals(val) ? null : val; } // private debug output of keystore private void logKeyStore(KeyStore ks, char[] privateKeyPassWord) { log.debug3("start of key store"); try { for (Enumeration en = ks.aliases(); en.hasMoreElements(); ) { String alias = (String) en.nextElement(); log.debug3("Next alias " + alias); if (ks.isCertificateEntry(alias)) { log.debug3("About to Certificate"); java.security.cert.Certificate cert = ks.getCertificate(alias); if (cert == null) { log.debug3(alias + " null cert chain"); } else { log.debug3("Cert for " + alias + " is " + cert.toString()); } } else if (ks.isKeyEntry(alias)) { log.debug3("About to getKey"); Key privateKey = ks.getKey(alias, privateKeyPassWord); log.debug3(alias + " key " + privateKey.getAlgorithm() + "/" + privateKey.getFormat()); } else { log.debug3(alias + " neither key nor cert"); } } log.debug3("end of key store"); } catch (Exception ex) { log.error("logKeyStore() threw " + ex); } } /** Return true iff all connections are authenticated; <i>ie</i>, we only * talk to known peers */ public boolean isTrustedNetwork() { return paramUseV3OverSsl && paramSslClientAuth; } // overridable for testing protected PeerIdentity getLocalPeerIdentity() { return idMgr.getLocalPeerIdentity(Poll.V3_PROTOCOL); } PeerIdentity findPeerIdentity(String idkey) throws IdentityManager.MalformedIdentityKeyException { return idMgr.findPeerIdentity(idkey); } PeerIdentity getMyPeerId() { return myPeerId; } Queue getReceiveQueue() { return rcvQueue; } SocketFactory getSocketFactory() { if (sockFact == null) if (paramUseV3OverSsl) { sockFact = new SslSocketFactory(); } else { sockFact = new NormalSocketFactory(); } return sockFact; } long getConnectTimeout() { return paramConnectTimeout; } long getSoTimeout() { return paramSoTimeout; } long getSendWakeupTime() { return paramSendWakeupTime; } long getChannelIdleTime() { return paramChannelIdleTime; } long getDrainInputTime() { return paramDrainInputTime; } long getChannelHungTime() { return paramChannelIdleTime + 1000; } long getMaxMessageSize() { return paramMaxMessageSize; } long getMinMeasuredMessageSize() { return paramMinMeasuredMessageSize; } boolean isBufferedSend() { return paramIsBufferedSend; } boolean isTcpNoDelay() { return paramIsTcpNoDelay; } boolean getAbortOnUnknownOp() { return paramAbortOnUnknownOp; } boolean getDissociateOnEveryStop() { return paramDissociateOnEveryStop; } /** * Called by channel when it learns its peer's identity */ void associateChannelWithPeer(BlockingPeerChannel chan, PeerIdentity peer) { PeerData pdata = findPeerData(peer); pdata.associateChannel(chan); } /** * Called by channel when closing */ void dissociateChannelFromPeer(BlockingPeerChannel chan, PeerIdentity peer, Queue sendQueue) { // Do nothing if channel has no peer. E.g., incoming connection, on // which no PeerId msg received. if (peer != null) { PeerData pdata = getPeerData(peer); // No action if no PeerData if (pdata != null) { pdata.dissociateChannel(chan); if (sendQueue != null) { pdata.drainQueue(pdata, sendQueue, chan.shouldRetry()); } } } } /** * Return an existing PeerData for the peer or create a new one */ PeerData findPeerData(PeerIdentity pid) { if (pid == null) { log.error("findPeerData: null pid", new Throwable()); throw new RuntimeException("Null pid"); } synchronized (peers) { PeerData pdata = peers.get(pid); if (pdata == null) { log.debug2("new PeerData("+pid+")"); pdata = new PeerData(pid); peers.put(pid, pdata); } return pdata; } } PeerData getPeerData(PeerIdentity pid) { synchronized (peers) { return peers.get(pid); } } void rcvRateLimited(PeerIdentity pid) { PeerData pdata = getPeerData(pid); if (pdata != null) { pdata.rcvRateLimited(); } } /** Send a message to a peer. * @param msg the message to send * @param id the identity of the peer to which to send the message * @throws IOException if message couldn't be queued */ public void sendTo(PeerMessage msg, PeerIdentity id) throws IOException { if (!isRunning()) throw new IllegalStateException("SComm not running"); if (msg == null) throw new NullPointerException("Null message"); if (id == null) throw new NullPointerException("Null peer"); if (log.isDebug3()) log.debug3("sending "+ msg +" to "+ id); sendToChannel(msg, id); } protected void sendToChannel(PeerMessage msg, PeerIdentity id) throws IOException { PeerData pdata = findPeerData(id); pdata.send(msg); } RateLimiter getSendRateLimiter(PeerIdentity id) { return sendRateLimiters.getRateLimiter(id); } RateLimiter getReceiveRateLimiter(PeerIdentity id) { return receiveRateLimiters.getRateLimiter(id); } BlockingPeerChannel findOrMakeChannel(PeerIdentity id) throws IOException { PeerData pdata = findPeerData(id); return pdata.findOrMakeChannel(); } void countMessageRetries(PeerMessage msg) { synchronized (retryHist) { retryHist.add(msg.getRetryCount()); } } void countMessageErrRetries(PeerMessage msg) { synchronized (retryErrHist) { retryErrHist.add(msg.getRetryCount()); } } void start() { pool = new PooledExecutor(paramMaxPoolSize); pool.setMinimumPoolSize(paramMinPoolSize); pool.setKeepAliveTime(paramPoolKeepaliveTime); log.debug2("Channel thread pool min, max: " + pool.getMinimumPoolSize() + ", " + pool.getMaximumPoolSize()); pool.abortWhenBlocked(); rcvQueue = new FifoQueue(); try { int port = myPeerAddr.getPort(); if (!getDaemon().getResourceManager().reserveTcpPort(port, SERVER_NAME)) { throw new IOException("TCP port " + port + " unavailable"); } if (bindAddr != null) { log.debug("Listening on port " + port + " on " + bindAddr); } else { log.debug("Listening on port " + port); } listenSock = getSocketFactory().newServerSocket(bindAddr, port, paramBacklog); } catch (IOException e) { log.critical("Can't create listen socket", e); return; } ensureQRunner(); ensureRetryThread(); ensureListener(); running = true; } protected boolean isRunning() { return running; } // stop all threads and channels void stop() { running = false; Deadline timeout = null; synchronized (threadLock) { if (paramWaitExit > 0) { timeout = Deadline.in(paramWaitExit); } stopThread(retryThread, timeout); retryThread = null; stopThread(listenThread, timeout); listenThread = null; stopThread(rcvThread, timeout); rcvThread = null; } log.debug2("Shutting down pool"); if (pool != null) { pool.shutdownNow(); } log.debug2("pool shut down "); } List<PeerData> getAllPeerData() { synchronized (peers) { return new ArrayList<PeerData>(peers.values()); } } // stop all channels in channel map void stopChannels(Map map, Deadline timeout) { log.debug2("Stopping channels"); List<PeerData> lst = getAllPeerData(); for (PeerData pdata : lst) { pdata.abortChannels(); } // Wait until the threads have exited before proceeding. Useful in // testing to keep debug output straight. // Any channels that had already dissociated themselves are not waited // for. It would take extra bookkeeping to handle those and they don't // seem to cause nearly as much trouble. if (timeout != null) { for (PeerData pdata : lst) { pdata.waitChannelsDone(timeout); } } } // poke channels that might have hung sender void checkHungChannels() { log.debug3("Doing hung check"); for (PeerData pdata : getAllPeerData()) { pdata.checkHung(); } } /** * Execute the runnable in a pool thread * @param run the Runnable to be run * @throws RuntimeException if no pool thread is available */ void execute(Runnable run) throws InterruptedException { if (run == null) log.warning("Executing null", new Throwable()); pool.execute(run); } /** Setup all socket options. Should be called before any read/write * calls */ void setupOpenSocket(Socket sock) throws SocketException { if (log.isDebug3()) { log.debug3(sock + "SO_TIMEOUT: " + getSoTimeout() + ", TcpNoDelay: " + isTcpNoDelay() + ", KeepAlive: " + paramSoKeepAlive); } sock.setSoTimeout((int)getSoTimeout()); sock.setTcpNoDelay(isTcpNoDelay()); sock.setKeepAlive(paramSoKeepAlive); } // process a socket returned by accept() // overridable for testing void processIncomingConnection(Socket sock) throws IOException { if (sock.isClosed()) { // This should no longer happen throw new SocketException("processIncomingConnection got closed socket"); } // Setup socket (SO_TIMEOUT, etc.) before SSL handshake setupOpenSocket(sock); log.debug2("Accepted connection from " + new IPAddr(sock.getInetAddress())); // SSL handshake now performed by channel BlockingPeerChannel chan = getSocketFactory().newPeerChannel(this, sock); chan.startIncoming(); } private void processReceivedPacket(PeerMessage msg) { log.debug2("Received " + msg); try { runHandlers(msg); } catch (ProtocolException e) { log.warning("Cannot process incoming packet", e); } } protected void runHandler(MessageHandler handler, PeerMessage msg) { try { handler.handleMessage(msg); } catch (Exception e) { log.error("callback threw", e); } } private void runHandlers(PeerMessage msg) throws ProtocolException { try { int proto = msg.getProtocol(); MessageHandler handler; if (proto >= 0 && proto < messageHandlers.size() && (handler = (MessageHandler)messageHandlers.get(proto)) != null) { runHandler(handler, msg); } else { log.warning("Received message with unregistered protocol: " + proto); } } catch (RuntimeException e) { log.warning("Unexpected error in runHandlers", e); throw new ProtocolException(e.toString()); } } /** * Register a {@link LcapStreamComm.MessageHandler}, which will be called * whenever a message is received. * @param protocol an int representing the protocol * @param handler MessageHandler to add */ public void registerMessageHandler(int protocol, MessageHandler handler) { synchronized (messageHandlers) { if (protocol >= messageHandlers.size()) { messageHandlers.setSize(protocol + 1); } if (messageHandlers.get(protocol) != null) { throw new RuntimeException("Protocol " + protocol + " already registered"); } messageHandlers.set(protocol, handler); } } /** * Unregister a {@link LcapStreamComm.MessageHandler}. * @param protocol an int representing the protocol */ public void unregisterMessageHandler(int protocol) { if (protocol < messageHandlers.size()) { messageHandlers.set(protocol, null); } } // PeerMessage.Factory implementation public PeerMessage newPeerMessage() { return new MemoryPeerMessage(); } public PeerMessage newPeerMessage(long estSize) { if (estSize < 0) { return newPeerMessage(); } else if (estSize > 0 && dataDir != null && estSize >= paramMinFileMessageSize) { return new FilePeerMessage(dataDir); } else { return new MemoryPeerMessage(); } } // Make it easy to compare timeout values, where 0 = infinite. long absTimeout(long timeout) { return timeout == 0 ? Long.MAX_VALUE : timeout; } protected void handshake(SSLSocket s) throws SSLPeerUnverifiedException { long oldTimeout = -2; try { oldTimeout = s.getSoTimeout(); if (absTimeout(paramSslHandshakeTimeout) < absTimeout(oldTimeout)) { s.setSoTimeout((int)paramSslHandshakeTimeout); } } catch (SocketException e) { log.warning("Couldn't save/set socket timeout before handshake", e); } try { SSLSession session = s.getSession(); java.security.cert.Certificate[] certs = session.getPeerCertificates(); log.debug(session.getPeerHost() + " via " + session.getProtocol() + " verified"); } catch (SSLPeerUnverifiedException ex) { log.error(s.getInetAddress() + ":" + s.getPort() + " not verified"); try { s.close(); } catch (IOException ex2) { log.error("Socket close threw " + ex2); } throw ex; } finally { if (!s.isClosed() && absTimeout(paramSslHandshakeTimeout) < absTimeout(oldTimeout)) { try { s.setSoTimeout((int)oldTimeout); } catch (SocketException e) { log.warning("Couldn't restore socket timeout after handshake", e); } } } } protected void handshakeIfClientAuth(Socket sock) throws SSLPeerUnverifiedException { if (sock instanceof SSLSocket && paramSslClientAuth) { // Ensure handshake is complete before doing anything else handshake((SSLSocket)sock); } } /** Sort retry list by time of next retry. Ensure never equal */ static class RetryOrderComparator implements Comparator { public int compare(Object o1, Object o2) { if (o1 == o2) { return 0; } PeerData pd1 = (PeerData)o1; PeerData pd2 = (PeerData)o2; long r1 = pd1.getNextRetry(); long r2 = pd2.getNextRetry(); int res = (r2 > r1 ? -1 : (r2 < r1 ? 1 : (System.identityHashCode(pd1) - System.identityHashCode(pd2)))); return res; } } void ensureQRunner() { synchronized (threadLock) { if (rcvThread == null) { log.info("Starting receive thread"); rcvThread = new ReceiveThread("SCommRcv: " + myPeerId.getIdString()); rcvThread.start(); rcvThread.waitRunning(); } } } void ensureListener() { synchronized (threadLock) { if (listenThread == null) { log.info("Starting listen thread"); listenThread = new ListenThread("SCommListen: " + myPeerId.getIdString()); listenThread.start(); listenThread.waitRunning(); } } } void ensureRetryThread() { synchronized (threadLock) { if (retryThread == null) { log.info("Starting retry thread"); retryThread = new RetryThread("SCommRetry: " + myPeerId.getIdString()); retryThread.start(); retryThread.waitRunning(); } } } void stopThread(CommThread th, Deadline timeout) { if (th != null) { log.info("Stopping " + th.getName()); th.stopCommThread(); if (timeout != null) { th.waitExited(timeout); } } } abstract class CommThread extends LockssThread { abstract void stopCommThread(); CommThread(String name) { super(name); } } // Receive thread private class ReceiveThread extends CommThread { private volatile boolean goOn = true; private Deadline timeout = Deadline.in(getChannelHungTime()); ReceiveThread(String name) { super(name); } public void lockssRun() { setPriority(PRIORITY_PARAM_SCOMM, PRIORITY_DEFAULT_SCOMM); triggerWDogOnExit(true); startWDog(WDOG_PARAM_SCOMM, WDOG_DEFAULT_SCOMM); nowRunning(); while (goOn) { pokeWDog(); try { synchronized (timeout) { if (goOn) { timeout.expireIn(getChannelHungTime()); } } if (log.isDebug3()) log.debug3("rcvQueue.get(" + timeout + ")"); Object qObj = rcvQueue.get(timeout); if (qObj != null) { if (qObj instanceof PeerMessage) { if (log.isDebug3()) log.debug3("Rcvd " + qObj); processReceivedPacket((PeerMessage)qObj); } else { log.warning("Non-PeerMessage on rcv queue" + qObj); } } if (TimeBase.msSince(lastHungCheckTime) > getChannelHungTime()) { checkHungChannels(); lastHungCheckTime = TimeBase.nowMs(); } } catch (InterruptedException e) { // just wake up and check for exit } finally { } } rcvThread = null; } void stopCommThread() { synchronized (timeout) { stopWDog(); triggerWDogOnExit(false); goOn = false; timeout.expire(); } } } private void disableSelectedProtocols(SSLServerSocket sock) { if (paramDisableSslServerProtocols == null) return; Set<String> enaprotos = new HashSet<String>(); for (String s : sock.getEnabledProtocols()) { if (paramDisableSslServerProtocols.contains(s)) { continue; } enaprotos.add(s); } sock.setEnabledProtocols(enaprotos.toArray(new String[0])); } private void disableSelectedProtocols(SSLSocket sock) { if (paramDisableSslClientProtocols == null) return; Set<String> enaprotos = new HashSet<String>(); for (String s : sock.getEnabledProtocols()) { if (paramDisableSslClientProtocols.contains(s)) { continue; } enaprotos.add(s); } sock.setEnabledProtocols(enaprotos.toArray(new String[0])); } // Listen thread private class ListenThread extends CommThread { private volatile boolean goOn = true; private ListenThread(String name) { super(name); } public void lockssRun() { setPriority(PRIORITY_PARAM_SLISTEN, PRIORITY_DEFAULT_SLISTEN); triggerWDogOnExit(true); // startWDog(WDOG_PARAM_SLISTEN, WDOG_DEFAULT_SLISTEN); nowRunning(); String sockmsg = (listenSock instanceof SSLServerSocket) ? "SSL Listener" : "Listener"; while (goOn) { // pokeWDog(); log.debug3("accept()"); try { Socket sock = listenSock.accept(); if (!goOn) { break; } processIncomingConnection(sock); } catch (SocketException e) { if (goOn) { log.warning(sockmsg, e); } } catch (Exception e) { log.warning(sockmsg, e); } } listenThread = null; } void stopCommThread() { stopWDog(); triggerWDogOnExit(false); goOn = false; IOUtil.safeClose(listenSock); this.interrupt(); } } // Outside thread so stat table can find it // Must initialize (to mutable Deadline) in case recalcNext called before // thread runs. private volatile Deadline retryThreadNextRetry = Deadline.at(TimeBase.MAX); // Retry thread private class RetryThread extends CommThread { private volatile boolean goOn = true; private long soonest = 0; RetryThread(String name) { super(name); } public void lockssRun() { setPriority(PRIORITY_PARAM_RETRY, PRIORITY_DEFAULT_RETRY); triggerWDogOnExit(true); startWDog(WDOG_PARAM_RETRY, WDOG_DEFAULT_RETRY); nowRunning(); outer: while (goOn) { pokeWDog(); do { retryThreadNextRetry = getNextRetry(); log.debug2("nextRetry: " + retryThreadNextRetry.shortString()); try { retryThreadNextRetry.sleep(); } catch (InterruptedException e) { // just wakeup and check for work } if (!goOn) { break outer; } } while (TimeBase.nowMs() < soonest); PeerData pdata = firstPeerToRetry(); if (pdata != null && pdata.retryIfNeeded()) { soonest = TimeBase.nowMs() + paramRetryDelay; } else { soonest = 0; } } retryThread = null; } Deadline getNextRetry() { synchronized (peersToRetry) { PeerData pdata = firstPeerToRetry(); if (log.isDebug3()) log.debug3("firstPeerToRetry: " + pdata); if (pdata != null) { log.debug3("pdata.getNextRetry(): " + pdata.getNextRetry()); return Deadline.at(Math.max(soonest, pdata.getNextRetry())); } else { return Deadline.at(TimeBase.MAX); } } } PeerData firstPeerToRetry() { synchronized (peersToRetry) { if (peersToRetry.isEmpty()) { return null; } PeerData pdata = peersToRetry.first(); if (log.isDebug2()) { log.debug2("First peer to retry: " + pdata.getPid()); } return pdata; } } void recalcNext() { retryThreadNextRetry.expire(); } void stopCommThread() { synchronized (retryThreadNextRetry) { stopWDog(); triggerWDogOnExit(false); goOn = false; retryThreadNextRetry.expire(); } } @Override protected void threadHung() { Deadline next = getNextRetry(); if (next.expired()) { super.threadHung(); } else { pokeWDog(); } } } /** SocketFactory interface allows encapsulation of socket type details (normal, SSL, etc.) and allows test code to use instrumented or mock sockets and peer channels */ interface SocketFactory { /** Return a listen socket of the appropriate type */ ServerSocket newServerSocket(String bindAddr, int port, int backlog) throws IOException; /** Return a socket of the appropriate type connected to the remote * address, with its options set */ Socket newSocket(IPAddr addr, int port) throws IOException; /** Overridable for testing */ BlockingPeerChannel newPeerChannel(BlockingStreamComm comm, Socket sock) throws IOException; /** Overridable for testing */ BlockingPeerChannel newPeerChannel(BlockingStreamComm comm, PeerIdentity peer) throws IOException; } /** Normal socket factory creates real TCP Sockets */ class NormalSocketFactory implements SocketFactory { public ServerSocket newServerSocket(String bindAddr, int port, int backlog) throws IOException { if (bindAddr != null) { return new ServerSocket(port, backlog, InetAddress.getByName(bindAddr)); } else { return new ServerSocket(port, backlog); } } public Socket newSocket(IPAddr addr, int port) throws IOException { Socket sock; if (sendFromBindAddr && bindAddr != null) { sock = new Socket(addr.getInetAddr(), port, InetAddress.getByName(bindAddr), 0); } else { sock = new Socket(addr.getInetAddr(), port); } setupOpenSocket(sock); return sock; } public BlockingPeerChannel newPeerChannel(BlockingStreamComm comm, Socket sock) throws IOException { return new BlockingPeerChannel(comm, sock); } public BlockingPeerChannel newPeerChannel(BlockingStreamComm comm, PeerIdentity peer) throws IOException { return new BlockingPeerChannel(comm, peer); } } /** SSL socket factory */ class SslSocketFactory implements SocketFactory { public ServerSocket newServerSocket(String bindAddr, int port, int backlog) throws IOException { if (sslServerSocketFactory == null) { throw new IOException("no SSL server socket factory"); } SSLServerSocket s; if (bindAddr != null) { s = (SSLServerSocket) sslServerSocketFactory.createServerSocket(port, backlog, InetAddress.getByName(bindAddr)); } else { s = (SSLServerSocket) sslServerSocketFactory.createServerSocket(port, backlog); } disableSelectedProtocols(s); s.setNeedClientAuth(paramSslClientAuth); log.debug("New SSL server socket: " + port + " backlog " + backlog + " clientAuth " + paramSslClientAuth); if (log.isDebug2()) logSSLSocketDetails(s); return s; } public Socket newSocket(IPAddr addr, int port) throws IOException { if (sslSocketFactory == null) { throw new IOException("no SSL client socket factory"); } SSLSocket s; if (sendFromBindAddr && bindAddr != null) { s = (SSLSocket)sslSocketFactory.createSocket(addr.getInetAddr(), port, InetAddress.getByName(bindAddr), 0); } else { s = (SSLSocket)sslSocketFactory.createSocket(addr.getInetAddr(), port); } disableSelectedProtocols(s); log.debug2("New SSL client socket: " + port + "@" + addr.toString()); // Setup socket (SO_TIMEOUT, etc.) before SSL handshake setupOpenSocket(s); if (paramSslClientAuth) { handshake(s); } return s; } private void logSSLSocketDetails(SSLServerSocket s) { log.debug2("Supported cipher suites: " + ListUtil.fromArray(s.getSupportedCipherSuites())); log.debug2("Enabled cipher suites: " + ListUtil.fromArray(s.getEnabledCipherSuites())); log.debug2("Supported protocols: " + ListUtil.fromArray(s.getSupportedProtocols())); log.debug2("Enabled protocols: " + ListUtil.fromArray(s.getEnabledProtocols())); log.debug2("Enable session creation: " + s.getEnableSessionCreation()); } public BlockingPeerChannel newPeerChannel(BlockingStreamComm comm, Socket sock) throws IOException { return new BlockingPeerChannel(comm, sock); } public BlockingPeerChannel newPeerChannel(BlockingStreamComm comm, PeerIdentity peer) throws IOException { return new BlockingPeerChannel(comm, peer); } } private static final List chanStatusColDescs = ListUtil.list( new ColumnDescriptor("Peer", "Peer", ColumnDescriptor.TYPE_STRING), new ColumnDescriptor("State", "State", ColumnDescriptor.TYPE_STRING), new ColumnDescriptor("Flags", "Flags", ColumnDescriptor.TYPE_STRING), new ColumnDescriptor("SendQ", "SendQ", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("Sent", "Msgs Sent", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("Rcvd", "Msgs Rcvd", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("SentBytes", "Bytes Sent", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("RcvdBytes", "Bytes Rcvd", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("LastSend", "LastSend", ColumnDescriptor.TYPE_TIME_INTERVAL), new ColumnDescriptor("LastRcv", "LastRcv", ColumnDescriptor.TYPE_TIME_INTERVAL), new ColumnDescriptor("PrevStateChange", "Change", ColumnDescriptor.TYPE_TIME_INTERVAL), new ColumnDescriptor("PrevState", "PrevState", ColumnDescriptor.TYPE_STRING) ); private class ChannelStatus implements StatusAccessor { long start; public String getDisplayName() { return "Comm Channels"; } public boolean requiresKey() { return false; } public void populateTable(StatusTable table) { String key = table.getKey(); ChannelStats cumulative = new ChannelStats(); table.setColumnDescriptors(chanStatusColDescs); table.setRows(getRows(key, cumulative)); cumulative.add(globalStats); table.setSummaryInfo(getSummaryInfo(key, cumulative)); } private List getSummaryInfo(String key, ChannelStats stats) { List res = new ArrayList(); StringBuilder sb = new StringBuilder(); if (paramUseV3OverSsl) { sb.append(paramSslProtocol); if (paramSslClientAuth) { sb.append(", Client Auth"); } } else { sb.append("No"); } res.add(new StatusTable.SummaryInfo("SSL", ColumnDescriptor.TYPE_STRING, sb.toString())); res.add(new StatusTable.SummaryInfo("Channels", ColumnDescriptor.TYPE_STRING, nPrimary + "/" + paramMaxChannels + ", " + maxPrimary + " max")); res.add(new StatusTable.SummaryInfo("RcvChannels", ColumnDescriptor.TYPE_STRING, nSecondary + ", " + maxSecondary +" max")); res.add(new StatusTable.SummaryInfo("Draining", ColumnDescriptor.TYPE_STRING, drainingChannels.size() + ", " + maxDrainingChannels + " max")); ChannelStats.Count count = stats.getInCount(); res.add(new StatusTable.SummaryInfo("Msgs Sent", ColumnDescriptor.TYPE_INT, count.getMsgs())); res.add(new StatusTable.SummaryInfo("Bytes Sent", ColumnDescriptor.TYPE_INT, count.getBytes())); count = stats.getOutCount(); res.add(new StatusTable.SummaryInfo("Msgs Rcvd", ColumnDescriptor.TYPE_INT, count.getMsgs())); res.add(new StatusTable.SummaryInfo("Bytes Rcvd", ColumnDescriptor.TYPE_INT, count.getBytes())); return res; } private List getRows(String key, ChannelStats cumulative) { List table = new ArrayList(); for (PeerData pdata : getAllPeerData()) { BlockingPeerChannel primary = pdata.getPrimaryChannel(); if (primary != null) { table.add(makeRow(primary, "", cumulative)); } BlockingPeerChannel secondary = pdata.getSecondaryChannel(); if (secondary != null) { table.add(makeRow(secondary, "", cumulative)); } } synchronized (drainingChannels) { for (BlockingPeerChannel chan : drainingChannels) { table.add(makeRow(chan, "D", cumulative)); } } return table; } private Map makeRow(BlockingPeerChannel chan, String flags, ChannelStats cumulative) { PeerIdentity pid = chan.getPeer(); Map row = new HashMap(); // Draining channels can sometimes have null peer row.put("Peer", (pid == null) ? "???" : pid.getIdString()); row.put("State", chan.getState()); row.put("SendQ", chan.getSendQueueSize()); ChannelStats stats = chan.getStats(); cumulative.add(stats); ChannelStats.Count count = stats.getInCount(); row.put("Sent", count.getMsgs()); row.put("SentBytes", count.getBytes()); count = stats.getOutCount(); row.put("Rcvd", count.getMsgs()); row.put("RcvdBytes", count.getBytes()); StringBuilder sb = new StringBuilder(flags); if (chan.isOriginate()) sb.append("O"); if (chan.hasConnecter()) sb.append("C"); if (chan.hasReader()) sb.append("R"); if (chan.hasWriter()) sb.append("W"); row.put("Flags", sb.toString()); row.put("LastSend", lastTime(chan.getLastSendTime())); row.put("LastRcv", lastTime(chan.getLastRcvTime())); if (chan.getPrevState() != BlockingPeerChannel.ChannelState.NONE) { row.put("PrevState", chan.getPrevState()); row.put("PrevStateChange", lastTime(chan.getLastStateChange())); } return row; } Long lastTime(long time) { if (time <= 0) return null; return TimeBase.msSince(time); } } private static final List peerStatusColDescs = ListUtil.list( new ColumnDescriptor("Peer", "Peer", ColumnDescriptor.TYPE_STRING), new ColumnDescriptor("Orig", "Orig", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("Fail", "Fail", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("Accept", "Accept", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("Sent", "Msgs Sent", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("Rcvd", "Msgs Rcvd", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("Chan", "Chan", ColumnDescriptor.TYPE_STRING), new ColumnDescriptor("SendQ", "Send Q", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("LastRetry", "Last Attempt", ColumnDescriptor.TYPE_DATE), new ColumnDescriptor("NextRetry", "Next Retry", ColumnDescriptor.TYPE_DATE) ); private static final List rateLimitColDescs = ListUtil.list( new ColumnDescriptor("SendLimited", "Send Discard", ColumnDescriptor.TYPE_STRING), new ColumnDescriptor("RcvLimited", "Rcv Discard", ColumnDescriptor.TYPE_STRING) ); private static final List peerStatusSortRules = ListUtil.list(new StatusTable.SortRule("Peer", true)); private class PeerStatus implements StatusAccessor { long start; public String getDisplayName() { return "Comm Peer Data"; } public boolean requiresKey() { return false; } public void populateTable(StatusTable table) { String key = table.getKey(); if (anyRateLimited) { table.setColumnDescriptors(ListUtil.append(peerStatusColDescs, rateLimitColDescs)); } else { table.setColumnDescriptors(peerStatusColDescs); } table.setDefaultSortRules(peerStatusSortRules); table.setRows(getRows(key)); table.setSummaryInfo(getSummaryInfo(key)); } private List getRows(String key) { List table = new ArrayList(); for (PeerData pdata : getAllPeerData()) { table.add(makeRow(pdata)); } return table; } private Map makeRow(PeerData pdata) { PeerIdentity pid = pdata.getPid(); Map row = new HashMap(); row.put("Peer", (pid == null) ? "???" : pid.getIdString()); row.put("Orig", pdata.getOrigCnt()); row.put("Fail", pdata.getFailCnt()); row.put("Accept", pdata.getAcceptCnt()); row.put("Sent", pdata.getMsgsSent()); row.put("Rcvd", pdata.getMsgsRcvd()); if (anyRateLimited) { row.put("SendLimited", pdata.getSendRateLimited()); row.put("RcvLimited", pdata.getRcvRateLimited()); } StringBuilder sb = new StringBuilder(2); if (pdata.getPrimaryChannel() != null) { sb.append("P"); } if (pdata.getSecondaryChannel() != null) { sb.append("S"); } if (sb.length() != 0) { row.put("Chan", sb.toString()); } int pq = pdata.getSendQueueSize(); if (pq != 0) { row.put("SendQ", pq); } else { BlockingPeerChannel chan = pdata.getPrimaryChannel(); if (chan != null) { row.put("SendQ", chan.getSendQueueSize()); } } row.put("LastRetry", pdata.getLastRetry()); if (pdata.getNextRetry() != TimeBase.MAX) { row.put("NextRetry", pdata.getNextRetry()); } return row; } private String histString(Bag hist) { synchronized (hist) { List lst = new ArrayList(); for (Integer cnt : ((Set<Integer>)hist.uniqueSet())) { StringBuilder sb = new StringBuilder(); sb.append("["); sb.append(cnt); sb.append(","); sb.append(hist.getCount(cnt)); sb.append("]"); lst.add(sb.toString()); } return StringUtil.separatedString(lst, ","); } } private List getSummaryInfo(String key) { List res = new ArrayList(); res.add(new StatusTable.SummaryInfo("Msg Ok Retries", ColumnDescriptor.TYPE_STRING, histString(retryHist))); res.add(new StatusTable.SummaryInfo("Msg Err Retries", ColumnDescriptor.TYPE_STRING, histString(retryErrHist))); res.add(new StatusTable.SummaryInfo("Waiting Retry", ColumnDescriptor.TYPE_INT, peersToRetry.size())); if (peersToRetry.size() != 0) { res.add(new StatusTable.SummaryInfo("Next Retry", ColumnDescriptor.TYPE_DATE, retryThreadNextRetry)); } return res; } } }
package org.junit.rules; import org.junit.runner.Description; import org.junit.runners.model.Statement; /** * Verifier is a base class for Rules like ErrorCollector, which can turn * otherwise passing test methods into failing tests if a verification check is * failed * * <pre> * public static class ErrorLogVerifier() { * private ErrorLog errorLog = new ErrorLog(); * * &#064;Rule * public TestRule verifier = new Verifier() { * &#064;Override public void verify() { * assertTrue(errorLog.isEmpty()); * } * } * * &#064;Test public void testThatMightWriteErrorLog() { * // ... * } * } * </pre> */ public abstract class Verifier implements TestRule { public Statement apply(final Statement base, Description description) { return new Statement() { @Override public void evaluate() throws Throwable { base.evaluate(); verify(); } }; } /** * Override this to add verification logic. Overrides should throw an * exception to indicate that verification failed. */ protected void verify() throws Throwable { } }
package com.aimmac23.hub.proxy; import java.util.Map; import java.util.logging.Logger; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpPost; import org.apache.http.message.BasicHttpEntityEnclosingRequest; import org.apache.http.util.EntityUtils; import org.json.JSONObject; import org.openqa.grid.common.RegistrationRequest; import org.openqa.grid.internal.Registry; import org.openqa.grid.internal.TestSession; import org.openqa.grid.selenium.proxy.DefaultRemoteProxy; import org.openqa.grid.web.servlet.handler.RequestType; import org.openqa.grid.web.servlet.handler.SeleniumBasedRequest; import org.openqa.selenium.remote.DesiredCapabilities; import org.openqa.selenium.remote.internal.HttpClientFactory; import com.aimmac23.hub.HubVideoRegistry; public class VideoProxy extends DefaultRemoteProxy { private static final Logger log = Logger.getLogger(VideoProxy.class.getName()); private String serviceUrl; boolean isCurrentlyRecording = false; private HttpClient client; private HttpHost remoteHost; public VideoProxy(RegistrationRequest request, Registry registry) { super(transformRegistration(request), registry); serviceUrl = getRemoteHost() + "/extra/VideoRecordingControlServlet"; remoteHost = new HttpHost(getRemoteHost().getHost(), getRemoteHost().getPort()); HttpClientFactory httpClientFactory = new HttpClientFactory(); client = httpClientFactory.getHttpClient(); } static RegistrationRequest transformRegistration(RegistrationRequest request) { int maxSessions = request.getConfigAsInt(RegistrationRequest.MAX_SESSION, 1); request.getConfiguration().put(RegistrationRequest.MAX_SESSION, 1); if(maxSessions != 1) { log.warning("Reducing " + RegistrationRequest.MAX_SESSION + " value to 1: Video node does not support concurrent sessions"); } for(DesiredCapabilities caps : request.getCapabilities()) { Object maxInstances = caps.getCapability(RegistrationRequest.MAX_INSTANCES); caps.setCapability(RegistrationRequest.MAX_INSTANCES, "1"); if(maxInstances != null && !"1".equals(maxInstances)) { log.warning("Reducing " + RegistrationRequest.MAX_INSTANCES + " for browser " + caps.getBrowserName() + " to 1: Video node does not support concurrent sessions"); } } return request; } @Override public void beforeSession(TestSession arg0) { super.beforeSession(arg0); HttpPost r = new HttpPost(serviceUrl + "?command=start"); try { HttpResponse response = client.execute(remoteHost, r); if(response.getStatusLine().getStatusCode() != HttpStatus.SC_OK) { log.warning("Could not start video reporting: " + EntityUtils.toString(response.getEntity())); return; } isCurrentlyRecording = true; log.info("Started recording for new session on node: " + getId()); } catch (Exception e) { log.warning("Could not start video reporting due to exception: " + e.getMessage()); e.printStackTrace(); return; } finally { r.releaseConnection(); } } @Override public void afterCommand(TestSession session, HttpServletRequest request, HttpServletResponse response) { super.afterCommand(session, request, response); // its a shame we have to extract this again SeleniumBasedRequest seleniumRequest = SeleniumBasedRequest.createFromRequest(request, getRegistry()); if(RequestType.STOP_SESSION.equals(seleniumRequest.getRequestType())) { if(isCurrentlyRecording) { log.info("Selenium session closed for " + session.getExternalKey() + " on node " + getId() + " - stopping recording."); stopRecording(session); } else { log.severe("Recording not started for " + session.getExternalKey() + " on node " + getId() + " and session being deleted - this could be a bug in the code."); } } } @Override public void afterSession(TestSession session) { super.afterSession(session); if(isCurrentlyRecording) { log.warning("Session session terminated ungracefully for " + session.getExternalKey() + " on node " + getId() + " - stopping recording"); stopRecording(session); } } void stopRecording(TestSession session) { HttpPost r = new HttpPost(serviceUrl + "?command=stop"); try { HttpResponse response = client.execute(remoteHost, r); if(response.getStatusLine().getStatusCode() != HttpStatus.SC_OK) { log.warning("Could not stop video reporting: " + EntityUtils.toString(response.getEntity())); } isCurrentlyRecording = false; // check that the session didn't explode on startup if(session.getExternalKey() != null) { JSONObject json = new JSONObject(EntityUtils.toString(response.getEntity())); String fileKey = json.getString("filekey"); HubVideoRegistry.copyVideoToHub(session, fileKey, getRemoteHost()); } } catch (Exception e) { log.warning("Could not stop video reporting due to exception: " + e.getMessage()); e.printStackTrace(); } finally { r.releaseConnection(); } } }
package ar.edu.unrc.exa.dc.dose2016.riocuartobandasderock.modelTest; import org.junit.Test; import static org.junit.Assert.*; import ar.edu.unrc.exa.dc.dose2016.riocuartobandasderock.model.Album; import ar.edu.unrc.exa.dc.dose2016.riocuartobandasderock.model.Artist; import ar.edu.unrc.exa.dc.dose2016.riocuartobandasderock.model.Band; import ar.edu.unrc.exa.dc.dose2016.riocuartobandasderock.model.Genre; import java.util.LinkedList; import java.util.UUID; public class BandTest { /** * in this test, initily we create a band, then we check * with getters all attributes * are the same are the same as the inserted */ /* @Test public void shortConstructorTest(){ String a_name = "Led Zeppeling"; String a_description = "Sex,Drugs,Rock&Roll"; String a_release = "13/09/2016"; Genre a_genere = new Genre(a_name,a_description); LinkedList<Artist> an_artist_list = new LinkedList<Artist>(); Band band = new Band(a_name,a_genere,an_artist_list,a_release); assertEquals(band.getName(),a_name); assertEquals(band.getGenere(),a_genere); assertEquals(band.getArtistList(),an_artist_list); assertEquals(band.getRelease(),a_release); }*/ /** * in this test, initily we create a band, then we check * with getters all attributes * are the same are the same as the inserted */ /* @Test public void longConstructorTest(){ String a_name = "Led Zeppeling"; String a_description = "Sex,Drugs,Rock&Roll"; String a_release = "13/09/2016"; Genre a_genere = new Genre(a_name,a_description); LinkedList<Artist> an_artist_list = new LinkedList<Artist>(); LinkedList<Album> an_album_list = new LinkedList<Album>(); Band band = new Band(a_name,a_genere,an_artist_list,a_release,an_album_list); assertEquals(band.getName(),a_name); assertEquals(band.getGenere(),a_genere); assertEquals(band.getArtistList(),an_artist_list); assertEquals(band.getRelease(),a_release); assertEquals(band.getAlbum(),an_album_list); } */ /** * in this test we set an id, and then we check * that match with the id created */ @Test public void setIdTest(){ String an_id = "1"; Band band = new Band(); band.setId(an_id); assertEquals(band.getId(),an_id); } /** * in this test we set a name, and then we check * that match with the name created */ @Test public void setNameTest(){ Band band = new Band(); String name = "led zeppeling"; band.setName(name); assertEquals(band.getName(),name); } /** * in this test we set a Genere , and then we check * that match with the genere created */ @Test public void setGenereTest(){ Band band = new Band(); Genre a_genere = new Genre(); band.setGenere(a_genere); assertEquals(band.getGenere(),a_genere); } }
/* * $Id: BlockingStreamComm.java,v 1.31 2008-01-07 13:32:32 dshr Exp $ */ package org.lockss.protocol; import java.io.*; import java.net.*; import java.security.*; import java.security.cert.*; import javax.net.ssl.*; import java.util.*; import EDU.oswego.cs.dl.util.concurrent.*; import org.lockss.util.*; import org.lockss.util.Queue; import org.lockss.config.*; import org.lockss.daemon.*; import org.lockss.daemon.status.*; import org.lockss.app.*; import org.lockss.poller.*; /** * BlockingStreamComm implements the streaming mesaage protocol using * blocking sockets. */ public class BlockingStreamComm extends BaseLockssDaemonManager implements ConfigurableManager, LcapStreamComm, PeerMessage.Factory { static Logger log = Logger.getLogger("SComm"); public static final String SERVER_NAME = "StreamComm"; /** Use V3 over SSL **/ public static final String PARAM_USE_V3_OVER_SSL = PREFIX + "v3OverSsl"; public static final boolean DEFAULT_USE_V3_OVER_SSL = false; /** Use client authentication for SSL **/ public static final String PARAM_USE_SSL_CLIENT_AUTH = PREFIX + "SslClientAuth"; public static final boolean DEFAULT_USE_SSL_CLIENT_AUTH = true; /** Use temporary SSL keystore**/ public static final String PARAM_SSL_TEMP_KEYSTORE = PREFIX + "SslTempKeystore"; public static final boolean DEFAULT_SSL_TEMP_KEYSTORE = true; /** File name for SSL key store **/ public static final String PARAM_SSL_KEYSTORE = PREFIX + "SslKeyStore"; public static final String DEFAULT_SSL_KEYSTORE = ".keystore"; /** File name for SSL key store password **/ public static final String PARAM_SSL_PRIVATE_KEY_PASSWORD_FILE = PREFIX + "SslPrivateKeyPasswordFile"; public static final String DEFAULT_SSL_PRIVATE_KEY_PASSWORD_FILE = ".password"; /** SSL protocol to use **/ public static final String PARAM_SSL_PROTOCOL = PREFIX + "SslProtocol"; public static final String DEFAULT_SSL_PROTOCOL = "TLSv1"; /** Max peer channels. Only affects outgoing messages; incoming * connections are always accepted. */ public static final String PARAM_MAX_CHANNELS = PREFIX + "maxChannels"; public static final int DEFAULT_MAX_CHANNELS = 50; /** Min threads in channel thread pool */ public static final String PARAM_CHANNEL_THREAD_POOL_MIN = PREFIX + "threadPool.min"; public static final int DEFAULT_CHANNEL_THREAD_POOL_MIN = 3; /** Max threads in channel thread pool */ public static final String PARAM_CHANNEL_THREAD_POOL_MAX = PREFIX + "threadPool.max"; public static final int DEFAULT_CHANNEL_THREAD_POOL_MAX = 3 * DEFAULT_MAX_CHANNELS; /** Duration after which idle threads will be terminated.. -1 = never */ public static final String PARAM_CHANNEL_THREAD_POOL_KEEPALIVE = PREFIX + "threadPool.keepAlive"; public static final long DEFAULT_CHANNEL_THREAD_POOL_KEEPALIVE = 10 * Constants.MINUTE; /** Connect timeout */ public static final String PARAM_CONNECT_TIMEOUT = PREFIX + "timeout.connect"; public static final long DEFAULT_CONNECT_TIMEOUT = 2 * Constants.MINUTE; /** Data timeout (SO_TIMEOUT), channel is aborted if read times out. * This should be disabled (zero) because the read side of a channel may * legitimately be idle for a long time (if the channel is sending), and * interrupted reads apparently cannot reliably be resumed. If the * channel is truly idle, the send side should close it. */ public static final String PARAM_DATA_TIMEOUT = PREFIX + "timeout.data"; public static final long DEFAULT_DATA_TIMEOUT = 0; /** Time after which idle channel will be closed */ public static final String PARAM_CHANNEL_IDLE_TIME = PREFIX + "channelIdleTime"; public static final long DEFAULT_CHANNEL_IDLE_TIME = 2 * Constants.MINUTE; /** Time channel remains in DRAIN_INPUT state before closing */ public static final String PARAM_DRAIN_INPUT_TIME = PREFIX + "drainInputTime"; public static final long DEFAULT_DRAIN_INPUT_TIME = 10 * Constants.SECOND; /** Interval at which send thread checks idle timer */ public static final String PARAM_SEND_WAKEUP_TIME = PREFIX + "sendWakeupTime"; public static final long DEFAULT_SEND_WAKEUP_TIME = 1 * Constants.MINUTE; /** FilePeerMessage will be used for messages larger than this, else * MemoryPeerMessage */ public static final String PARAM_MIN_FILE_MESSAGE_SIZE = PREFIX + "minFileMessageSize"; public static final int DEFAULT_MIN_FILE_MESSAGE_SIZE = 1024; /** FilePeerMessage will be used for messages larger than this, else * MemoryPeerMessage */ public static final String PARAM_MAX_MESSAGE_SIZE = PREFIX + "maxMessageSize"; public static final int DEFAULT_MAX_MESSAGE_SIZE = 100 * 1024; /** Dir for PeerMessage data storage */ public static final String PARAM_DATA_DIR = PREFIX + "messageDataDir"; /** Default is PlatformInfo.getSystemTempDir() */ public static final String DEFAULT_DATA_DIR = "Platform tmp dir"; /** Wrap Socket OutputStream in BufferedOutputStream? */ public static final String PARAM_IS_BUFFERED_SEND = PREFIX + "bufferedSend"; public static final boolean DEFAULT_IS_BUFFERED_SEND = true; /** TCP_NODELAY */ public static final String PARAM_TCP_NODELAY = PREFIX + "tcpNodelay"; public static final boolean DEFAULT_TCP_NODELAY = true; /** Amount of time BlockingStreamComm.stopService() should wait for * worker threads to exit. Zero disables wait. */ public static final String PARAM_WAIT_EXIT = PREFIX + "waitExit"; public static final long DEFAULT_WAIT_EXIT = 2 * Constants.SECOND; static final String WDOG_PARAM_SCOMM = "SComm"; static final long WDOG_DEFAULT_SCOMM = 1 * Constants.HOUR; static final String PRIORITY_PARAM_SCOMM = "SComm"; static final int PRIORITY_DEFAULT_SCOMM = -1; static final String PRIORITY_PARAM_SLISTEN = "SListen"; static final int PRIORITY_DEFAULT_SLISTEN = -1; static final String WDOG_PARAM_CHANNEL = "Channel"; static final long WDOG_DEFAULT_CHANNEL = 30 * Constants.MINUTE; static final String PRIORITY_PARAM_CHANNEL = "Channel"; static final int PRIORITY_DEFAULT_CHANNEL = -1; private boolean paramUseV3OverSsl = DEFAULT_USE_V3_OVER_SSL; private boolean paramSslClientAuth = DEFAULT_USE_SSL_CLIENT_AUTH; private boolean paramSslTempKeystore = DEFAULT_SSL_TEMP_KEYSTORE; private String paramSslKeyStore = DEFAULT_SSL_KEYSTORE; private String paramSslPrivateKeyPasswordFile = DEFAULT_SSL_PRIVATE_KEY_PASSWORD_FILE; private String paramSslProtocol = DEFAULT_SSL_PROTOCOL; private int paramMinFileMessageSize = DEFAULT_MIN_FILE_MESSAGE_SIZE; private int paramMaxMessageSize = DEFAULT_MAX_MESSAGE_SIZE; private File dataDir = null; private int paramBacklog = DEFAULT_LISTEN_BACKLOG; private int paramMaxChannels = DEFAULT_MAX_CHANNELS; private int paramMinPoolSize = DEFAULT_CHANNEL_THREAD_POOL_MIN; private int paramMaxPoolSize = DEFAULT_CHANNEL_THREAD_POOL_MAX; private long paramPoolKeepaliveTime = DEFAULT_CHANNEL_THREAD_POOL_KEEPALIVE; private long paramConnectTimeout = DEFAULT_CONNECT_TIMEOUT; private long paramSoTimeout = DEFAULT_DATA_TIMEOUT; private long paramSendWakeupTime = DEFAULT_SEND_WAKEUP_TIME; protected long paramChannelIdleTime = DEFAULT_CHANNEL_IDLE_TIME; private long paramDrainInputTime = DEFAULT_DRAIN_INPUT_TIME; private boolean paramIsBufferedSend = DEFAULT_IS_BUFFERED_SEND; private boolean paramIsTcpNodelay = DEFAULT_TCP_NODELAY; private long paramWaitExit = DEFAULT_WAIT_EXIT; private long lastHungCheckTime = 0; private PooledExecutor pool; protected SSLSocketFactory sslSocketFactory = null; protected SSLServerSocketFactory sslServerSocketFactory = null; private String paramSslKeyStorePassword = null; private boolean enabled = DEFAULT_ENABLED; private boolean running = false; private SocketFactory sockFact; private ServerSocket listenSock; private PeerIdentity myPeerId; private PeerAddress.Tcp myPeerAddr; private IdentityManager idMgr; private OneShot configShot = new OneShot(); private FifoQueue rcvQueue; // PeerMessages received from channels private ReceiveThread rcvThread; private ListenThread listenThread; // Synchronization lock for rcv thread, listen thread manipulations private Object threadLock = new Object(); // Maps PeerIdentity to primary PeerChannel (and used as lock for both maps) MaxSizeRecordingMap channels = new MaxSizeRecordingMap(); // Maps PeerIdentity to secondary PeerChannel MaxSizeRecordingMap rcvChannels = new MaxSizeRecordingMap(); Set<BlockingPeerChannel> drainingChannels = new HashSet(); int maxDrainingChannels = 0; ChannelStats globalStats = new ChannelStats(); private Vector messageHandlers = new Vector(); // Vector is synchronized public BlockingStreamComm() { sockFact = null; } /** * start the stream comm manager. */ public void startService() { super.startService(); LockssDaemon daemon = getDaemon(); idMgr = daemon.getIdentityManager(); resetConfig(); try { myPeerId = getLocalPeerIdentity(); } catch (Exception e) { log.critical("No V3 identity, not starting stream comm", e); enabled = false; return; } log.debug("Local V3 peer: " + myPeerId); try { PeerAddress pad = myPeerId.getPeerAddress(); if (pad instanceof PeerAddress.Tcp) { myPeerAddr = (PeerAddress.Tcp)pad; } else { log.error("Disabling stream comm; no local TCP peer address: " + pad); enabled = false; } } catch (IdentityManager.MalformedIdentityKeyException e) { log.error("Disabling stream comm; local address malformed", e); enabled = false; } if (enabled) { start(); daemon.getStatusService(). registerStatusAccessor(getStatusAccessorName("SCommChans"), new ChannelStatus()); } } protected String getStatusAccessorName(String base) { return base; } /** * stop the stream comm manager * @see org.lockss.app.LockssManager#stopService() */ public void stopService() { getDaemon().getStatusService(). unregisterStatusAccessor(getStatusAccessorName("SCommChans")); if (running) { stop(); } super.stopService(); } /** * Set communication parameters from configuration, once only. * This service currently cannot be reconfigured. * @param config the Configuration */ public void setConfig(Configuration config, Configuration prevConfig, Configuration.Differences changedKeys) { if (getDaemon().isDaemonInited()) { // one-time only init if (configShot.once()) { configure(config, prevConfig, changedKeys); } // these params can be changed on the fly paramMinFileMessageSize = config.getInt(PARAM_MIN_FILE_MESSAGE_SIZE, DEFAULT_MIN_FILE_MESSAGE_SIZE); paramMaxMessageSize = config.getInt(PARAM_MAX_MESSAGE_SIZE, DEFAULT_MAX_MESSAGE_SIZE); paramIsBufferedSend = config.getBoolean(PARAM_IS_BUFFERED_SEND, DEFAULT_IS_BUFFERED_SEND); paramIsTcpNodelay = config.getBoolean(PARAM_TCP_NODELAY, DEFAULT_TCP_NODELAY); paramWaitExit = config.getTimeInterval(PARAM_WAIT_EXIT, DEFAULT_WAIT_EXIT); if (changedKeys.contains(PARAM_DATA_DIR)) { String paramDataDir = config.get(PARAM_DATA_DIR, PlatformUtil.getSystemTempDir()); File dir = new File(paramDataDir); if (dir.exists() || dir.mkdirs()) { dataDir = dir; log.debug2("Message data dir: " + dataDir); } else { log.warning("No message data dir: " + dir); dataDir = null; } } paramMaxChannels = config.getInt(PARAM_MAX_CHANNELS, DEFAULT_MAX_CHANNELS); paramConnectTimeout = config.getTimeInterval(PARAM_CONNECT_TIMEOUT, DEFAULT_CONNECT_TIMEOUT); paramSoTimeout = config.getTimeInterval(PARAM_DATA_TIMEOUT, DEFAULT_DATA_TIMEOUT); paramSendWakeupTime = config.getTimeInterval(PARAM_SEND_WAKEUP_TIME, DEFAULT_SEND_WAKEUP_TIME); paramChannelIdleTime = config.getTimeInterval(PARAM_CHANNEL_IDLE_TIME, DEFAULT_CHANNEL_IDLE_TIME); paramDrainInputTime = config.getTimeInterval(PARAM_DRAIN_INPUT_TIME, DEFAULT_DRAIN_INPUT_TIME); } } /** Internal config, so can invoke from test constructor */ void configure(Configuration config, Configuration prevConfig, Configuration.Differences changedKeys) { enabled = config.getBoolean(PARAM_ENABLED, DEFAULT_ENABLED); if (!enabled) { return; } paramMinPoolSize = config.getInt(PARAM_CHANNEL_THREAD_POOL_MIN, DEFAULT_CHANNEL_THREAD_POOL_MIN); paramMaxPoolSize = config.getInt(PARAM_CHANNEL_THREAD_POOL_MAX, DEFAULT_CHANNEL_THREAD_POOL_MAX); paramPoolKeepaliveTime = config.getTimeInterval(PARAM_CHANNEL_THREAD_POOL_KEEPALIVE, DEFAULT_CHANNEL_THREAD_POOL_KEEPALIVE); if (changedKeys.contains(PARAM_USE_V3_OVER_SSL)) { paramUseV3OverSsl = config.getBoolean(PARAM_USE_V3_OVER_SSL, DEFAULT_USE_V3_OVER_SSL); sockFact = null; // XXX shut down old listen socket, do exponential backoff // XXX on bind() to bring up new listen socket // XXX then move this to the "change on the fly" above } if (!paramUseV3OverSsl) return; // We're trying to use SSL if (changedKeys.contains(PARAM_USE_SSL_CLIENT_AUTH)) { paramSslClientAuth = config.getBoolean(PARAM_USE_SSL_CLIENT_AUTH, DEFAULT_USE_SSL_CLIENT_AUTH); sockFact = null; } if (changedKeys.contains(PARAM_SSL_TEMP_KEYSTORE)) { paramSslTempKeystore = config.getBoolean(PARAM_SSL_TEMP_KEYSTORE, DEFAULT_SSL_TEMP_KEYSTORE); sockFact = null; } if (sslServerSocketFactory != null && sslSocketFactory != null) { // already initialized return; } if (paramSslTempKeystore) { // We're using the temporary keystore paramSslKeyStore = System.getProperty("javax.net.ssl.keyStore", null); paramSslKeyStorePassword = System.getProperty("javax.net.ssl.keyStorePassword", null); log.debug("Using temporary keystore from " + paramSslKeyStore); // Now create the SSL socket factories from the context sslServerSocketFactory = (SSLServerSocketFactory)SSLServerSocketFactory.getDefault(); sslSocketFactory = (SSLSocketFactory)SSLSocketFactory.getDefault(); return; } // We're using the real keystore if (changedKeys.contains(PARAM_SSL_KEYSTORE)) { paramSslKeyStore = config.get(PARAM_SSL_KEYSTORE, DEFAULT_SSL_KEYSTORE); // The password for the keystore is the machine's FQDN. paramSslKeyStorePassword = config.get("org.lockss.platform.fqdn", ""); log.debug("Using permanent keystore from " + paramSslKeyStore); sockFact = null; } byte[] sslPrivateKeyPassword = null; if (changedKeys.contains(PARAM_SSL_PRIVATE_KEY_PASSWORD_FILE)) { paramSslPrivateKeyPasswordFile = config.get(PARAM_SSL_PRIVATE_KEY_PASSWORD_FILE, DEFAULT_SSL_PRIVATE_KEY_PASSWORD_FILE); sockFact = null; } if (changedKeys.contains(PARAM_SSL_PROTOCOL)) { paramSslProtocol = config.get(PARAM_SSL_PROTOCOL, DEFAULT_SSL_PROTOCOL); sockFact = null; } try { File keyStorePasswordFile = new File(paramSslPrivateKeyPasswordFile); if (keyStorePasswordFile.exists()) { FileInputStream fis = new FileInputStream(keyStorePasswordFile); sslPrivateKeyPassword = new byte[(int)keyStorePasswordFile.length()]; if (fis.read(sslPrivateKeyPassword) != sslPrivateKeyPassword.length) { throw new IOException("short read"); } fis.close(); FileOutputStream fos = new FileOutputStream(keyStorePasswordFile); byte[] junk = new byte[(int)keyStorePasswordFile.length()]; for (int i = 0; i < junk.length; i++) junk[i] = 0; fos.write(junk); fos.close(); keyStorePasswordFile.delete(); } else { log.debug("SSL password file " + paramSslPrivateKeyPasswordFile + " missing"); return; } } catch (IOException ex) { log.error(paramSslPrivateKeyPasswordFile + " threw " + ex); return; } // We now have a password to decrypt the private key in the // SSL keystore. Next create the keystore from the file. KeyStore keyStore = null; InputStream fis = null; try { keyStore = KeyStore.getInstance("JCEKS"); fis = new FileInputStream(paramSslKeyStore); keyStore.load(fis, paramSslKeyStorePassword.toCharArray()); } catch (KeyStoreException ex) { log.error("loading SSL key store threw " + ex); return; } catch (IOException ex) { log.error("loading SSL key store threw " + ex); return; } catch (NoSuchAlgorithmException ex) { log.error("loading SSL key store threw " + ex); return; } catch (CertificateException ex) { log.error("loading SSL key store threw " + ex); return; } finally { IOUtil.safeClose(fis); } { String temp = new String(sslPrivateKeyPassword); logKeyStore(keyStore, temp.toCharArray()); } // Now create a KeyManager from the keystore using the password. KeyManager[] kma = null; try { KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); kmf.init(keyStore, new String(sslPrivateKeyPassword).toCharArray()); kma = kmf.getKeyManagers(); } catch (NoSuchAlgorithmException ex) { log.error("creating SSL key manager threw " + ex); return; } catch (KeyStoreException ex) { log.error("creating SSL key manager threw " + ex); return; } catch (UnrecoverableKeyException ex) { log.error("creating SSL key manager threw " + ex); return; } // Now create a TrustManager from the keystore using the password TrustManager[] tma = null; try { TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); tmf.init(keyStore); tma = tmf.getTrustManagers(); } catch (NoSuchAlgorithmException ex) { log.error("creating SSL trust manager threw " + ex); return; } catch (KeyStoreException ex) { log.error("creating SSL trust manager threw " + ex); return; } finally { // Now forget the password for (int i = 0; i < sslPrivateKeyPassword.length; i++) { sslPrivateKeyPassword[i] = 0; } sslPrivateKeyPassword = null; } // Now create an SSLContext from the KeyManager SSLContext sslContext = null; try { sslContext = SSLContext.getInstance(paramSslProtocol); sslContext.init(kma, tma, null); // Now create the SSL socket factories from the context sslServerSocketFactory = sslContext.getServerSocketFactory(); sslSocketFactory = sslContext.getSocketFactory(); } catch (NoSuchAlgorithmException ex) { log.error("Creating SSL context threw " + ex); sslContext = null; } catch (KeyManagementException ex) { log.error("Creating SSL context threw " + ex); sslContext = null; } } // private debug output of keystore private void logKeyStore(KeyStore ks, char[] privateKeyPassWord) { log.debug3("start of key store"); try { for (Enumeration en = ks.aliases(); en.hasMoreElements(); ) { String alias = (String) en.nextElement(); log.debug3("Next alias " + alias); if (ks.isCertificateEntry(alias)) { log.debug3("About to Certificate"); java.security.cert.Certificate cert = ks.getCertificate(alias); if (cert == null) { log.debug3(alias + " null cert chain"); } else { log.debug3("Cert for " + alias + " is " + cert.toString()); } } else if (ks.isKeyEntry(alias)) { log.debug3("About to getKey"); Key privateKey = ks.getKey(alias, privateKeyPassWord); log.debug3(alias + " key " + privateKey.getAlgorithm() + "/" + privateKey.getFormat()); } else { log.debug3(alias + " neither key nor cert"); } } log.debug3("end of key store"); } catch (Exception ex) { log.error("logKeyStore() threw " + ex); } } // overridable for testing protected PeerIdentity getLocalPeerIdentity() { return idMgr.getLocalPeerIdentity(Poll.V3_PROTOCOL); } PeerIdentity findPeerIdentity(String idkey) { return idMgr.findPeerIdentity(idkey); } PeerIdentity getMyPeerId() { return myPeerId; } Queue getReceiveQueue() { return rcvQueue; } SocketFactory getSocketFactory() { if (sockFact == null) if (paramUseV3OverSsl) { sockFact = new SslSocketFactory(); } else { sockFact = new NormalSocketFactory(); } return sockFact; } long getConnectTimeout() { return paramConnectTimeout; } long getSoTimeout() { return paramSoTimeout; } long getSendWakeupTime() { return paramSendWakeupTime; } long getChannelIdleTime() { return paramChannelIdleTime; } long getDrainInputTime() { return paramDrainInputTime; } long getChannelHungTime() { return paramChannelIdleTime + 1000; } int getMaxMessageSize() { return paramMaxMessageSize; } boolean isBufferedSend() { return paramIsBufferedSend; } boolean isTcpNodelay() { return paramIsTcpNodelay; } /** * Called by channel when it learns its peer's identity */ void associateChannelWithPeer(BlockingPeerChannel chan, PeerIdentity peer) { synchronized (channels) { BlockingPeerChannel currentChan = (BlockingPeerChannel)channels.get(peer); if (currentChan == null) { channels.put(peer, chan); // normal association log.debug2("Associated " + chan); } else if (currentChan == chan) { log.warning("Redundant peer-channel association (" + chan + ")"); } else { BlockingPeerChannel rcvChan = (BlockingPeerChannel)rcvChannels.get(peer); if (rcvChan == null) { rcvChannels.put(peer, chan); // normal secondary association log.debug2("Associated secondary " + chan); } else if (rcvChan == chan) { log.debug2("Redundant secondary peer-channel association(" + chan +")"); } else { // maybe should replace if new working and old not. but old will // eventually timeout and close anyway log.warning("Conflicting peer-channel association(" + chan + "), was " + peer); } } } } /** * Called by channel when closing */ void dissociateChannelFromPeer(BlockingPeerChannel chan, PeerIdentity peer) { synchronized (channels) { BlockingPeerChannel currentChan = (BlockingPeerChannel)channels.get(peer); if (currentChan == chan) { globalStats.add(chan.getStats()); channels.remove(peer); log.debug2("Removed: " + chan); } BlockingPeerChannel rcvChan = (BlockingPeerChannel)rcvChannels.get(peer); if (rcvChan == chan) { globalStats.add(chan.getStats()); rcvChannels.remove(peer); log.debug2("Removed secondary: " + chan); } drainingChannels.remove(chan); } } /** * Called by channel when draining channel is dissociated, so it can be * included in stats */ void addDrainingChannel(BlockingPeerChannel chan) { drainingChannels.add(chan); if (drainingChannels.size() > maxDrainingChannels) { maxDrainingChannels = drainingChannels.size(); } } /** * Return an existing channel for the peer or create and start one */ BlockingPeerChannel findOrMakeChannel(PeerIdentity pid) throws IOException { synchronized (channels) { BlockingPeerChannel chan = (BlockingPeerChannel)channels.get(pid); if (chan != null) { return chan; } chan = (BlockingPeerChannel)rcvChannels.get(pid); if (chan != null) { // found secondary, no primary. promote secondary to primary channels.put(pid, chan); rcvChannels.remove(pid); log.debug2("Promoted " + chan); return chan; } // new primary channel, if we have room if (channels.size() >= paramMaxChannels) { // need to maintain queue of messages waiting for active channel? throw new IOException("Too many open channels"); } chan = getSocketFactory().newPeerChannel(this, pid); channels.put(pid, chan); log.debug2("Added " + chan); try { chan.startOriginate(); return chan; } catch (IOException e) { log.warning("Can't make channel", e); channels.remove(pid); log.debug2("Removed " + chan); throw e; } } } /** Send a message to a peer. * @param msg the message to send * @param id the identity of the peer to which to send the message * @throws IOException if message couldn't be queued */ public void sendTo(PeerMessage msg, PeerIdentity id, RateLimiter limiter) throws IOException { if (!running) throw new IllegalStateException("SComm not running"); if (msg == null) throw new NullPointerException("Null message"); if (id == null) throw new NullPointerException("Null peer"); if (log.isDebug3()) log.debug3("sending "+ msg +" to "+ id); if (limiter == null || limiter.isEventOk()) { sendToChannel(msg, id); if (limiter != null) limiter.event(); } else { log.debug2("Pkt rate limited"); } } private void sendToChannel(PeerMessage msg, PeerIdentity id) throws IOException { // closing channel might refuse the message (return false), in which // case it will have removed itself so try again with a new channel BlockingPeerChannel last = null; for (int rpt = 0; rpt < 3; rpt++) { BlockingPeerChannel chan = findOrMakeChannel(id); if (last == chan) throw new IllegalStateException("Got same channel as last time: " + chan); if (chan.send(msg)) { return; } if (!chan.wasOpen()) { log.warning("Couldn't start channel"); return; } last = chan; } log.error("Couldn't enqueue msg after 3 tries: " + msg); } void start() { pool = new PooledExecutor(paramMaxPoolSize); pool.setMinimumPoolSize(paramMinPoolSize); pool.setKeepAliveTime(paramPoolKeepaliveTime); log.debug2("Channel thread pool min, max: " + pool.getMinimumPoolSize() + ", " + pool.getMaximumPoolSize()); pool.abortWhenBlocked(); rcvQueue = new FifoQueue(); try { int port = myPeerAddr.getPort(); if (!getDaemon().getResourceManager().reserveTcpPort(port, SERVER_NAME)) { throw new IOException("TCP port " + port + " unavailable"); } log.debug("Listening on port " + port); listenSock = getSocketFactory().newServerSocket(port, paramBacklog); } catch (IOException e) { log.critical("Can't create listen socket", e); return; } ensureQRunner(); ensureListener(); running = true; } void ensureQRunner() { synchronized (threadLock) { if (rcvThread == null) { log.info("Starting receive thread"); rcvThread = new ReceiveThread("SCommRcv: " + myPeerId.getIdString()); rcvThread.start(); rcvThread.waitRunning(); } } } void ensureListener() { synchronized (threadLock) { if (listenThread == null) { log.info("Starting listen thread"); listenThread = new ListenThread("SCommListen: " + myPeerId.getIdString()); listenThread.start(); listenThread.waitRunning(); } } } // stop all threads and channels void stop() { running = false; Deadline timeout = null; synchronized (threadLock) { if (paramWaitExit > 0) { timeout = Deadline.in(paramWaitExit); } ListenThread lth = listenThread; if (lth != null) { log.info("Stopping listen thread"); lth.stopListenThread(); if (timeout != null) { lth.waitExited(timeout); } listenThread = null; } ReceiveThread rth = rcvThread; if (rth != null) { log.info("Stopping receive thread"); rth.stopRcvThread(); if (timeout != null) { rth.waitExited(timeout); } rcvThread = null; } } stopChannels(channels, timeout); stopChannels(rcvChannels, timeout); log.debug2("shutting down pool"); if (pool != null) { pool.shutdownNow(); } log.debug2("pool shut down "); } // stop all channels in channel map void stopChannels(Map map, Deadline timeout) { List lst; synchronized (channels) { // make copy while map is locked lst = new ArrayList(map.values()); } for (Iterator iter = lst.iterator(); iter.hasNext(); ) { BlockingPeerChannel chan = (BlockingPeerChannel)iter.next(); chan.abortChannel(); } // Wait until the threads have exited before proceeding. Useful in // testing to keep debug output straight. // Any channels that had already dissociated themselves are not waited // for. It would take extra bookkeeping to handle those and they don't // seem to cause nearly as much trouble. if (timeout != null) { for (Iterator iter = lst.iterator(); iter.hasNext(); ) { BlockingPeerChannel chan = (BlockingPeerChannel)iter.next(); chan.waitThreadsExited(timeout); } } } // poke channels that might have hung sender void checkHungChannels() { log.debug3("Doing hung check"); List lst; synchronized (channels) { // make copy while map is locked lst = new ArrayList(channels.values()); } for (Iterator iter = lst.iterator(); iter.hasNext(); ) { BlockingPeerChannel chan = (BlockingPeerChannel)iter.next(); chan.checkHung(); } } /** * Execute the runnable in a pool thread * @param run the Runnable to be run * @throws RuntimeException if no pool thread is available */ void execute(Runnable run) throws InterruptedException { if (run == null) log.warning("Executing null", new Throwable()); pool.execute(run); } void XXXexecute(Runnable run) { try { if (run == null) log.warning("Executing null", new Throwable()); pool.execute(run); } catch (InterruptedException e) { // Shouldn't happen in abortWhenBlocked mode log.warning("Shouldn't happen", e); throw new RuntimeException("InterruptedException in pool.excute(): " + e.toString()); } } // process a socket returned by accept() // overridable for testing void processIncomingConnection(Socket sock) throws IOException { if (sock.isClosed()) { throw new SocketException("socket closed during handshake"); } log.debug2("Accepted connection from " + new IPAddr(sock.getInetAddress())); BlockingPeerChannel chan = getSocketFactory().newPeerChannel(this, sock); chan.startIncoming(); } private void processReceivedPacket(PeerMessage msg) { log.debug2("Received " + msg); try { runHandlers(msg); } catch (ProtocolException e) { log.warning("Cannot process incoming packet", e); } } protected void runHandler(MessageHandler handler, PeerMessage msg) { try { handler.handleMessage(msg); } catch (Exception e) { log.error("callback threw", e); } } private void runHandlers(PeerMessage msg) throws ProtocolException { try { int proto = msg.getProtocol(); MessageHandler handler; if (proto >= 0 && proto < messageHandlers.size() && (handler = (MessageHandler)messageHandlers.get(proto)) != null) { runHandler(handler, msg); } else { log.warning("Received message with unregistered protocol: " + proto); } } catch (RuntimeException e) { log.warning("Unexpected error in runHandlers", e); throw new ProtocolException(e.toString()); } } /** * Register a {@link LcapStreamComm.MessageHandler}, which will be called * whenever a message is received. * @param protocol an int representing the protocol * @param handler MessageHandler to add */ public void registerMessageHandler(int protocol, MessageHandler handler) { synchronized (messageHandlers) { if (protocol >= messageHandlers.size()) { messageHandlers.setSize(protocol + 1); } if (messageHandlers.get(protocol) != null) { throw new RuntimeException("Protocol " + protocol + " already registered"); } messageHandlers.set(protocol, handler); } } /** * Unregister a {@link LcapStreamComm.MessageHandler}. * @param protocol an int representing the protocol */ public void unregisterMessageHandler(int protocol) { if (protocol < messageHandlers.size()) { messageHandlers.set(protocol, null); } } // PeerMessage.Factory implementation public PeerMessage newPeerMessage() { return new MemoryPeerMessage(); } public PeerMessage newPeerMessage(int estSize) { if (estSize < 0) { return newPeerMessage(); } else if (estSize > 0 && dataDir != null && estSize >= paramMinFileMessageSize) { return new FilePeerMessage(dataDir); } else { return new MemoryPeerMessage(); } } protected void handShake(SSLSocket s) { SSLSession session = s.getSession(); try { java.security.cert.Certificate[] certs = session.getPeerCertificates(); log.debug(session.getPeerHost() + " via " + session.getProtocol() + " verified"); } catch (SSLPeerUnverifiedException ex) { log.error(s.getInetAddress() + " threw " + ex); try { s.close(); } catch (IOException ex2) { log.error("Socket close threw " + ex2); } } } // Receive thread private class ReceiveThread extends LockssThread { private volatile boolean goOn = true; private Deadline timeout = Deadline.in(getChannelHungTime()); ReceiveThread(String name) { super(name); } public void lockssRun() { setPriority(PRIORITY_PARAM_SCOMM, PRIORITY_DEFAULT_SCOMM); triggerWDogOnExit(true); startWDog(WDOG_PARAM_SCOMM, WDOG_DEFAULT_SCOMM); nowRunning(); while (goOn) { pokeWDog(); try { synchronized (timeout) { if (goOn) { timeout.expireIn(getChannelHungTime()); } } if (log.isDebug3()) log.debug3("rcvQueue.get(" + timeout + ")"); Object qObj = rcvQueue.get(timeout); if (qObj != null) { if (qObj instanceof PeerMessage) { if (log.isDebug3()) log.debug3("Rcvd " + qObj); processReceivedPacket((PeerMessage)qObj); } else { log.warning("Non-PeerMessage on rcv queue" + qObj); } } if (TimeBase.msSince(lastHungCheckTime) > getChannelHungTime()) { checkHungChannels(); lastHungCheckTime = TimeBase.nowMs(); } } catch (InterruptedException e) { // just wake up and check for exit } finally { } } rcvThread = null; } private void stopRcvThread() { synchronized (timeout) { stopWDog(); triggerWDogOnExit(false); goOn = false; timeout.expire(); } } } // Listen thread private class ListenThread extends LockssThread { private volatile boolean goOn = true; private ListenThread(String name) { super(name); } public void lockssRun() { setPriority(PRIORITY_PARAM_SLISTEN, PRIORITY_DEFAULT_SLISTEN); triggerWDogOnExit(true); // startWDog(WDOG_PARAM_SLISTEN, WDOG_DEFAULT_SLISTEN); nowRunning(); while (goOn) { // pokeWDog(); log.debug3("accept()"); try { Socket sock = listenSock.accept(); if (sock instanceof SSLSocket && paramSslClientAuth) { // Ensure handshake is complete before doing anything else handShake((SSLSocket)sock); } processIncomingConnection(sock); } catch (SocketException e) { if (goOn) { log.warning("Listener", e); } } catch (Exception e) { if (listenSock instanceof SSLServerSocket) { log.debug("SSL Listener ", e); } else { log.warning("Listener", e); } } } listenThread = null; } private void stopListenThread() { stopWDog(); triggerWDogOnExit(false); goOn = false; IOUtil.safeClose(listenSock); this.interrupt(); } } /** SocketFactory interface allows test code to use instrumented or mock sockets and peer channels */ interface SocketFactory { ServerSocket newServerSocket(int port, int backlog) throws IOException; Socket newSocket(IPAddr addr, int port) throws IOException; BlockingPeerChannel newPeerChannel(BlockingStreamComm comm, Socket sock) throws IOException; BlockingPeerChannel newPeerChannel(BlockingStreamComm comm, PeerIdentity peer) throws IOException; } /** Normal socket factory creates real TCP Sockets */ class NormalSocketFactory implements SocketFactory { public ServerSocket newServerSocket(int port, int backlog) throws IOException { return new ServerSocket(port, backlog); } public Socket newSocket(IPAddr addr, int port) throws IOException { return new Socket(addr.getInetAddr(), port); } public BlockingPeerChannel newPeerChannel(BlockingStreamComm comm, Socket sock) throws IOException { return new BlockingPeerChannel(comm, sock); } public BlockingPeerChannel newPeerChannel(BlockingStreamComm comm, PeerIdentity peer) throws IOException { return new BlockingPeerChannel(comm, peer); } } /** SSL socket factory */ class SslSocketFactory implements SocketFactory { public ServerSocket newServerSocket(int port, int backlog) throws IOException { if (sslServerSocketFactory == null) { throw new IOException("no SSL server socket factory"); } SSLServerSocket s = (SSLServerSocket) sslServerSocketFactory.createServerSocket(port, backlog); s.setNeedClientAuth(paramSslClientAuth); log.debug("New SSL server socket: " + port + " backlog " + backlog + " clientAuth " + paramSslClientAuth); String cs[] = s.getEnabledCipherSuites(); for (int i = 0; i < cs.length; i++) { log.debug2(cs[i] + " enabled cipher suite"); } cs = s.getSupportedCipherSuites(); for (int i = 0; i < cs.length; i++) { log.debug2(cs[i] + " supported cipher suite"); } cs = s.getEnabledProtocols(); for (int i = 0; i < cs.length; i++) { log.debug2(cs[i] + " enabled protocol"); } cs = s.getSupportedProtocols(); for (int i = 0; i < cs.length; i++) { log.debug2(cs[i] + " supported protocol"); } log.debug2("enable session creation " + s.getEnableSessionCreation()); return s; } public Socket newSocket(IPAddr addr, int port) throws IOException { if (sslSocketFactory == null) { throw new IOException("no SSL client socket factory"); } SSLSocket s = (SSLSocket) sslSocketFactory.createSocket(addr.getInetAddr(), port); log.debug2("New SSL client socket: " + port + "@" + addr.toString()); if (paramSslClientAuth) { handShake(s); } return s; } public BlockingPeerChannel newPeerChannel(BlockingStreamComm comm, Socket sock) throws IOException { return new BlockingPeerChannel(comm, sock); } public BlockingPeerChannel newPeerChannel(BlockingStreamComm comm, PeerIdentity peer) throws IOException { return new BlockingPeerChannel(comm, peer); } } private static final List statusColDescs = ListUtil.list( new ColumnDescriptor("Peer", "Peer", ColumnDescriptor.TYPE_STRING) ); private class Status implements StatusAccessor, StatusAccessor.DebugOnly { // port, proto, u/m, direction, compressed, pkts, bytes long start; public String getDisplayName() { return "Comm Statistics"; } public boolean requiresKey() { return false; } public void populateTable(StatusTable table) { // table.setResortable(false); // table.setDefaultSortRules(statusSortRules); String key = table.getKey(); table.setColumnDescriptors(statusColDescs); table.setRows(getRows(key)); table.setSummaryInfo(getSummaryInfo(key)); } private List getSummaryInfo(String key) { List res = new ArrayList(); res.add(new StatusTable.SummaryInfo("Max channels", ColumnDescriptor.TYPE_INT, channels.getMaxSize())); res.add(new StatusTable.SummaryInfo("Max rcvChannels", ColumnDescriptor.TYPE_INT, rcvChannels.getMaxSize())); return res; } private List getRows(String key) { List table = new ArrayList(); synchronized (channels) { for (Iterator iter = channels.entrySet().iterator(); iter.hasNext();) { Map.Entry ent = (Map.Entry)iter.next(); PeerIdentity pid = (PeerIdentity)ent.getKey(); BlockingPeerChannel chan = (BlockingPeerChannel)ent.getValue(); table.add(makeRow(pid, chan)); } } return table; } private Map makeRow(PeerIdentity pid, BlockingPeerChannel chan) { Map row = new HashMap(); row.put("Peer", pid.getIdString()); return row; } } private static final List chanStatusColDescs = ListUtil.list( new ColumnDescriptor("Peer", "Peer", ColumnDescriptor.TYPE_STRING), new ColumnDescriptor("State", "State", ColumnDescriptor.TYPE_STRING), new ColumnDescriptor("Flags", "Flags", ColumnDescriptor.TYPE_STRING), new ColumnDescriptor("SendQ", "SendQ", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("Sent", "Msgs Sent", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("Rcvd", "Msgs Rcvd", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("SentBytes", "Bytes Sent", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("RcvdBytes", "Bytes Rcvd", ColumnDescriptor.TYPE_INT), new ColumnDescriptor("LastSend", "LastSend", ColumnDescriptor.TYPE_STRING), new ColumnDescriptor("LastRcv", "LastRcv", ColumnDescriptor.TYPE_STRING), new ColumnDescriptor("PrevState", "PrevState", ColumnDescriptor.TYPE_STRING), new ColumnDescriptor("PrevStateChange", "Change", ColumnDescriptor.TYPE_STRING) ); private class ChannelStatus implements StatusAccessor { long start; public String getDisplayName() { return "Comm Channels"; } public boolean requiresKey() { return false; } public void populateTable(StatusTable table) { // table.setResortable(false); // table.setDefaultSortRules(statusSortRules); String key = table.getKey(); ChannelStats cumulative = new ChannelStats(); table.setColumnDescriptors(chanStatusColDescs); table.setRows(getRows(key, cumulative)); cumulative.add(globalStats); table.setSummaryInfo(getSummaryInfo(key, cumulative)); } private List getSummaryInfo(String key, ChannelStats stats) { List res = new ArrayList(); res.add(new StatusTable.SummaryInfo("Channels", ColumnDescriptor.TYPE_STRING, channels.size() + "/" + paramMaxChannels + ", " + channels.getMaxSize() + " max")); res.add(new StatusTable.SummaryInfo("RcvChannels", ColumnDescriptor.TYPE_STRING, rcvChannels.size() + ", " + rcvChannels.getMaxSize() +" max")); res.add(new StatusTable.SummaryInfo("Draining", ColumnDescriptor.TYPE_STRING, drainingChannels.size() + ", " + maxDrainingChannels + " max")); ChannelStats.Count count = stats.getInCount(); res.add(new StatusTable.SummaryInfo("Msgs Sent", ColumnDescriptor.TYPE_INT, count.getMsgs())); res.add(new StatusTable.SummaryInfo("Bytes Sent", ColumnDescriptor.TYPE_INT, count.getBytes())); count = stats.getOutCount(); res.add(new StatusTable.SummaryInfo("Msgs Rcvd", ColumnDescriptor.TYPE_INT, count.getMsgs())); res.add(new StatusTable.SummaryInfo("Bytes Rcvd", ColumnDescriptor.TYPE_INT, count.getBytes())); return res; } private List getRows(String key, ChannelStats cumulative) { List table = new ArrayList(); synchronized (channels) { for (Iterator iter = channels.entrySet().iterator(); iter.hasNext();) { Map.Entry ent = (Map.Entry)iter.next(); PeerIdentity pid = (PeerIdentity)ent.getKey(); BlockingPeerChannel chan = (BlockingPeerChannel)ent.getValue(); table.add(makeRow(pid, chan, "", cumulative)); } for (Iterator iter = rcvChannels.entrySet().iterator(); iter.hasNext();) { Map.Entry ent = (Map.Entry)iter.next(); PeerIdentity pid = (PeerIdentity)ent.getKey(); BlockingPeerChannel chan = (BlockingPeerChannel)ent.getValue(); table.add(makeRow(pid, chan, "2", cumulative)); } for (BlockingPeerChannel chan : drainingChannels) { table.add(makeRow(chan.getPeer(), chan, "D", cumulative)); } } return table; } private Map makeRow(PeerIdentity pid, BlockingPeerChannel chan, String flags, ChannelStats cumulative) { Map row = new HashMap(); row.put("Peer", pid.getIdString()); row.put("State", chan.getState()); row.put("SendQ", chan.getSendQueueSize()); ChannelStats stats = chan.getStats(); cumulative.add(stats); ChannelStats.Count count = stats.getInCount(); row.put("Sent", count.getMsgs()); row.put("SentBytes", count.getBytes()); count = stats.getOutCount(); row.put("Rcvd", count.getMsgs()); row.put("RcvdBytes", count.getBytes()); StringBuilder sb = new StringBuilder(flags); if (chan.isOriginate()) sb.append("O"); if (chan.hasConnecter()) sb.append("C"); if (chan.hasReader()) sb.append("R"); if (chan.hasWriter()) sb.append("W"); row.put("Flags", sb.toString()); row.put("LastSend", lastTime(chan.getLastSendTime())); row.put("LastRcv", lastTime(chan.getLastRcvTime())); if (chan.getPrevState() != BlockingPeerChannel.ChannelState.NONE) { row.put("PrevState", chan.getPrevState()); row.put("PrevStateChange", lastTime(chan.getLastStateChange())); } return row; } String lastTime(long time) { if (time <= 0) return ""; return StringUtil.timeIntervalToString(TimeBase.msSince(time)); } } }
package com.amplitude.api; import android.app.Application; import android.content.Context; import android.content.SharedPreferences; import android.database.sqlite.SQLiteDatabase; import android.location.Location; import android.os.Build; import android.util.Pair; import com.amplitude.security.MD5; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.security.MessageDigest; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicBoolean; import okhttp3.FormBody; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.Response; /** * <h1>AmplitudeClient</h1> * This is the SDK instance class that contains all of the SDK functionality.<br><br> * <b>Note:</b> call the methods on the default shared instance in the Amplitude class, * for example: {@code Amplitude.getInstance().logEvent();}<br><br> * Many of the SDK functions return the SDK instance back, allowing you to chain multiple method * calls together, for example: {@code Amplitude.getInstance().initialize(this, "APIKEY").enableForegroundTracking(getApplication())} */ public class AmplitudeClient { /** * The class identifier tag used in logging. TAG = {@code "com.amplitude.api.AmplitudeClient";} */ private static final String TAG = AmplitudeClient.class.getName(); /** * The event type for start session events. */ public static final String START_SESSION_EVENT = "session_start"; /** * The event type for end session events. */ public static final String END_SESSION_EVENT = "session_end"; /** * The pref/database key for the device ID value. */ public static final String DEVICE_ID_KEY = "device_id"; /** * The pref/database key for the user ID value. */ public static final String USER_ID_KEY = "user_id"; /** * The pref/database key for the opt out flag. */ public static final String OPT_OUT_KEY = "opt_out"; /** * The pref/database key for the sequence number. */ public static final String SEQUENCE_NUMBER_KEY = "sequence_number"; /** * The pref/database key for the last event time. */ public static final String LAST_EVENT_TIME_KEY = "last_event_time"; /** * The pref/database key for the last event ID value. */ public static final String LAST_EVENT_ID_KEY = "last_event_id"; /** * The pref/database key for the last identify ID value. */ public static final String LAST_IDENTIFY_ID_KEY = "last_identify_id"; /** * The pref/database key for the previous session ID value. */ public static final String PREVIOUS_SESSION_ID_KEY = "previous_session_id"; private static final AmplitudeLog logger = AmplitudeLog.getLogger(); /** * The Android App Context. */ protected Context context; /** * The shared OkHTTPClient instance. */ protected OkHttpClient httpClient; /** * The shared Amplitude database helper instance. */ protected DatabaseHelper dbHelper; /** * The Amplitude App API key. */ protected String apiKey; /** * The name for this instance of AmplitudeClient. */ protected String instanceName; /** * The user's ID value. */ protected String userId; /** * The user's Device ID value. */ protected String deviceId; private boolean newDeviceIdPerInstall = false; private boolean useAdvertisingIdForDeviceId = false; protected boolean initialized = false; private boolean optOut = false; private boolean offline = false; TrackingOptions inputTrackingOptions = new TrackingOptions(); TrackingOptions appliedTrackingOptions = TrackingOptions.copyOf(inputTrackingOptions); JSONObject apiPropertiesTrackingOptions = appliedTrackingOptions.getApiPropertiesTrackingOptions(); private boolean coppaControlEnabled = false; /** * The device's Platform value. */ protected String platform; /** * Event metadata */ long sessionId = -1; long sequenceNumber = 0; long lastEventId = -1; long lastIdentifyId = -1; long lastEventTime = -1; long previousSessionId = -1; private DeviceInfo deviceInfo; /** * The current session ID value. */ private int eventUploadThreshold = Constants.EVENT_UPLOAD_THRESHOLD; private int eventUploadMaxBatchSize = Constants.EVENT_UPLOAD_MAX_BATCH_SIZE; private int eventMaxCount = Constants.EVENT_MAX_COUNT; private long eventUploadPeriodMillis = Constants.EVENT_UPLOAD_PERIOD_MILLIS; private long minTimeBetweenSessionsMillis = Constants.MIN_TIME_BETWEEN_SESSIONS_MILLIS; private long sessionTimeoutMillis = Constants.SESSION_TIMEOUT_MILLIS; private boolean backoffUpload = false; private int backoffUploadBatchSize = eventUploadMaxBatchSize; private boolean usingForegroundTracking = false; private boolean trackingSessionEvents = false; private boolean inForeground = false; private boolean flushEventsOnClose = true; private AtomicBoolean updateScheduled = new AtomicBoolean(false); /** * Whether or not the SDK is in the process of uploading events. */ AtomicBoolean uploadingCurrently = new AtomicBoolean(false); /** * The last SDK error - used for testing. */ Throwable lastError; /** * The url for Amplitude API endpoint */ String url = Constants.EVENT_LOG_URL; /** * The Bearer Token for authentication */ String bearerToken = null; /** * The background event logging worker thread instance. */ WorkerThread logThread = new WorkerThread("logThread"); /** * The background event uploading worker thread instance. */ WorkerThread httpThread = new WorkerThread("httpThread"); /** * Instantiates a new default instance AmplitudeClient and starts worker threads. */ public AmplitudeClient() { this(null); } /** * Instantiates a new AmplitudeClient with instance name and starts worker threads. * @param instance */ public AmplitudeClient(String instance) { this.instanceName = Utils.normalizeInstanceName(instance); logThread.start(); httpThread.start(); } /** * Initialize the Amplitude SDK with the Android application context and your Amplitude * App API key. <b>Note:</b> initialization is required before you log events and modify * user properties. * * @param context the Android application context * @param apiKey your Amplitude App API key * @return the AmplitudeClient */ public AmplitudeClient initialize(Context context, String apiKey) { return initialize(context, apiKey, null); } /** * Initialize the Amplitude SDK with the Android application context, your Amplitude App API * key, and a user ID for the current user. <b>Note:</b> initialization is required before * you log events and modify user properties. * * @param context the Android application context * @param apiKey your Amplitude App API key * @param userId the user id to set * @return the AmplitudeClient */ public AmplitudeClient initialize(Context context, String apiKey, String userId) { return initialize(context, apiKey, userId, null, false); } /** * Initialize the Amplitude SDK with the Android application context, your Amplitude App API * key, a user ID for the current user, and a custom platform value. * <b>Note:</b> initialization is required before you log events and modify user properties. * * @param context the Android application context * @param apiKey your Amplitude App API key * @param userId the user id to set * @param * @return the AmplitudeClient */ public synchronized AmplitudeClient initialize(final Context context, final String apiKey, final String userId, final String platform, final boolean enableDiagnosticLogging) { if (context == null) { logger.e(TAG, "Argument context cannot be null in initialize()"); return this; } if (Utils.isEmptyString(apiKey)) { logger.e(TAG, "Argument apiKey cannot be null or blank in initialize()"); return this; } this.context = context.getApplicationContext(); this.apiKey = apiKey; this.dbHelper = DatabaseHelper.getDatabaseHelper(this.context, this.instanceName); this.platform = Utils.isEmptyString(platform) ? Constants.PLATFORM : platform; final AmplitudeClient client = this; runOnLogThread(new Runnable() { @Override public void run() { if (!initialized) { // this try block is idempotent, so it's safe to retry initialize if failed try { if (instanceName.equals(Constants.DEFAULT_INSTANCE)) { AmplitudeClient.upgradePrefs(context); AmplitudeClient.upgradeSharedPrefsToDB(context); } httpClient = new OkHttpClient(); deviceInfo = new DeviceInfo(context); deviceId = initializeDeviceId(); if (enableDiagnosticLogging) { Diagnostics.getLogger().enableLogging(httpClient, apiKey, deviceId); } deviceInfo.prefetch(); if (userId != null) { client.userId = userId; dbHelper.insertOrReplaceKeyValue(USER_ID_KEY, userId); } else { client.userId = dbHelper.getValue(USER_ID_KEY); } final Long optOutLong = dbHelper.getLongValue(OPT_OUT_KEY); optOut = optOutLong != null && optOutLong == 1; // try to restore previous session id previousSessionId = getLongvalue(PREVIOUS_SESSION_ID_KEY, -1); if (previousSessionId >= 0) { sessionId = previousSessionId; } // reload event meta data sequenceNumber = getLongvalue(SEQUENCE_NUMBER_KEY, 0); lastEventId = getLongvalue(LAST_EVENT_ID_KEY, -1); lastIdentifyId = getLongvalue(LAST_IDENTIFY_ID_KEY, -1); lastEventTime = getLongvalue(LAST_EVENT_TIME_KEY, -1); // install database reset listener to re-insert metadata in memory dbHelper.setDatabaseResetListener(new DatabaseResetListener() { @Override public void onDatabaseReset(SQLiteDatabase db) { dbHelper.insertOrReplaceKeyValueToTable(db, DatabaseHelper.STORE_TABLE_NAME, DEVICE_ID_KEY, client.deviceId); dbHelper.insertOrReplaceKeyValueToTable(db, DatabaseHelper.STORE_TABLE_NAME, USER_ID_KEY, client.userId); dbHelper.insertOrReplaceKeyValueToTable(db, DatabaseHelper.LONG_STORE_TABLE_NAME, OPT_OUT_KEY, client.optOut ? 1L : 0L); dbHelper.insertOrReplaceKeyValueToTable(db, DatabaseHelper.LONG_STORE_TABLE_NAME, PREVIOUS_SESSION_ID_KEY, client.sessionId); dbHelper.insertOrReplaceKeyValueToTable(db, DatabaseHelper.LONG_STORE_TABLE_NAME, LAST_EVENT_TIME_KEY, client.lastEventTime); } }); initialized = true; } catch (CursorWindowAllocationException e) { // treat as uninitialized SDK logger.e(TAG, String.format( "Failed to initialize Amplitude SDK due to: %s", e.getMessage() )); Diagnostics.getLogger().logError("Failed to initialize Amplitude SDK", e); client.apiKey = null; } } } }); return this; } public AmplitudeClient enableForegroundTracking(Application app) { if (usingForegroundTracking || !contextAndApiKeySet("enableForegroundTracking()")) { return this; } if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) { app.registerActivityLifecycleCallbacks(new AmplitudeCallbacks(this)); } return this; } public AmplitudeClient enableDiagnosticLogging() { if (!contextAndApiKeySet("enableDiagnosticLogging")) { return this; } Diagnostics.getLogger().enableLogging(httpClient, apiKey, deviceId); return this; } public AmplitudeClient disableDiagnosticLogging() { Diagnostics.getLogger().disableLogging(); return this; } public AmplitudeClient setDiagnosticEventMaxCount(int eventMaxCount) { Diagnostics.getLogger().setDiagnosticEventMaxCount(eventMaxCount); return this; } /** * Whether to set a new device ID per install. If true, then the SDK will always generate a new * device ID on app install (as opposed to re-using an existing value like ADID). * * @param newDeviceIdPerInstall whether to set a new device ID on app install. * @return the AmplitudeClient * @deprecated */ public AmplitudeClient enableNewDeviceIdPerInstall(boolean newDeviceIdPerInstall) { this.newDeviceIdPerInstall = newDeviceIdPerInstall; return this; } /** * Whether to use the Android advertising ID (ADID) as the user's device ID. * * @return the AmplitudeClient */ public AmplitudeClient useAdvertisingIdForDeviceId() { this.useAdvertisingIdForDeviceId = true; return this; } /** * Enable location listening in the SDK. This will add the user's current lat/lon coordinates * to every event logged. * * @return the AmplitudeClient */ public AmplitudeClient enableLocationListening() { runOnLogThread(new Runnable() { @Override public void run() { if (deviceInfo == null) { throw new IllegalStateException( "Must initialize before acting on location listening."); } deviceInfo.setLocationListening(true); } }); return this; } /** * Disable location listening in the SDK. This will stop the sending of the user's current * lat/lon coordinates. * * @return the AmplitudeClient */ public AmplitudeClient disableLocationListening() { runOnLogThread(new Runnable() { @Override public void run() { if (deviceInfo == null) { throw new IllegalStateException( "Must initialize before acting on location listening."); } deviceInfo.setLocationListening(false); } }); return this; } /** * Sets event upload threshold. The SDK will attempt to batch upload unsent events * every eventUploadPeriodMillis milliseconds, or if the unsent event count exceeds the * event upload threshold. * * @param eventUploadThreshold the event upload threshold * @return the AmplitudeClient */ public AmplitudeClient setEventUploadThreshold(int eventUploadThreshold) { this.eventUploadThreshold = eventUploadThreshold; return this; } /** * Sets event upload max batch size. This controls the maximum number of events sent with * each upload request. * * @param eventUploadMaxBatchSize the event upload max batch size * @return the AmplitudeClient */ public AmplitudeClient setEventUploadMaxBatchSize(int eventUploadMaxBatchSize) { this.eventUploadMaxBatchSize = eventUploadMaxBatchSize; this.backoffUploadBatchSize = eventUploadMaxBatchSize; return this; } /** * Sets event max count. This is the maximum number of unsent events to keep on the device * (for example if the device does not have internet connectivity and cannot upload events). * If the number of unsent events exceeds the max count, then the SDK begins dropping events, * starting from the earliest logged. * * @param eventMaxCount the event max count * @return the AmplitudeClient */ public AmplitudeClient setEventMaxCount(int eventMaxCount) { this.eventMaxCount = eventMaxCount; return this; } /** * Sets event upload period millis. The SDK will attempt to batch upload unsent events * every eventUploadPeriodMillis milliseconds, or if the unsent event count exceeds the * event upload threshold. * * @param eventUploadPeriodMillis the event upload period millis * @return the AmplitudeClient */ public AmplitudeClient setEventUploadPeriodMillis(int eventUploadPeriodMillis) { this.eventUploadPeriodMillis = eventUploadPeriodMillis; return this; } /** * Sets min time between sessions millis. * * @param minTimeBetweenSessionsMillis the min time between sessions millis * @return the min time between sessions millis */ public AmplitudeClient setMinTimeBetweenSessionsMillis(long minTimeBetweenSessionsMillis) { this.minTimeBetweenSessionsMillis = minTimeBetweenSessionsMillis; return this; } /** * Sets a custom server url for event upload. * @param serverUrl - a string url for event upload. * @return the AmplitudeClient */ public AmplitudeClient setServerUrl(String serverUrl) { if (!Utils.isEmptyString(serverUrl)) { url = serverUrl; } return this; } /** * Set Bearer Token to be included in request header. * @param token * @return the AmplitudeClient */ public AmplitudeClient setBearerToken(String token) { this.bearerToken = token; return this; } /** * Sets session timeout millis. If foreground tracking has not been enabled with * @{code enableForegroundTracking()}, then new sessions will be started after * sessionTimeoutMillis milliseconds have passed since the last event logged. * * @param sessionTimeoutMillis the session timeout millis * @return the AmplitudeClient */ public AmplitudeClient setSessionTimeoutMillis(long sessionTimeoutMillis) { this.sessionTimeoutMillis = sessionTimeoutMillis; return this; } public AmplitudeClient setTrackingOptions(TrackingOptions trackingOptions) { inputTrackingOptions = trackingOptions; appliedTrackingOptions = TrackingOptions.copyOf(inputTrackingOptions); if (coppaControlEnabled) { appliedTrackingOptions.mergeIn(TrackingOptions.forCoppaControl()); } apiPropertiesTrackingOptions = appliedTrackingOptions.getApiPropertiesTrackingOptions(); return this; } /** * Enable COPPA (Children's Online Privacy Protection Act) restrictions on ADID, city, IP address and location tracking. * This can be used by any customer that does not want to collect ADID, city, IP address and location tracking. */ public AmplitudeClient enableCoppaControl() { coppaControlEnabled = true; appliedTrackingOptions.mergeIn(TrackingOptions.forCoppaControl()); apiPropertiesTrackingOptions = appliedTrackingOptions.getApiPropertiesTrackingOptions(); return this; } /** * Disable COPPA (Children's Online Privacy Protection Act) restrictions on ADID, city, IP address and location tracking. */ public AmplitudeClient disableCoppaControl() { coppaControlEnabled = false; appliedTrackingOptions = TrackingOptions.copyOf(inputTrackingOptions); apiPropertiesTrackingOptions = appliedTrackingOptions.getApiPropertiesTrackingOptions(); return this; } /** * Sets opt out. If true then the SDK does not track any events for the user. * * @param optOut whether or not to opt the user out of tracking * @return the AmplitudeClient */ public AmplitudeClient setOptOut(final boolean optOut) { if (!contextAndApiKeySet("setOptOut()")) { return this; } final AmplitudeClient client = this; runOnLogThread(new Runnable() { @Override public void run() { if (Utils.isEmptyString(apiKey)) { // in case initialization failed return; } client.optOut = optOut; dbHelper.insertOrReplaceKeyLongValue(OPT_OUT_KEY, optOut ? 1L : 0L); } }); return this; } /** * Returns whether or not the user is opted out of tracking. * * @return the optOut flag value */ public boolean isOptedOut() { return optOut; } /** * Enable/disable message logging by the SDK. * * @param enableLogging whether to enable message logging by the SDK. * @return the AmplitudeClient */ public AmplitudeClient enableLogging(boolean enableLogging) { logger.setEnableLogging(enableLogging); return this; } /** * Sets the logging level. Logging messages will only appear if they are the same severity * level or higher than the set log level. * * @param logLevel the log level * @return the AmplitudeClient */ public AmplitudeClient setLogLevel(int logLevel) { logger.setLogLevel(logLevel); return this; } /** * Sets offline. If offline is true, then the SDK will not upload events to Amplitude servers; * however, it will still log events. * * @param offline whether or not the SDK should be offline * @return the AmplitudeClient */ public AmplitudeClient setOffline(boolean offline) { this.offline = offline; // Try to update to the server once offline mode is disabled. if (!offline) { uploadEvents(); } return this; } /** * Enable/disable flushing of unsent events on app close (enabled by default). * * @param flushEventsOnClose whether to flush unsent events on app close * @return the AmplitudeClient */ public AmplitudeClient setFlushEventsOnClose(boolean flushEventsOnClose) { this.flushEventsOnClose = flushEventsOnClose; return this; } /** * Track session events amplitude client. If enabled then the SDK will automatically send * start and end session events to mark the start and end of the user's sessions. * * @param trackingSessionEvents whether to enable tracking of session events * @return the AmplitudeClient */ public AmplitudeClient trackSessionEvents(boolean trackingSessionEvents) { this.trackingSessionEvents = trackingSessionEvents; return this; } /** * Set foreground tracking to true. */ void useForegroundTracking() { usingForegroundTracking = true; } /** * Whether foreground tracking is enabled. * * @return whether foreground tracking is enabled */ boolean isUsingForegroundTracking() { return usingForegroundTracking; } /** * Whether app is in the foreground. * * @return whether app is in the foreground */ boolean isInForeground() { return inForeground; } /** * Log an event with the specified event type. * <b>Note:</b> this is asynchronous and happens on a background thread. * * @param eventType the event type */ public void logEvent(String eventType) { logEvent(eventType, null); } /** * Log an event with the specified event type and event properties. * <b>Note:</b> this is asynchronous and happens on a background thread. * * @param eventType the event type * @param eventProperties the event properties */ public void logEvent(String eventType, JSONObject eventProperties) { logEvent(eventType, eventProperties, false); } /** * Log an event with the specified event type, event properties, with optional out of session * flag. If out of session is true, then the sessionId will be -1 for the event, indicating * that it is not part of the current session. Note: this might be useful when logging events * for notifications received. * <b>Note:</b> this is asynchronous and happens on a background thread. * * @param eventType the event type * @param eventProperties the event properties * @param outOfSession the out of session */ public void logEvent(String eventType, JSONObject eventProperties, boolean outOfSession) { logEvent(eventType, eventProperties, null, outOfSession); } /** * Log an event with the specified event type, event properties, and groups. Use this to set * event-level groups, meaning the group(s) set only apply for this specific event and does * not persist on the user. * <b>Note:</b> this is asynchronous and happens on a background thread. * * @param eventType the event type * @param eventProperties the event properties * @param groups the groups */ public void logEvent(String eventType, JSONObject eventProperties, JSONObject groups) { logEvent(eventType, eventProperties, groups, false); } /** * Log event with the specified event type, event properties, groups, with optional out of * session flag. If out of session is true, then the sessionId will be -1 for the event, * indicating that it is not part of the current session. Note: this might be useful when * logging events for notifications received. * <b>Note:</b> this is asynchronous and happens on a background thread. * * @param eventType the event type * @param eventProperties the event properties * @param groups the groups * @param outOfSession the out of session */ public void logEvent(String eventType, JSONObject eventProperties, JSONObject groups, boolean outOfSession) { logEvent(eventType, eventProperties, groups, getCurrentTimeMillis(), outOfSession); } public void logEvent(String eventType, JSONObject eventProperties, JSONObject groups, long timestamp, boolean outOfSession) { if (validateLogEvent(eventType)) { logEventAsync( eventType, eventProperties, null, null, groups, null, timestamp, outOfSession ); } } /** * Log an event with the specified event type. * <b>Note:</b> this is version is synchronous and blocks the main thread until done. * * @param eventType the event type */ public void logEventSync(String eventType) { logEventSync(eventType, null); } public void logEventSync(String eventType, JSONObject eventProperties) { logEventSync(eventType, eventProperties, false); } /** * Log an event with the specified event type, event properties, with optional out of session * flag. If out of session is true, then the sessionId will be -1 for the event, indicating * that it is not part of the current session. Note: this might be useful when logging events * for notifications received. * <b>Note:</b> this is version is synchronous and blocks the main thread until done. * * @param eventType the event type * @param eventProperties the event properties * @param outOfSession the out of session */ public void logEventSync(String eventType, JSONObject eventProperties, boolean outOfSession) { logEventSync(eventType, eventProperties, null, outOfSession); } /** * Log an event with the specified event type, event properties, and groups. Use this to set * event-level groups, meaning the group(s) set only apply for this specific event and does * not persist on the user. * <b>Note:</b> this is version is synchronous and blocks the main thread until done. * * @param eventType the event type * @param eventProperties the event properties * @param groups the groups */ public void logEventSync(String eventType, JSONObject eventProperties, JSONObject groups) { logEventSync(eventType, eventProperties, groups, false); } public void logEventSync(String eventType, JSONObject eventProperties, JSONObject groups, boolean outOfSession) { logEventSync(eventType, eventProperties, groups, getCurrentTimeMillis(), outOfSession); } /** * Log event with the specified event type, event properties, groups, timestamp, with optional * sout of ession flag. If out of session is true, then the sessionId will be -1 for the event, * indicating that it is not part of the current session. Note: this might be useful when * logging events for notifications received. * <b>Note:</b> this is version is synchronous and blocks the main thread until done. * * @param eventType the event type * @param eventProperties the event properties * @param groups the groups * @param timestamp the timestamp in milliseconds since epoch * @param outOfSession the out of session */ public void logEventSync(String eventType, JSONObject eventProperties, JSONObject groups, long timestamp, boolean outOfSession) { if (validateLogEvent(eventType)) { logEvent(eventType, eventProperties, null, null, groups, null, timestamp, outOfSession); } } /** * Validate the event type being logged. Also verifies that the context and API key * have been set already with an initialize call. * * @param eventType the event type * @return true if the event type is valid */ protected boolean validateLogEvent(String eventType) { if (Utils.isEmptyString(eventType)) { logger.e(TAG, "Argument eventType cannot be null or blank in logEvent()"); return false; } return contextAndApiKeySet("logEvent()"); } /** * Log event async. Internal method to handle the synchronous logging of events. * * @param eventType the event type * @param eventProperties the event properties * @param apiProperties the api properties * @param userProperties the user properties * @param groups the groups * @param timestamp the timestamp * @param outOfSession the out of session */ protected void logEventAsync(final String eventType, JSONObject eventProperties, JSONObject apiProperties, JSONObject userProperties, JSONObject groups, JSONObject groupProperties, final long timestamp, final boolean outOfSession) { // Clone the incoming eventProperties object before sending over // to the log thread. Helps avoid ConcurrentModificationException // if the caller starts mutating the object they passed in. // Only does a shallow copy, so it's still possible, though unlikely, // to hit concurrent access if the caller mutates deep in the object. if (eventProperties != null) { eventProperties = Utils.cloneJSONObject(eventProperties); } if (apiProperties != null) { apiProperties = Utils.cloneJSONObject(apiProperties); } if (userProperties != null) { userProperties = Utils.cloneJSONObject(userProperties); } if (groups != null) { groups = Utils.cloneJSONObject(groups); } if (groupProperties != null) { groupProperties = Utils.cloneJSONObject(groupProperties); } final JSONObject copyEventProperties = eventProperties; final JSONObject copyApiProperties = apiProperties; final JSONObject copyUserProperties = userProperties; final JSONObject copyGroups = groups; final JSONObject copyGroupProperties = groupProperties; runOnLogThread(new Runnable() { @Override public void run() { if (Utils.isEmptyString(apiKey)) { // in case initialization failed return; } logEvent( eventType, copyEventProperties, copyApiProperties, copyUserProperties, copyGroups, copyGroupProperties, timestamp, outOfSession ); } }); } /** * Log event. Internal method to handle the asynchronous logging of events on background * thread. * * @param eventType the event type * @param eventProperties the event properties * @param apiProperties the api properties * @param userProperties the user properties * @param groups the groups * @param timestamp the timestamp * @param outOfSession the out of session * @return the event ID if succeeded, else -1. */ protected long logEvent(String eventType, JSONObject eventProperties, JSONObject apiProperties, JSONObject userProperties, JSONObject groups, JSONObject groupProperties, long timestamp, boolean outOfSession) { logger.d(TAG, "Logged event to Amplitude: " + eventType); if (optOut) { return -1; } // skip session check if logging start_session or end_session events boolean loggingSessionEvent = trackingSessionEvents && (eventType.equals(START_SESSION_EVENT) || eventType.equals(END_SESSION_EVENT)); if (!loggingSessionEvent && !outOfSession) { // default case + corner case when async logEvent between onPause and onResume if (!inForeground){ startNewSessionIfNeeded(timestamp); } else { refreshSessionTime(timestamp); } } long result = -1; JSONObject event = new JSONObject(); try { event.put("event_type", replaceWithJSONNull(eventType)); event.put("timestamp", timestamp); event.put("user_id", replaceWithJSONNull(userId)); event.put("device_id", replaceWithJSONNull(deviceId)); event.put("session_id", outOfSession ? -1 : sessionId); event.put("uuid", UUID.randomUUID().toString()); event.put("sequence_number", getNextSequenceNumber()); if (appliedTrackingOptions.shouldTrackVersionName()) { event.put("version_name", replaceWithJSONNull(deviceInfo.getVersionName())); } if (appliedTrackingOptions.shouldTrackOsName()) { event.put("os_name", replaceWithJSONNull(deviceInfo.getOsName())); } if (appliedTrackingOptions.shouldTrackOsVersion()) { event.put("os_version", replaceWithJSONNull(deviceInfo.getOsVersion())); } if (appliedTrackingOptions.shouldTrackDeviceBrand()) { event.put("device_brand", replaceWithJSONNull(deviceInfo.getBrand())); } if (appliedTrackingOptions.shouldTrackDeviceManufacturer()) { event.put("device_manufacturer", replaceWithJSONNull(deviceInfo.getManufacturer())); } if (appliedTrackingOptions.shouldTrackDeviceModel()) { event.put("device_model", replaceWithJSONNull(deviceInfo.getModel())); } if (appliedTrackingOptions.shouldTrackCarrier()) { event.put("carrier", replaceWithJSONNull(deviceInfo.getCarrier())); } if (appliedTrackingOptions.shouldTrackCountry()) { event.put("country", replaceWithJSONNull(deviceInfo.getCountry())); } if (appliedTrackingOptions.shouldTrackLanguage()) { event.put("language", replaceWithJSONNull(deviceInfo.getLanguage())); } if (appliedTrackingOptions.shouldTrackPlatform()) { event.put("platform", platform); } JSONObject library = new JSONObject(); library.put("name", Constants.LIBRARY); library.put("version", Constants.VERSION); event.put("library", library); apiProperties = (apiProperties == null) ? new JSONObject() : apiProperties; if (apiPropertiesTrackingOptions != null && apiPropertiesTrackingOptions.length() > 0) { apiProperties.put("tracking_options", apiPropertiesTrackingOptions); } if (appliedTrackingOptions.shouldTrackLatLng()) { Location location = deviceInfo.getMostRecentLocation(); if (location != null) { JSONObject locationJSON = new JSONObject(); locationJSON.put("lat", location.getLatitude()); locationJSON.put("lng", location.getLongitude()); apiProperties.put("location", locationJSON); } } if (appliedTrackingOptions.shouldTrackAdid() && deviceInfo.getAdvertisingId() != null) { apiProperties.put("androidADID", deviceInfo.getAdvertisingId()); } apiProperties.put("limit_ad_tracking", deviceInfo.isLimitAdTrackingEnabled()); apiProperties.put("gps_enabled", deviceInfo.isGooglePlayServicesEnabled()); event.put("api_properties", apiProperties); event.put("event_properties", (eventProperties == null) ? new JSONObject() : truncate(eventProperties)); event.put("user_properties", (userProperties == null) ? new JSONObject() : truncate(userProperties)); event.put("groups", (groups == null) ? new JSONObject() : truncate(groups)); event.put("group_properties", (groupProperties == null) ? new JSONObject() : truncate(groupProperties)); result = saveEvent(eventType, event); } catch (JSONException e) { logger.e(TAG, String.format( "JSON Serialization of event type %s failed, skipping: %s", eventType, e.toString() )); Diagnostics.getLogger().logError( String.format("Failed to JSON serialize event type %s", eventType), e ); } return result; } /** * Save event long. Internal method to save an event to the database. * * @param eventType the event type * @param event the event * @return the event ID if succeeded, else -1 */ protected long saveEvent(String eventType, JSONObject event) { String eventString = event.toString(); if (Utils.isEmptyString(eventString)) { logger.e(TAG, String.format( "Detected empty event string for event type %s, skipping", eventType )); return -1; } if (eventType.equals(Constants.IDENTIFY_EVENT) || eventType.equals(Constants.GROUP_IDENTIFY_EVENT)) { lastIdentifyId = dbHelper.addIdentify(eventString); setLastIdentifyId(lastIdentifyId); } else { lastEventId = dbHelper.addEvent(eventString); setLastEventId(lastEventId); } int numEventsToRemove = Math.min( Math.max(1, eventMaxCount/10), Constants.EVENT_REMOVE_BATCH_SIZE ); if (dbHelper.getEventCount() > eventMaxCount) { dbHelper.removeEvents(dbHelper.getNthEventId(numEventsToRemove)); } if (dbHelper.getIdentifyCount() > eventMaxCount) { dbHelper.removeIdentifys(dbHelper.getNthIdentifyId(numEventsToRemove)); } long totalEventCount = dbHelper.getTotalEventCount(); // counts may have changed, refetch if ((totalEventCount % eventUploadThreshold) == 0 && totalEventCount >= eventUploadThreshold) { updateServer(); } else { updateServerLater(eventUploadPeriodMillis); } return ( eventType.equals(Constants.IDENTIFY_EVENT) || eventType.equals(Constants.GROUP_IDENTIFY_EVENT) ) ? lastIdentifyId : lastEventId; } // fetches key from dbHelper longValueStore // if key does not exist, return defaultValue instead private long getLongvalue(String key, long defaultValue) { Long value = dbHelper.getLongValue(key); return value == null ? defaultValue : value; } /** * Internal method to increment and fetch the next event sequence number. * * @return the next sequence number */ long getNextSequenceNumber() { sequenceNumber++; dbHelper.insertOrReplaceKeyLongValue(SEQUENCE_NUMBER_KEY, sequenceNumber); return sequenceNumber; } /** * Internal method to set the last event time. * * @param timestamp the timestamp */ void setLastEventTime(long timestamp) { lastEventTime = timestamp; dbHelper.insertOrReplaceKeyLongValue(LAST_EVENT_TIME_KEY, timestamp); } /** * Internal method to set the last event id. * * @param eventId the event id */ void setLastEventId(long eventId) { lastEventId = eventId; dbHelper.insertOrReplaceKeyLongValue(LAST_EVENT_ID_KEY, eventId); } /** * Internal method to set the last identify id. * * @param identifyId the identify id */ void setLastIdentifyId(long identifyId) { lastIdentifyId = identifyId; dbHelper.insertOrReplaceKeyLongValue(LAST_IDENTIFY_ID_KEY, identifyId); } /** * Gets the current session id. * * @return The current sessionId value. */ public long getSessionId() { return sessionId; } /** * Internal method to set the previous session id. * * @param timestamp the timestamp */ void setPreviousSessionId(long timestamp) { previousSessionId = timestamp; dbHelper.insertOrReplaceKeyLongValue(PREVIOUS_SESSION_ID_KEY, timestamp); } /** * Public method to start a new session if needed. * * @param timestamp the timestamp * @return whether or not a new session was started */ public boolean startNewSessionIfNeeded(long timestamp) { if (inSession()) { if (isWithinMinTimeBetweenSessions(timestamp)) { refreshSessionTime(timestamp); return false; } startNewSession(timestamp); return true; } // no current session - check for previous session if (isWithinMinTimeBetweenSessions(timestamp)) { if (previousSessionId == -1) { startNewSession(timestamp); return true; } // extend previous session setSessionId(previousSessionId); refreshSessionTime(timestamp); return false; } startNewSession(timestamp); return true; } private void startNewSession(long timestamp) { // end previous session if (trackingSessionEvents) { sendSessionEvent(END_SESSION_EVENT); } // start new session setSessionId(timestamp); refreshSessionTime(timestamp); if (trackingSessionEvents) { sendSessionEvent(START_SESSION_EVENT); } } private boolean inSession() { return sessionId >= 0; } private boolean isWithinMinTimeBetweenSessions(long timestamp) { long sessionLimit = usingForegroundTracking ? minTimeBetweenSessionsMillis : sessionTimeoutMillis; return (timestamp - lastEventTime) < sessionLimit; } private void setSessionId(long timestamp) { sessionId = timestamp; setPreviousSessionId(timestamp); } /** * Internal method to refresh the current session time. * * @param timestamp the timestamp */ void refreshSessionTime(long timestamp) { if (!inSession()) { return; } setLastEventTime(timestamp); } private void sendSessionEvent(final String sessionEvent) { if (!contextAndApiKeySet(String.format("sendSessionEvent('%s')", sessionEvent))) { return; } if (!inSession()) { return; } JSONObject apiProperties = new JSONObject(); try { apiProperties.put("special", sessionEvent); } catch (JSONException e) { Diagnostics.getLogger().logError( String.format("Failed to generate API Properties JSON for session event %s", sessionEvent), e ); return; } logEvent(sessionEvent, null, apiProperties, null, null, null, lastEventTime, false); } /** * Internal method to handle on app exit foreground behavior. * * @param timestamp the timestamp */ void onExitForeground(final long timestamp) { runOnLogThread(new Runnable() { @Override public void run() { if (Utils.isEmptyString(apiKey)) { return; } refreshSessionTime(timestamp); inForeground = false; if (flushEventsOnClose) { updateServer(); } // re-persist metadata into database for good measure dbHelper.insertOrReplaceKeyValue(DEVICE_ID_KEY, deviceId); dbHelper.insertOrReplaceKeyValue(USER_ID_KEY, userId); dbHelper.insertOrReplaceKeyLongValue(OPT_OUT_KEY, optOut ? 1L : 0L); dbHelper.insertOrReplaceKeyLongValue(PREVIOUS_SESSION_ID_KEY, sessionId); dbHelper.insertOrReplaceKeyLongValue(LAST_EVENT_TIME_KEY, lastEventTime); } }); } /** * Internal method to handle on app enter foreground behavior. * * @param timestamp the timestamp */ void onEnterForeground(final long timestamp) { runOnLogThread(new Runnable() { @Override public void run() { if (Utils.isEmptyString(apiKey)) { return; } startNewSessionIfNeeded(timestamp); inForeground = true; } }); } public void logRevenue(double amount) { // Amount is in dollars // ex. $3.99 would be pass as logRevenue(3.99) logRevenue(null, 1, amount); } public void logRevenue(String productId, int quantity, double price) { logRevenue(productId, quantity, price, null, null); } public void logRevenue(String productId, int quantity, double price, String receipt, String receiptSignature) { if (!contextAndApiKeySet("logRevenue()")) { return; } // Log revenue in events JSONObject apiProperties = new JSONObject(); try { apiProperties.put("special", Constants.AMP_REVENUE_EVENT); apiProperties.put("productId", productId); apiProperties.put("quantity", quantity); apiProperties.put("price", price); apiProperties.put("receipt", receipt); apiProperties.put("receiptSig", receiptSignature); } catch (JSONException e) { Diagnostics.getLogger().logError("Failed to generate API Properties JSON for revenue event", e); } logEventAsync( Constants.AMP_REVENUE_EVENT, null, apiProperties, null, null, null, getCurrentTimeMillis(), false ); } /** * Log revenue v2. Create a {@link Revenue} object to hold your revenue data and properties, * and log it as a revenue event using this method. * * @param revenue a {@link Revenue} object */ public void logRevenueV2(Revenue revenue) { if (!contextAndApiKeySet("logRevenueV2()") || revenue == null || !revenue.isValidRevenue()) { return; } logEvent(Constants.AMP_REVENUE_EVENT, revenue.toJSONObject()); } /** * Sets user properties. This is a convenience wrapper around the * {@link Identify} API to set multiple user properties with a single * command. <b>Note:</b> the replace parameter is deprecated and has no effect. * * @param userProperties the user properties * @param replace the replace - has no effect * @deprecated */ public void setUserProperties(final JSONObject userProperties, final boolean replace) { setUserProperties(userProperties); } /** * Sets user properties. This is a convenience wrapper around the * {@link Identify} API to set multiple user properties with a single * command. * * @param userProperties the user properties */ public void setUserProperties(final JSONObject userProperties) { if (userProperties == null || userProperties.length() == 0 || !contextAndApiKeySet("setUserProperties")) { return; } // sanitize and truncate properties before trying to convert to identify JSONObject sanitized = truncate(userProperties); if (sanitized.length() == 0) { return; } Identify identify = new Identify(); Iterator<?> keys = sanitized.keys(); while (keys.hasNext()) { String key = (String) keys.next(); try { identify.setUserProperty(key, sanitized.get(key)); } catch (JSONException e) { logger.e(TAG, e.toString()); Diagnostics.getLogger().logError( String.format("Failed to set user property %s", key), e ); } } identify(identify); } /** * Clear user properties. This will clear all user properties at once. <b>Note: the * result is irreversible!</b> */ public void clearUserProperties() { Identify identify = new Identify().clearAll(); identify(identify); } /** * Identify. Use this to send an {@link Identify} object containing * user property operations to Amplitude server. * * @param identify an {@link Identify} object */ public void identify(Identify identify) { identify(identify, false); } /** * Identify. Use this to send an {@link com.amplitude.api.Identify} object containing * user property operations to Amplitude server. If outOfSession is true, then the identify * event is sent with a session id of -1, and does not trigger any session-handling logic. * * @param identify an {@link Identify} object * @param outOfSession whther to log the identify event out of session */ public void identify(Identify identify, boolean outOfSession) { if ( identify == null || identify.userPropertiesOperations.length() == 0 || !contextAndApiKeySet("identify()") ) return; logEventAsync( Constants.IDENTIFY_EVENT, null, null, identify.userPropertiesOperations, null, null, getCurrentTimeMillis(), outOfSession ); } /** * Sets the user's group(s). * * @param groupType the group type (ex: orgId) * @param groupName the group name (ex: 15) */ public void setGroup(String groupType, Object groupName) { if (!contextAndApiKeySet("setGroup()") || Utils.isEmptyString(groupType)) { return; } JSONObject group = null; try { group = new JSONObject().put(groupType, groupName); } catch (JSONException e) { logger.e(TAG, e.toString()); Diagnostics.getLogger().logError( String.format("Failed to generate Group JSON for groupType: %s", groupType), e ); } Identify identify = new Identify().setUserProperty(groupType, groupName); logEventAsync(Constants.IDENTIFY_EVENT, null, null, identify.userPropertiesOperations, group, null, getCurrentTimeMillis(), false); } public void groupIdentify(String groupType, Object groupName, Identify groupIdentify) { groupIdentify(groupType, groupName, groupIdentify, false); } public void groupIdentify(String groupType, Object groupName, Identify groupIdentify, boolean outOfSession) { if ( groupIdentify == null || groupIdentify.userPropertiesOperations.length() == 0 || !contextAndApiKeySet("groupIdentify()") || Utils.isEmptyString(groupType) ) return; JSONObject group = null; try { group = new JSONObject().put(groupType, groupName); } catch (JSONException e) { logger.e(TAG, e.toString()); Diagnostics.getLogger().logError( String.format("Failed to generate Group Identify JSON Object for groupType %s", groupType), e ); } logEventAsync( Constants.GROUP_IDENTIFY_EVENT, null, null, null, group, groupIdentify.userPropertiesOperations, getCurrentTimeMillis(), outOfSession ); } /** * Truncate values in a JSON object. Any string values longer than 1024 characters will be * truncated to 1024 characters. * Any dictionary with more than 1000 items will be ignored. * * @param object the object * @return the truncated JSON object */ public JSONObject truncate(JSONObject object) { if (object == null) { return new JSONObject(); } if (object.length() > Constants.MAX_PROPERTY_KEYS) { logger.w(TAG, "Warning: too many properties (more than 1000), ignoring"); return new JSONObject(); } Iterator<?> keys = object.keys(); while (keys.hasNext()) { String key = (String) keys.next(); try { Object value = object.get(key); // do not truncate revenue receipt and receipt sig fields if (key.equals(Constants.AMP_REVENUE_RECEIPT) || key.equals(Constants.AMP_REVENUE_RECEIPT_SIG)) { object.put(key, value); } else if (value.getClass().equals(String.class)) { object.put(key, truncate((String) value)); } else if (value.getClass().equals(JSONObject.class)) { object.put(key, truncate((JSONObject) value)); } else if (value.getClass().equals(JSONArray.class)) { object.put(key, truncate((JSONArray) value)); } } catch (JSONException e) { logger.e(TAG, e.toString()); } } return object; } /** * Truncate values in a JSON array. Any string values longer than 1024 characters will be * truncated to 1024 characters. * * @param array the array * @return the truncated JSON array * @throws JSONException the json exception */ public JSONArray truncate(JSONArray array) throws JSONException { if (array == null) { return new JSONArray(); } for (int i = 0; i < array.length(); i++) { Object value = array.get(i); if (value.getClass().equals(String.class)) { array.put(i, truncate((String) value)); } else if (value.getClass().equals(JSONObject.class)) { array.put(i, truncate((JSONObject) value)); } else if (value.getClass().equals(JSONArray.class)) { array.put(i, truncate((JSONArray) value)); } } return array; } /** * Truncate a string to 1024 characters. * * @param value the value * @return the truncated string */ public static String truncate(String value) { return value.length() <= Constants.MAX_STRING_LENGTH ? value : value.substring(0, Constants.MAX_STRING_LENGTH); } /** * Gets the user's id. Can be null. * * @return The developer specified identifier for tracking within the analytics system. */ public String getUserId() { return userId; } /** * Sets the user id (can be null). * * @param userId the user id * @return the AmplitudeClient */ public AmplitudeClient setUserId(final String userId) { return setUserId(userId, false); } /** * Sets the user id (can be null). * If startNewSession is true, ends the session for the previous user and starts a new * session for the new user id. * * @param userId the user id * @return the AmplitudeClient */ public AmplitudeClient setUserId(final String userId, final boolean startNewSession) { if (!contextAndApiKeySet("setUserId()")) { return this; } final AmplitudeClient client = this; runOnLogThread(new Runnable() { @Override public void run() { if (Utils.isEmptyString(client.apiKey)) { // in case initialization failed return; } // end previous session if (startNewSession && trackingSessionEvents) { sendSessionEvent(END_SESSION_EVENT); } client.userId = userId; dbHelper.insertOrReplaceKeyValue(USER_ID_KEY, userId); // start new session if (startNewSession) { long timestamp = getCurrentTimeMillis(); setSessionId(timestamp); refreshSessionTime(timestamp); if (trackingSessionEvents) { sendSessionEvent(START_SESSION_EVENT); } } } }); return this; } /** * Sets a custom device id. <b>Note: only do this if you know what you are doing!</b> * * @param deviceId the device id * @return the AmplitudeClient */ public AmplitudeClient setDeviceId(final String deviceId) { Set<String> invalidDeviceIds = getInvalidDeviceIds(); if (!contextAndApiKeySet("setDeviceId()") || Utils.isEmptyString(deviceId) || invalidDeviceIds.contains(deviceId)) { return this; } final AmplitudeClient client = this; runOnLogThread(new Runnable() { @Override public void run() { if (Utils.isEmptyString(client.apiKey)) { // in case initialization failed return; } client.deviceId = deviceId; saveDeviceId(deviceId); } }); return this; } /** * Regenerates a new random deviceId for current user. Note: this is not recommended unless you * know what you are doing. This can be used in conjunction with setUserId(null) to anonymize * users after they log out. With a null userId and a completely new deviceId, the current user * would appear as a brand new user in dashboard. */ public AmplitudeClient regenerateDeviceId() { if (!contextAndApiKeySet("regenerateDeviceId()")) { return this; } final AmplitudeClient client = this; runOnLogThread(new Runnable() { @Override public void run() { if (Utils.isEmptyString(client.apiKey)) { // in case initialization failed return; } String randomId = DeviceInfo.generateUUID() + "R"; setDeviceId(randomId); } }); return this; } /** * Force SDK to upload any unsent events. */ public void uploadEvents() { if (!contextAndApiKeySet("uploadEvents()")) { return; } logThread.post(new Runnable() { @Override public void run() { if (Utils.isEmptyString(apiKey)) { // in case initialization failed return; } updateServer(); } }); } private void updateServerLater(long delayMillis) { if (updateScheduled.getAndSet(true)) { return; } logThread.postDelayed(new Runnable() { @Override public void run() { updateScheduled.set(false); updateServer(); } }, delayMillis); } /** * Internal method to upload unsent events. */ protected void updateServer() { updateServer(false); Diagnostics.getLogger().flushEvents(); } /** * Internal method to upload unsent events. Limit controls whether to use event upload max * batch size or backoff upload batch size. <b>Note: </b> always call this on logThread * * @param limit the limit */ protected void updateServer(boolean limit) { if (optOut || offline) { return; } // if returning out of this block, always be sure to set uploadingCurrently to false!! if (!uploadingCurrently.getAndSet(true)) { long totalEventCount = dbHelper.getTotalEventCount(); long batchSize = Math.min( limit ? backoffUploadBatchSize : eventUploadMaxBatchSize, totalEventCount ); if (batchSize <= 0) { uploadingCurrently.set(false); return; } try { List<JSONObject> events = dbHelper.getEvents(lastEventId, batchSize); List<JSONObject> identifys = dbHelper.getIdentifys(lastIdentifyId, batchSize); final Pair<Pair<Long, Long>, JSONArray> merged = mergeEventsAndIdentifys( events, identifys, batchSize); final JSONArray mergedEvents = merged.second; if (mergedEvents.length() == 0) { uploadingCurrently.set(false); return; } final long maxEventId = merged.first.first; final long maxIdentifyId = merged.first.second; final String mergedEventsString = merged.second.toString(); httpThread.post(new Runnable() { @Override public void run() { makeEventUploadPostRequest(httpClient, mergedEventsString, maxEventId, maxIdentifyId); } }); } catch (JSONException e) { uploadingCurrently.set(false); logger.e(TAG, e.toString()); Diagnostics.getLogger().logError("Failed to update server", e); // handle CursorWindowAllocationException when fetching events, defer upload } catch (CursorWindowAllocationException e) { uploadingCurrently.set(false); logger.e(TAG, String.format( "Caught Cursor window exception during event upload, deferring upload: %s", e.getMessage() )); Diagnostics.getLogger().logError("Failed to update server", e); } } } /** * Internal method to merge unsent events and identifies into a single array by sequence number. * * @param events the events * @param identifys the identifys * @param numEvents the num events * @return the merged array, max event id, and max identify id * @throws JSONException the json exception */ protected Pair<Pair<Long,Long>, JSONArray> mergeEventsAndIdentifys(List<JSONObject> events, List<JSONObject> identifys, long numEvents) throws JSONException { JSONArray merged = new JSONArray(); long maxEventId = -1; long maxIdentifyId = -1; while (merged.length() < numEvents) { boolean noEvents = events.isEmpty(); boolean noIdentifys = identifys.isEmpty(); // case 0: no events or identifys, nothing to grab // this case should never happen, as it means there are less identifys and events // than expected if (noEvents && noIdentifys) { logger.w(TAG, String.format( "mergeEventsAndIdentifys: number of events and identifys " + "less than expected by %d", numEvents - merged.length()) ); break; // case 1: no identifys, grab from events } else if (noIdentifys) { JSONObject event = events.remove(0); maxEventId = event.getLong("event_id"); merged.put(event); // case 2: no events, grab from identifys } else if (noEvents) { JSONObject identify = identifys.remove(0); maxIdentifyId = identify.getLong("event_id"); merged.put(identify); // case 3: need to compare sequence numbers } else { // events logged before v2.1.0 won't have a sequence number, put those first if (!events.get(0).has("sequence_number") || events.get(0).getLong("sequence_number") < identifys.get(0).getLong("sequence_number")) { JSONObject event = events.remove(0); maxEventId = event.getLong("event_id"); merged.put(event); } else { JSONObject identify = identifys.remove(0); maxIdentifyId = identify.getLong("event_id"); merged.put(identify); } } } return new Pair<Pair<Long, Long>, JSONArray>(new Pair<Long,Long>(maxEventId, maxIdentifyId), merged); } /** * Internal method to generate the event upload post request. * * @param client the client * @param events the events * @param maxEventId the max event id * @param maxIdentifyId the max identify id */ protected void makeEventUploadPostRequest(OkHttpClient client, String events, final long maxEventId, final long maxIdentifyId) { String apiVersionString = "" + Constants.API_VERSION; String timestampString = "" + getCurrentTimeMillis(); String checksumString = ""; try { String preimage = apiVersionString + apiKey + events + timestampString; // MessageDigest.getInstance(String) is not threadsafe on Android. // This implementation does not throw NoSuchAlgorithm exceptions. MessageDigest messageDigest = new MD5(); checksumString = bytesToHexString(messageDigest.digest(preimage.getBytes("UTF-8"))); } catch (UnsupportedEncodingException e) { // According to // this will never be thrown logger.e(TAG, e.toString()); Diagnostics.getLogger().logError("Failed to compute checksum for upload request", e); } FormBody body = new FormBody.Builder() .add("v", apiVersionString) .add("client", apiKey) .add("e", events) .add("upload_time", timestampString) .add("checksum", checksumString) .build(); Request request; try { Request.Builder builder = new Request.Builder() .url(url) .post(body); if (!Utils.isEmptyString(bearerToken)) { builder.addHeader("Authorization", "Bearer " + bearerToken); } request = builder.build(); } catch (IllegalArgumentException e) { logger.e(TAG, e.toString()); uploadingCurrently.set(false); Diagnostics.getLogger().logError("Failed to build upload request", e); return; } boolean uploadSuccess = false; try { Response response = client.newCall(request).execute(); String stringResponse = response.body().string(); if (stringResponse.equals("success")) { uploadSuccess = true; logThread.post(new Runnable() { @Override public void run() { if (maxEventId >= 0) dbHelper.removeEvents(maxEventId); if (maxIdentifyId >= 0) dbHelper.removeIdentifys(maxIdentifyId); uploadingCurrently.set(false); if (dbHelper.getTotalEventCount() > eventUploadThreshold) { logThread.post(new Runnable() { @Override public void run() { updateServer(backoffUpload); } }); } else { backoffUpload = false; backoffUploadBatchSize = eventUploadMaxBatchSize; } } }); } else if (stringResponse.equals("invalid_api_key")) { logger.e(TAG, "Invalid API key, make sure your API key is correct in initialize()"); } else if (stringResponse.equals("bad_checksum")) { logger.w(TAG, "Bad checksum, post request was mangled in transit, will attempt to reupload later"); } else if (stringResponse.equals("request_db_write_failed")) { logger.w(TAG, "Couldn't write to request database on server, will attempt to reupload later"); } else if (response.code() == 413) { // If blocked by one massive event, drop it if (backoffUpload && backoffUploadBatchSize == 1) { if (maxEventId >= 0) dbHelper.removeEvent(maxEventId); if (maxIdentifyId >= 0) dbHelper.removeIdentify(maxIdentifyId); // maybe we want to reset backoffUploadBatchSize after dropping massive event } // Server complained about length of request, backoff and try again backoffUpload = true; int numEvents = Math.min((int)dbHelper.getEventCount(), backoffUploadBatchSize); backoffUploadBatchSize = (int)Math.ceil(numEvents / 2.0); logger.w(TAG, "Request too large, will decrease size and attempt to reupload"); logThread.post(new Runnable() { @Override public void run() { uploadingCurrently.set(false); updateServer(true); } }); } else { logger.w(TAG, "Upload failed, " + stringResponse + ", will attempt to reupload later"); } } catch (java.net.ConnectException e) { // logger.w(TAG, // "No internet connection found, unable to upload events"); lastError = e; Diagnostics.getLogger().logError("Failed to post upload request", e); } catch (java.net.UnknownHostException e) { // logger.w(TAG, // "No internet connection found, unable to upload events"); lastError = e; Diagnostics.getLogger().logError("Failed to post upload request", e); } catch (IOException e) { logger.e(TAG, e.toString()); lastError = e; Diagnostics.getLogger().logError("Failed to post upload request", e); } catch (AssertionError e) { // This can be caused by a NoSuchAlgorithmException thrown by DefaultHttpClient logger.e(TAG, "Exception:", e); lastError = e; Diagnostics.getLogger().logError("Failed to post upload request", e); } catch (Exception e) { // Just log any other exception so things don't crash on upload logger.e(TAG, "Exception:", e); lastError = e; Diagnostics.getLogger().logError("Failed to post upload request", e); } if (!uploadSuccess) { uploadingCurrently.set(false); } } /** * Get the current device id. Can be null if deviceId hasn't been initialized yet. * * @return A unique identifier for tracking within the analytics system. */ public String getDeviceId() { return deviceId; } // don't need to keep this in memory, if only using it at most 1 or 2 times private Set<String> getInvalidDeviceIds() { Set<String> invalidDeviceIds = new HashSet<String>(); invalidDeviceIds.add(""); invalidDeviceIds.add("9774d56d682e549c"); invalidDeviceIds.add("unknown"); invalidDeviceIds.add("000000000000000"); // Common Serial Number invalidDeviceIds.add("Android"); invalidDeviceIds.add("DEFACE"); invalidDeviceIds.add("00000000-0000-0000-0000-000000000000"); return invalidDeviceIds; } private String initializeDeviceId() { Set<String> invalidIds = getInvalidDeviceIds(); // see if device id already stored in db String deviceId = dbHelper.getValue(DEVICE_ID_KEY); String sharedPrefDeviceId = Utils.getStringFromSharedPreferences(context, instanceName, DEVICE_ID_KEY); if (!(Utils.isEmptyString(deviceId) || invalidIds.contains(deviceId))) { // compare against device id stored in backup storage and update if necessary if (!deviceId.equals(sharedPrefDeviceId)) { saveDeviceId(deviceId); } return deviceId; } // backup #1: check if device id is stored in shared preferences if (!(Utils.isEmptyString(sharedPrefDeviceId) || invalidIds.contains(sharedPrefDeviceId))) { saveDeviceId(sharedPrefDeviceId); return sharedPrefDeviceId; } if (!newDeviceIdPerInstall && useAdvertisingIdForDeviceId && !deviceInfo.isLimitAdTrackingEnabled()) { // Android ID is deprecated by Google. // We are required to use Advertising ID, and respect the advertising ID preference String advertisingId = deviceInfo.getAdvertisingId(); if (!(Utils.isEmptyString(advertisingId) || invalidIds.contains(advertisingId))) { saveDeviceId(advertisingId); return advertisingId; } } // If this still fails, generate random identifier that does not persist // across installations. Append R to distinguish as randomly generated String randomId = deviceInfo.generateUUID() + "R"; saveDeviceId(randomId); return randomId; } private void saveDeviceId(String deviceId) { dbHelper.insertOrReplaceKeyValue(DEVICE_ID_KEY, deviceId); Utils.writeStringToSharedPreferences(context, instanceName, DEVICE_ID_KEY, deviceId); } protected void runOnLogThread(Runnable r) { if (Thread.currentThread() != logThread) { logThread.post(r); } else { r.run(); } } /** * Internal method to replace null event fields with JSON null object. * * @param obj the obj * @return the object */ protected Object replaceWithJSONNull(Object obj) { return obj == null ? JSONObject.NULL : obj; } /** * Internal method to check whether application context and api key are set * * @param methodName the parent method name to print in error message * @return whether application context and api key are set */ protected synchronized boolean contextAndApiKeySet(String methodName) { if (context == null) { logger.e(TAG, "context cannot be null, set context with initialize() before calling " + methodName); return false; } if (Utils.isEmptyString(apiKey)) { logger.e(TAG, "apiKey cannot be null or empty, set apiKey with initialize() before calling " + methodName); return false; } return true; } /** * Internal method to convert bytes to hex string * * @param bytes the bytes * @return the string */ protected String bytesToHexString(byte[] bytes) { final char[] hexArray = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' }; char[] hexChars = new char[bytes.length * 2]; int v; for (int j = 0; j < bytes.length; j++) { v = bytes[j] & 0xFF; hexChars[j * 2] = hexArray[v >>> 4]; hexChars[j * 2 + 1] = hexArray[v & 0x0F]; } return new String(hexChars); } /** * Move all preference data from the legacy name to the new, static name if needed. * <p/> * Constants.PACKAGE_NAME used to be set using {@code Constants.class.getPackage().getName()} * Some aggressive proguard optimizations broke the reflection and caused apps * to crash on startup. * <p/> * Now that Constants.PACKAGE_NAME is changed, old data on devices needs to be * moved over to the new location so that device ids remain consistent. * <p/> * This should only happen once -- the first time a user loads the app after updating. * This logic needs to remain in place for quite a long time. It was first introduced in * April 2015 in version 1.6.0. * * @param context the context * @return the boolean */ static boolean upgradePrefs(Context context) { return upgradePrefs(context, null, null); } /** * Upgrade prefs boolean. * * @param context the context * @param sourcePkgName the source pkg name * @param targetPkgName the target pkg name * @return the boolean */ static boolean upgradePrefs(Context context, String sourcePkgName, String targetPkgName) { try { if (sourcePkgName == null) { // Try to load the package name using the old reflection strategy. sourcePkgName = Constants.PACKAGE_NAME; try { sourcePkgName = Constants.class.getPackage().getName(); } catch (Exception e) { } } if (targetPkgName == null) { targetPkgName = Constants.PACKAGE_NAME; } // No need to copy if the source and target are the same. if (targetPkgName.equals(sourcePkgName)) { return false; } // Copy over any preferences that may exist in a source preference store. String sourcePrefsName = sourcePkgName + "." + context.getPackageName(); SharedPreferences source = context.getSharedPreferences(sourcePrefsName, Context.MODE_PRIVATE); // Nothing left in the source store to copy if (source.getAll().size() == 0) { return false; } String prefsName = targetPkgName + "." + context.getPackageName(); SharedPreferences targetPrefs = context.getSharedPreferences(prefsName, Context.MODE_PRIVATE); SharedPreferences.Editor target = targetPrefs.edit(); // Copy over all existing data. if (source.contains(sourcePkgName + ".previousSessionId")) { target.putLong(Constants.PREFKEY_PREVIOUS_SESSION_ID, source.getLong(sourcePkgName + ".previousSessionId", -1)); } if (source.contains(sourcePkgName + ".deviceId")) { target.putString(Constants.PREFKEY_DEVICE_ID, source.getString(sourcePkgName + ".deviceId", null)); } if (source.contains(sourcePkgName + ".userId")) { target.putString(Constants.PREFKEY_USER_ID, source.getString(sourcePkgName + ".userId", null)); } if (source.contains(sourcePkgName + ".optOut")) { target.putBoolean(Constants.PREFKEY_OPT_OUT, source.getBoolean(sourcePkgName + ".optOut", false)); } // Commit the changes and clear the source store so we don't recopy. target.apply(); source.edit().clear().apply(); logger.i(TAG, "Upgraded shared preferences from " + sourcePrefsName + " to " + prefsName); return true; } catch (Exception e) { logger.e(TAG, "Error upgrading shared preferences", e); Diagnostics.getLogger().logError("Failed to upgrade shared prefs", e); return false; } } /** * Upgrade shared prefs to db boolean. * * @param context the context * @return the boolean */ static boolean upgradeSharedPrefsToDB(Context context) { // Move all data from sharedPrefs to sqlite key value store to support multi-process apps. // sharedPrefs is known to not be process-safe. return upgradeSharedPrefsToDB(context, null); } /** * Upgrade shared prefs to db boolean. * * @param context the context * @param sourcePkgName the source pkg name * @return the boolean */ static boolean upgradeSharedPrefsToDB(Context context, String sourcePkgName) { if (sourcePkgName == null) { sourcePkgName = Constants.PACKAGE_NAME; } // check if upgrade needed DatabaseHelper dbHelper = DatabaseHelper.getDatabaseHelper(context); String deviceId = dbHelper.getValue(DEVICE_ID_KEY); Long previousSessionId = dbHelper.getLongValue(PREVIOUS_SESSION_ID_KEY); Long lastEventTime = dbHelper.getLongValue(LAST_EVENT_TIME_KEY); if (!Utils.isEmptyString(deviceId) && previousSessionId != null && lastEventTime != null) { return true; } String prefsName = sourcePkgName + "." + context.getPackageName(); SharedPreferences preferences = context.getSharedPreferences(prefsName, Context.MODE_PRIVATE); migrateStringValue( preferences, Constants.PREFKEY_DEVICE_ID, null, dbHelper, DEVICE_ID_KEY ); migrateLongValue( preferences, Constants.PREFKEY_LAST_EVENT_TIME, -1, dbHelper, LAST_EVENT_TIME_KEY ); migrateLongValue( preferences, Constants.PREFKEY_LAST_EVENT_ID, -1, dbHelper, LAST_EVENT_ID_KEY ); migrateLongValue( preferences, Constants.PREFKEY_LAST_IDENTIFY_ID, -1, dbHelper, LAST_IDENTIFY_ID_KEY ); migrateLongValue( preferences, Constants.PREFKEY_PREVIOUS_SESSION_ID, -1, dbHelper, PREVIOUS_SESSION_ID_KEY ); migrateStringValue( preferences, Constants.PREFKEY_USER_ID, null, dbHelper, USER_ID_KEY ); migrateBooleanValue( preferences, Constants.PREFKEY_OPT_OUT, false, dbHelper, OPT_OUT_KEY ); return true; } private static void migrateLongValue(SharedPreferences prefs, String prefKey, long defValue, DatabaseHelper dbHelper, String dbKey) { Long value = dbHelper.getLongValue(dbKey); if (value != null) { // If value already exists, it doesn't need to migrate. return; } long oldValue = prefs.getLong(prefKey, defValue); dbHelper.insertOrReplaceKeyLongValue(dbKey, oldValue); prefs.edit().remove(prefKey).apply(); } private static void migrateStringValue(SharedPreferences prefs, String prefKey, String defValue, DatabaseHelper dbHelper, String dbKey) { String value = dbHelper.getValue(dbKey); if (!Utils.isEmptyString(value)) { return; } String oldValue = prefs.getString(prefKey, defValue); if (!Utils.isEmptyString(oldValue)) { dbHelper.insertOrReplaceKeyValue(dbKey, oldValue); prefs.edit().remove(prefKey).apply(); } } private static void migrateBooleanValue(SharedPreferences prefs, String prefKey, boolean defValue, DatabaseHelper dbHelper, String dbKey) { Long value = dbHelper.getLongValue(dbKey); if (value != null) { return; } boolean oldValue = prefs.getBoolean(prefKey, defValue); dbHelper.insertOrReplaceKeyLongValue(dbKey, oldValue ? 1L : 0L); prefs.edit().remove(prefKey).apply(); } /** * Internal method to fetch the current time millis. Used for testing. * * @return the current time millis */ protected long getCurrentTimeMillis() { return System.currentTimeMillis(); } }
/* * $Id: LocalServletManager.java,v 1.7 2005-01-04 03:03:33 tlipkis Exp $ */ package org.lockss.servlet; import java.io.*; import java.net.*; import java.util.*; import org.lockss.app.*; import org.lockss.config.*; import org.lockss.daemon.*; import org.lockss.util.*; import org.lockss.jetty.*; import org.mortbay.http.*; import org.mortbay.http.handler.*; import org.mortbay.jetty.servlet.*; /** * Local UI servlet starter */ public class LocalServletManager extends BaseServletManager { public static final String SERVER_NAME = "LocalUI"; private static Logger log = Logger.getLogger("ServletMgr"); static final String PREFIX = Configuration.PREFIX + "admin."; static final String PARAM_CONTACT_ADDR = PREFIX + "contactEmail"; static final String DEFAULT_CONTACT_ADDR = "contactnotset@notset"; static final String PARAM_HELP_URL = PREFIX + "helpUrl"; static final String DEFAULT_HELP_URL = "http://documents.lockss.org/publicdocs/release/"; public static final String PARAM_REDIRECT_ROOT = PREFIX + "redirectRoot"; public static final String DEFAULT_REDIRECT_ROOT = null; private String redirectRootTo = DEFAULT_REDIRECT_ROOT; private LockssResourceHandler rootResourceHandler; private HashUserRealm realm; public LocalServletManager() { super(SERVER_NAME); } public void setConfig(Configuration config, Configuration prevConfig, Configuration.Differences changedKeys) { super.setConfig(config, prevConfig, changedKeys); if (changedKeys.contains(PARAM_REDIRECT_ROOT)) { redirectRootTo = config.get(PARAM_REDIRECT_ROOT, DEFAULT_REDIRECT_ROOT); if (rootResourceHandler != null) { setRedirectRootTo(rootResourceHandler, (StringUtil.isNullString(redirectRootTo) ? null : redirectRootTo)); } } if (changedKeys.contains(PREFIX)) { LockssServlet.setContactAddr(config.get(PARAM_CONTACT_ADDR, DEFAULT_CONTACT_ADDR)); LockssServlet.setHelpUrl(config.get(PARAM_HELP_URL, DEFAULT_HELP_URL)); } } private void setRedirectRootTo(LockssResourceHandler rh, String redTo) { rootResourceHandler.setRedirectRootTo(StringUtil.isNullString(redTo) ? null : redTo); } public void startServlets() { try { // Create the server HttpServer server = new HttpServer(); // Create a port listener HttpListener listener = server.addListener(new org.mortbay.util.InetAddrPort(port)); // create auth realm if (doAuth) { try { URL propsUrl = this.getClass().getResource(PASSWORD_PROPERTY_FILE); if (propsUrl != null) { log.debug("passwd props file: " + propsUrl); realm = new HashUserRealm(UI_REALM, propsUrl.toString()); } } catch (IOException e) { log.warning("Error loading admin.props", e); } if (realm == null) { realm = new HashUserRealm(UI_REALM); } setConfiguredPasswords(realm); if (realm.isEmpty()) { log.warning("No users created, UI is effectively disabled."); } } configureAdminContexts(server); // Start the http server startServer(server, port); } catch (Exception e) { log.warning("Couldn't start servlets", e); } } public void configureAdminContexts(HttpServer server) { try { if (true || logdir != null) { // Create a context setupLogContext(server, realm, "/log/", logdir); } // info currently has same auth as /, but could be different setupInfoContext(server); setupAdminContext(server); // no separate image context for now. (Use if want different // access control or auth from / context // setupImageContext(server); } catch (Exception e) { log.warning("Couldn't create admin UI contexts", e); } } void setupAdminContext(HttpServer server) throws MalformedURLException { HttpContext context = makeContext(server, "/"); // add handlers in the order they should be tried. // user authentication handler setContextAuthHandler(context, realm); // Create a servlet container ServletHandler handler = new ServletHandler(); // Request dump servlet handler.addServlet("Dump", "/Dump", "org.mortbay.servlet.Dump"); handler.addServlet("Home", "/Home", "org.lockss.servlet.UiHome"); handler.addServlet("BatchAuConfig", "/BatchAuConfig", "org.lockss.servlet.BatchAuConfig"); handler.addServlet("JournalConfig", "/AuConfig", "org.lockss.servlet.AuConfig"); handler.addServlet("DaemonStatus", "/DaemonStatus", "org.lockss.servlet.DaemonStatus"); handler.addServlet("AdminIpAccess", "/AdminIpAccess", "org.lockss.servlet.AdminIpAccess"); handler.addServlet("ProxyIpAccess", "/ProxyIpAccess", "org.lockss.servlet.ProxyIpAccess"); handler.addServlet("Hash CUS", "/HashCUS", "org.lockss.servlet.HashCUS"); handler.addServlet("Raise Alert", "/RaiseAlert", "org.lockss.servlet.RaiseAlert"); addServletIfAvailable(handler, "ThreadDump", "/ThreadDump", "org.lockss.servlet.ThreadDump"); addServletIfAvailable(handler, "Api", "/Api", "org.lockss.ui.servlet.Api"); context.addHandler(handler); // ResourceHandler should come after servlets // find the htdocs directory, set as resource base ClassLoader loader = Thread.currentThread().getContextClassLoader(); URL resourceUrl=loader.getResource("org/lockss/htdocs/"); log.debug("Resource URL: " + resourceUrl); context.setResourceBase(resourceUrl.toString()); rootResourceHandler = new LockssResourceHandler(getDaemon()); rootResourceHandler.setDirAllowed(false); setRedirectRootTo(rootResourceHandler, redirectRootTo); // rHandler.setAcceptRanges(true); context.addHandler(rootResourceHandler); // NotFoundHandler context.addHandler(new NotFoundHandler()); // context.addHandler(new DumpHandler()); } void setupImageContext(HttpServer server) throws MalformedURLException { HttpContext context = makeContext(server, "/images"); // add handlers in the order they should be tried. // ResourceHandler for /images dir // find the htdocs directory, set as resource base ClassLoader loader = Thread.currentThread().getContextClassLoader(); URL resourceUrl=loader.getResource("org/lockss/htdocs/images/"); log.debug("Images resource URL: " + resourceUrl); context.setResourceBase(resourceUrl.toString()); LockssResourceHandler rHandler = new LockssResourceHandler(getDaemon()); context.addHandler(rHandler); // NotFoundHandler context.addHandler(new NotFoundHandler()); } void setupInfoContext(HttpServer server) { HttpContext context = makeContext(server, "/info"); // add handlers in the order they should be tried. // user authentication handler setContextAuthHandler(context, realm); // Create a servlet container ServletHandler handler = new ServletHandler(); handler.addServlet("ProxyInfo", "/ProxyInfo", "org.lockss.servlet.ProxyConfig"); context.addHandler(handler); // NotFoundHandler context.addHandler(new NotFoundHandler()); } // common context setup // adds IpAccessHandler as all contexts want it // doesn't add AuthHandler as not all contexts want it HttpContext makeContext(HttpServer server, String path) { HttpContext context = server.getContext(path); context.setAttribute("LockssApp", theApp); // In this environment there is no point in consuming memory with // cached resources context.setMaxCachedFileSize(0); // IpAccessHandler is always first handler addAccessHandler(context); return context; } }
package org.smerty.jham; /** * Location class with methods allowing conversion to and from Maidenhead * locator (grid squares) based off of * "Conversion Between Geodetic and Grid Locator Systems" by Edmund T. Tyson, * N5JTY in QST January 1989, pp. 29-30, 43 * * @author Paul Picazo <ppicazo@gmail.com> * */ public class Location { /** * Average earth radius in kilometers, IUGG definition. */ private static final double AVG_EARTH_RADIUS_KM = 6371.009; /** * Average earth radius in statute miles, IUGG definition. */ private static final double AVG_EARTH_RADIUS_SM = 3958.761; /** * Average earth radius in nautical miles, IUGG definition. */ private static final double AVG_EARTH_RADIUS_NM = 3440.069; /** * latitude of location. */ private Latitude latitude; /** * longitude of location. */ private Longitude longitude; /** * No argument constructor. * */ public Location() { this.latitude = new Latitude(); this.longitude = new Longitude(); } /** * @param latitudeIn * initial latitude * @param longitudeIn * initial longitude */ public Location(final Latitude latitudeIn, final Longitude longitudeIn) { this.latitude = latitudeIn; this.longitude = longitudeIn; } /** * @param latitudeIn * initial latitude * @param longitudeIn * initial longitude */ public Location(final double latitudeIn, final double longitudeIn) { this.latitude = Latitude.fromDegrees(latitudeIn); this.longitude = Longitude.fromDegrees(longitudeIn); } /** * @param maidenhead * used construct location from maidenhead locator string */ public Location(final String maidenhead) { this.latitude = extractLat(maidenhead); this.longitude = extractLon(maidenhead); } @Override public boolean equals(final Object obj) { if (obj instanceof Location) { return ((Location) obj).hashCode() == this.hashCode(); } return false; } @Override public int hashCode() { int hash = 1; hash = hash * 17 + this.latitude.hashCode(); hash = hash * 31 + this.longitude.hashCode(); return hash; } /** * @return maidenhead locator string */ public String toMaidenhead() { return toMaidenhead(this.latitude.toDegrees(), this.longitude.toDegrees()); } /** * @param latitudeIn * latitude component of locator string * @param longitudeIn * longitude component of locator string * @return maidenhead locator string */ public static String toMaidenhead(final double latitudeIn, final double longitudeIn) { double longitude = longitudeIn + 180; longitude /= 2; char lonFirst = (char) ('A' + (longitude / 10)); char lonSecond = (char) ('0' + longitude % 10); char lonThird = (char) ('A' + (longitude % 1) * 24); double latitude = latitudeIn + 90; char latFirst = (char) ('A' + (latitude / 10)); char latSecond = (char) ('0' + latitude % 10); char latThird = (char) ('A' + (latitude % 1) * 24); StringBuilder sb = new StringBuilder(); sb.append(lonFirst); sb.append(latFirst); sb.append(lonSecond); sb.append(latSecond); sb.append(("" + lonThird).toLowerCase()); sb.append(("" + latThird).toLowerCase()); return sb.toString(); } /** * @param maidenheadIn * locator string to be converted * @return latitude */ public static Latitude extractLat(final String maidenheadIn) { String maidenhead = maidenheadIn.toUpperCase(); double latitude = -90 + 10 * (maidenhead.charAt(1) - 'A') + (maidenhead.charAt(3) - '0') + 2.5 / 60 * (maidenhead.charAt(5) - 'A') + 2.5 / 60 / 2; return Latitude.fromDegrees(latitude); } /** * @param maidenheadIn * locator string to be converted * @return longitude */ public static Longitude extractLon(final String maidenheadIn) { String maidenhead = maidenheadIn.toUpperCase(); double longitude = -180 + 20 * (maidenhead.charAt(0) - 'A') + 2 * (maidenhead.charAt(2) - '0') + 5.0 / 60 * (maidenhead.charAt(4) - 'A') + 5.0 / 60 / 2; return Longitude.fromDegrees(longitude); } /** * @return latitude */ public Latitude getLatitude() { return latitude; } /** * @return longitude */ public Longitude getLongitude() { return longitude; } /** * @param latitudeIn * north/south component of location */ public void setLatitude(final Latitude latitudeIn) { this.latitude = latitudeIn; } /** * @param longitudeIn * east/west component of location */ public void setLongitude(final Longitude longitudeIn) { this.longitude = longitudeIn; } /** * @param loc2 * second location * @return great circle distance in miles */ public double getDistanceMi(final Location loc2) { return getDistanceMi(this, loc2); } /** * @param loc2 * second location * @return great circle distance in kilometers */ public double getDistanceKm(final Location loc2) { return getDistanceKm(this, loc2); } /** * @param loc2 * second location * @return great circle distance in nautical miles */ public double getDistanceNm(final Location loc2) { return getDistanceNm(this, loc2); } /** * @param loc1 * first location * @param loc2 * second location * @return great circle distance in miles */ public static double getDistanceMi(final Location loc1, final Location loc2) { return getDistance(loc1, loc2, AVG_EARTH_RADIUS_SM); } /** * @param loc1 * first location * @param loc2 * second location * @return great circle distance in kilometers */ private static double getDistanceKm(final Location loc1, final Location loc2) { return getDistance(loc1, loc2, AVG_EARTH_RADIUS_KM); } /** * @param loc1 * first location * @param loc2 * second location * @return great circle distance in nautical miles */ private static double getDistanceNm(final Location loc1, final Location loc2) { return getDistance(loc1, loc2, AVG_EARTH_RADIUS_NM); } /** * @param loc1 * first location * @param loc2 * second location * @param radius * radius of the earth in the units desired for result * @return great circle distance between the two locations, result units same * of the radius units */ private static double getDistance(final Location loc1, final Location loc2, final double radius) { if (loc1.equals(loc2)) { return 0; } return Math.acos(Math.sin(loc1.getLatitude().getRadians()) * Math.sin(loc2.getLatitude().getRadians()) + Math.cos(loc1.getLatitude().getRadians()) * Math.cos(loc2.getLatitude().getRadians()) * Math.cos(loc2.getLongitude().getRadians() - loc1.getLongitude().getRadians())) * radius; } /** * @param loc2 * destination location * @return bearing in degrees */ public double getBearing(final Location loc2) { return getBearing(this, loc2); } /** * @param loc1 * source location * @param loc2 * destination location * @return bearing in degrees */ public static double getBearing(final Location loc1, final Location loc2) { if (loc1.equals(loc2)) { return Double.NaN; } double dLon = loc2.getLongitude().getRadians() - loc1.getLongitude().getRadians(); double y = Math.sin(dLon) * Math.cos(loc2.getLatitude().getRadians()); double x = Math.cos(loc1.getLatitude().getRadians()) * Math.sin(loc2.getLatitude().getRadians()) - Math.sin(loc1.getLatitude().getRadians()) * Math.cos(loc2.getLatitude().getRadians()) * Math.cos(dLon); return (Angle.radiansToDegrees(Math.atan2(y, x)) + 360) % 360; } }
package com.celements.navigation; import org.xwiki.model.reference.DocumentReference; import com.xpn.xwiki.XWikiContext; public class TreeNode { private String parent; private Integer position; private String partName; private IPartNameGetStrategy partNameGetStrategy; private DocumentReference docRef; private String databaseName; @Deprecated public TreeNode(String fullName, String parent, Integer position, String databaseName) { this.databaseName = databaseName; setFullName(fullName); setParent(parent); setPosition(position); } public TreeNode(DocumentReference docRef, String parent, Integer position) { setDocumentReference(docRef); setParent(parent); setPosition(position); } /** * * @return fullName * * @deprecated since 2.14.0 use getDocumentReference instead */ @Deprecated public String getFullName() { return docRef.getLastSpaceReference().getName() + "." + docRef.getName(); } void setDocumentReference(DocumentReference docRef) { this.docRef = docRef; } @Deprecated void setFullName(String fullName) { setDocumentReference(new DocumentReference(databaseName, fullName.split("\\.")[0], fullName.split("\\.")[1])); } public String getParent() { return parent; } void setParent(String parent) { if (parent == null) { parent = ""; } this.parent = parent; } public Integer getPosition() { if (position == null) { position = 0; } return position; } void setPosition(Integer position) { this.position = position; } public String getPartName(XWikiContext context) { if (partName == null) { if (partNameGetStrategy != null) { partName = partNameGetStrategy.getPartName(getFullName(), context); } else { partName = ""; } } return partName; } public void setPartName(String partName) { if (partName == null) { partName = ""; } this.partName = partName; } public void setPartNameGetStrategy(IPartNameGetStrategy strategy) { this.partNameGetStrategy = strategy; } public DocumentReference getDocumentReference() { return docRef; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if ((obj == null) || (obj.getClass() != this.getClass())) { return false; } // object must be Test at this point TreeNode node = (TreeNode) obj; return docRef.equals(node.docRef) && (position == node.position); } @Override public int hashCode() { int hash = 7; hash = 31 * hash + position; hash = 31 * hash + (docRef == null ? 0 : docRef.hashCode()); return hash; } }
package yokohama.unit.ast; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; import lombok.AllArgsConstructor; import yokohama.unit.position.ErrorMessage; import yokohama.unit.util.ClassResolver; @AllArgsConstructor public class ClassCheckVisitor extends CheckVisitorTemplate { private final ClassResolver classResolver; public List<ErrorMessage> check(Group group) { return visitGroup(group).collect(Collectors.toList()); } @Override public Stream<ErrorMessage> visitClassType(ClassType classType) { String name = classType.getName(); try { classResolver.lookup(name); } catch (ClassNotFoundException e) { return Stream.of(new ErrorMessage( "cannot resolve class: " + name, classType.getSpan())); } return Stream.empty(); } }
package org.nutz.dao.impl.sql.run; import java.sql.Connection; import java.sql.SQLException; import java.sql.Savepoint; import javax.sql.DataSource; import org.nutz.dao.ConnCallback; import org.nutz.dao.DaoException; import org.nutz.dao.DaoInterceptorChain; import org.nutz.dao.DatabaseMeta; import org.nutz.dao.impl.DaoRunner; import org.nutz.dao.sql.DaoStatement; import org.nutz.lang.Configurable; import org.nutz.lang.util.NutMap; import org.nutz.log.Log; import org.nutz.log.Logs; import org.nutz.trans.Atom; import org.nutz.trans.Trans; import org.nutz.trans.Transaction; /** * * @author wendal * */ public class NutDaoRunner implements DaoRunner, Configurable { private static final Log log = Logs.get(); protected DataSource slaveDataSource; protected boolean supportSavePoint = true; public void run(final DataSource dataSource, final ConnCallback callback) { if (callback instanceof DaoInterceptorChain) { DaoInterceptorChain chain = (DaoInterceptorChain)callback; DaoStatement[] sts = chain.getDaoStatements(); boolean useTrans = false; boolean isAllSelect = true; for (DaoStatement st : sts) { if (!st.isSelect() && !st.isForceExecQuery()) { isAllSelect = false; break; } } switch (meta.getType()) { case PSQL: // PSQL,ClobBlob useTrans = true; break; case SQLITE: // SQLITE2 Transaction t = Trans.get(); if (t == null) { if (isAllSelect) useTrans = false; else { chain.setAutoTransLevel(Connection.TRANSACTION_READ_UNCOMMITTED); useTrans = true; } } else if (t.getLevel() != Connection.TRANSACTION_SERIALIZABLE && t.getLevel() != Connection.TRANSACTION_READ_UNCOMMITTED) { t.setLevel(Connection.TRANSACTION_READ_UNCOMMITTED); useTrans = true; } break; default: useTrans = !(Trans.isTransactionNone() && (sts.length==1 || isAllSelect)); break; } if (useTrans && chain.getAutoTransLevel() > 0) { Trans.exec(chain.getAutoTransLevel(), new Atom() { public void run() { _run(dataSource, callback); } }); return; } } _run(dataSource, callback); } public void _run(DataSource dataSource, ConnCallback callback) { Transaction t = Trans.get(); if (null != t) { _runWithTransaction(t, dataSource, callback); } else { _runWithoutTransaction(dataSource, callback); } } protected void _runWithTransaction(Transaction t, DataSource dataSource, ConnCallback callback) { Connection conn = null; Savepoint sp = null; try { conn = t.getConnection(selectDataSource(t, dataSource, callback)); if (supportSavePoint && meta != null && meta.isPostgresql()) { sp = conn.setSavepoint(); } runCallback(conn, callback); } catch (Exception e) { if (sp != null && conn != null) try { conn.rollback(sp); } catch (SQLException e1) { } if (e instanceof DaoException) throw (DaoException)e; throw new DaoException(e); } } public void _runWithoutTransaction(DataSource dataSource, ConnCallback callback) { Connection conn = null; try { conn = selectDataSource(null, dataSource, callback).getConnection(); runCallback(conn, callback); if (!conn.getAutoCommit()) conn.commit(); } catch (Exception e) { try { if (conn != null) // ,,connnull conn.rollback(); } catch (Exception e1) {}// TODO ? if (e instanceof DaoException) throw (DaoException)e; throw new DaoException(e); } finally { if (null != conn) { try { conn.close(); } catch (SQLException closeE) { if (log.isWarnEnabled()) log.warn("Fail to close connection!", closeE); } } } } protected void runCallback(Connection conn, ConnCallback callback) throws Exception { callback.invoke(conn); } protected DatabaseMeta meta; public void setMeta(DatabaseMeta meta) { this.meta = meta; } public void setSlaveDataSource(DataSource slaveDataSource) { this.slaveDataSource = slaveDataSource; } protected DataSource selectDataSource(Transaction t, DataSource master, ConnCallback callback) { if (this.slaveDataSource == null) return master; if(meta.getType() == DB.PSQL){ if (callback instanceof DaoInterceptorChain) { DaoInterceptorChain chain = (DaoInterceptorChain)callback; DaoStatement[] sts = chain.getDaoStatements(); if (sts.length == 1 && (sts[0].isSelect() || sts[0].isForceExecQuery())) { return slaveDataSource; } } }else { if (t == null && callback instanceof DaoInterceptorChain) { DaoInterceptorChain chain = (DaoInterceptorChain)callback; DaoStatement[] sts = chain.getDaoStatements(); if (sts.length == 1 && (sts[0].isSelect() || sts[0].isForceExecQuery())) { return slaveDataSource; } } } return master; } @Override public void setupProperties(NutMap conf) { supportSavePoint = conf.getBoolean("nutz.dao.jdbc.psql.supportSavePoint", true); } }
package de.marza.firstspirit.modules.logging.console; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import java.awt.Color; import javax.swing.text.Document; import javax.swing.text.JTextComponent; import javax.swing.text.SimpleAttributeSet; import javax.swing.text.StyleConstants; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public class ConsoleOutputStreamTest { public static final String EOL = System.lineSeparator(); private ConsoleOutputStream testling; private SimpleAttributeSet attributes; @Mock private MessageConsole console; @Mock private Document document; @Mock private JTextComponent textComponent; @Before public void setUp() throws Exception { testling = new ConsoleOutputStream(console, Color.BLACK, null); when(console.getDocument()).thenReturn(document); when(console.getTextComponent()).thenReturn(textComponent); attributes = new SimpleAttributeSet(); StyleConstants.setForeground(attributes, Color.BLACK); } @Test public void flushAppend() throws Exception { when(console.isAppend()).thenReturn(true); testling.write(("Test 123" + EOL).getBytes()); testling.flush(); verify(document).insertString(0, "Test 123" + EOL, attributes); } @Test public void flushInsert() throws Exception { when(console.isAppend()).thenReturn(false); testling.write("Test 123".getBytes()); testling.flush(); testling.write(EOL.getBytes()); testling.flush(); verify(document).insertString(0, "Test 123" + EOL, attributes); } }
package com.cloudera.data.hdfs.util; import java.io.File; import org.apache.hadoop.fs.Path; import com.google.common.base.Preconditions; public class Paths { /* Disallow instantiation. */ private Paths() { } public static File toFile(Path path) { Preconditions.checkArgument(path != null, "Path can not be null"); return new File(path.toUri().getPath()); } }
package net.finmath.time; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import net.finmath.time.businessdaycalendar.BusinessdayCalendar; import net.finmath.time.businessdaycalendar.BusinessdayCalendarAny; import net.finmath.time.businessdaycalendar.BusinessdayCalendarInterface; import net.finmath.time.businessdaycalendar.BusinessdayCalendarInterface.DateRollConvention; import net.finmath.time.daycount.DayCountConventionInterface; import net.finmath.time.daycount.DayCountConvention_30E_360; import net.finmath.time.daycount.DayCountConvention_30E_360_ISDA; import net.finmath.time.daycount.DayCountConvention_30U_360; import net.finmath.time.daycount.DayCountConvention_ACT_360; import net.finmath.time.daycount.DayCountConvention_ACT_365; import net.finmath.time.daycount.DayCountConvention_ACT_ACT_ISDA; /** * Generates a schedule based on some meta data (frequency, maturity, date roll convention, etc.). * A schedule is just a collection of {@link net.finmath.time.Period}s. * * <ul> * <li>The period length is specified via {@link net.finmath.time.ScheduleGenerator.Frequency}. * <li>The schedule generation considers short periods via the specification of {@link net.finmath.time.ScheduleGenerator.DaycountConvention}.</li> * <li>The schedule may use an externally provided business day adjustment via an object implementing {@link net.finmath.time.businessdaycalendar.BusinessdayCalendarInterface}</li> * <li>You may specify fixing and payment adjustments. * </ul> * * @author Christian Fries * @date 02.03.2014 */ public class ScheduleGenerator { /** * Possible frequencies supported by {@link ScheduleGenerator}. * * @author Christian Fries */ public enum Frequency { /** Daily periods. **/ DAILY, /** Weekly periods. **/ WEEKLY, /** One months periods. **/ MONTHLY, /** Three months periods. **/ QUATERLY, /** Six months periods. **/ SEMIANNUAL, /** Twelve months periods. **/ ANNUAL, /** A single period, i.e., the period is as long as from start to maturity. **/ TENOR } /** * Possible day count conventions supported by {@link DaycountConvention}. * * @author Christian Fries */ public enum DaycountConvention { /** See {@link net.finmath.time.daycount.DayCountConvention_30E_360_ISDA }. **/ E30_360_ISDA, /** See {@link net.finmath.time.daycount.DayCountConvention_30E_360 }. **/ E30_360, /** See {@link net.finmath.time.daycount.DayCountConvention_30U_360 }. **/ U30_360, /** See {@link net.finmath.time.daycount.DayCountConvention_ACT_360 }. **/ ACT_360, /** See {@link net.finmath.time.daycount.DayCountConvention_ACT_365 }. **/ ACT_365, /** See {@link net.finmath.time.daycount.DayCountConvention_ACT_ACT_ISDA }. **/ ACT_ACT_ISDA, ACT_ACT; public static DaycountConvention getEnum(String string) { if(string == null) throw new IllegalArgumentException(); if(string.equalsIgnoreCase("30e/360 isda")) return E30_360_ISDA; if(string.equalsIgnoreCase("e30/360 isda")) return E30_360_ISDA; if(string.equalsIgnoreCase("30e/360")) return E30_360; if(string.equalsIgnoreCase("e30/360")) return E30_360; if(string.equalsIgnoreCase("30u/360")) return U30_360; if(string.equalsIgnoreCase("u30/360")) return U30_360; if(string.equalsIgnoreCase("act/360")) return ACT_360; if(string.equalsIgnoreCase("act/365")) return ACT_365; if(string.equalsIgnoreCase("act/act isda")) return ACT_ACT_ISDA; if(string.equalsIgnoreCase("act/act")) return ACT_ACT; return DaycountConvention.valueOf(string.toUpperCase()); } } /** * Possible stub period conventions supported. * * @author Christian Fries */ public enum ShortPeriodConvention { /** The first period will be shorter, if a regular period does not fit. **/ FIRST, /** The last period will be shorter, if a regular period does not fit. **/ LAST } private ScheduleGenerator() { } /** * Schedule generation from meta data. * * Generates a schedule based on some meta data. * <ul> * <li>The schedule generation considers short stub periods at beginning or at the end.</li> * <li>Date rolling is performed using the provided businessdayCalendar.</li> * </ul> * * The reference date is used internally to represent all dates as doubles, i.e. * t = 0 corresponds to the reference date. * * @param referenceDate The date which is used in the schedule to internally convert dates to doubles, i.e., the date where t=0. * @param startDate The start date of the first period. * @param maturity The end date of the first period. * @param frequency The frequency. * @param daycountConvention The daycount convention. * @param shortPeriodConvention If short period exists, have it first or last. * @param dateRollConvention Adjustment to be applied to the all dates. * @param businessdayCalendar Businessday calendar (holiday calendar) to be used for date roll adjustment. * @param fixingOffsetDays Number of days to be added to period start to get the fixing date. * @param paymentOffsetDays Number of days to be added to period end to get the payment date. * @return The corresponding schedule */ public static ScheduleInterface createScheduleFromConventions( Calendar referenceDate, Calendar startDate, Calendar maturity, Frequency frequency, DaycountConvention daycountConvention, ShortPeriodConvention shortPeriodConvention, DateRollConvention dateRollConvention, BusinessdayCalendarInterface businessdayCalendar, int fixingOffsetDays, int paymentOffsetDays ) { /* * Generate periods - note: we do not use any date roll convention */ ArrayList<Period> periods = new ArrayList<Period>(); DayCountConventionInterface daycountConventionObject = null; switch (daycountConvention) { case E30_360_ISDA: daycountConventionObject = new DayCountConvention_30E_360_ISDA(); break; case E30_360: daycountConventionObject = new DayCountConvention_30E_360(); break; case U30_360: daycountConventionObject = new DayCountConvention_30U_360(); break; case ACT_360: daycountConventionObject = new DayCountConvention_ACT_360(); break; case ACT_365: daycountConventionObject = new DayCountConvention_ACT_365(); break; case ACT_ACT_ISDA: case ACT_ACT: default: daycountConventionObject = new DayCountConvention_ACT_ACT_ISDA(); break; } int periodLengthDays = 0; int periodLengthWeeks = 0; int periodLengthMonth = 0; switch(frequency) { case DAILY: periodLengthDays = 1; break; case WEEKLY: periodLengthDays = 1; break; case MONTHLY: periodLengthMonth = 1; break; case QUATERLY: periodLengthMonth = 3; break; case SEMIANNUAL: periodLengthMonth = 6; break; case ANNUAL: default: periodLengthMonth = 12; break; case TENOR: periodLengthMonth = 100000; break; } if(shortPeriodConvention == ShortPeriodConvention.LAST) { /* * Going forward on periodStartDate, starting with startDate as periodStartDate */ Calendar periodStartDateUnadjusted = (Calendar)startDate.clone(); Calendar periodEndDateUnadjusted = (Calendar)startDate.clone(); Calendar periodStartDate = businessdayCalendar.getAdjustedDate(periodStartDateUnadjusted, dateRollConvention); while(periodStartDateUnadjusted.before(maturity)) { // Determine period end periodEndDateUnadjusted.add(Calendar.DAY_OF_YEAR, periodLengthDays); periodEndDateUnadjusted.add(Calendar.WEEK_OF_YEAR, periodLengthWeeks); periodEndDateUnadjusted.add(Calendar.MONTH, periodLengthMonth); if(periodEndDateUnadjusted.after(maturity)) periodEndDateUnadjusted = maturity; // Adjust period Calendar periodEndDate = businessdayCalendar.getAdjustedDate(periodEndDateUnadjusted, dateRollConvention); // Map to same hour (daylight savings may result in a modified hour). roundToSame(periodEndDate, startDate, Calendar.HOUR_OF_DAY); // Skip empty periods if(periodStartDate.compareTo(periodEndDate) == 0) continue; // Adjust fixing date Calendar fixingDate = (Calendar)periodStartDate.clone(); fixingDate.add(Calendar.DAY_OF_YEAR, fixingOffsetDays); fixingDate = businessdayCalendar.getAdjustedDate(fixingDate, dateRollConvention); // Adjust payment date Calendar paymentDate = (Calendar)periodEndDate.clone(); paymentDate.add(Calendar.DAY_OF_YEAR, paymentOffsetDays); paymentDate = businessdayCalendar.getAdjustedDate(paymentDate, dateRollConvention); // Create period periods.add(new Period(fixingDate, paymentDate, periodStartDate, periodEndDate)); periodStartDate = (Calendar)periodEndDate.clone(); periodStartDateUnadjusted = (Calendar)periodEndDateUnadjusted.clone(); } } else { /* * Going backward on periodEndDate, starting with maturity as periodEndDate */ Calendar periodStartDateUnadjusted = (Calendar)maturity.clone(); Calendar periodEndDateUnadjusted = (Calendar)maturity.clone(); Calendar periodEndDate = businessdayCalendar.getAdjustedDate(periodEndDateUnadjusted, dateRollConvention); while(periodEndDateUnadjusted.after(startDate)) { // Determine period start periodStartDateUnadjusted.add(Calendar.DAY_OF_YEAR, -periodLengthDays); periodStartDateUnadjusted.add(Calendar.WEEK_OF_YEAR, -periodLengthWeeks); periodStartDateUnadjusted.add(Calendar.MONTH, -periodLengthMonth); if(periodStartDateUnadjusted.before(startDate)) periodStartDateUnadjusted = startDate; // Adjust period Calendar periodStartDate = businessdayCalendar.getAdjustedDate(periodStartDateUnadjusted, dateRollConvention); // Map to same hour (daylight savings may result in a modified hour). roundToSame(periodStartDate, maturity, Calendar.HOUR_OF_DAY); // Skip empty periods if(periodStartDate.compareTo(periodEndDate) == 0) continue; // Roll fixing date Calendar fixingDate = (Calendar)periodStartDate.clone(); fixingDate = businessdayCalendar.getRolledDate(fixingDate, fixingOffsetDays); // TODO: There might be an additional calendar adjustment of the fixingDate, if the index has its own businessdayCalendar. // Roll payment date Calendar paymentDate = (Calendar)periodEndDate.clone(); paymentDate = businessdayCalendar.getRolledDate(paymentDate, paymentOffsetDays); // TODO: There might be an additional calendar adjustment of the paymentDate, if the index has its own businessdayCalendar. // Create period periods.add(0, new Period(fixingDate, paymentDate, periodStartDate, periodEndDate)); periodEndDate = (Calendar)periodStartDate.clone(); periodEndDateUnadjusted = (Calendar)periodStartDateUnadjusted.clone(); } } return new Schedule(referenceDate, periods, daycountConventionObject); } /** * Schedule generation from meta data. * * Generates a schedule based on some meta data. * <ul> * <li>The schedule generation considers short stub periods at beginning or at the end.</li> * <li>Date rolling is performed using the provided businessdayCalendar.</li> * </ul> * * The reference date is used internally to represent all dates as doubles. * * @param referenceDate The date which is used in the schedule to internally convert dates to doubles, i.e., the date where t=0. * @param startDate The start date of the first period (this may/should be an unadjusted date). * @param maturityDate The end date of the last period (this may/should be an unadjusted date). * @param frequency The frequency. * @param daycountConvention The daycount convention. * @param shortPeriodConvention If short period exists, have it first or last. * @param dateRollConvention Adjustment to be applied to the all dates. * @param businessdayCalendar Businessday calendar (holiday calendar) to be used for date roll adjustment. * @param fixingOffsetDays Number of days to be added to period start to get the fixing date. * @param paymentOffsetDays Number of days to be added to period end to get the payment date. * @return The corresponding schedule */ public static ScheduleInterface createScheduleFromConventions( Date referenceDate, Date startDate, Date maturityDate, String frequency, String daycountConvention, String shortPeriodConvention, String dateRollConvention, BusinessdayCalendarInterface businessdayCalendar, int fixingOffsetDays, int paymentOffsetDays ) { Calendar referenceDateAsCalendar = GregorianCalendar.getInstance(); referenceDateAsCalendar.setTime(referenceDate); Calendar startDateAsCalendar = GregorianCalendar.getInstance(); startDateAsCalendar.setTime(startDate); Calendar maturityDateAsCalendar = GregorianCalendar.getInstance(); maturityDateAsCalendar.setTime(maturityDate); return createScheduleFromConventions( referenceDateAsCalendar, startDateAsCalendar, maturityDateAsCalendar, Frequency.valueOf(frequency.replace("/", "_").toUpperCase()), DaycountConvention.getEnum(daycountConvention), ShortPeriodConvention.valueOf(shortPeriodConvention.replace("/", "_").toUpperCase()), DateRollConvention.getEnum(dateRollConvention), businessdayCalendar, fixingOffsetDays, paymentOffsetDays ); } /** * Simple schedule generation. * * Generates a schedule based on some meta data. The schedule generation * considers short periods. Date rolling is ignored. * * @param referenceDate The date which is used in the schedule to internally convert dates to doubles, i.e., the date where t=0. * @param tradeDate Base date for the schedule generation (used to build spot date). * @param spotOffsetDays Number of business days to be added to the trade date to obtain the spot date. * @param startOffset The start date as an offset from the spotDate (build from tradeDate and spotOffsetDays) entered as a code like 1D, 1W, 1M, 2M, 3M, 1Y, etc. * @param maturity The end date of the first period entered as a code like 1D, 1W, 1M, 2M, 3M, 1Y, etc. * @param frequency The frequency. * @param daycountConvention The day count convention. * @param shortPeriodConvention If short period exists, have it first or last. * @param dateRollConvention Adjustment to be applied to the all dates. * @param businessdayCalendar Business day calendar (holiday calendar) to be used for date roll adjustment. * @param fixingOffsetDays Number of business days to be added to period start to get the fixing date. * @param paymentOffsetDays Number of business days to be added to period end to get the payment date. * @return The corresponding schedule */ public static ScheduleInterface createScheduleFromConventions( Date referenceDate, Date tradeDate, int spotOffsetDays, String startOffset, String maturity, String frequency, String daycountConvention, String shortPeriodConvention, String dateRollConvention, BusinessdayCalendarInterface businessdayCalendar, int fixingOffsetDays, int paymentOffsetDays ) { Calendar referenceDateAsCalendar = GregorianCalendar.getInstance(); referenceDateAsCalendar.setTime(referenceDate); Calendar tradeDateAsCalendar = GregorianCalendar.getInstance(); tradeDateAsCalendar.setTime(tradeDate); Calendar spotDateAsCalendar = businessdayCalendar.getRolledDate(tradeDateAsCalendar, spotOffsetDays); Calendar startDateAsCalendar = BusinessdayCalendar.createDateFromDateAndOffsetCode(spotDateAsCalendar, startOffset); Calendar maturityAsCalendar = BusinessdayCalendar.createDateFromDateAndOffsetCode(startDateAsCalendar, maturity); return createScheduleFromConventions( referenceDateAsCalendar, startDateAsCalendar, maturityAsCalendar, Frequency.valueOf(frequency.replace("/", "_").toUpperCase()), DaycountConvention.getEnum(daycountConvention), ShortPeriodConvention.valueOf(shortPeriodConvention.replace("/", "_").toUpperCase()), DateRollConvention.getEnum(dateRollConvention), businessdayCalendar, fixingOffsetDays, paymentOffsetDays ); } /** * Simple schedule generation. * * Generates a schedule based on some meta data. The schedule generation * considers short periods. Date rolling is ignored. * * @param referenceDate The date which is used in the schedule to internally convert dates to doubles, i.e., the date where t=0. * @param spotOffsetDays Number of business days to be added to the reference date to obtain the spot date. * @param startOffset The start date as an offset from the spotDate (build from referenceDate and spotOffsetDays) entered as a code like 1D, 1W, 1M, 2M, 3M, 1Y, etc. * @param maturity The end date of the first period entered as a code like 1D, 1W, 1M, 2M, 3M, 1Y, etc. * @param frequency The frequency. * @param daycountConvention The day count convention. * @param shortPeriodConvention If short period exists, have it first or last. * @param dateRollConvention Adjustment to be applied to the all dates. * @param businessdayCalendar Business day calendar (holiday calendar) to be used for date roll adjustment. * @param fixingOffsetDays Number of business days to be added to period start to get the fixing date. * @param paymentOffsetDays Number of business days to be added to period end to get the payment date. * @return The corresponding schedule */ public static ScheduleInterface createScheduleFromConventions( Date referenceDate, int spotOffsetDays, String startOffset, String maturity, String frequency, String daycountConvention, String shortPeriodConvention, String dateRollConvention, BusinessdayCalendarInterface businessdayCalendar, int fixingOffsetDays, int paymentOffsetDays ) { return createScheduleFromConventions(referenceDate, referenceDate, spotOffsetDays, startOffset, maturity, frequency, daycountConvention, shortPeriodConvention, dateRollConvention, businessdayCalendar, fixingOffsetDays, paymentOffsetDays); } /** * Simple schedule generation. * * Generates a schedule based on some meta data. The schedule generation * considers short periods. Date rolling is ignored. * * @param referenceDate The date which is used in the schedule to internally convert dates to doubles, i.e., the date where t=0. * @param startOffset The start date as an offset from the referenceDate entered as a code like 1D, 1W, 1M, 2M, 3M, 1Y, etc. * @param maturity The end date of the first period entered as a code like 1D, 1W, 1M, 2M, 3M, 1Y, etc. * @param frequency The frequency. * @param daycountConvention The day count convention. * @param shortPeriodConvention If short period exists, have it first or last. * @param dateRollConvention Adjustment to be applied to the all dates. * @param businessdayCalendar Business day calendar (holiday calendar) to be used for date roll adjustment. * @param fixingOffsetDays Number of business days to be added to period start to get the fixing date. * @param paymentOffsetDays Number of business days to be added to period end to get the payment date. * @return The corresponding schedule */ public static ScheduleInterface createScheduleFromConventions( Date referenceDate, String startOffset, String maturity, String frequency, String daycountConvention, String shortPeriodConvention, String dateRollConvention, BusinessdayCalendarInterface businessdayCalendar, int fixingOffsetDays, int paymentOffsetDays ) { Calendar referenceDateAsCalendar = GregorianCalendar.getInstance(); referenceDateAsCalendar.setTime(referenceDate); Calendar startDateAsCalendar = BusinessdayCalendar.createDateFromDateAndOffsetCode(referenceDateAsCalendar, startOffset); Calendar maturityAsCalendar = BusinessdayCalendar.createDateFromDateAndOffsetCode(startDateAsCalendar, maturity); return createScheduleFromConventions( referenceDateAsCalendar, startDateAsCalendar, maturityAsCalendar, Frequency.valueOf(frequency.replace("/", "_").toUpperCase()), DaycountConvention.getEnum(daycountConvention), ShortPeriodConvention.valueOf(shortPeriodConvention.replace("/", "_").toUpperCase()), DateRollConvention.getEnum(dateRollConvention), businessdayCalendar, fixingOffsetDays, paymentOffsetDays ); } /** * Generates a schedule based on some meta data. The schedule generation * considers short periods. * * @param referenceDate The date which is used in the schedule to internally convert dates to doubles, i.e., the date where t=0. * @param startDate The start date of the first period. * @param frequency The frequency. * @param maturity The end date of the first period. * @param daycountConvention The daycount convention. * @param shortPeriodConvention If short period exists, have it first or last. * @param dateRollConvention Adjustment to be applied to the all dates. * @param businessdayCalendar Businessday calendar (holiday calendar) to be used for date roll adjustment. * @param fixingOffsetDays Number of days to be added to period start to get the fixing date. * @param paymentOffsetDays Number of days to be added to period end to get the payment date. * @return The corresponding schedule */ public static ScheduleInterface createScheduleFromConventions( Date referenceDate, Date startDate, String frequency, double maturity, String daycountConvention, String shortPeriodConvention, String dateRollConvention, BusinessdayCalendarInterface businessdayCalendar, int fixingOffsetDays, int paymentOffsetDays ) { Calendar referenceDateAsCalendar = GregorianCalendar.getInstance(); referenceDateAsCalendar.setTime(referenceDate); Calendar startDateAsCalendar = GregorianCalendar.getInstance(); startDateAsCalendar.setTime(startDate); Calendar maturityAsCalendar = createDateFromDateAndOffset(startDateAsCalendar, maturity); return createScheduleFromConventions( referenceDateAsCalendar, startDateAsCalendar, maturityAsCalendar, Frequency.valueOf(frequency.toUpperCase()), DaycountConvention.getEnum(daycountConvention), ShortPeriodConvention.valueOf(shortPeriodConvention.toUpperCase()), DateRollConvention.getEnum(dateRollConvention), businessdayCalendar, fixingOffsetDays, paymentOffsetDays ); } /** * Generates a schedule based on some meta data. The schedule generation * considers short periods. Date rolling is ignored. * * @param referenceDate The date which is used in the schedule to internally convert dates to doubles, i.e., the date where t=0. * @param startDate The start date of the first period. * @param frequency The frequency. * @param maturity The end date of the first period. * @param daycountConvention The daycount convention. * @param shortPeriodConvention If short period exists, have it first or last. * @return The corresponding schedule */ public static ScheduleInterface createScheduleFromConventions( Date referenceDate, Date startDate, String frequency, double maturity, String daycountConvention, String shortPeriodConvention ) { return createScheduleFromConventions( referenceDate, startDate, frequency, maturity, daycountConvention, shortPeriodConvention, "UNADJUSTED", new BusinessdayCalendarAny(), 0, 0); } /** * Create a new date by "adding" a year fraction to the start date. * The year fraction is interpreted in a 30/360 way. More specifically, * every integer unit advances by a year, each remaining fraction of 12 * advances by a month and each remaining fraction of 30 advances a day. * * The function may be used to ease the creation of maturities in spreadsheets. * * @param baseDate The start date. * @param offsetYearFrac The year fraction in 30/360 to be used for adding to the start date. * @return A date corresponding the maturity. */ private static Calendar createDateFromDateAndOffset(Calendar baseDate, double offsetYearFrac) { // Years Calendar maturity = (Calendar)baseDate.clone(); maturity.add(Calendar.YEAR, (int)offsetYearFrac); // Months offsetYearFrac = (offsetYearFrac - (int)offsetYearFrac) * 12; maturity.add(Calendar.MONTH, (int)offsetYearFrac); // Days offsetYearFrac = (offsetYearFrac - (int)offsetYearFrac) * 30; maturity.add(Calendar.DAY_OF_YEAR, (int)Math.round(offsetYearFrac)); // Adjust hour to be the same (may differ by one due to daylight savings) roundToSame(maturity, baseDate, Calendar.HOUR_OF_DAY); return maturity; } private static void roundToSame(Calendar date, Calendar referenceDate, int field) { int difference = date.get(field) - referenceDate.get(field); int half = date.getActualMaximum(field) / 2; if(difference > 0 && difference <= half || difference < 0 && difference >= half) date.add(field, -Math.abs(difference)); else date.add(field, +Math.abs(difference)); } }
package agilec.ikeaswipe; import java.io.File; import android.os.Bundle; import android.util.Log; import android.view.View; import com.metaio.sdk.ARViewActivity; import com.metaio.sdk.MetaioDebug; import com.metaio.sdk.jni.IGeometry; import com.metaio.sdk.jni.IMetaioSDKCallback; import com.metaio.tools.io.AssetsManager; /* Camera environment for Metaio edge based tracking with 3D models @author @antonosterblad @linneamalcherek */ public class ArFindAllActivity extends ARViewActivity { /** * Instance variables for 3D geometry that can be loaded within the system */ //3D model private IGeometry mRimModel = null; //Edge visualization model private IGeometry mVizAidModel = null; /** * Metaio SDK callback handler */ private MetaioSDKCallbackHandler mCallbackHandler; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); mCallbackHandler = new MetaioSDKCallbackHandler(); } @Override protected void onDestroy() { super.onDestroy(); mCallbackHandler.delete(); mCallbackHandler = null; } @Override protected IMetaioSDKCallback getMetaioSDKCallbackHandler() { return mCallbackHandler; } public void onButtonClick(View v) { finish(); } public void onResetButtonClick(View v) { metaioSDK.sensorCommand("reset"); } @Override protected void loadContents() { // Set path for the model/file to load mRimModel = loadModel("custom/stolsida.obj"); mVizAidModel = loadModel("custom/stolsida.obj"); // Set id for each models individual coordinate system if (mRimModel != null) mRimModel.setCoordinateSystemID(1); if (mVizAidModel != null) mVizAidModel.setCoordinateSystemID(2); // Tracking.xml defines how to track the model setTrackingConfiguration("custom/rim_tracking/Tracking.xml"); } final class MetaioSDKCallbackHandler extends IMetaioSDKCallback { @Override public void onSDKReady() { // show GUI runOnUiThread(new Runnable() { @Override public void run() { mGUIView.setVisibility(View.VISIBLE); } }); } } /** * Load 3D model * @param path * @return geometry */ private IGeometry loadModel(final String path) { IGeometry geometry = null; try { // Load model AssetsManager.extractAllAssets(this, true); final File modelPath = AssetsManager.getAssetPathAsFile(getApplicationContext(), path); // Log.i("info", "modelPath: " + modelPath); geometry = metaioSDK.createGeometry(modelPath); MetaioDebug.log("Loaded geometry "+modelPath); } catch (Exception e) { MetaioDebug.log(Log.ERROR, "Error loading geometry: "+e.getMessage()); return geometry; } return geometry; } /** * Define how to track the 3D model * @param path * @return result */ private boolean setTrackingConfiguration(final String path) { boolean result = false; try { // set tracking configuration final File xmlPath = AssetsManager.getAssetPathAsFile(getApplicationContext(), path); result = metaioSDK.setTrackingConfiguration(xmlPath); MetaioDebug.log("Loaded tracking configuration "+xmlPath); } catch (Exception e) { MetaioDebug.log(Log.ERROR, "Error loading tracking configuration: "+ path + " " +e.getMessage()); return result; } return result; } @Override protected int getGUILayout() { return R.layout.activity_ar_view_find_all; } @Override protected void onGeometryTouched(IGeometry geometry) { } }
package com.hazelcast.stabilizer.tests.queue; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.core.HazelcastInstanceNotActiveException; import com.hazelcast.core.IList; import com.hazelcast.core.ILock; import com.hazelcast.core.IQueue; import com.hazelcast.core.TransactionalQueue; import com.hazelcast.spi.exception.TargetDisconnectedException; import com.hazelcast.stabilizer.tests.TestContext; import com.hazelcast.stabilizer.tests.TestRunner; import com.hazelcast.stabilizer.tests.annotations.Run; import com.hazelcast.stabilizer.tests.annotations.Setup; import com.hazelcast.stabilizer.tests.annotations.Teardown; import com.hazelcast.stabilizer.tests.annotations.Verify; import com.hazelcast.stabilizer.tests.queue.helpers.TxnCounter; import com.hazelcast.stabilizer.tests.utils.ThreadSpawner; import com.hazelcast.transaction.TransactionContext; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; /** * This stabilizer test simulates the issue #2287 */ public class TxnQueueWithLockTest { public String basename = this.getClass().getName(); public int threadCount = 5; private HazelcastInstance instance=null; private TestContext testContext = null; @Setup public void setup(TestContext testContext) throws Exception { this.testContext = testContext; this.instance = testContext.getTargetInstance(); } @Run public void run() { ThreadSpawner spawner = new ThreadSpawner(testContext.getTestId()); for (int k = 0; k < threadCount; k++) { spawner.spawn(new Worker()); } spawner.awaitCompletion(); } private class Worker implements Runnable { private TxnCounter counter = new TxnCounter(); @Override public void run() { while (!testContext.isStopped()) { try{ ILock firstLock = instance.getLock(basename +"l1"); firstLock.lock(); TransactionContext ctx = instance.newTransactionContext(); ctx.beginTransaction(); try { TransactionalQueue<Integer> queue = ctx.getQueue(basename +"q"); queue.offer(1); ILock secondLock = instance.getLock(basename +"l2"); secondLock.lock(); secondLock.unlock(); ctx.commitTransaction(); counter.committed++; } catch (Exception e) { ctx.rollbackTransaction(); counter.rolled++; System.out.println(basename+": ThreadLocal txn No. "+ counter.committed+1+" ThreadLocal roles ="+counter.rolled); System.out.println(basename+": "+e); } finally { firstLock.unlock(); } }catch(TargetDisconnectedException e){ System.out.println(e); }catch(HazelcastInstanceNotActiveException e){ System.out.println(e); } } IList<TxnCounter> results = instance.getList(basename +"results"); results.add(counter); } } @Verify(global = true) public void verify() { IQueue queue = instance.getQueue(basename +"q"); ILock firstLock = instance.getLock(basename +"l1"); ILock secondLock = instance.getLock(basename +"l2"); IList<TxnCounter> results = instance.getList(basename +"results"); TxnCounter total = new TxnCounter(); for(TxnCounter counter : results){ total.add(counter); } System.out.println(basename +": "+ total+" from "+results.size()); assertFalse(firstLock.isLocked()); assertFalse(secondLock.isLocked()); assertEquals(total.committed - total.rolled, queue.size()); } public static void main(String[] args) throws Throwable { TxnQueueWithLockTest test = new TxnQueueWithLockTest(); new TestRunner(test).run(); } }
package com.contentful.java.cda; import com.contentful.java.cda.interceptor.AuthorizationHeaderInterceptor; import com.contentful.java.cda.interceptor.ContentfulUserAgentHeaderInterceptor; import com.contentful.java.cda.interceptor.ContentfulUserAgentHeaderInterceptor.Section; import com.contentful.java.cda.interceptor.ContentfulUserAgentHeaderInterceptor.Section.OperatingSystem; import com.contentful.java.cda.interceptor.ContentfulUserAgentHeaderInterceptor.Section.Version; import com.contentful.java.cda.interceptor.ErrorInterceptor; import com.contentful.java.cda.interceptor.LogInterceptor; import com.contentful.java.cda.interceptor.UserAgentHeaderInterceptor; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executor; import io.reactivex.Flowable; import io.reactivex.functions.Function; import okhttp3.Call; import okhttp3.OkHttpClient; import retrofit2.Response; import retrofit2.Retrofit; import retrofit2.adapter.rxjava2.RxJava2CallAdapterFactory; import retrofit2.converter.gson.GsonConverterFactory; import static com.contentful.java.cda.Constants.ENDPOINT_PROD; import static com.contentful.java.cda.Constants.PATH_CONTENT_TYPES; import static com.contentful.java.cda.Util.checkNotNull; import static com.contentful.java.cda.Util.getProperty; import static com.contentful.java.cda.interceptor.ContentfulUserAgentHeaderInterceptor.Section.os; import static com.contentful.java.cda.interceptor.ContentfulUserAgentHeaderInterceptor.Section.platform; import static com.contentful.java.cda.interceptor.ContentfulUserAgentHeaderInterceptor.Section.sdk; /** * Client to be used when requesting information from the Delivery API. Every client is associated * with exactly one Space, but there is no limit to the concurrent number of clients existing at * any one time. Avoid creating multiple clients for the same Space. Use {@link #builder()} * to create a new client instance. */ public class CDAClient { final String spaceId; final String token; final CDAService service; final Cache cache; final Executor callbackExecutor; final boolean preview; private CDAClient(Builder builder) { this(new Cache(), Platform.get().callbackExecutor(), createService(builder), builder); validate(builder); } CDAClient(Cache cache, Executor executor, CDAService service, Builder builder) { this.cache = cache; this.callbackExecutor = executor; this.service = service; this.spaceId = builder.space; this.token = builder.token; this.preview = builder.preview; } private void validate(Builder builder) { checkNotNull(builder.space, "Space ID must be provided."); if (builder.callFactory == null) { checkNotNull(builder.token, "A token must be provided, if no call factory is specified."); } } private static CDAService createService(Builder clientBuilder) { String endpoint = clientBuilder.endpoint; if (endpoint == null) { endpoint = ENDPOINT_PROD; } Retrofit.Builder retrofitBuilder = new Retrofit.Builder() .addConverterFactory(GsonConverterFactory.create(ResourceFactory.GSON)) .addCallAdapterFactory(RxJava2CallAdapterFactory.create()) .callFactory(clientBuilder.createOrGetCallFactory(clientBuilder)) .baseUrl(endpoint); return retrofitBuilder.build().create(CDAService.class); } /** * Returns a {@link FetchQuery} for a given {@code type}, which can be used to fulfill the * request synchronously or asynchronously when a callback is provided. * * @param type resource type. * @param <T> resource type. * @return query instance. */ public <T extends CDAResource> FetchQuery<T> fetch(Class<T> type) { return new FetchQuery<T>(type, this); } /** * Returns an {@link ObserveQuery} for a given {@code type}, which can be used to return * an {@link Flowable} that fetches the desired resources. * * @param type resource type. * @param <T> resource type. * @return query instance. */ public <T extends CDAResource> ObserveQuery<T> observe(Class<T> type) { return new ObserveQuery<T>(type, this); } /** * Populate the content type cache with _all_ available content types. * <p> * This method will run through all the content types, saving them in the process and also takes * care of paging. * <p> * This method is synchronous. * * @return the number of content types cached. */ public int populateContentTypeCache() { return observeContentTypeCachePopulation().blockingFirst(); } public int populateContentTypeCache(int limit) { if (limit > 1000) { throw new IllegalArgumentException("Content types per page limit cannot be more then 1000."); } if (limit <= 0) { throw new IllegalArgumentException("Content types per page limit cannot be " + "less or equal to 0."); } return observeContentTypeCachePopulation(limit).blockingFirst(); } /** * Populate the content type cache with _all_ available content types. * <p> * This method will run through all the content types, saving them in the process and also takes * care of paging. * <p> * This method is asynchronous and needs to be subscribed to. * * @return the flowable representing the asynchronous call. */ public Flowable<Integer> observeContentTypeCachePopulation() { return observeContentTypeCachePopulation(1000); } public Flowable<Integer> observeContentTypeCachePopulation(final int limit) { if (limit > 1000) { throw new IllegalArgumentException("Content types per page limit cannot be more then 1000."); } if (limit <= 0) { throw new IllegalArgumentException("Content types per page limit cannot be " + "less or equal to 0."); } return observe(CDAContentType.class) .orderBy("sys.id") .limit(limit) .all() .map( new Function<CDAArray, CDAArray>() { @Override public CDAArray apply(CDAArray array) throws Exception { if (array.skip() + array.limit() < array.total()) { return nextPage(array); } else { return array; } } private CDAArray nextPage(CDAArray array) { final CDAArray nextArray = observe(CDAContentType.class) .orderBy("sys.id") .limit(limit) .skip(array.skip + limit) .all() .map(this) .blockingFirst(); array.skip = nextArray.skip; array.items.addAll(nextArray.items); array.assets.putAll(nextArray.assets); array.entries.putAll(nextArray.entries); return array; } } ) .map( new Function<CDAArray, Integer>() { @Override public Integer apply(CDAArray array) throws Exception { for (CDAResource resource : array.items) { if (resource instanceof CDAContentType) { cache.types().put(resource.id(), (CDAContentType) resource); } else { throw new IllegalStateException( "Requesting a list of content types should not return " + "any other type."); } } return array.total; } } ); } /** * Returns a {@link SyncQuery} for initial synchronization via the Sync API. * * @return query instance. */ public SyncQuery sync() { return sync(null, null); } /** * Returns a {@link SyncQuery} for synchronization with the provided {@code syncToken} via * the Sync API. * <p> * If called from a {@link #preview} client, this will always do an initial sync. * * @param syncToken sync token. * @return query instance. */ public SyncQuery sync(String syncToken) { return sync(syncToken, null); } /** * Returns a {@link SyncQuery} for synchronization with an existing space. * <p> * If called from a {@link #preview} client, this will always do an initial sync. * * @param synchronizedSpace space to sync. * @return query instance. */ public SyncQuery sync(SynchronizedSpace synchronizedSpace) { return sync(null, synchronizedSpace); } private SyncQuery sync(String syncToken, SynchronizedSpace synchronizedSpace) { if (preview) { syncToken = null; synchronizedSpace = null; } SyncQuery.Builder builder = SyncQuery.builder().setClient(this); if (synchronizedSpace != null) { builder.setSpace(synchronizedSpace); } if (syncToken != null) { builder.setSyncToken(syncToken); } return builder.build(); } /** * @return the space for this client (synchronously). */ public CDASpace fetchSpace() { return observeSpace().blockingFirst(); } /** * Asynchronously fetch the space. * * @param <C> the type of the callback to be used. * @param callback the value of the callback to be called back. * @return the space for this client (asynchronously). */ @SuppressWarnings("unchecked") public <C extends CDACallback<CDASpace>> C fetchSpace(C callback) { return (C) Callbacks.subscribeAsync(observeSpace(), callback, this); } /** * @return an {@link Flowable} that fetches the space for this client. */ public Flowable<CDASpace> observeSpace() { return cacheSpace(true); } /** * Caching */ Flowable<Cache> cacheAll(final boolean invalidate) { return cacheSpace(invalidate) .flatMap(new Function<CDASpace, Flowable<Map<String, CDAContentType>>>() { @Override public Flowable<Map<String, CDAContentType>> apply(CDASpace cdaSpace) { return cacheTypes(invalidate); } }) .map(new Function<Map<String, CDAContentType>, Cache>() { @Override public Cache apply(Map<String, CDAContentType> stringCDAContentTypeMap) { return cache; } }); } Flowable<CDASpace> cacheSpace(boolean invalidate) { CDASpace space = invalidate ? null : cache.space(); if (space == null) { return service.space(spaceId).map(new Function<Response<CDASpace>, CDASpace>() { @Override public CDASpace apply(Response<CDASpace> response) { CDASpace space = ResourceFactory.space(response); cache.setSpace(space); return space; } }); } return Flowable.just(space); } Flowable<Map<String, CDAContentType>> cacheTypes(boolean invalidate) { Map<String, CDAContentType> types = invalidate ? null : cache.types(); if (types == null) { return service.array(spaceId, PATH_CONTENT_TYPES, new HashMap<String, String>()).map( new Function<Response<CDAArray>, Map<String, CDAContentType>>() { @Override public Map<String, CDAContentType> apply(Response<CDAArray> arrayResponse) { CDAArray array = ResourceFactory.array(arrayResponse, CDAClient.this); Map<String, CDAContentType> tmp = new ConcurrentHashMap<String, CDAContentType>(); for (CDAResource resource : array.items()) { tmp.put(resource.id(), (CDAContentType) resource); } cache.setTypes(tmp); return tmp; } }); } return Flowable.just(types); } Flowable<CDAContentType> cacheTypeWithId(String id) { CDAContentType contentType = cache.types().get(id); if (contentType == null) { return observe(CDAContentType.class).one(id).map(new Function<CDAContentType, CDAContentType>() { @Override public CDAContentType apply(CDAContentType resource) { if (resource != null) { cache.types().put(resource.id(), resource); } return resource; } }); } return Flowable.just(contentType); } /** * Clear the java internal cache. * * @return this client for chaining. */ public CDAClient clearCache() { cache.clear(); return this; } static String createUserAgent() { final Properties properties = System.getProperties(); return String.format("contentful.java/%s(%s %s) %s/%s", getProperty("version.name"), properties.getProperty("java.runtime.name"), properties.getProperty("java.runtime.version"), properties.getProperty("os.name"), properties.getProperty("os.version") ); } static Section[] createCustomHeaderSections(Section application, Section integration) { final Properties properties = System.getProperties(); final Platform platform = Platform.get(); return new Section[]{ sdk("contentful.java", Version.parse(getProperty("version.name"))), platform( "java", Version.parse(properties.getProperty("java.runtime.version")) ), os( OperatingSystem.parse(platform.name()), Version.parse(platform.version()) ), application, integration }; } /** * @return a {@link CDAClient} builder. */ public static Builder builder() { return new Builder(); } /** * This builder will be used to configure and then create a {@link CDAClient}. */ public static class Builder { String space; String token; String endpoint; Logger logger; Logger.Level logLevel = Logger.Level.NONE; Call.Factory callFactory; boolean preview; boolean useTLS12; Section application; Section integration; private Builder() { } /** * Sets the space ID. * * @param space the space id to be set. * @return this builder for chaining. */ public Builder setSpace(String space) { this.space = space; return this; } /** * Sets the space access token. * * @param token the access token, sometimes called authorization token. * @return this builder for chaining. */ public Builder setToken(String token) { this.token = token; return this; } public Builder setEndpoint(String endpoint) { this.endpoint = endpoint; return this; } /** * Sets a custom logger level. * <p> * If set to {@link Logger.Level}.NONE any custom logger will get ignored. * * @param logLevel the amount/level of logging to be used. * @return this builder for chaining. */ public Builder setLogLevel(Logger.Level logLevel) { this.logLevel = logLevel; return this; } /** * Sets a custom logger. * * @param logger the logger to be set. * @return this builder for chaining. */ public Builder setLogger(Logger logger) { this.logger = logger; return this; } /** * Sets the endpoint to point the Preview API. * * @return this builder for chaining. */ public Builder preview() { preview = true; return this.setEndpoint(Constants.ENDPOINT_PREVIEW); } /** * Sets a custom HTTP call factory. * * @param callFactory the factory to be used to create a call. * @return this builder for chaining. */ public Builder setCallFactory(Call.Factory callFactory) { this.callFactory = callFactory; return this; } private Call.Factory createOrGetCallFactory(Builder clientBuilder) { final Call.Factory callFactory; if (clientBuilder.callFactory == null) { callFactory = defaultCallFactoryBuilder().build(); } else { callFactory = clientBuilder.callFactory; } return callFactory; } private OkHttpClient.Builder setLogger(OkHttpClient.Builder okBuilder) { if (logger != null) { switch (logLevel) { case NONE: break; case BASIC: return okBuilder.addInterceptor(new LogInterceptor(logger)); case FULL: return okBuilder.addNetworkInterceptor(new LogInterceptor(logger)); } } else { if (logLevel != Logger.Level.NONE) { throw new IllegalArgumentException("Cannot log to a null logger. Please set either logLevel to None, or do set a Logger"); } } return okBuilder; } private OkHttpClient.Builder useTLS12IfWanted(OkHttpClient.Builder okBuilder) { if (useTLS12) { try { okBuilder.sslSocketFactory(new TLSSocketFactory()); } catch (Exception e) { throw new IllegalArgumentException("Cannot create TLSSocketFactory for TLS 1.2", e); } } return okBuilder; } /** * Returns the default Call.Factory.Builder used throughout this SDK. * <p> * Please use this method last in the building step, since changing settings as in the * {@link #token} or others afterwards will not be reflected by this factory. * <p> * This might be useful if you want to augment the default client, without needing to rely on * replicating the current sdk behaviour. * * @return A {@link Call.Factory} used through out SDK, as if no custom call factory was used. */ public OkHttpClient.Builder defaultCallFactoryBuilder() { final Section[] sections = createCustomHeaderSections(application, integration); OkHttpClient.Builder okBuilder = new OkHttpClient.Builder() .addInterceptor(new AuthorizationHeaderInterceptor(token)) .addInterceptor(new UserAgentHeaderInterceptor(createUserAgent())) .addInterceptor(new ContentfulUserAgentHeaderInterceptor(sections)) .addInterceptor(new ErrorInterceptor()); setLogger(okBuilder); useTLS12IfWanted(okBuilder); return okBuilder; } public Builder useTLS12() { this.useTLS12 = true; return this; } /** * Tell the client which application this is. * <p> * It might be used for internal tracking of Contentfuls tools. * * @param name the name of the app. * @param version the version in semver of the app. * @return this builder for chaining. */ public Builder setApplication(String name, String version) { this.application = Section.app(name, Version.parse(version)); return this; } /** * Set the name of the integration. * <p> * This custom user agent header will be used for libraries build on top of this library. * * @param name of the integration. * @param version version of the integration. * @return this builder for chaining. */ public Builder setIntegration(String name, String version) { this.integration = Section.integration(name, Version.parse(version)); return this; } /** * Create CDAClient, using the specified configuration options. * * @return a build CDAClient. */ public CDAClient build() { return new CDAClient(this); } } }
package org.usfirst.frc.team236.robot; /** * The RobotMap is a mapping from the ports sensors and actuators are wired into * to a variable name. This provides flexibility changing wiring, makes checking * the wiring easier and significantly reduces the number of magic numbers * floating around. */ public class RobotMap { // For example to map the left and right motors, you could define the // following variables to use with your drivetrain subsystem. // public static int leftMotor = 1; // public static int rightMotor = 2; // If you are using multiple modules, make sure to define both the port // number and the module. For example you with a rangefinder: // public static int rangefinderPort = 1; // public static int rangefinderModule = 1; public static final String CAMERA_NAME = "cam0"; public class DriveMap { public static final int PWM_LEFT_FRONT = 0; public static final int PWM_LEFT_BACK = 1; public static final int PWM_RIGHT_FRONT = 2; public static final int PWM_RIGHT_BACK = 3; public static final int DIO_ENCODER_LEFT_A = 0; public static final int DIO_ENCODER_LEFT_B = 1; public static final int DIO_ENCODER_RIGHT_A = 2; public static final int DIO_ENCODER_RIGHT_B = 3; public static final int SOL_FORWARD = 0; public static final int SOL_REVERSE = 1; public static final boolean INV_LEFT_FRONT = false; public static final boolean INV_LEFT_MID = false; public static final boolean INV_LEFT_BACK = false; public static final boolean INV_RIGHT_FRONT = false; public static final boolean INV_RIGHT_MID = false; public static final boolean INV_RIGHT_BACK = false; public static final boolean INV_ENCODER_LEFT = false; public static final boolean INV_ENCODER_RIGHT = false; public static final double DISTANCE_PER_PULSE = 1; // TODO get distance } public class IntakeMap { public static final int PWM_MOTOR = 4; public static final boolean INV_MOTOR = false; } public class ArmMap { public static final int PWM_MOTOR = 5; public static final boolean INV_MOTOR = false; public static final int DIO_ENCODER_A = 4; public static final int DIO_ENCODER_B = 5; public static final double DEGREES_PER_PULSE = 1; // TODO get degrees public static final boolean INV_ENCODER = false; public static final int DIO_LIMITSWITCH_TOP = 6; public static final int DIO_LIMITSWITCH_BOTTOM = 7; public static final int MAN_INCREMENT = 1; // TODO test, get arm speed public class PID { // TODO tune PID public static final double kP = 1; public static final double kI = .5; public static final double kD = 17; } } public class ControlMap { // USB public static final int PORT_STICK_LEFT = 0; public static final int PORT_STICK_RIGHT = 1; public static final int PORT_CONTROLLER = 2; // Right stick public static final int BUTTON_EJECT = 1; public static final int BUTTON_SHIFT_DOWN = 2; public static final int BUTTON_SHIFT_UP = 3; // Left stick public static final int BUTTON_INTAKE = 1; public static final int BUTTON_INVERT_DRIVE = 2; public static final int BUTTON_COCK = 4; public static final int BUTTON_SHOOT = 5; // Controller public static final int BUTTON_ARM_DOWN = 2; public static final int BUTTON_ARM_UP = 4; } public class ShooterMap { public static final int PWM_MOTOR_LEFT = 6; public static final boolean INV_MOTOR_LEFT = false; public static final int PWM_MOTOR_RIGHT = 7; public static final boolean INV_MOTOR_RIGHT = false; // Not used /* public static final int DIO_ENCODER_A = 6; public static final int DIO_ENCODER_B = 7; public static final double DISTANCE_PER_PULSE = 1; // TODO get distance public static final boolean INV_ENCODER = false; */ public static final int SOL_FORWARD = 2; public static final int SOL_REVERSE = 3; } }