answer
stringlengths
17
10.2M
package com.sibisoft.faizaniftikhartdd; public class Franc extends Money { public Franc() { } public Franc(int amount) { this.amount= amount; } Franc times(int multiplier) { return new Franc(amount * multiplier); } public boolean equals(Object object) { Franc franc= (Franc) object; return amount == franc.amount; } }
package com.tilioteo.hypothesis.ui; import java.util.Collection; import javax.servlet.annotation.WebServlet; import net.engio.mbassy.listener.Handler; import org.apache.log4j.Logger; import org.vaadin.jouni.animator.AnimatorProxy; import org.vaadin.jouni.animator.AnimatorProxy.AnimationEvent; import org.vaadin.jouni.animator.shared.AnimType; import org.vaadin.special.ui.ShortcutKey; import com.tilioteo.hypothesis.core.Messages; import com.tilioteo.hypothesis.entity.SimpleTest; import com.tilioteo.hypothesis.event.AbstractNotificationEvent; import com.tilioteo.hypothesis.event.AfterFinishSlideEvent; import com.tilioteo.hypothesis.event.AfterPrepareTestEvent; import com.tilioteo.hypothesis.event.AfterRenderContentEvent; import com.tilioteo.hypothesis.event.CloseTestEvent; import com.tilioteo.hypothesis.event.ErrorNotificationEvent; import com.tilioteo.hypothesis.event.FinishSlideEvent.Direction; import com.tilioteo.hypothesis.event.FinishTestEvent; import com.tilioteo.hypothesis.event.HypothesisEvent.ProcessViewEndEvent; import com.tilioteo.hypothesis.event.NextSlideEvent; import com.tilioteo.hypothesis.event.PriorSlideEvent; import com.tilioteo.hypothesis.event.ProcessEventBus; import com.tilioteo.hypothesis.event.RenderContentEvent; import com.tilioteo.hypothesis.extension.PluginManager; import com.tilioteo.hypothesis.model.ProcessModel; import com.tilioteo.hypothesis.processing.Command; import com.tilioteo.hypothesis.servlet.ProcessServlet; import com.tilioteo.hypothesis.slide.ui.Timer; import com.tilioteo.hypothesis.slide.ui.Window; import com.tilioteo.hypothesis.ui.view.FinishTestView; import com.tilioteo.hypothesis.ui.view.StartTestView; import com.vaadin.annotations.PreserveOnRefresh; import com.vaadin.annotations.Push; import com.vaadin.annotations.Theme; import com.vaadin.annotations.VaadinServletConfiguration; import com.vaadin.server.Page; import com.vaadin.server.VaadinRequest; import com.vaadin.server.VaadinServlet; import com.vaadin.shared.communication.PushMode; import com.vaadin.ui.Component; import com.vaadin.ui.ComponentContainer; import com.vaadin.ui.CssLayout; import com.vaadin.ui.JavaScript; import com.vaadin.ui.Notification; import com.vaadin.ui.Window.CloseEvent; /** * @author kamil * */ @SuppressWarnings("serial") @Theme("hypothesis") @PreserveOnRefresh @Push(value = PushMode.MANUAL) public class ProcessUI extends HUI { private static Logger log = Logger.getLogger(ProcessUI.class); public static final String FULLSCREEN_PARAMETER = "fs"; public static final String BACK_PARAMETER = "bk"; public static final String TOKEN_PARAMETER = "token"; public static final String CLOSE_URL = "/resource/close.html"; /** * sets empty layout to remove old content */ public void clearContent(boolean animate, final Command nextCommand) { log.debug("clearContent::"); removeAllTimers(); removeAllShortcutKeys(); Component content = getContent(); if (animate && content instanceof ComponentContainer) { AnimatorProxy animator = new AnimatorProxy(); animator.addListener(new AnimatorProxy.AnimationListener() { @Override public void onAnimation(AnimationEvent event) { setContent(clearLayout); Command.Executor.execute(nextCommand); } }); ((ComponentContainer)content).addComponent(animator); animator.animate(content, AnimType.FADE_OUT).setDuration(300).setDelay(0); } else { setContent(clearLayout); Command.Executor.execute(nextCommand); } } private void setSlideContent(Component component, Collection<Timer> timers, Collection<ShortcutKey> shortcutKeys) { log.debug("setSlideContent::"); setContent(component); // add timers for (Timer timer : timers) { addTimer(timer); } // add shortcut keys for (ShortcutKey shortcutKey : shortcutKeys) { addShortcutKey(shortcutKey); } focus(); } @Override public void detach() { log.debug("ProcessUI detach"); bus.unregister(this); processModel.requestBreak(); processModel.clean(); super.detach(); } @Override public void close() { log.debug("close::"); if (!requestClose) { log.warn("ProcessUI closing without request. Possible runtime error or user closed the browser window."); requestClose = false; } if (!requestBack) { log.debug("Closing window."); String path = VaadinServlet.getCurrent().getServletContext().getContextPath(); Page.getCurrent().setLocation(path + CLOSE_URL); // this is also possible way but not for SWT browser //Page.getCurrent().getJavaScript().execute("window.setTimeout(function(){/*window.open('','_self','');*/window.close();},10);"); } else { log.debug("History back."); JavaScript javaScript = Page.getCurrent().getJavaScript(); javaScript.execute("window.history.back();"); } //getSession().close(); // closes all windows in this session super.close(); } public boolean isFullscreen() { return requestFullscreen; } public boolean isAnimated() { return animate; } public void setLoadingIndicatorVisible(boolean visible) { Page.getCurrent().getJavaScript().execute("var x=document.getElementsByClassName(\"v-loading-indicator\");if(x.length>0){x[0].style.zIndex=\"" + (visible ? 9999 : 0) + "\"}"); } }
package algorithms.imageProcessing.matching; import algorithms.QuickSort; import algorithms.compGeometry.FurthestPair; import algorithms.imageProcessing.ColorHistogram; import algorithms.imageProcessing.FixedSizeSortedVector; import algorithms.imageProcessing.Image; import algorithms.imageProcessing.ImageIOHelper; import algorithms.imageProcessing.ImageProcessor; import algorithms.imageProcessing.MiscellaneousCurveHelper; import algorithms.imageProcessing.SIGMA; import algorithms.imageProcessing.VanishingPoints; import algorithms.imageProcessing.features.CorrespondenceList; import algorithms.imageProcessing.features.ORB; import algorithms.imageProcessing.features.ORB.Descriptors; import static algorithms.imageProcessing.features.ORB.convertToImage; import algorithms.imageProcessing.features.RANSACEuclideanSolver; import algorithms.imageProcessing.features.RANSACSolver; import algorithms.imageProcessing.matching.PartialShapeMatcher.Result; import algorithms.imageProcessing.matching.ShapeFinder.ShapeFinderResult; import algorithms.imageProcessing.transform.EpipolarTransformationFit; import algorithms.imageProcessing.transform.EpipolarTransformer; import algorithms.imageProcessing.transform.EuclideanTransformationFit; import algorithms.imageProcessing.transform.MatchedPointsTransformationCalculator; import algorithms.imageProcessing.transform.TransformationParameters; import algorithms.imageProcessing.transform.Transformer; import algorithms.misc.Misc; import algorithms.misc.MiscDebug; import algorithms.misc.MiscMath; import algorithms.search.NearestNeighbor2D; import algorithms.util.CorrespondencePlotter; import algorithms.util.OneDIntArray; import algorithms.util.PairFloatArray; import algorithms.util.PairInt; import algorithms.util.PairIntArray; import algorithms.util.QuadInt; import algorithms.util.TwoDFloatArray; import algorithms.util.TwoDIntArray; import algorithms.util.VeryLongBitString; import gnu.trove.iterator.TIntIterator; import gnu.trove.iterator.TIntObjectIterator; import gnu.trove.iterator.TObjectIntIterator; import gnu.trove.list.TDoubleList; import gnu.trove.list.TFloatList; import gnu.trove.list.TIntList; import gnu.trove.list.array.TDoubleArrayList; import gnu.trove.list.array.TFloatArrayList; import gnu.trove.list.array.TIntArrayList; import gnu.trove.map.TIntIntMap; import gnu.trove.map.TIntObjectMap; import gnu.trove.map.TObjectIntMap; import gnu.trove.map.hash.TIntIntHashMap; import gnu.trove.map.hash.TIntObjectHashMap; import gnu.trove.map.hash.TObjectIntHashMap; import gnu.trove.set.TIntSet; import gnu.trove.set.hash.TIntHashSet; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import org.ejml.simple.SimpleMatrix; /** * a class to hold various methods related to matching * the descriptors of ORB. * See also ObjectMatcher. * * @see ORB * @see ObjectMatcher * * @author nichole */ public class ORBMatcher { // vahnishing points for dataset2 private VanishingPoints vp2 = null; public void setVanishingPointsForSet2(VanishingPoints vp) { vp2 = vp; } /** * match template image and shape in orb1 and labeledPoints1 * with the same object which is somewhere in the * segmented labledPoints2 and orb2. * * NOTE that if the template or the true object match in dataset2 * are smaller than 32 pixels across, the method may not find the * object very well so alternative methods should be used in that case * or pre-processing to correct that. * * NOTE also that if precise correspondence is needed, this method should * probably be followed by partial shape matcher to get better transformation * and then add transformed matching keypoints to that correspondence. * * NOT READY FOR USE yet. * * @param orb1 * @param orb2 * @param labeledPoints1 * @param labeledPoints2 * @return */ public List<CorrespondenceList> match0(ORB orb1, ORB orb2, Set<PairInt> labeledPoints1, List<Set<PairInt>> labeledPoints2) { /* uses the descriptors given and then optionally makes masks for them using the labeled points. -- visits each octave pair -- calculates cost of descriptors -- uses the segmentation to calculate every permutation of 2 pairs of points. -- filter out high cost pairs. -- filters out 2 pair combinations with transformation scales not near 1 -- keeps only the top 10 percent cost of items from the 2 pair list. -- evaluates the transformation using the transformed keypoints cost difference, distance from nearest neighbor and number of matches -- keeps the best of each j -- further compares bestJs with SSDs of intersecting transformed point sets of the matching keypoints -- top of those best is the returned result */ if (!orb1.getDescrChoice().equals(orb2.getDescrChoice())) { throw new IllegalStateException("orbs must contain same kind of descirptors"); } int nBands = 3; if (orb1.getDescrChoice().equals(ORB.DescriptorChoice.HSV)) { if (orb1.getDescriptorsH() == null || orb2.getDescriptorsH() == null) { throw new IllegalStateException("hsv descriptors must be created first"); } } else if (orb1.getDescrChoice().equals(ORB.DescriptorChoice.ALT)) { if (orb1.getDescriptorsListAlt() == null || orb2.getDescriptorsListAlt() == null) { throw new IllegalStateException("alt descriptors must be created first"); } nBands = 1; } else if (orb1.getDescrChoice().equals(ORB.DescriptorChoice.GREYSCALE)) { if (orb1.getDescriptorsList() == null || orb2.getDescriptorsList() == null) { throw new IllegalStateException("descriptors must be created first"); } nBands = 1; } boolean useMasks = false; if (useMasks) { // initialize the masks, but discard the maps TObjectIntMap<PairInt> pointLabels1 = new TObjectIntHashMap<PairInt>(); Set<PairInt> set = labeledPoints1; for (PairInt p : set) { pointLabels1.put(p, 0); } TObjectIntMap<PairInt> pointLabels2 = new TObjectIntHashMap<PairInt>(); for (int i = 0; i < labeledPoints2.size(); ++i) { set = labeledPoints2.get(i); for (PairInt p : set) { pointLabels2.put(p, i); } } orb1.createDescriptorMasks(pointLabels1); orb2.createDescriptorMasks(pointLabels2); } //TODO: may need to revise this or allow it as a method argument: int pixTolerance = 10; MatchedPointsTransformationCalculator tc = new MatchedPointsTransformationCalculator(); Transformer transformer = new Transformer(); TFloatList scales1 = extractScales(orb1.getScalesList()); TFloatList scales2 = extractScales(orb2.getScalesList()); if (Math.abs(scales1.get(0) - 1) > 0.01) { throw new IllegalArgumentException("logic depends upon first scale" + " level being '1'"); } if (Math.abs(scales2.get(0) - 1) > 0.01) { throw new IllegalArgumentException("logic depends upon first scale" + " level being '1'"); } // a rough estimate of maximum number of matchable points in any // scale dataset comparison final int nMaxMatchable = Math.round(0.5F * calculateNMaxMatchable(orb1.getKeyPoint1List(), orb2.getKeyPoint1List())); //TODO: allow a factor to be passed in System.out.println("nMaxMatchable=" + nMaxMatchable); int nMax1 = maxSize(orb1.getKeyPoint1List()); int nMax2 = maxSize(orb2.getKeyPoint1List()); int nMax = nMax1 * nMax2; double minCostTotal = Double.MAX_VALUE; double minCost1 = Double.MAX_VALUE; double minCost2 = Double.MAX_VALUE; double minCost3 = Double.MAX_VALUE; float minCostTScale = Float.MAX_VALUE; //runtime complexity of this vector depends upon the number of items // it is currently holding, so can set the capacity high and fill vector only // with items within bitTolerance of best, but too high might affect jvm // performance. // (note, can optimize this for very large results by occassionally ejecting // all values with cost > best + bitTolerance.) // TODO: a safe size is to set capacity to the number of unique // transformation parameter sets, but since that isn't known // until later without refactoring here, will make an assumption for now, // that size 100 is generous for number of top solutions. FixedSizeSortedVector<CObject3> minVec = new FixedSizeSortedVector<CObject3>(1, CObject3.class); int templateSize = calculateObjectSize(labeledPoints1); // populated on demand TObjectIntMap<OneDIntArray> labeledPointsSizes2 = new TObjectIntHashMap<OneDIntArray>(); for (int i = 0; i < scales1.size(); ++i) { //for (int i = 2; i < 3; ++i) { float scale1 = scales1.get(i); // coords are in ref frame of scale=1 of their pyramids TIntList kpX1 = orb1.getKeyPoint1List().get(i); TIntList kpY1 = orb1.getKeyPoint0List().get(i); int n1 = kpX1.size(); TwoDFloatArray octaveImg1 = orb1.getPyramidImages().get(i); float diag1 = (float) Math.sqrt(octaveImg1.a.length * octaveImg1.a[0].length); final double maxDist = diag1; // create data structures in scaled reference frame TObjectIntMap<PairInt> p1KPIndexMap = new TObjectIntHashMap<PairInt>(); TIntList kpX1_2 = new TIntArrayList(n1); TIntList kpY1_2 = new TIntArrayList(n1); for (int i3 = 0; i3 < n1; ++i3) { int x = Math.round((float) kpX1.get(i3) / scale1); int y = Math.round((float) kpY1.get(i3) / scale1); kpX1_2.add(x); kpY1_2.add(y); p1KPIndexMap.put(new PairInt(x, y), i3); } List<TIntList> pointIndexLists1 = new ArrayList<TIntList>(); int ns = 1; for (int i3 = 0; i3 < ns; ++i3) { pointIndexLists1.add(new TIntArrayList()); } TObjectIntMap<PairInt> pointLabels1 = new TObjectIntHashMap<PairInt>(); Set<PairInt> set = labeledPoints1; Set<PairInt> setScaled = new HashSet<PairInt>(); TIntList list = pointIndexLists1.get(0); assert (list != null); for (PairInt p : set) { int x = Math.round((float) p.getX() / scale1); int y = Math.round((float) p.getY() / scale1); PairInt p2 = new PairInt(x, y); pointLabels1.put(p2, 0); int idx = p1KPIndexMap.get(p2); list.add(idx); setScaled.add(p2); } Set<PairInt> shape = new HashSet<PairInt>(setScaled); int objDimension = (int) Math.round((float) templateSize / (float) scale1); int limit = Math.round(1.15F * objDimension); int limitSq = limit * limit; PairIntArray a1 = new PairIntArray(kpX1_2.size()); TIntList a1Indexes = new TIntArrayList(kpX1_2.size()); for (int ii = 0; ii < kpX1.size(); ++ii) { int x = kpX1.get(ii); int y = kpY1.get(ii); a1.add(x, y); a1Indexes.add(ii); } for (int j = 0; j < scales2.size(); ++j) { //for (int j = 0; j < 1; ++j) { float scale2 = scales2.get(j); // coords are in ref frame of scale=1 of their pyramids TIntList kpX2 = orb2.getKeyPoint1List().get(j); TIntList kpY2 = orb2.getKeyPoint0List().get(j); int n2 = kpX2.size(); // create data structures in scaled reference frame TObjectIntMap<PairInt> p2KPIndexMap = new TObjectIntHashMap<PairInt>(); TObjectIntMap<PairInt> p2KPIndexMap_2 = new TObjectIntHashMap<PairInt>(); TIntList kpX2_2 = new TIntArrayList(n2); TIntList kpY2_2 = new TIntArrayList(n2); for (int j3 = 0; j3 < n2; ++j3) { int x = Math.round((float) kpX2.get(j3) / scale2); int y = Math.round((float) kpY2.get(j3) / scale2); kpX2_2.add(x); kpY2_2.add(y); p2KPIndexMap_2.put(new PairInt(x, y), j3); p2KPIndexMap.put(new PairInt(kpX2.get(j3), kpY2.get(j3)), j3); } List<TIntList> pointIndexLists2 = new ArrayList<TIntList>(); int ns2 = labeledPoints2.size(); for (int j3 = 0; j3 < ns2; ++j3) { pointIndexLists2.add(new TIntArrayList()); } TObjectIntMap<PairInt> pointLabels2 = new TObjectIntHashMap<PairInt>(); for (int j3 = 0; j3 < ns2; ++j3) { Set<PairInt> set2 = labeledPoints2.get(j3); TIntList list2 = pointIndexLists2.get(j3); assert (list2 != null); for (PairInt p : set2) { int x = Math.round((float) p.getX() / scale2); int y = Math.round((float) p.getY() / scale2); PairInt p2 = new PairInt(x, y); pointLabels2.put(p2, j3); int idx = p2KPIndexMap_2.get(p2); list2.add(idx); } } TwoDFloatArray octaveImg2 = orb2.getPyramidImages().get(j); debugPrint(octaveImg1, octaveImg2, kpX1_2, kpY1_2, kpX2_2, kpY2_2, i, j); int maxX2 = orb2.getPyramidImages().get(0).a[0].length; int maxY2 = orb2.getPyramidImages().get(0).a.length; int maxX2_2 = octaveImg2.a[0].length; int maxY2_2 = octaveImg2.a.length; NearestNeighbor2D nn2 = new NearestNeighbor2D(makeSet(kpX2, kpY2), maxX2 + limit, maxY2 + limit); int nTot = n1 * n2; //use descriptors with params here to reduce paramsList int[][] cost = null; if (useMasks) { ORB.Descriptors[] desc1 = getDescriptors(orb1, i); ORB.Descriptors[] desc2 = getDescriptors(orb2, j); cost = ORB.calcMaskedDescriptorCostMatrixes(desc1, desc2, orb1.getDescriptorsMaskList().get(i), orb2.getDescriptorsMaskList().get(j))[1].a; } else { ORB.Descriptors[] desc1 = getDescriptors(orb1, i); ORB.Descriptors[] desc2 = getDescriptors(orb2, j); cost = ORB.calcDescriptorCostMatrix(desc1, desc2); } //combinations of pairs with same labels // storing them all to reduce nesting // quadint is idx1, idx2, idx3, idx4 //TODO: can use the cost to more quickly filter the // pairs at creation time List<QuadInt> pairIndexes = createPairLabelIndexes(cost, nBands, pointIndexLists1, kpX1_2, kpY1_2, pointIndexLists2, kpX2_2, kpY2_2); System.out.println("i=" + i + " j=" + j + " nPairs=" + pairIndexes.size()); FixedSizeSortedVector<CObject4> vecP = new FixedSizeSortedVector<CObject4>(100, //Math.round(0.1f * pairIndexes.size()), //Math.round(0.01f * pairIndexes.size()), CObject4.class); for (int ipi = 0; ipi < pairIndexes.size(); ++ipi) { QuadInt q = pairIndexes.get(ipi); int t1X = kpX1_2.get(q.getA()); int t1Y = kpY1_2.get(q.getA()); int t2X = kpX1_2.get(q.getB()); int t2Y = kpY1_2.get(q.getB()); int s1X = kpX2_2.get(q.getC()); int s1Y = kpY2_2.get(q.getC()); int s2X = kpX2_2.get(q.getD()); int s2Y = kpY2_2.get(q.getD()); // transform dataset 1 into frame 2 TransformationParameters params = tc.calulateEuclidean(t1X, t1Y, t2X, t2Y, s1X, s1Y, s2X, s2Y, 0, 0); float tScale = params.getScale(); if (Math.abs(tScale - 1.0) > 0.15) { continue; } int idx1_1 = p1KPIndexMap.get(new PairInt(t1X, t1Y)); int idx1_2 = p1KPIndexMap.get(new PairInt(t2X, t2Y)); int idx2_1 = p2KPIndexMap_2.get(new PairInt(s1X, s1Y)); int idx2_2 = p2KPIndexMap_2.get(new PairInt(s2X, s2Y)); // a filter for objects too large to be the template object in // dataset 1. // caveat is that cannot use partial shape matcher on all // results in same manner if filter this one out, but it's // the right logic if not oversegmented or blended into // other objects. int label2 = pointLabels2.get(new PairInt(kpX2.get(q.getC()), kpY2.get(q.getC()))); if (labeledPoints2.get(label2).size() < 2) { continue; } OneDIntArray key = new OneDIntArray(new int[]{label2}); if (!labeledPointsSizes2.containsKey(key)) { Set<PairInt> set2 = labeledPoints2.get(label2); if (set2.size() < 2) { continue; } int sz = calculateObjectSize(set2); labeledPointsSizes2.put(key, sz); } int regionSize = labeledPointsSizes2.get(key); if (regionSize > (1.5 * templateSize)) { continue; } int sum = cost[idx1_1][idx2_1] + cost[idx1_2][idx2_2]; CObject4 cObj = new CObject4(sum, params, q); boolean added = vecP.add(cObj); } System.out.println("for i=" + i + " j=" + j + " filtered nPairs=" + vecP.getNumberOfItems()); double minCostJTotal = Double.MAX_VALUE; double minCostJ1 = Double.MAX_VALUE; double minCostJ2 = Double.MAX_VALUE; double minCostJ3 = Double.MAX_VALUE; float minCostJTScale = Float.MAX_VALUE; FixedSizeSortedVector<CObject3> vecJ = new FixedSizeSortedVector<CObject3>(1, CObject3.class); for (int ipi = 0; ipi < vecP.getNumberOfItems(); ++ipi) { CObject4 c = vecP.getArray()[ipi]; TransformationParameters params = c.params; float tScale = params.getScale(); QuadInt q = c.q; int t1X = kpX1_2.get(q.getA()); int t1Y = kpY1_2.get(q.getA()); int t2X = kpX1_2.get(q.getB()); int t2Y = kpY1_2.get(q.getB()); int s1X = kpX2_2.get(q.getC()); int s1Y = kpY2_2.get(q.getC()); int s2X = kpX2_2.get(q.getD()); int s2Y = kpY2_2.get(q.getD()); PairIntArray tr1 = transformer.applyTransformation(params, a1); // trim to image dimensions tr1 = trimToImageBounds(octaveImg2, tr1); if (tr1.getN() == 0) { continue; } //the matched kpx1,kpy1 kpx2,kpy2 coordinate pairs int[] mp1 = new int[kpX1.size()]; int[] mp2 = new int[kpX1.size()]; double[] distAndCount = sumKeypointDescAndDist( cost, 3, a1Indexes, tr1, kpX1, kpY1, nn2, p2KPIndexMap, maxX2, maxY2, pixTolerance, maxDist, mp1, mp2); double sumDesc = distAndCount[0]; double sumDist = distAndCount[1]; int np = (int) distAndCount[2]; int count = np; if (count < 2) { continue; } if (count == 2 && (nMaxMatchable > 2 * count)) { // TODO: may want to revise this while still discarding // false positives continue; } if (np < mp1.length) { mp1 = Arrays.copyOf(mp1, np); mp2 = Arrays.copyOf(mp2, np); } if (count > nMaxMatchable) { count = nMaxMatchable; } double cf = count; if (cf > nMaxMatchable) { cf = nMaxMatchable; } cf /= nMaxMatchable; double sum3 = 1.0 - cf; //sumDesc /= (double)count; //sumDist /= (double)count; sumDesc /= distAndCount[2]; sumDist /= distAndCount[2]; double sum = sumDesc + sumDist + sum3; // if vecJ is filled and sum is not better than last item, // continue if (vecJ.getNumberOfItems() == vecJ.getFixedCapacity()) { if (sum < vecJ.getArray()[vecJ.getNumberOfItems() - 1].cost) { continue; } } TIntSet labels2 = new TIntHashSet(); PairInt[] m1 = new PairInt[np]; PairInt[] m2 = new PairInt[mp1.length]; for (int j3 = 0; j3 < m1.length; ++j3) { int idx1 = mp1[j3]; int idx2 = mp2[j3]; assert (idx1 < kpX1.size() && idx1 > -1); assert (idx2 < kpX2.size() && idx2 > -1); m1[j3] = new PairInt(kpX1.get(idx1), kpY1.get(idx1)); m2[j3] = new PairInt(kpX2.get(idx2), kpY2.get(idx2)); assert (labeledPoints1.contains(m1[j3])); assert (p1KPIndexMap.get(new PairInt(kpX1_2.get(idx1), kpY1_2.get(idx1))) == idx1); assert (p2KPIndexMap_2.get(new PairInt(kpX2_2.get(idx2), kpY2_2.get(idx2))) == idx2); labels2.add(pointLabels2.get(m2[j3])); } // apply a size filter OneDIntArray keys = new OneDIntArray( labels2.toArray(new int[labels2.size()])); Arrays.sort(keys.a); if (!labeledPointsSizes2.containsKey(keys)) { Set<PairInt> combined = new HashSet<PairInt>(); for (int k = 0; k < keys.a.length; ++k) { combined.addAll(labeledPoints2.get(keys.a[k])); } if (combined.size() < 2) { continue; } int sz = calculateObjectSize(combined); labeledPointsSizes2.put(keys, sz); } int regionSize = labeledPointsSizes2.get(keys); if (regionSize > (1.5 * templateSize)) { continue; } CObject2 cObj2 = new CObject2(ipi, sum, sumDesc, sumDist, sum3, m1, m2); CObject3 cObj = new CObject3(cObj2, sum, 0, params); cObj.keypointCount = count; boolean added = vecJ.add(cObj); if (added) { minCostJTotal = sum; minCostJ1 = sumDesc; minCostJ2 = sumDist; minCostJ3 = sum3; minCostJTScale = tScale; System.out.println(String.format("i=%d j=%d ipi=%d ts=%.2f c=%.2f c1=%.2f c2=%.2f c3=%.2f count=%d", i, j, ipi, tScale, (float) sum, (float) sumDesc, (float) sumDist, (float) sum3, count)); if (true) { CorrespondencePlotter plotter = new CorrespondencePlotter(ORB.convertToImage(orb1.getPyramidImages().get(i)), ORB.convertToImage(orb2.getPyramidImages().get(j))); for (int ii = 0; ii < cObj.m1.length; ++ii) { PairInt p1 = cObj.m1[ii]; PairInt p2 = cObj.m2[ii]; int x1 = Math.round((float) p1.getX() / scale1); int y1 = Math.round((float) p1.getY() / scale1); int x2 = Math.round((float) p2.getX() / scale2); int y2 = Math.round((float) p2.getY() / scale2); plotter.drawLineInAlternatingColors(x1, y1, x2, y2, 0); } String str = Integer.toString(i); while (str.length() < 3) { str = "0" + str; } String str2 = Integer.toString(j); while (str2.length() < 3) { str2 = "0" + str2; } str = str + "_" + str2; try { plotter.writeImage("_indiv_masked_corres2_" + str + "_" + ipi); } catch (IOException ex) { Logger.getLogger(ORB.class.getName()).log(Level.SEVERE, null, ex); } } } } // end loop over paramsList if (vecJ.getNumberOfItems() == 0) { continue; } if (false) { //DEBUG for (int k = 0; k < vecJ.getNumberOfItems(); ++k) { CObject3 cobj = vecJ.getArray()[k]; CorrespondencePlotter plotter = new CorrespondencePlotter(ORB.convertToImage(orb1.getPyramidImages().get(i)), ORB.convertToImage(orb2.getPyramidImages().get(j))); for (int ii = 0; ii < cobj.m1.length; ++ii) { PairInt p1 = cobj.m1[ii]; PairInt p2 = cobj.m2[ii]; int x1 = Math.round((float) p1.getX() / scale1); int y1 = Math.round((float) p1.getY() / scale1); int x2 = Math.round((float) p2.getX() / scale2); int y2 = Math.round((float) p2.getY() / scale2); plotter.drawLineInAlternatingColors(x1, y1, x2, y2, 0); } String str = Integer.toString(i); while (str.length() < 3) { str = "0" + str; } String str2 = Integer.toString(j); while (str2.length() < 3) { str2 = "0" + str2; } str = str + "_" + str2; try { plotter.writeImage("_mindiv_masked_corres3_" + str + "_" + MiscDebug.getCurrentTimeFormatted()); } catch (IOException ex) { Logger.getLogger(ORB.class.getName()).log(Level.SEVERE, null, ex); } System.out.println(String.format("* %d %d ts=%.2f c=%.2f c1=%.2f c2=%.2f c3=%.2f", i, j, cobj.params.getScale(), (float) cobj.cost, (float) cobj.costDesc, (float) cobj.costDist, (float) cobj.costCount)); } } if (vecJ.getNumberOfItems() == 0) { System.out.println("no matches for i=" + i + " j=" + j); continue; } // if expand capacity of minVec, add up to capacity here minVec.add(vecJ.getArray()[0]); } // end loop over image j } if (minVec.getNumberOfItems() == 0) { return null; } List<CorrespondenceList> topResults = new ArrayList<CorrespondenceList>(); for (int i = 0; i < minVec.getNumberOfItems(); ++i) { CObject3 a = minVec.getArray()[i]; if (a.cost > minCostTotal) { break; } CorrespondenceList cor = new CorrespondenceList(a.params, a.m1, a.m2); topResults.add(cor); } return topResults; } /** * match template image and shape in orb1 and labeledPoints1 * with the same object which is somewhere in the * segmented labledPoints2 and orb2. * * this method matches points on a segmented cell basis to calculate the minimum cost correspondence with an objective function consisting of cost from an outer point chord difference matrix, cost from hsv orb descriptors of keypoints, and an epipolar projection to remove outliers and find matching inner points (and subsequent addition of the later costs to the total). * NOT READY FOR USE yet. * * @param orb1 * @param orb2 * @param labeledPoints1 * @param labeledPoints2 * @return */ public List<CorrespondenceList> match0Epipolar(ORB orb1, ORB orb2, Set<PairInt> labeledPoints1, List<Set<PairInt>> labeledPoints2) { if (!orb1.getDescrChoice().equals(orb2.getDescrChoice())) { throw new IllegalStateException("orbs must contain same kind of descirptors"); } int nBands = 3; if (orb1.getDescrChoice().equals(ORB.DescriptorChoice.HSV)) { if (orb1.getDescriptorsH() == null || orb2.getDescriptorsH() == null) { throw new IllegalStateException("hsv descriptors must be created first"); } } else if (orb1.getDescrChoice().equals(ORB.DescriptorChoice.ALT)) { if (orb1.getDescriptorsListAlt() == null || orb2.getDescriptorsListAlt() == null) { throw new IllegalStateException("alt descriptors must be created first"); } nBands = 1; } else if (orb1.getDescrChoice().equals(ORB.DescriptorChoice.GREYSCALE)) { if (orb1.getDescriptorsList() == null || orb2.getDescriptorsList() == null) { throw new IllegalStateException("descriptors must be created first"); } nBands = 1; } // NOTE: keeping coords in full size reference frames TFloatList scales1 = extractScales(orb1.getScalesList()); TFloatList scales2 = extractScales(orb2.getScalesList()); if (Math.abs(scales1.get(0) - 1) > 0.01) { throw new IllegalArgumentException("logic depends upon first scale" + " level being '1'"); } if (Math.abs(scales2.get(0) - 1) > 0.01) { throw new IllegalArgumentException("logic depends upon first scale" + " level being '1'"); } SIGMA sigma = SIGMA.ZEROPOINTFIVE; float distTol = 5; float distMax = (float)(Math.sqrt(2) * distTol); EpipolarTransformer eTransformer = new EpipolarTransformer(); // NOTE that all octaves use coordinates based in the // full reference frame, so only one bounds1 is needed for // all octaves. PairIntArray bounds1 = createOrderedBounds(orb1, labeledPoints1, sigma); if (bounds1.getN() < 7) { throw new IllegalStateException("the boundary of object 1 " + " must have at least 7 points"); } TIntIntMap sizes2Maps = new TIntIntHashMap(); for (int i = 0; i < labeledPoints2.size(); ++i) { Set<PairInt> set2 = labeledPoints2.get(i); if (set2.size() < 7) { continue; } int sz = calculateObjectSize(set2); { MiscellaneousCurveHelper curveHelper = new MiscellaneousCurveHelper(); double[] xyCen = curveHelper.calculateXYCentroids(set2); System.out.println("set " + i + " center=" + (int) xyCen[0] + "," + (int) xyCen[1] + " size_full=" + sz); } sizes2Maps.put(i, sz); } ImageProcessor imageProcessor = new ImageProcessor(); // -- initialize bounds2MapsList and populte on demand TIntObjectMap<PairIntArray> bounds2Maps = new TIntObjectHashMap<PairIntArray>(); List<TObjectIntMap<PairInt>> kp1IdxMapList = new ArrayList<TObjectIntMap<PairInt>>(); for (int octave = 0; octave < scales1.size(); ++octave) { TObjectIntMap<PairInt> keypoints1IndexMap = new TObjectIntHashMap<PairInt>(); for (int i = 0; i < orb1.getKeyPoint1List().get(octave).size(); ++i) { int x = orb1.getKeyPoint1List().get(octave).get(i); int y = orb1.getKeyPoint0List().get(octave).get(i); keypoints1IndexMap.put(new PairInt(x, y), i); } kp1IdxMapList.add(keypoints1IndexMap); } List<TObjectIntMap<PairInt>> kp2IdxMapList = new ArrayList<TObjectIntMap<PairInt>>(); for (int octave = 0; octave < scales2.size(); ++octave) { TObjectIntMap<PairInt> keypoints2IndexMap = new TObjectIntHashMap<PairInt>(); for (int i = 0; i < orb2.getKeyPoint1List().get(octave).size(); ++i) { int x = orb2.getKeyPoint1List().get(octave).get(i); int y = orb2.getKeyPoint0List().get(octave).get(i); keypoints2IndexMap.put(new PairInt(x, y), i); } kp2IdxMapList.add(keypoints2IndexMap); } // making a lookup map for keypoint indexes in points2 labeled sets List<TIntObjectMap<TIntSet>> labels2KPIdxsList = new ArrayList<TIntObjectMap<TIntSet>>(); for (int octave = 0; octave < scales2.size(); ++octave) { TIntObjectMap<TIntSet> labels2KPIdxs = new TIntObjectHashMap<TIntSet>(); labels2KPIdxsList.add(labels2KPIdxs); TObjectIntMap<PairInt> keypoints2IndexMap = kp2IdxMapList.get(octave); for (int i = 0; i < labeledPoints2.size(); ++i) { for (PairInt p : labeledPoints2.get(i)) { if (keypoints2IndexMap.containsKey(p)) { int kp2Idx = keypoints2IndexMap.get(p); TIntSet kpIdxs = labels2KPIdxs.get(i); if (kpIdxs == null) { kpIdxs = new TIntHashSet(); labels2KPIdxs.put(i, kpIdxs); } kpIdxs.add(kp2Idx); } } } } // a cache for partial shape matcher and results TIntObjectMap<List<Object>> psmMap = new TIntObjectHashMap<List<Object>>(); double maxChordAvg = Double.MIN_VALUE; double maxAvgDist = Double.MIN_VALUE; List<List<QuadInt>> correspondences = new ArrayList<List<QuadInt>>(); TIntList nLabelKP2s = new TIntArrayList(); TDoubleList descCosts = new TDoubleArrayList(); TIntList nDesc = new TIntArrayList(); TFloatList descNormalizations = new TFloatArrayList(); TDoubleList epCosts = new TDoubleArrayList(); TDoubleList chordCosts = new TDoubleArrayList(); TIntList octs1 = new TIntArrayList(); TIntList octs2 = new TIntArrayList(); TIntList segIdxs = new TIntArrayList(); for (int octave1 = 0; octave1 < scales1.size(); ++octave1) { //for (int octave1 = 1; octave1 < 2; ++octave1) { float scale1 = scales1.get(octave1); TObjectIntMap<PairInt> keypoints1IndexMap = kp1IdxMapList.get(octave1); float sz1 = calculateObjectSize(bounds1)/scale1; int nkp1 = orb1.getKeyPoint0List().get(octave1).size(); int nb1 = bounds1.getN(); float normDesc = nkp1 * nBands * 256; for (int octave2 = 0; octave2 < scales2.size(); ++octave2) { //for (int octave2 = 0; octave2 < 1; ++octave2) { float scale2 = scales2.get(octave2); TObjectIntMap<PairInt> keypoints2IndexMap = kp2IdxMapList.get(octave2); TIntObjectMap<TIntSet> labels2KPIdxs = labels2KPIdxsList.get(octave2); TwoDFloatArray img2 = orb2.getPyramidImages().get(octave2); TIntObjectIterator<TIntSet> iter2 = labels2KPIdxs.iterator(); for (int i2 = 0; i2 < labels2KPIdxs.size(); ++i2) { iter2.advance(); int segIdx = iter2.key(); TIntSet kp2Idxs = iter2.value(); float sz2 = sizes2Maps.get(segIdx)/scale2; if (sz2 == 0) { continue; } System.out.println("octave1=" + octave1 + " octave2=" + octave2 + " sz1=" + sz1 + " sz2=" + sz2 + " segIdx=" + segIdx); if ((sz1 > sz2 && Math.abs(sz1 / sz2) > 1.2) || (sz2 > sz1 && Math.abs(sz2 / sz1) > 1.2)) { continue; } PairIntArray bounds2 = getOrCreateOrderedBounds(img2, bounds2Maps, segIdx, labeledPoints2.get(segIdx), sigma); if (bounds2 == null || bounds2.getN() < 7) { continue; } List<Object> psmObj = psmMap.get(segIdx); if (psmObj == null) { PartialShapeMatcher matcher = new PartialShapeMatcher(); matcher.overrideSamplingDistance(1); matcher._overrideToThreshhold(0.2f); matcher.setToRemoveOutliers(); matcher.overrideToStoreMatrix(); PartialShapeMatcher.Result result = matcher.match( bounds1, bounds2); psmObj = new ArrayList<Object>(); psmObj.add(matcher); if (result != null) { psmObj.add(result); } psmMap.put(segIdx, psmObj); } if (psmObj.size() == 1) { continue; } PartialShapeMatcher matcher = (PartialShapeMatcher)psmObj.get(0); PartialShapeMatcher.Result result = (PartialShapeMatcher.Result)psmObj.get(1); if ((matcher.getStoredEpipolarFit() == null) || (result.getNumberOfMatches() < 3)) { continue; } int nr = result.getNumberOfMatches(); PairIntArray m1 = new PairIntArray(nr); PairIntArray m2 = new PairIntArray(nr); Set<PairInt> matched1 = new HashSet<PairInt>(); Set<PairInt> matched2 = new HashSet<PairInt>(); for (int j = 0; j < nr; ++j) { int idx1 = result.idx1s.get(j); int idx2 = result.idx2s.get(j); int x1 = bounds1.getX(idx1); int y1 = bounds1.getY(idx1); int x2 = bounds2.getX(idx2); int y2 = bounds2.getY(idx2); m1.add(x1, y1); m2.add(x2, y2); matched1.add(new PairInt(x1, y1)); matched2.add(new PairInt(x2, y2)); } //TODO: correct error in including points outside of segmentation region SimpleMatrix fm = matcher.getStoredEpipolarFit().getFundamentalMatrix(); //TODO: this method needs to be tested...normalization effects... // sum, avg, max double[] avgAndMaxDist = sumAndMaxEPDist(fm, m1, m2); if (avgAndMaxDist[2] > maxAvgDist) { maxAvgDist = avgAndMaxDist[2]; } // key=keypoint in this labeled region, value=kp2Index PairIntArray unmatchedKP2 = new PairIntArray(); TObjectIntMap<PairInt> unmatchedKP2Idxs = new TObjectIntHashMap<PairInt>(); TIntIterator iter = kp2Idxs.iterator(); while (iter.hasNext()) { int kp2Idx = iter.next(); int x = orb2.getKeyPoint1List().get(octave2).get(kp2Idx); int y = orb2.getKeyPoint0List().get(octave2).get(kp2Idx); PairInt p = new PairInt(x, y); if (!matched2.contains(p)) { unmatchedKP2Idxs.put(p, kp2Idx); unmatchedKP2.add(x, y); } } PairIntArray unmatchedKP1 = new PairIntArray(); TObjectIntMap<PairInt> unmatchedKP1Idxs = new TObjectIntHashMap<PairInt>(); TObjectIntIterator<PairInt> iter1 = keypoints1IndexMap.iterator(); for (int j = 0; j < keypoints1IndexMap.size(); ++j) { iter1.advance(); PairInt p = iter1.key(); int kpIdx1 = iter1.value(); if (!matched1.contains(p)) { unmatchedKP1Idxs.put(p, kpIdx1); unmatchedKP1.add(p.getX(), p.getY()); } } {// DEBUG, print bounds1 and unmatchedkp1 String str3 = Integer.toString(segIdx); while (str3.length() < 3) { str3 = "0" + str3; } Image img1 = ORB.convertToImage( orb1.getPyramidImages().get(octave1)); for (int ii = 0; ii < m1.getN(); ++ii) { int x = Math.round((float)m1.getX(ii)/scale1); int y = Math.round((float)m1.getY(ii)/scale1); ImageIOHelper.addPointToImage(x, y, img1, 1, 0, 255, 0); } for (int ii = 0; ii < unmatchedKP1.getN(); ++ii) { int x = Math.round((float)unmatchedKP1.getX(ii)/scale1); int y = Math.round((float)unmatchedKP1.getY(ii)/scale1); ImageIOHelper.addPointToImage(x, y, img1, 1, 255, 0, 0); } MiscDebug.writeImage(img1, "_TMP1_" + octave1 + "_" + octave2 + "_" + str3 + "_" + MiscDebug.getCurrentTimeFormatted()); img1 = ORB.convertToImage( orb2.getPyramidImages().get(octave2)); for (int ii = 0; ii < m2.getN(); ++ii) { int x = Math.round((float)m2.getX(ii)/scale2); int y = Math.round((float)m2.getY(ii)/scale2); ImageIOHelper.addPointToImage(x, y, img1, 1, 0, 255, 0); } for (int ii = 0; ii < unmatchedKP2.getN(); ++ii) { int x = Math.round((float)unmatchedKP2.getX(ii)/scale2); int y = Math.round((float)unmatchedKP2.getY(ii)/scale2); ImageIOHelper.addPointToImage(x, y, img1, 1, 255, 0, 0); } MiscDebug.writeImage(img1, "_TMP2_" + octave1 + "_" + octave2 + "_" + str3 + "_" + MiscDebug.getCurrentTimeFormatted()); } ORB.Descriptors[] desc1 = getDescriptors(orb1, octave1); ORB.Descriptors[] desc2 = getDescriptors(orb2, octave2); int[][] costD = ORB.calcDescriptorCostMatrix(desc1, desc2); // -- use epipolar fundamental matrix to add unmatched // points from the segmented cell's keypoints // output variable to hold sums and count // 0 = totalChordDiffSum // 1 = max avg chord diff // 2 = totalDistance // 3 = max avg total dist // 4 = totalDescrSum // 5 = nDescr double[] output = new double[6]; List<PairInt> addedKPIdxs = matchUsingFM(orb1, orb2, costD, octave1, octave2, bounds1, bounds2, matcher, result, keypoints1IndexMap, keypoints2IndexMap, fm, unmatchedKP1, unmatchedKP2, unmatchedKP1Idxs, unmatchedKP2Idxs, nBands, normDesc, distTol, output); if (output[1] > maxChordAvg) { maxChordAvg = output[1]; } if (output[3] > maxAvgDist) { maxAvgDist = output[3]; } // add the boundary matching epipolar dist sum: output[2] += avgAndMaxDist[0]; System.out.println("nAdded inner points=" + addedKPIdxs.size()); int nTot = result.getNumberOfMatches() + addedKPIdxs.size(); List<QuadInt> corres = new ArrayList<QuadInt>(nTot); // for any point in result that is a keypoint, // add the descriptor cost to totalDescrSum for (int j = 0; j < result.getNumberOfMatches(); ++j) { int idx1 = result.idx1s.get(j); int idx2 = result.idx2s.get(j); int x1 = bounds1.getX(idx1); int y1 = bounds1.getY(idx1); PairInt p1 = new PairInt(x1, y1); int x2 = bounds2.getX(idx2); int y2 = bounds2.getY(idx2); PairInt p2 = new PairInt(x2, y2); if (keypoints1IndexMap.containsKey(p1) && keypoints2IndexMap.containsKey(p2)) { int kpIdx1 = keypoints1IndexMap.get(p1); int kpIdx2 = keypoints2IndexMap.get(p2); float c = costD[kpIdx1][kpIdx2]; output[4] += c; output[5]++; } // coords are in full reference frame corres.add(new QuadInt(p1, p2)); } for (int j = 0; j < addedKPIdxs.size(); ++j) { int kpIdx1 = addedKPIdxs.get(j).getX(); int kpIdx2 = addedKPIdxs.get(j).getY(); int x1 = orb1.getKeyPoint1List().get(octave1).get(kpIdx1); int y1 = orb1.getKeyPoint0List().get(octave1).get(kpIdx1); int x2 = orb2.getKeyPoint1List().get(octave2).get(kpIdx2); int y2 = orb2.getKeyPoint0List().get(octave2).get(kpIdx2); corres.add(new QuadInt(x1, y1, x2, y2)); } assert(corres.size() == nTot); // output variable to hold sums and count // 0 = totalChordDiffSum // 1 = max avg chord diff // 2 = totalDistance // 3 = max avg total dist // 4 = totalDescrSum // 5 = nDescr correspondences.add(corres); descCosts.add(output[4]); nDesc.add((int)output[5]); nLabelKP2s.add(kp2Idxs.size()); epCosts.add(output[2]); chordCosts.add(output[0]); octs1.add(octave1); octs2.add(octave2); segIdxs.add(segIdx); descNormalizations.add(normDesc); }// end loop over octave2's segIdx }// end loop over octave2 } // end loop over octave1 int nC = correspondences.size(); /* "salukwzde distance" separate for descriptor cost, chords cost, and the opipolar dist from model then add them -- descr component max matchable number is nLabelKP2s -- ep distances component max matchable number is bounds1.getN() + nLabelKP2s (note: should remove overlapping) -- chords component max matchable number is bounds1.getN() + nLabelKP2s (note: should remove overlapping) */ int[] indexes = new int[nC]; float[] costs = new float[nC]; for (int i = 0; i < nC; ++i) { int octave1 = octs1.get(i); int octave2 = octs2.get(i); // calculate "fraction of whole" for hsv keypoint descriptors int nKP2 = nLabelKP2s.get(i); float f1 = 1.f - ((float)nDesc.get(i)/(float)nKP2); //calculate the cost of hsv kp descriptors float d1 = 1.f - ((nBands * 256.f - (float)(descCosts.get(i)/(float)nDesc.get(i))) /descNormalizations.get(i)); if (descNormalizations.get(i) == 0) { d1 = 1; } float sd1 = f1 * f1 + d1 * d1; float nb1 = bounds1.getN(); float n = correspondences.get(i).size(); float f2 = 1.f - (n/(nKP2 + nb1)); float d2 = (float)((chordCosts.get(i)/n)/maxChordAvg); float sd2 = f2 * f2 + d2 * d2; float d3 = (float)((epCosts.get(i)/n)/maxAvgDist); float sd3 = f2 * f2 + d3 * d3; // add in quadrature or linearly... double tot = sd1*sd1 + sd2*sd2 + sd3*sd3; System.out.println(String.format( "octave1=%d octave2=%d segIdx=%d nCor=%d ch=%.2f, normch=%.2f normep=%.2f normdesc=%.2f sd1=%.2f sd2=%.2f sd3=%.2f nd=%d nKP2=%d tot=%.2f", octave1, octave2, segIdxs.get(i), (int)n, (float)chordCosts.get(i), (float)d2, (float)d3, (float)d1, sd1, sd2, sd3, nDesc.get(i), nKP2, (float)tot)); indexes[i] = i; costs[i] = (float)tot; } QuickSort.sortBy1stArg(costs, indexes); List<CorrespondenceList> results = new ArrayList<CorrespondenceList>(); for (int i = 0; i < costs.length; ++i) { int idx = indexes[i]; List<QuadInt> qs = correspondences.get(idx); // points are in full reference frame results.add(new CorrespondenceList(qs)); } return results; } public List<CorrespondenceList> match0Epipolar2(ORB orb1, ORB orb2, Set<PairInt> labeledPoints1, List<Set<PairInt>> labeledPoints2) { if (!orb1.getDescrChoice().equals(orb2.getDescrChoice())) { throw new IllegalStateException("orbs must contain same kind of descirptors"); } SIGMA sigma = SIGMA.ZEROPOINTFIVE; float distTol = 5; float distMax = (float)(Math.sqrt(2) * distTol); PairIntArray bounds1 = createOrderedBounds(orb1, labeledPoints1, sigma); if (bounds1.getN() < 7) { throw new IllegalStateException("the boundary of object 1 " + " must have at least 7 points"); } int[] minMaxXYB1 = MiscMath.findMinMaxXY(bounds1); NearestNeighbor2D nnb1 = new NearestNeighbor2D(Misc.convert(bounds1), minMaxXYB1[1] + (int)Math.ceil(distTol + 1), minMaxXYB1[3] + (int)Math.ceil(distTol) + 1); TObjectIntMap<PairInt> bounds1IndexMap = new TObjectIntHashMap<PairInt>(); for (int i = 0; i < bounds1.getN(); ++i) { bounds1IndexMap.put(new PairInt(bounds1.getX(i), bounds1.getY(i)), i); } int nBands = 3; if (orb1.getDescrChoice().equals(ORB.DescriptorChoice.HSV)) { if (orb1.getDescriptorsH() == null || orb2.getDescriptorsH() == null) { throw new IllegalStateException("hsv descriptors must be created first"); } } else if (orb1.getDescrChoice().equals(ORB.DescriptorChoice.ALT)) { if (orb1.getDescriptorsListAlt() == null || orb2.getDescriptorsListAlt() == null) { throw new IllegalStateException("alt descriptors must be created first"); } nBands = 1; } else if (orb1.getDescrChoice().equals(ORB.DescriptorChoice.GREYSCALE)) { if (orb1.getDescriptorsList() == null || orb2.getDescriptorsList() == null) { throw new IllegalStateException("descriptors must be created first"); } nBands = 1; } // NOTE: keeping coords in full size reference frames TFloatList scales1 = extractScales(orb1.getScalesList()); TFloatList scales2 = extractScales(orb2.getScalesList()); if (Math.abs(scales1.get(0) - 1) > 0.01) { throw new IllegalArgumentException("logic depends upon first scale" + " level being '1'"); } if (Math.abs(scales2.get(0) - 1) > 0.01) { throw new IllegalArgumentException("logic depends upon first scale" + " level being '1'"); } EpipolarTransformer eTransformer = new EpipolarTransformer(); TIntIntMap sizes2Maps = new TIntIntHashMap(); for (int i = 0; i < labeledPoints2.size(); ++i) { Set<PairInt> set2 = labeledPoints2.get(i); if (set2.size() < 7) { continue; } int sz = calculateObjectSize(set2); { MiscellaneousCurveHelper curveHelper = new MiscellaneousCurveHelper(); double[] xyCen = curveHelper.calculateXYCentroids(set2); System.out.println("set " + i + " center=" + (int) xyCen[0] + "," + (int) xyCen[1] + " size_full=" + sz); } sizes2Maps.put(i, sz); } ImageProcessor imageProcessor = new ImageProcessor(); List<TObjectIntMap<PairInt>> kp1IdxMapList = new ArrayList<TObjectIntMap<PairInt>>(); for (int octave = 0; octave < scales1.size(); ++octave) { TObjectIntMap<PairInt> keypoints1IndexMap = new TObjectIntHashMap<PairInt>(); for (int i = 0; i < orb1.getKeyPoint1List().get(octave).size(); ++i) { int x = orb1.getKeyPoint1List().get(octave).get(i); int y = orb1.getKeyPoint0List().get(octave).get(i); keypoints1IndexMap.put(new PairInt(x, y), i); } kp1IdxMapList.add(keypoints1IndexMap); } List<TObjectIntMap<PairInt>> kp2IdxMapList = new ArrayList<TObjectIntMap<PairInt>>(); for (int octave = 0; octave < scales2.size(); ++octave) { TObjectIntMap<PairInt> keypoints2IndexMap = new TObjectIntHashMap<PairInt>(); for (int i = 0; i < orb2.getKeyPoint1List().get(octave).size(); ++i) { int x = orb2.getKeyPoint1List().get(octave).get(i); int y = orb2.getKeyPoint0List().get(octave).get(i); keypoints2IndexMap.put(new PairInt(x, y), i); } kp2IdxMapList.add(keypoints2IndexMap); } // making a lookup map for keypoint indexes in points2 labeled sets List<TIntObjectMap<TIntSet>> labels2KPIdxsList = new ArrayList<TIntObjectMap<TIntSet>>(); for (int octave = 0; octave < scales2.size(); ++octave) { TIntObjectMap<TIntSet> labels2KPIdxs = new TIntObjectHashMap<TIntSet>(); labels2KPIdxsList.add(labels2KPIdxs); TObjectIntMap<PairInt> keypoints2IndexMap = kp2IdxMapList.get(octave); for (int segIdx = 0; segIdx < labeledPoints2.size(); ++segIdx) { for (PairInt p : labeledPoints2.get(segIdx)) { if (keypoints2IndexMap.containsKey(p)) { int kp2Idx = keypoints2IndexMap.get(p); TIntSet kpIdxs = labels2KPIdxs.get(segIdx); if (kpIdxs == null) { kpIdxs = new TIntHashSet(); labels2KPIdxs.put(segIdx, kpIdxs); } kpIdxs.add(kp2Idx); } } } } // -- initialize bounds2MapsList and populte on demand TIntObjectMap<PairIntArray> bounds2Maps = new TIntObjectHashMap<PairIntArray>(); // a cache for partial shape matcher and results TIntObjectMap<List<Object>> psmMap = new TIntObjectHashMap<List<Object>>(); double maxAvgDist = Double.MIN_VALUE; List<List<QuadInt>> correspondences = new ArrayList<List<QuadInt>>(); TIntList nLabelKP2s = new TIntArrayList(); TDoubleList descCosts = new TDoubleArrayList(); TIntList nDesc = new TIntArrayList(); TFloatList descNormalizations = new TFloatArrayList(); TDoubleList epCosts = new TDoubleArrayList(); TIntList octs1 = new TIntArrayList(); TIntList octs2 = new TIntArrayList(); TIntList segIdxs = new TIntArrayList(); for (int octave1 = 0; octave1 < scales1.size(); ++octave1) { //for (int octave1 = 1; octave1 < 2; ++octave1) { float scale1 = scales1.get(octave1); TObjectIntMap<PairInt> keypoints1IndexMap = kp1IdxMapList.get(octave1); float sz1 = calculateObjectSize(labeledPoints1)/scale1; int nkp1 = orb1.getKeyPoint0List().get(octave1).size(); float normDesc = nkp1 * nBands * 256; TObjectIntMap<PairInt> leftIdxMap = new TObjectIntHashMap<PairInt>(); PairIntArray left = new PairIntArray(nkp1); for (int i = 0; i < nkp1; ++i) { int x = orb1.getKeyPoint1List().get(octave1).get(i); int y = orb1.getKeyPoint0List().get(octave1).get(i); left.add(x, y); leftIdxMap.put(new PairInt(x, y), i); } int[] minMaxXY1 = MiscMath.findMinMaxXY(left); NearestNeighbor2D nn1 = new NearestNeighbor2D(Misc.convert(left), minMaxXY1[1] + (int)Math.ceil(distTol + 1), minMaxXY1[3] + (int)Math.ceil(distTol) + 1); for (int octave2 = 0; octave2 < scales2.size(); ++octave2) { //for (int octave2 = 0; octave2 < 1; ++octave2) { float scale2 = scales2.get(octave2); TObjectIntMap<PairInt> keypoints2IndexMap = kp2IdxMapList.get(octave2); TIntObjectMap<TIntSet> labels2KPIdxs = labels2KPIdxsList.get(octave2); TwoDFloatArray img2 = orb2.getPyramidImages().get(octave2); TIntObjectIterator<TIntSet> iter2 = labels2KPIdxs.iterator(); for (int i2 = 0; i2 < labels2KPIdxs.size(); ++i2) { iter2.advance(); int segIdx = iter2.key(); TIntSet kp2Idxs = iter2.value(); float sz2 = sizes2Maps.get(segIdx)/scale2; System.out.println("octave1=" + octave1 + " octave2=" + octave2 + " sz1=" + sz1 + " sz2=" + sz2 + " segIdx=" + segIdx + " nKP2=" + kp2Idxs.size()); if (sz2 == 0 || kp2Idxs.size() < 2) { continue; } if ((sz1 > sz2 && Math.abs(sz1 / sz2) > 1.2) || (sz2 > sz1 && Math.abs(sz2 / sz1) > 1.2)) { continue; } PairIntArray bounds2 = getOrCreateOrderedBounds(img2, bounds2Maps, segIdx, labeledPoints2.get(segIdx), sigma); if (bounds2 == null || bounds2.getN() < 7) { continue; } TObjectIntMap<PairInt> bounds2IdxMap = new TObjectIntHashMap<PairInt>(kp2Idxs.size()); for (int j = 0; j < bounds2.getN(); ++j) { bounds2IdxMap.put(new PairInt(bounds2.getX(j), bounds2.getY(j)), j); } TObjectIntMap<PairInt> rightIdxMap = new TObjectIntHashMap<PairInt>(kp2Idxs.size()); TIntIterator iter = kp2Idxs.iterator(); PairIntArray right = new PairIntArray(kp2Idxs.size()); while (iter.hasNext()) { int kpIdx2 = iter.next(); int x = orb2.getKeyPoint1List().get(octave2).get(kpIdx2); int y = orb2.getKeyPoint0List().get(octave2).get(kpIdx2); rightIdxMap.put(new PairInt(x, y), right.getN()); right.add(x, y); } ORB.Descriptors[] desc1 = getDescriptors(orb1, octave1); ORB.Descriptors[] desc2 = getDescriptors(orb2, octave2); int[][] costD = ORB.calcDescriptorCostMatrix(desc1, desc2); // optimal matching or sorted greedy ordered by incr descr cost PairIntArray m1 = new PairIntArray(); PairIntArray m2 = new PairIntArray(); matchGreedy(left, right, nBands, costD, nn1, leftIdxMap, rightIdxMap, m1, m2, orb1.getPyramidImages().get(0).a[0].length, orb1.getPyramidImages().get(0).a.length, Math.round(distTol), bounds1, bounds2, nnb1, bounds1IndexMap, bounds2IdxMap); System.out.println("octave1=" + octave1 + " octave2=" + octave2 + " euclid m1.n=" + m1.getN() + " segIdx=" + segIdx); if (m1.getN() < 7) { continue; } {// DEBUG String str3 = Integer.toString(segIdx); while (str3.length() < 3) { str3 = "0" + str3; } Image img1 = ORB.convertToImage( orb1.getPyramidImages().get(octave1)); for (int ii = 0; ii < m1.getN(); ++ii) { int x = Math.round((float)m1.getX(ii)/scale1); int y = Math.round((float)m1.getY(ii)/scale1); ImageIOHelper.addPointToImage(x, y, img1, 1, 0, 255, 0); } MiscDebug.writeImage(img1, "_TMP1__" + octave1 + "_" + octave2 + "_" + str3 + "_" + MiscDebug.getCurrentTimeFormatted()); img1 = ORB.convertToImage( orb2.getPyramidImages().get(octave2)); for (int ii = 0; ii < m2.getN(); ++ii) { int x = Math.round((float)m2.getX(ii)/scale2); int y = Math.round((float)m2.getY(ii)/scale2); ImageIOHelper.addPointToImage(x, y, img1, 1, 0, 255, 0); } MiscDebug.writeImage(img1, "_TMP2__" + octave1 + "_" + octave2 + "_" + str3 + "_" + MiscDebug.getCurrentTimeFormatted()); } //NOTE: may need to use euclid here for fewer pts List<Object> psmObj = psmMap.get(segIdx); if (psmObj == null) { PairIntArray outLeft = new PairIntArray(left.getN()); PairIntArray outRight = new PairIntArray(left.getN()); RANSACSolver solver = new RANSACSolver(); EpipolarTransformationFit fit = solver.calculateEpipolarProjection( m1, m2, outLeft, outRight); if (fit == null || left.getN() < 2) { continue; } psmObj = new ArrayList<Object>(); psmObj.add(fit); psmObj.add(outLeft); psmObj.add(outRight); psmMap.put(segIdx, psmObj); } EpipolarTransformationFit fit = (EpipolarTransformationFit)psmObj.get(0); m1 = (PairIntArray)psmObj.get(1); m2 = (PairIntArray)psmObj.get(2); System.out.println("octave1=" + octave1 + " octave2=" + octave2 + " epipolar m1.n=" + m1.getN() + " segIdx=" + segIdx); Set<PairInt> matched1 = new HashSet<PairInt>(); Set<PairInt> matched2 = new HashSet<PairInt>(); for (int j = 0; j < m1.getN(); ++j) { int x1 = m1.getX(j); int y1 = m1.getY(j); int x2 = m2.getX(j); int y2 = m2.getY(j); matched1.add(new PairInt(x1, y1)); matched2.add(new PairInt(x2, y2)); } SimpleMatrix fm = fit.getFundamentalMatrix(); //TODO: this method needs to be tested...normalization effects... // sum, avg, max double[] avgAndMaxDist = sumAndMaxEPDist(fm, m1, m2); if (avgAndMaxDist[2] > maxAvgDist) { maxAvgDist = avgAndMaxDist[2]; } // key=keypoint in this labeled region, value=kp2Index PairIntArray unmatchedKP2 = new PairIntArray(); TObjectIntMap<PairInt> unmatchedKP2Idxs = new TObjectIntHashMap<PairInt>(); iter = kp2Idxs.iterator(); while (iter.hasNext()) { int kp2Idx = iter.next(); int x = orb2.getKeyPoint1List().get(octave2).get(kp2Idx); int y = orb2.getKeyPoint0List().get(octave2).get(kp2Idx); PairInt p = new PairInt(x, y); if (!matched2.contains(p)) { unmatchedKP2Idxs.put(p, kp2Idx); unmatchedKP2.add(x, y); } } //TODO: include unmatched boundary points PairIntArray unmatchedKP1 = new PairIntArray(); TObjectIntMap<PairInt> unmatchedKP1Idxs = new TObjectIntHashMap<PairInt>(); TObjectIntIterator<PairInt> iter1 = keypoints1IndexMap.iterator(); for (int j = 0; j < keypoints1IndexMap.size(); ++j) { iter1.advance(); PairInt p = iter1.key(); int kpIdx1 = iter1.value(); if (!matched1.contains(p)) { unmatchedKP1Idxs.put(p, kpIdx1); unmatchedKP1.add(p.getX(), p.getY()); } } {// DEBUG, print bounds1 and unmatchedkp1 String str3 = Integer.toString(segIdx); while (str3.length() < 3) { str3 = "0" + str3; } Image img1 = ORB.convertToImage( orb1.getPyramidImages().get(octave1)); for (int ii = 0; ii < m1.getN(); ++ii) { int x = Math.round((float)m1.getX(ii)/scale1); int y = Math.round((float)m1.getY(ii)/scale1); ImageIOHelper.addPointToImage(x, y, img1, 1, 0, 255, 0); } for (int ii = 0; ii < unmatchedKP1.getN(); ++ii) { int x = Math.round((float)unmatchedKP1.getX(ii)/scale1); int y = Math.round((float)unmatchedKP1.getY(ii)/scale1); ImageIOHelper.addPointToImage(x, y, img1, 1, 255, 0, 0); } MiscDebug.writeImage(img1, "_TMP1_" + octave1 + "_" + octave2 + "_" + str3 + "_" + MiscDebug.getCurrentTimeFormatted()); img1 = ORB.convertToImage( orb2.getPyramidImages().get(octave2)); for (int ii = 0; ii < m2.getN(); ++ii) { int x = Math.round((float)m2.getX(ii)/scale2); int y = Math.round((float)m2.getY(ii)/scale2); ImageIOHelper.addPointToImage(x, y, img1, 1, 0, 255, 0); } for (int ii = 0; ii < unmatchedKP2.getN(); ++ii) { int x = Math.round((float)unmatchedKP2.getX(ii)/scale2); int y = Math.round((float)unmatchedKP2.getY(ii)/scale2); ImageIOHelper.addPointToImage(x, y, img1, 1, 255, 0, 0); } MiscDebug.writeImage(img1, "_TMP2_" + octave1 + "_" + octave2 + "_" + str3 + "_" + MiscDebug.getCurrentTimeFormatted()); } // -- use epipolar fundamental matrix to add unmatched // points from the segmented cell's keypoints // output variable to hold sums and count // 0 = totalDistance // 1 = max avg total dist // 2 = totalDescrSum // 3 = nDescr double[] output = new double[4]; List<PairInt> addedKPIdxs = matchUsingFM(orb1, orb2, costD, octave1, octave2, keypoints1IndexMap, keypoints2IndexMap, fm, unmatchedKP1, unmatchedKP2, unmatchedKP1Idxs, unmatchedKP2Idxs, nBands, normDesc, distTol, output); if (output[1] > maxAvgDist) { maxAvgDist = output[1]; } // add the boundary matching epipolar dist sum: output[0] += avgAndMaxDist[0]; System.out.println("nAdded inner points=" + addedKPIdxs.size()); // 0 = totalDistance // 1 = max avg total dist // 2 = totalDescrSum // 3 = nDescr int nTot = m1.getN() + addedKPIdxs.size(); List<QuadInt> corres = new ArrayList<QuadInt>(nTot); // for any point in result that is a keypoint, // add the descriptor cost to totalDescrSum for (int j = 0; j < m1.getN(); ++j) { int x1 = m1.getX(j); int y1 = m1.getY(j); PairInt p1 = new PairInt(x1, y1); int x2 = m2.getX(j); int y2 = m2.getY(j); PairInt p2 = new PairInt(x2, y2); if (keypoints1IndexMap.containsKey(p1) && keypoints2IndexMap.containsKey(p2)) { int kpIdx1 = keypoints1IndexMap.get(p1); int kpIdx2 = keypoints2IndexMap.get(p2); float c = costD[kpIdx1][kpIdx2]; output[2] += c; output[3]++; } // coords are in full reference frame corres.add(new QuadInt(p1, p2)); } for (int j = 0; j < addedKPIdxs.size(); ++j) { int kpIdx1 = addedKPIdxs.get(j).getX(); int kpIdx2 = addedKPIdxs.get(j).getY(); int x1 = orb1.getKeyPoint1List().get(octave1).get(kpIdx1); int y1 = orb1.getKeyPoint0List().get(octave1).get(kpIdx1); int x2 = orb2.getKeyPoint1List().get(octave2).get(kpIdx2); int y2 = orb2.getKeyPoint0List().get(octave2).get(kpIdx2); corres.add(new QuadInt(x1, y1, x2, y2)); } assert(corres.size() == nTot); // output variable to hold sums and count // 0 = totalDistance // 1 = max avg total dist // 2 = totalDescrSum // 3 = nDescr correspondences.add(corres); descCosts.add(output[2]); nDesc.add((int)output[3]); nLabelKP2s.add(kp2Idxs.size()); epCosts.add(output[0]); octs1.add(octave1); octs2.add(octave2); segIdxs.add(segIdx); descNormalizations.add(normDesc); }// end loop over octave2's segIdx }// end loop over octave2 } // end loop over octave1 int nC = correspondences.size(); /* "salukwzde distance" separate for descriptor cost, and the opipolar dist from model then add them -- descr component max matchable number is nLabelKP2s -- ep distances component max matchable number is bounds1.getN() + nLabelKP2s (note: should remove overlapping) -- chords component max matchable number is bounds1.getN() + nLabelKP2s (note: should remove overlapping) */ int[] indexes = new int[nC]; float[] costs = new float[nC]; for (int i = 0; i < nC; ++i) { int octave1 = octs1.get(i); int octave2 = octs2.get(i); // calculate "fraction of whole" for hsv keypoint descriptors int nKP2 = nLabelKP2s.get(i); float f1 = 1.f - ((float)nDesc.get(i)/(float)nKP2); //calculate the cost of hsv kp descriptors float d1 = 1.f - ((nBands * 256.f - (float)(descCosts.get(i)/(float)nDesc.get(i))) /descNormalizations.get(i)); if (descNormalizations.get(i) == 0) { d1 = 1; } float sd1 = f1 * f1 + d1 * d1; float n = correspondences.get(i).size(); float f2 = 1.f - (n/(float)nKP2); float d2 = (float)((epCosts.get(i)/n)/maxAvgDist); float sd2 = f2 * f2 + d2 * d2; // add in quadrature or linearly... double tot = sd1*sd1 + sd2*sd2; System.out.println(String.format( "octave1=%d octave2=%d segIdx=%d nCor=%d normep=%.2f normdesc=%.2f sd1=%.2f sd2=%.2f nd=%d nKP2=%d tot=%.2f", octave1, octave2, segIdxs.get(i), (int)n, (float)d2, (float)d1, sd1, sd2, nDesc.get(i), nKP2, (float)tot)); indexes[i] = i; costs[i] = (float)tot; } QuickSort.sortBy1stArg(costs, indexes); List<CorrespondenceList> results = new ArrayList<CorrespondenceList>(); for (int i = 0; i < costs.length; ++i) { int idx = indexes[i]; List<QuadInt> qs = correspondences.get(idx); // points are in full reference frame results.add(new CorrespondenceList(qs)); } return results; } /** * * NOT READY FOR USE yet. * * needs the orbs to contain the theta pyramidal images. * add usage here. * * @param orb1 * @param orb2 * @param labeledPoints1 * @param labeledPoints2 * @return */ public List<CorrespondenceList> matchSmall(ORB orb1, ORB orb2, Set<PairInt> labeledPoints1, List<Set<PairInt>> labeledPoints2) { TFloatList scales1 = extractScales(orb1.getScalesList()); TFloatList scales2 = extractScales(orb2.getScalesList()); SIGMA sigma = SIGMA.ZEROPOINTFIVE; ImageProcessor imageProcessor = new ImageProcessor(); ColorHistogram cHist = new ColorHistogram(); int templateSize = calculateObjectSize(labeledPoints1); TIntObjectMap<Set<PairInt>> labeledPoints1Lists = new TIntObjectHashMap<Set<PairInt>>(); // key = octave number, value = histograms of cie luv TIntObjectMap<TwoDIntArray> ch1s = new TIntObjectHashMap<TwoDIntArray>(); // key = octave number, value = ordered boundaries of sets TIntObjectMap<PairIntArray> labeledBoundaries1 = new TIntObjectHashMap<PairIntArray>(); for (int octave1 = 0; octave1 < scales1.size(); ++octave1) { float scale1 = scales1.get(octave1); Set<PairInt> set1 = new HashSet<PairInt>(); for (PairInt p : labeledPoints1) { PairInt p1 = new PairInt(Math.round((float) p.getX() / scale1), Math.round((float) p.getY() / scale1)); set1.add(p1); } labeledPoints1Lists.put(octave1, set1); Image img = ORB.convertToImage(orb1.getPyramidImages().get(octave1)); int[][] ch = cHist.histogramCIELUV(img, set1); ch1s.put(octave1, new TwoDIntArray(ch)); PairIntArray bounds = imageProcessor.extractSmoothedOrderedBoundary( new HashSet(set1), sigma, img.getWidth(), img.getHeight()); labeledBoundaries1.put(octave1, bounds); } int dp = 1; float intersectionLimit = 0.5F; // key = octave number, value = list of labeled sets TIntObjectMap<List<Set<PairInt>>> labeledPoints2Lists = new TIntObjectHashMap<List<Set<PairInt>>>(); // key = octave number, value = list of histograms of cie lab theta TIntObjectMap<List<TwoDIntArray>> ch2Lists = new TIntObjectHashMap<List<TwoDIntArray>>(); // key = octave number, value = list of ordered points in labeled set TIntObjectMap<List<PairIntArray>> labeledBoundaries2Lists = new TIntObjectHashMap<List<PairIntArray>>(); for (int k = 0; k < labeledPoints2.size(); ++k) { Set<PairInt> set = labeledPoints2.get(k); if (set.size() < 7) { // NOTE: this means that subsequent datasets2 will not be // lists having same indexes as labeledPoints2 continue; } assert(Math.abs(scales2.get(0) - 1) < 0.02); PairIntArray bounds = imageProcessor.extractSmoothedOrderedBoundary( new HashSet(set), sigma, orb2.getPyramidImages().get(0).a[0].length, orb2.getPyramidImages().get(0).a.length); for (int octave2 = 0; octave2 < scales2.size(); ++octave2) { float scale2 = scales2.get(octave2); Image img = ORB.convertToImage( orb2.getPyramidImages().get(octave2)); int w2 = img.getWidth(); int h2 = img.getHeight(); Set<PairInt> set2 = new HashSet<PairInt>(); for (PairInt p : set) { int x = Math.round((float) p.getX() / scale2); int y = Math.round((float) p.getY() / scale2); if (x == w2) { x = w2 - 1; } if (y == h2) { y = h2 - 1; } PairInt p2 = new PairInt(x, y); set2.add(p2); } List<Set<PairInt>> list2 = labeledPoints2Lists.get(octave2); if (list2 == null) { list2 = new ArrayList<Set<PairInt>>(); labeledPoints2Lists.put(octave2, list2); } list2.add(set2); // create histograms for later comparison w/ template at // different scales int[][] ch = cHist.histogramCIELUV(img, set2); List<TwoDIntArray> ch2List = ch2Lists.get(octave2); if (ch2List == null) { ch2List = new ArrayList<TwoDIntArray>(); ch2Lists.put(octave2, ch2List); } ch2List.add(new TwoDIntArray(ch)); List<PairIntArray> list3 = labeledBoundaries2Lists.get(octave2); if (list3 == null) { list3 = new ArrayList<PairIntArray>(); labeledBoundaries2Lists.put(octave2, list3); } PairIntArray bounds2 = reduceBounds(bounds, scale2); list3.add(bounds2); assert(labeledBoundaries2Lists.get(octave2).size() == labeledPoints2Lists.get(octave2).size()); assert(labeledBoundaries2Lists.get(octave2).size() == ch2Lists.get(octave2).size()); } } // populated on demand, key=octave, key=segmented cell, value=size TObjectIntMap<PairInt> size2Map = new TObjectIntHashMap<PairInt>(); // -- compare sets over octaves, first by color histogram intersection, // then by partial shape matcher // delaying evaluation of results until end in order to get the // maximum chord differerence sum, needed for Salukwzde distance. // for each i, list of Results, chordDiffSums, bounds1, bounds2 // bundling Results and bounds into an object TIntObjectMap<List<PObject>> resultsMap = new TIntObjectHashMap<List<PObject>>(); TIntObjectMap<TDoubleList> chordDiffSumsMap = new TIntObjectHashMap<TDoubleList>(); TIntObjectMap<TFloatList> intersectionsMap = new TIntObjectHashMap<TFloatList>(); double maxDiffChordSum = Double.MIN_VALUE; double maxAvgDiffChord = Double.MIN_VALUE; double maxAvgDist = Double.MIN_VALUE; for (int i = 0; i < scales1.size(); ++i) { //for (int i = 2; i < 3; ++i) { float scale1 = scales1.get(i); int[][] ch1 = ch1s.get(i).a; //Set<PairInt> templateSet = labeledPoints1Lists.get(i); PairIntArray bounds1 = labeledBoundaries1.get(i); float sz1 = calculateObjectSize(bounds1); List<PObject> results = new ArrayList<PObject>(); TDoubleList chordDiffSums = new TDoubleArrayList(); TFloatList intersections = new TFloatArrayList(); for (int j = 0; j < scales2.size(); ++j) { //for (int j = 0; j < 1; ++j) { float scale2 = scales2.get(j); List<TwoDIntArray> listOfCH2s = ch2Lists.get(j); if (listOfCH2s == null) { continue; } List<Set<PairInt>> listOfSets2 = labeledPoints2Lists.get(j); List<PairIntArray> listOfBounds2 = labeledBoundaries2Lists.get(j); for (int k = 0; k < listOfCH2s.size(); ++k) { PairIntArray bounds2 = listOfBounds2.get(k); PairInt octLabelKey = new PairInt(j, k); float sz2; if (size2Map.containsKey(octLabelKey)) { sz2 = size2Map.get(octLabelKey); } else { sz2 = calculateObjectSize(bounds2); } if (sz2 == 0) { continue; } if ((sz1 > sz2 && Math.abs((float)sz1 / (float)sz2) > 1.15) || (sz2 > sz1 && Math.abs((float)sz2 / (float)sz1) > 1.15)) { continue; } int[][] ch2 = listOfCH2s.get(k).a; float intersection = cHist.intersection(ch1, ch2); if (intersection < intersectionLimit) { continue; } System.out.println("p2=" + listOfSets2.get(k).iterator().next() + " sz1=" + sz1 + " sz2=" + sz2 + " nSet=" + listOfSets2.get(k).size()); PartialShapeMatcher matcher = new PartialShapeMatcher(); matcher.overrideSamplingDistance(dp); //matcher.setToDebug(); //matcher.setToUseSameNumberOfPoints(); PartialShapeMatcher.Result r = matcher.match(bounds1, bounds2); if (r == null) { continue; } //NOTE: to increase the abilit to find projected objects // that have euclidean poses and skew, might consider // fast ways to approximate an affine and evaluate it // after the euclidean solution here. // affine transformations leave parallel lines in the // transformed space so could look for that in the // unmatched portion: // for example, if half of the object is matched, // could determine the distance of the matched to the // unmatched and use that with knowledge of the // euclidean expected distance to approximate a shear. // for the evaluations to remain easy to compare results // with other results, would not want to allow too much // shear... // in order to add the chord differences, this additional // calculation needs to be handled in the // partial shape matcher (but can be left until the end) double c = r.getChordDiffSum(); results.add(new PObject(r, bounds1, bounds2, scale1, scale2)); chordDiffSums.add(r.getChordDiffSum()); intersections.add(intersection); if (r.getChordDiffSum() > maxDiffChordSum) { maxDiffChordSum = r.getChordDiffSum(); } double avgCD = r.getChordDiffSum() / (double) r.getNumberOfMatches(); if (avgCD > maxAvgDiffChord) { maxAvgDiffChord = avgCD; } double avgDist = r.getDistSum() / (double) r.getNumberOfMatches(); if (avgDist > maxAvgDist) { maxAvgDist = avgDist; } System.out.println(String.format( "%d %d p in set=%s shape matcher c=%.2f np=%d inter=%.2f dist=%.2f avgDist=%.2f", i, j, listOfSets2.get(k).iterator().next().toString(), (float) c, r.getNumberOfMatches(), (float) intersection, (float) r.getDistSum(), (float) avgDist)); try { CorrespondencePlotter plotter = new CorrespondencePlotter(bounds1, bounds2); for (int ii = 0; ii < r.getNumberOfMatches(); ++ii) { int idx1 = r.getIdx1(ii); int idx2 = r.getIdx2(ii); int x1 = bounds1.getX(idx1); int y1 = bounds1.getY(idx1); int x2 = bounds2.getX(idx2); int y2 = bounds2.getY(idx2); if ((ii % 4) == 0) { plotter.drawLineInAlternatingColors(x1, y1, x2, y2, 0); } } String strI = Integer.toString(i); while (strI.length() < 2) { strI = "0" + strI; } String strJ = Integer.toString(j); while (strJ.length() < 2) { strJ = "0" + strJ; } String strK = Integer.toString(k); while (strK.length() < 2) { strK = "0" + strK; } String str = strI + strJ + strK; String filePath = plotter.writeImage("_andr_" + str); } catch (Throwable t) { } } //end loop over k labeled sets of dataset 2 } // end loop over j datasets 2 if (!results.isEmpty()) { resultsMap.put(i, results); chordDiffSumsMap.put(i, chordDiffSums); intersectionsMap.put(i, intersections); } } // end loop over i dataset 1 // calculate the Salukwdze distances /* for each i, need the max chord diff sum, nPoints in bound1, and best Results */ double minSD = Double.MAX_VALUE; int minSDI = -1; TIntObjectIterator<List<PObject>> iter = resultsMap.iterator(); for (int i = 0; i < resultsMap.size(); ++i) { iter.advance(); int idx = iter.key(); //double maxDiffChordSum = chordDiffSumsMap.get(idx).max(); double minCost = Double.MAX_VALUE; int minCostIdx = -1; List<PObject> resultsList = resultsMap.get(idx); for (int j = 0; j < resultsList.size(); ++j) { PObject obj = resultsList.get(j); float costIntersection = 1.0F - intersectionsMap.get(idx).get(j); PartialShapeMatcher.Result r = obj.r; int nb1 = Math.round((float) obj.bounds1.getN() / (float) dp); float np = r.getNumberOfMatches(); float countComp = 1.0F - (np / (float) nb1); float countCompSq = countComp * countComp; double chordComp = ((float) r.getChordDiffSum() / np) / maxAvgDiffChord; double chordCompSq = chordComp * chordComp; double avgDist = r.getDistSum() / np; double distComp = avgDist / maxAvgDist; double distCompSq = distComp * distComp; // Salukwzde uses square sums //double sd = r.calculateSalukwdzeDistanceSquared( // maxDiffChordSum, nb1); // TODO: consider formal analysis of dependencies and hence // error terms: //double sd = chordCompSq*countCompSq // + distCompSq*countCompSq; //NOTE: The coverage of the matches is currently // approximated as simply numberMatched/maxNumberMatchable, // but a term representing the spatial distribution appears // to be necessary also. // will try largestNumberGap/maxNumberMatchable. // TODO: need to improve this in detail later int lGap = maxNumberOfGaps(obj.bounds1, r)/dp; float gCountComp = (float)lGap/(float)nb1; //double sd = chordCompSq + countCompSq + distCompSq; double sd = chordComp + countComp + gCountComp + distComp + costIntersection; if (sd < minCost) { minCost = sd; minCostIdx = j; } if (sd < minSD) { minSD = sd; minSDI = idx; } System.out.println("sd=" + sd + " n1=" + obj.bounds1.getN() + " n2=" + obj.bounds2.getN() + " origN1=" + r.getOriginalN1() + " nMatches=" + r.getNumberOfMatches() + String.format( " chord=%.2f count=%.2f spatial=%.2f dist=%.2f inter=%.2f", (float)chordComp, (float)countComp, (float)gCountComp, (float)distComp, (float)costIntersection) ); } assert (minCostIdx > -1); TDoubleList cList = chordDiffSumsMap.get(idx); TFloatList iList = intersectionsMap.get(idx); for (int j = resultsList.size() - 1; j > -1; --j) { if (j != minCostIdx) { resultsList.remove(j); cList.removeAt(j); iList.removeAt(j); } } } if (resultsMap.size() > 1) { // TODO: build a test for this. // possibly need to transform results to same reference // frame to compare. // using best SD for now TIntSet rm = new TIntHashSet(); iter = resultsMap.iterator(); for (int i = 0; i < resultsMap.size(); ++i) { iter.advance(); int idx = iter.key(); if (idx != minSDI) { rm.add(idx); } } TIntIterator iter2 = rm.iterator(); while (iter2.hasNext()) { int idx = iter2.next(); resultsMap.remove(idx); } } List<CorrespondenceList> topResults = new ArrayList<CorrespondenceList>(); iter = resultsMap.iterator(); for (int i = 0; i < resultsMap.size(); ++i) { iter.advance(); int idx = iter.key(); List<PObject> resultsList = resultsMap.get(idx); assert (resultsList.size() == 1); PObject obj = resultsList.get(0); int n = obj.r.getNumberOfMatches(); if (obj.r.getTransformationParameters() == null) { continue; } PairInt[] m1 = new PairInt[n]; PairInt[] m2 = new PairInt[n]; float scale1 = obj.scale1; float scale2 = obj.scale2; for (int ii = 0; ii < n; ++ii) { int idx1 = obj.r.getIdx1(ii); int idx2 = obj.r.getIdx2(ii); int x1 = Math.round(obj.bounds1.getX(idx1) * scale1); int y1 = Math.round(obj.bounds1.getY(idx1) * scale1); int x2 = Math.round(obj.bounds2.getX(idx2) * scale2); int y2 = Math.round(obj.bounds2.getY(idx2) * scale2); m1[ii] = new PairInt(x1, y1); m2[ii] = new PairInt(x2, y2); } CorrespondenceList cor = new CorrespondenceList(obj.r.getTransformationParameters(), m1, m2); topResults.add(cor); } return topResults; } /** * * NOT READY FOR USE yet. * * searchs among aggregated adjacent labeled points to find best * fitting shape and color object where template is * dataset 1 and the searchable is dataset 2. * * @param orb1 * @param orb2 * @param labeledPoints1 * @param labeledPoints2 * @return */ public List<CorrespondenceList> matchAggregatedShape( ORB orb1, ORB orb2, Set<PairInt> labeledPoints1, List<Set<PairInt>> labeledPoints2) { TFloatList scales1 = extractScales(orb1.getScalesList()); TFloatList scales2 = extractScales(orb2.getScalesList()); if (Math.abs(scales1.get(0) - 1) > 0.01) { throw new IllegalArgumentException("logic depends upon first scale" + " level being '1'"); } if (Math.abs(scales2.get(0) - 1) > 0.01) { throw new IllegalArgumentException("logic depends upon first scale" + " level being '1'"); } SIGMA sigma = SIGMA.ZEROPOINTFIVE; ImageProcessor imageProcessor = new ImageProcessor(); ColorHistogram cHist = new ColorHistogram(); int templateSize = calculateObjectSize(labeledPoints1); TIntObjectMap<Set<PairInt>> labeledPoints1Lists = new TIntObjectHashMap<Set<PairInt>>(); // key = octave number, value = histograms of cie cie luv TIntObjectMap<TwoDIntArray> ch1s = new TIntObjectHashMap<TwoDIntArray>(); // key = octave number, value = ordered boundaries of sets TIntObjectMap<PairIntArray> labeledBoundaries1 = new TIntObjectHashMap<PairIntArray>(); for (int octave1 = 0; octave1 < scales1.size(); ++octave1) { float scale1 = scales1.get(octave1); Set<PairInt> set1 = new HashSet<PairInt>(); for (PairInt p : labeledPoints1) { PairInt p1 = new PairInt(Math.round((float) p.getX() / scale1), Math.round((float) p.getY() / scale1)); set1.add(p1); } labeledPoints1Lists.put(octave1, set1); Image img = ORB.convertToImage(orb1.getPyramidImages().get(octave1)); int[][] ch = cHist.histogramCIELUV(img, set1); ch1s.put(octave1, new TwoDIntArray(ch)); PairIntArray bounds = imageProcessor.extractSmoothedOrderedBoundary( new HashSet(set1), sigma, img.getWidth(), img.getHeight()); labeledBoundaries1.put(octave1, bounds); } int dp = 1; float intersectionLimit = 0.5F; // key = octave number, value = list of labeled sets TIntObjectMap<List<Set<PairInt>>> labeledPoints2Lists = new TIntObjectHashMap<List<Set<PairInt>>>(); // key = octave number, value = list of histograms of cie lab theta TIntObjectMap<List<TwoDIntArray>> ch2Lists = new TIntObjectHashMap<List<TwoDIntArray>>(); for (int k = 0; k < labeledPoints2.size(); ++k) { Set<PairInt> set = labeledPoints2.get(k); if (set.size() < 7) { // NOTE: this means that subsequent datasets2 will not be // lists having same indexes as labeledPoints2 continue; } assert(Math.abs(scales2.get(0) - 1) < 0.02); PairIntArray bounds = imageProcessor.extractSmoothedOrderedBoundary( new HashSet(set), sigma, orb2.getPyramidImages().get(0).a[0].length, orb2.getPyramidImages().get(0).a.length); for (int octave2 = 0; octave2 < scales2.size(); ++octave2) { float scale2 = scales2.get(octave2); Image img = ORB.convertToImage( orb2.getPyramidImages().get(octave2)); int w2 = img.getWidth(); int h2 = img.getHeight(); Set<PairInt> set2 = new HashSet<PairInt>(); for (PairInt p : set) { int x = Math.round((float) p.getX() / scale2); int y = Math.round((float) p.getY() / scale2); if (x == w2) { x = w2 - 1; } if (y == h2) { y = h2 - 1; } PairInt p2 = new PairInt(x, y); set2.add(p2); } List<Set<PairInt>> list2 = labeledPoints2Lists.get(octave2); if (list2 == null) { list2 = new ArrayList<Set<PairInt>>(); labeledPoints2Lists.put(octave2, list2); } list2.add(set2); // create histograms for later comparison w/ template at // different scales int[][] ch = cHist.histogramCIELUV(img, set2); List<TwoDIntArray> ch2List = ch2Lists.get(octave2); if (ch2List == null) { ch2List = new ArrayList<TwoDIntArray>(); ch2Lists.put(octave2, ch2List); } ch2List.add(new TwoDIntArray(ch)); assert(labeledPoints2Lists.get(octave2).size() == ch2Lists.get(octave2).size()); } } // populated on demand, key=octave, key=segmented cell, value=size TObjectIntMap<PairInt> size2Map = new TObjectIntHashMap<PairInt>(); // -- compare sets over octaves: // aggregated search of adjacent labeled cells to compare their combined // properties of color histogram and shape to the template. // delaying evaluation of results until end in order to get the // maximum chord differerence sum, needed for Salukwzde distance. // for each i, list of Results, chordDiffSums, bounds1, bounds2 // bundling Results and bounds into an object TIntObjectMap<List<PObject>> resultsMap = new TIntObjectHashMap<List<PObject>>(); TIntObjectMap<TDoubleList> chordDiffSumsMap = new TIntObjectHashMap<TDoubleList>(); TIntObjectMap<TFloatList> intersectionsMap = new TIntObjectHashMap<TFloatList>(); double maxDiffChordSum = Double.MIN_VALUE; double maxAvgDiffChord = Double.MIN_VALUE; double maxAvgDist = Double.MIN_VALUE; // maps to reuse the aggregated boundaries // list is octave2 items // each map key=segmented cell label indexes, // value = index to map in octave2IndexBoundsMaps List<Map<OneDIntArray, PairIntArray>> octave2KeyIndexMaps = new ArrayList<Map<OneDIntArray, PairIntArray>>(); for (int j = 0; j < scales2.size(); ++j) { octave2KeyIndexMaps.add(new HashMap<OneDIntArray, PairIntArray>()); } for (int i = 0; i < scales1.size(); ++i) { //for (int i = 0; i < 1; ++i) { float scale1 = scales1.get(i); int[][] ch1 = ch1s.get(i).a; //Set<PairInt> templateSet = labeledPoints1Lists.get(i); PairIntArray bounds1 = labeledBoundaries1.get(i); float sz1 = calculateObjectSize(bounds1); List<PObject> results = new ArrayList<PObject>(); TDoubleList chordDiffSums = new TDoubleArrayList(); TFloatList intersections = new TFloatArrayList(); for (int j = 0; j < scales2.size(); ++j) { //for (int j = 0; j < 1; ++j) { float scale2 = scales2.get(j); List<TwoDIntArray> listOfCH2s = ch2Lists.get(j); if (listOfCH2s == null) { continue; } List<Set<PairInt>> listOfSets2 = labeledPoints2Lists.get(j); Map<OneDIntArray, PairIntArray> keyBoundsMap = octave2KeyIndexMaps.get(j); ShapeFinder shapeFinder = new ShapeFinder( bounds1, ch1, scale1, sz1, orb1.getPyramidImages().get(i).a[0].length -1, orb1.getPyramidImages().get(i).a.length - 1, listOfSets2, listOfCH2s, scale2, keyBoundsMap, orb2.getPyramidImages().get(j).a[0].length -1, orb2.getPyramidImages().get(j).a.length - 1, intersectionLimit ); shapeFinder.pyr1 = orb1.getPyramidImages().get(i); shapeFinder.pyr2 = orb2.getPyramidImages().get(j); shapeFinder.lbl = Integer.toString(i) + ":" + Integer.toString(j) + "_"; shapeFinder.oct1 = i; shapeFinder.oct2 = j; { //if (i==2&&j==0) { Image img1 = ORB.convertToImage(orb1.getPyramidImages().get(i)); Image img2 = ORB.convertToImage(orb2.getPyramidImages().get(j)); MiscDebug.writeImage(img2, "AAA_2_" + j); MiscDebug.writeImage(img1, "AAA_1_" + i); for (int i2 = 0; i2 < listOfSets2.size(); ++i2) { int clr = ImageIOHelper.getNextColorRGB(i2); Set<PairInt> set = listOfSets2.get(i2); for (PairInt p : set) { ImageIOHelper.addPointToImage(p.getX(), p.getY(), img2, 1, clr); } } MiscDebug.writeImage(img2, "_AAA_2_s_" + j); } ShapeFinderResult r = shapeFinder.findAggregated(); if (r == null) { continue; } double c = r.getChordDiffSum(); results.add(new PObject(r, r.bounds1, r.bounds2, scale1, scale2)); chordDiffSums.add(r.getChordDiffSum()); intersections.add(r.intersection); if (r.getChordDiffSum() > maxDiffChordSum) { maxDiffChordSum = r.getChordDiffSum(); } double avgCD = r.getChordDiffSum() / (double) r.getNumberOfMatches(); if (avgCD > maxAvgDiffChord) { maxAvgDiffChord = avgCD; } double avgDist = r.getDistSum() / (double) r.getNumberOfMatches(); if (avgDist > maxAvgDist) { maxAvgDist = avgDist; } System.out.println(String.format( "%d %d p in set=(%d,%d) shape matcher c=%.2f np=%d inter=%.2f dist=%.2f avgDist=%.2f", i, j, r.bounds2.getX(0), r.bounds2.getY(0), (float) c, r.getNumberOfMatches(), (float) r.intersection, (float) r.getDistSum(), (float) avgDist)); } // end loop over j datasets 2 if (!results.isEmpty()) { resultsMap.put(i, results); chordDiffSumsMap.put(i, chordDiffSums); intersectionsMap.put(i, intersections); } } // end loop over i dataset 1 // calculate the Salukwdze distances /* for each i, need the max chord diff sum, nPoints in bound1, and best Results */ double minSD = Double.MAX_VALUE; int minSDI = -1; TIntObjectIterator<List<PObject>> iter = resultsMap.iterator(); for (int i = 0; i < resultsMap.size(); ++i) { iter.advance(); int idx = iter.key(); //double maxDiffChordSum = chordDiffSumsMap.get(idx).max(); double minCost = Double.MAX_VALUE; int minCostIdx = -1; List<PObject> resultsList = resultsMap.get(idx); for (int j = 0; j < resultsList.size(); ++j) { PObject obj = resultsList.get(j); float costIntersection = 1.0F - intersectionsMap.get(idx).get(j); PartialShapeMatcher.Result r = obj.r; int nb1 = Math.round((float) obj.bounds1.getN() / (float) dp); float np = r.getNumberOfMatches(); float countComp = 1.0F - (np / (float) nb1); float countCompSq = countComp * countComp; double chordComp = ((float) r.getChordDiffSum() / np) / maxAvgDiffChord; double chordCompSq = chordComp * chordComp; double avgDist = r.getDistSum() / np; double distComp = avgDist / maxAvgDist; double distCompSq = distComp * distComp; // Salukwzde uses square sums //double sd = r.calculateSalukwdzeDistanceSquared( // maxDiffChordSum, nb1); // TODO: consider formal analysis of dependencies and hence // error terms: //double sd = chordCompSq*countCompSq // + distCompSq*countCompSq; //NOTE: The coverage of the matches is currently // approximated as simply numberMatched/maxNumberMatchable, // but a term representing the spatial distribution appears // to be necessary also. // will try largestNumberGap/maxNumberMatchable. // TODO: need to improve this in detail later int lGap = maxNumberOfGaps(obj.bounds1, r)/dp; float gCountComp = (float)lGap/(float)nb1; //double sd = chordCompSq + countCompSq + distCompSq; double sd = chordComp + countComp + gCountComp + distComp + costIntersection; if (sd < minCost) { minCost = sd; minCostIdx = j; } if (sd < minSD) { minSD = sd; minSDI = idx; } System.out.println("sd=" + sd + " n1=" + obj.bounds1.getN() + " n2=" + obj.bounds2.getN() + " origN1=" + r.getOriginalN1() + " nMatches=" + r.getNumberOfMatches() + String.format( " chord=%.2f count=%.2f spatial=%.2f dist=%.2f inter=%.2f", (float)chordComp, (float)countComp, (float)gCountComp, (float)distComp, (float)costIntersection) ); } assert (minCostIdx > -1); TDoubleList cList = chordDiffSumsMap.get(idx); TFloatList iList = intersectionsMap.get(idx); for (int j = resultsList.size() - 1; j > -1; --j) { if (j != minCostIdx) { resultsList.remove(j); cList.removeAt(j); iList.removeAt(j); } } } if (resultsMap.size() > 1) { // TODO: build a test for this. // possibly need to transform results to same reference // frame to compare. // using best SD for now TIntSet rm = new TIntHashSet(); iter = resultsMap.iterator(); for (int i = 0; i < resultsMap.size(); ++i) { iter.advance(); int idx = iter.key(); if (idx != minSDI) { rm.add(idx); } } TIntIterator iter2 = rm.iterator(); while (iter2.hasNext()) { int idx = iter2.next(); resultsMap.remove(idx); } } List<CorrespondenceList> topResults = new ArrayList<CorrespondenceList>(); iter = resultsMap.iterator(); for (int i = 0; i < resultsMap.size(); ++i) { iter.advance(); int idx = iter.key(); List<PObject> resultsList = resultsMap.get(idx); assert (resultsList.size() == 1); PObject obj = resultsList.get(0); int n = obj.r.getNumberOfMatches(); PairInt[] m1 = new PairInt[n]; PairInt[] m2 = new PairInt[n]; float scale1 = obj.scale1; float scale2 = obj.scale2; for (int ii = 0; ii < n; ++ii) { int idx1 = obj.r.getIdx1(ii); int idx2 = obj.r.getIdx2(ii); int x1 = Math.round(obj.bounds1.getX(idx1) * scale1); int y1 = Math.round(obj.bounds1.getY(idx1) * scale1); int x2 = Math.round(obj.bounds2.getX(idx2) * scale2); int y2 = Math.round(obj.bounds2.getY(idx2) * scale2); m1[ii] = new PairInt(x1, y1); m2[ii] = new PairInt(x2, y2); } CorrespondenceList cor = new CorrespondenceList(obj.r.getTransformationParameters(), m1, m2); topResults.add(cor); } return topResults; } private TFloatList extractScales(List<TFloatList> scalesList) { TFloatList scales = new TFloatArrayList(); for (int i = 0; i < scalesList.size(); ++i) { scales.add(scalesList.get(i).get(0)); } return scales; } private int calculateNMaxMatchable(List<TIntList> keypointsX1, List<TIntList> keypointsX2) { int nMaxM = Integer.MIN_VALUE; for (int i = 0; i < keypointsX1.size(); ++i) { int n1 = keypointsX1.get(i).size(); for (int j = 0; j < keypointsX2.size(); ++j) { int n2 = keypointsX2.get(j).size(); int min = Math.min(n1, n2); if (min > nMaxM) { nMaxM = min; } } } return nMaxM; } private int maxSize(List<TIntList> a) { int maxSz = Integer.MIN_VALUE; for (TIntList b : a) { int sz = b.size(); if (sz > maxSz) { maxSz = sz; } } return maxSz; } public static double distance(int x, int y, PairInt b) { int diffX = x - b.getX(); int diffY = y - b.getY(); double dist = Math.sqrt(diffX * diffX + diffY * diffY); return dist; } public static int distance(PairInt p1, PairInt p2) { int diffX = p1.getX() - p2.getX(); int diffY = p1.getY() - p2.getY(); return (int) Math.sqrt(diffX * diffX + diffY * diffY); } /** * greedy matching of d1 to d2 by min cost, with unique mappings for * all indexes. * * @param d1 * @param d2 * @return matches - two dimensional int array of indexes in d1 and * d2 which are matched. */ public static int[][] matchDescriptors(VeryLongBitString[] d1, VeryLongBitString[] d2, List<PairInt> keypoints1, List<PairInt> keypoints2) { int n1 = d1.length; int n2 = d2.length; //[n1][n2] int[][] cost = ORB.calcDescriptorCostMatrix(d1, d2); int[][] matches = greedyMatch(keypoints1, keypoints2, cost); // greedy or optimal match can be performed here. // NOTE: some matching problems might benefit from using the spatial // information at the same time. for those, will consider adding // an evaluation term for these descriptors to a specialization of // PartialShapeMatcher.java return matches; } /** * greedy matching of d1 to d2 by min difference, with unique mappings for * all indexes. * NOTE that if 2 descriptors match equally well, either one * might get the assignment. * Consider using instead, matchDescriptors2 which matches * by descriptor and relative spatial location. * * @param d1 * @param d2 * @param keypoints2 * @param keypoints1 * @return matches - two dimensional int array of indexes in d1 and * d2 which are matched. */ public static int[][] matchDescriptors(ORB.Descriptors[] d1, ORB.Descriptors[] d2, List<PairInt> keypoints1, List<PairInt> keypoints2) { if (d1.length != d2.length) { throw new IllegalArgumentException("d1 and d2 must" + " be same length"); } int n1 = d1[0].descriptors.length; int n2 = d2[0].descriptors.length; if (n1 != keypoints1.size()) { throw new IllegalArgumentException("number of descriptors in " + " d1 bitstrings must be same as keypoints1 length"); } if (n2 != keypoints2.size()) { throw new IllegalArgumentException("number of descriptors in " + " d2 bitstrings must be same as keypoints2 length"); } //[n1][n2] int[][] cost = ORB.calcDescriptorCostMatrix(d1, d2); int[][] matches = greedyMatch(keypoints1, keypoints2, cost); // greedy or optimal match can be performed here. // NOTE: some matching problems might benefit from using the spatial // information at the same time. for those, will consider adding // an evaluation term for these descriptors to a specialization of // PartialShapeMatcher.java return matches; } private static void debugPrint(List<QuadInt> pairs, int i, int j, TwoDFloatArray pyr1, TwoDFloatArray pyr2, float s1, float s2) { Image img1 = ORB.convertToImage(pyr1); Image img2 = ORB.convertToImage(pyr2); try { for (int ii = 0; ii < pairs.size(); ++ii) { QuadInt q = pairs.get(ii); int x1 = Math.round(q.getA() / s1); int y1 = Math.round(q.getB() / s1); int x2 = Math.round(q.getC() / s2); int y2 = Math.round(q.getD() / s2); ImageIOHelper.addPointToImage(x1, y1, img1, 1, 255, 0, 0); ImageIOHelper.addPointToImage(x2, y2, img2, 1, 255, 0, 0); } String strI = Integer.toString(i); while (strI.length() < 3) { strI = "0" + strI; } String strJ = Integer.toString(j); while (strJ.length() < 3) { strJ = "0" + strJ; } String str = "_pairs_" + strI + "_" + strJ + "_"; MiscDebug.writeImage(img1, str + "_" + strI); MiscDebug.writeImage(img2, str + "_" + strJ); } catch (Exception e) { } } private static void debugPrint(TwoDFloatArray pyr1, TwoDFloatArray pyr2, TIntList kpX1, TIntList kpY1, TIntList kpX2, TIntList kpY2, float scale1, float scale2, int img1Idx, int img2Idx) { Image img1 = ORB.convertToImage(pyr1); Image img2 = ORB.convertToImage(pyr2); try { for (int i = 0; i < kpX1.size(); ++i) { int x1 = (int) (kpX1.get(i) / scale1); int y1 = (int) (kpY1.get(i) / scale1); ImageIOHelper.addPointToImage(x1, y1, img1, 1, 255, 0, 0); } for (int i = 0; i < kpX2.size(); ++i) { int x2 = (int) (kpX2.get(i) / scale2); int y2 = (int) (kpY2.get(i) / scale2); ImageIOHelper.addPointToImage(x2, y2, img2, 1, 255, 0, 0); } String strI = Integer.toString(img1Idx); while (strI.length() < 3) { strI = "0" + strI; } String strJ = Integer.toString(img2Idx); while (strJ.length() < 3) { strJ = "0" + strJ; } String str = "_kp_" + strI + "_" + strJ + "_"; MiscDebug.writeImage(img1, str + "_i"); MiscDebug.writeImage(img2, str + "_j"); } catch (Exception e) { } } private static void debugPrint(TwoDFloatArray pyr1, TwoDFloatArray pyr2, TIntList kpX1, TIntList kpY1, TIntList kpX2, TIntList kpY2, int img1Idx, int img2Idx) { Image img1 = ORB.convertToImage(pyr1); Image img2 = ORB.convertToImage(pyr2); try { for (int i = 0; i < kpX1.size(); ++i) { int x1 = kpX1.get(i); int y1 = kpY1.get(i); ImageIOHelper.addPointToImage(x1, y1, img1, 1, 255, 0, 0); } for (int i = 0; i < kpX2.size(); ++i) { int x2 = kpX2.get(i); int y2 = kpY2.get(i); ImageIOHelper.addPointToImage(x2, y2, img2, 1, 255, 0, 0); } String strI = Integer.toString(img1Idx); while (strI.length() < 3) { strI = "0" + strI; } String strJ = Integer.toString(img2Idx); while (strJ.length() < 3) { strJ = "0" + strJ; } String str = "_kp_" + strI + "_" + strJ + "_"; MiscDebug.writeImage(img1, str + "_i"); MiscDebug.writeImage(img2, str + "_j"); } catch (Exception e) { } } private static Set<PairInt> makeSet(TIntList kpX1, TIntList kpY1) { Set<PairInt> set = new HashSet<PairInt>(); for (int i = 0; i < kpX1.size(); ++i) { PairInt p = new PairInt(kpX1.get(i), kpY1.get(i)); set.add(p); } return set; } private static Descriptors[] getDescriptors(ORB orb, int i) { ORB.Descriptors[] d = null; if (orb.getDescrChoice().equals(ORB.DescriptorChoice.ALT)) { d = new ORB.Descriptors[]{orb.getDescriptorsListAlt().get(i)}; } else if (orb.getDescrChoice().equals(ORB.DescriptorChoice.HSV)) { d = new ORB.Descriptors[]{orb.getDescriptorsH().get(i), orb.getDescriptorsS().get(i), orb.getDescriptorsV().get(i)}; } else if (orb.getDescrChoice().equals(ORB.DescriptorChoice.GREYSCALE)) { d = new ORB.Descriptors[]{orb.getDescriptorsList().get(i)}; } return d; } private static List<QuadInt> createPairLabelIndexes(int[][] cost, int nBands, List<TIntList> pointIndexLists1, TIntList kpX1, TIntList kpY1, List<TIntList> pointIndexLists2, TIntList kpX2, TIntList kpY2) { int costLimit = Math.round((float) (nBands * 256) * 0.65F); int minP1Diff = 3; Set<QuadInt> exists = new HashSet<QuadInt>(); // pairs of idx from set1 and idx from set 2 Set<PairInt> skip = new HashSet<PairInt>(); List<QuadInt> pairIndexes = new ArrayList<QuadInt>(); List<PairInt> pair2Indexes = calculatePairIndexes(pointIndexLists2, kpX2, kpY2, minP1Diff); for (int ii = 0; ii < pointIndexLists1.size(); ++ii) { TIntList kpIndexes1 = pointIndexLists1.get(ii); if (kpIndexes1.size() < 2) { continue; } // draw 2 from kpIndexes1 for (int ii1 = 0; ii1 < kpIndexes1.size(); ++ii1) { int idx1 = kpIndexes1.get(ii1); int t1X = kpX1.get(idx1); int t1Y = kpY1.get(idx1); boolean skipIdx1 = false; for (int ii2 = 0; ii2 < kpIndexes1.size(); ++ii2) { if (ii1 == ii2) { continue; } int idx2 = kpIndexes1.get(ii2); int t2X = kpX1.get(idx2); int t2Y = kpY1.get(idx2); if (t1X == t2X && t1Y == t2Y) { continue; } int diffX = t1X - t2X; int diffY = t1Y - t2Y; int distSq = diffX * diffX + diffY * diffY; //if (distSq > limitSq) { // continue; if (distSq < minP1Diff * minP1Diff) { continue; } for (PairInt p2Index : pair2Indexes) { int idx3 = p2Index.getX(); int idx4 = p2Index.getY(); PairInt p13 = new PairInt(idx1, idx3); if (skip.contains(p13)) { skipIdx1 = true; break; } PairInt p24 = new PairInt(idx2, idx4); if (skip.contains(p24)) { continue; } int c13 = cost[idx1][idx3]; // if idx1 and idx3 cost is above limit, skip if (c13 > costLimit) { skip.add(p13); skipIdx1 = true; break; } int c24 = cost[idx2][idx4]; if (c24 > costLimit) { skip.add(p24); continue; } QuadInt q = new QuadInt(idx1, idx2, idx3, idx4); QuadInt qChk = new QuadInt(idx2, idx1, idx4, idx3); if (exists.contains(q) || exists.contains(qChk)) { continue; } /* int s1X = kpX2.get(idx3); int s1Y = kpY2.get(idx3); int s2X = kpX2.get(idx4); int s2Y = kpY2.get(idx4); int diffX2 = s1X - s2X; int diffY2 = s1Y - s2Y; int distSq2 = diffX2 * diffX2 + diffY2 * diffY2; //if (distSq2 > limitSq) { // continue; //} if ((distSq2 < minP1Diff * minP1Diff)) { continue; } */ pairIndexes.add(q); exists.add(q); } if (skipIdx1) { break; } } } } return pairIndexes; } public static int calculateObjectSize(Set<PairInt> points) { // O(N*lg_2(N)) FurthestPair furthestPair = new FurthestPair(); PairInt[] fp = furthestPair.find(points); if (fp == null || fp.length < 2) { throw new IllegalArgumentException("did not find a furthest pair" + " in points"); } double dist = ORBMatcher.distance(fp[0], fp[1]); return (int) Math.round(dist); } public static int calculateObjectSize(PairIntArray points) { return calculateObjectSize(Misc.convert(points)); } private static float calculateDiagonal(List<TIntList> keypointsX1, List<TIntList> keypointsY1, int idx) { TIntList x1 = keypointsX1.get(idx); TIntList y1 = keypointsY1.get(idx); int maxX = x1.max(); int maxY = y1.max(); return (float) Math.sqrt(maxX * maxX + maxY * maxY); } /** * NOTE: preliminary results show that this matches the right pattern as * a subset of the object, but needs to be followed by a slightly larger * aggregated search by segmentation cells using partial shape matcher * for example. This was started in ShapeFinder, but needs to be * adjusted for a search given seed cells and possibly improved for the * other TODO items). * @param keypoints1 * @param keypoints2 * @param mT * @param mS * @param nn * @param minMaxXY2 * @param limit * @param tIndexes * @param idx1P2CostMap * @param indexes * @param costs * @return */ private static List<CorrespondenceList> completeUsingCombinations(List<PairInt> keypoints1, List<PairInt> keypoints2, PairIntArray mT, PairIntArray mS, NearestNeighbor2D nn, int[] minMaxXY2, int limit, TIntList tIndexes, TIntObjectMap<TObjectIntMap<PairInt>> idx1P2CostMap, PairInt[] indexes, int[] costs, int bitTolerance, int nBands) { int nTop = mT.getN(); System.out.println("have " + nTop + " sets of points for " + " n of k=2 combinations"); // need to make pairs of combinations from mT,mS // to calcuate euclidean transformations and evaluate them. // -- can reduce the number of combinations by imposing a // distance limit on separation of feasible pairs int limitSq = limit * limit; MatchedPointsTransformationCalculator tc = new MatchedPointsTransformationCalculator(); Transformer transformer = new Transformer(); // this fixed size sorted vector is faster for shorter arrays. // TODO: consider ways to robustly set the size from the cost // statistics to ensure the vector will always contain the // correct solution even if not in top position. int nt = mT.getN(); FixedSizeSortedVector<CObject> vec = new FixedSizeSortedVector<CObject>(nt, CObject.class); double minCost = Double.MAX_VALUE; //CorrespondenceList minCostCor = null; //PairIntArray minCostTrT = null; double[] minCostI = new double[nTop]; double[] minDistI = new double[nTop]; // temporary storage of corresp coords until object construction int[] m1x = new int[nTop]; int[] m1y = new int[nTop]; int[] m2x = new int[nTop]; int[] m2y = new int[nTop]; int mCount = 0; for (int i = 0; i < nTop; ++i) { int t1X = mT.getX(i); int t1Y = mT.getY(i); int s1X = mS.getX(i); int s1Y = mS.getY(i); // choose all combinations of 2nd point within distance // limit of point s1. for (int j = i + 1; j < mS.getN(); ++j) { int t2X = mT.getX(j); int t2Y = mT.getY(j); int s2X = mS.getX(j); int s2Y = mS.getY(j); if ((t1X == t2X && t1Y == t2Y) || (s1X == s2X && s1Y == s2Y)) { continue; } int diffX = s1X - s2X; int diffY = s1Y - s2Y; int distSq = diffX * diffX + diffY * diffY; if (distSq > limitSq) { continue; } // -- calculate euclid transformation // -- evaluate the fit TransformationParameters params = tc.calulateEuclidean(t1X, t1Y, t2X, t2Y, s1X, s1Y, s2X, s2Y, 0, 0); float scale = params.getScale(); mCount = 0; // template object transformed PairIntArray trT = transformer.applyTransformation(params, mT); /* two components to the evaluation and both need normalizations so that their contributions to total result are equally weighted. (1) descriptors: -- score is sum of each matched (3*256 - cost) -- the normalization is the maximum possible score, so will use the number of template points. --> norm = nTemplate * 3 * 256 -- normalized score = (3*256 - cost)/norm ==> normalized cost = 1 - ((3*256 - cost)/norm) (2) spatial distances from transformed points: -- sum of distances within limit and replacement of distance by limit if no matching nearest neighbor is found. -- divide each distance by the transformation scale to compare same values -- divide the total sum by the total max possible --> norm = nTemplate * limit / scale Then the total cost is (1) + (2) and the min cost among all of these combinations is the resulting correspondence list */ double maxCost = nBands * 256; double maxDist = limit / scale; double sum1 = 0; double sum2 = 0; double sum = 0; for (int k = 0; k < trT.getN(); ++k) { int xTr = trT.getX(k); int yTr = trT.getY(k); int idx1 = tIndexes.get(k); Set<PairInt> nearest = null; if ((xTr >= 0) && (yTr >= 0) && (xTr <= (minMaxXY2[1] + limit)) && (yTr <= (minMaxXY2[3] + limit))) { nearest = nn.findClosest(xTr, yTr, limit); } int minC = Integer.MAX_VALUE; PairInt minCP2 = null; if (nearest != null && !nearest.isEmpty()) { TObjectIntMap<PairInt> cMap = idx1P2CostMap.get(idx1); for (PairInt p2 : nearest) { if (!cMap.containsKey(p2)) { continue; } int c = cMap.get(p2); if (c < minC) { minC = c; minCP2 = p2; } } } if (minCP2 != null) { double scoreNorm = (3 * 256 - minC) / maxCost; double costNorm = 1.0 - scoreNorm; sum1 += costNorm; double dist = ORBMatcher.distance(xTr, yTr, minCP2); double distNorm = dist / maxDist; sum2 += distNorm; m1x[mCount] = keypoints1.get(idx1).getX(); m1y[mCount] = keypoints1.get(idx1).getY(); m2x[mCount] = minCP2.getX(); m2y[mCount] = minCP2.getY(); minCostI[mCount] = costNorm; minDistI[mCount] = distNorm; mCount++; } else { sum1 += 1; sum2 += 1; } } sum = sum1 + sum2; if ((minCost == Double.MAX_VALUE) || (sum < (minCost + bitTolerance))) { if (sum < minCost) { minCost = sum; } List<PairInt> m1 = new ArrayList<PairInt>(); List<PairInt> m2 = new ArrayList<PairInt>(); CorrespondenceList corr = new CorrespondenceList(params.getScale(), Math.round(params.getRotationInDegrees()), Math.round(params.getTranslationX()), Math.round(params.getTranslationY()), 0, 0, 0, m1, m2); for (int mi = 0; mi < mCount; ++mi) { m1.add(new PairInt(m1x[mi], m1y[mi])); m2.add(new PairInt(m2x[mi], m2y[mi])); } CObject cObj = new CObject(sum, corr, trT); vec.add(cObj); } } } if (vec.getNumberOfItems() == 0) { return null; } List<CorrespondenceList> topResults = new ArrayList<CorrespondenceList>(); for (int i = 0; i < vec.getNumberOfItems(); ++i) { CObject a = vec.getArray()[i]; if (a.cost > (minCost + bitTolerance)) { break; } topResults.add(a.cCor); } return topResults; } private static List<PairInt> calculatePairIndexes(List<TIntList> pointIndexLists2, TIntList kpX2, TIntList kpY2, int minPDiff) { List<PairInt> pairIndexes = new ArrayList<PairInt>(); Set<PairInt> exists = new HashSet<PairInt>(); // draw 2 pairs from other dataset for (int jj = 0; jj < pointIndexLists2.size(); ++jj) { TIntList kpIndexes2 = pointIndexLists2.get(jj); if (kpIndexes2.size() < 2) { continue; } // draw 2 from kpIndexes2 for (int jj1 = 0; jj1 < kpIndexes2.size(); ++jj1) { int idx3 = kpIndexes2.get(jj1); int s1X = kpX2.get(idx3); int s1Y = kpY2.get(idx3); for (int jj2 = 0; jj2 < kpIndexes2.size(); ++jj2) { if (jj1 == jj2) { continue; } int idx4 = kpIndexes2.get(jj2); int s2X = kpX2.get(idx4); int s2Y = kpY2.get(idx4); if (s1X == s2X && s1Y == s2Y) { continue; } PairInt q = new PairInt(idx3, idx4); if (exists.contains(q)) { continue; } int diffX2 = s1X - s2X; int diffY2 = s1Y - s2Y; int distSq2 = diffX2 * diffX2 + diffY2 * diffY2; //if (distSq2 > limitSq) { // continue; if (distSq2 < minPDiff * minPDiff) { continue; } pairIndexes.add(q); exists.add(q); } } } return pairIndexes; } private static float calculateDiagonal2(List<TwoDFloatArray> pyramidImages, int idx) { int w = pyramidImages.get(idx).a.length; int h = pyramidImages.get(idx).a[0].length; double diag = Math.sqrt(w * w + h * h); return (float) diag; } private static int[][] greedyMatch(List<PairInt> keypoints1, List<PairInt> keypoints2, int[][] cost) { int n1 = keypoints1.size(); int n2 = keypoints2.size(); // for the greedy match, separating the index information from the cost // and then sorting by cost int nTot = n1 * n2; PairInt[] indexes = new PairInt[nTot]; int[] costs = new int[nTot]; int count = 0; for (int i = 0; i < n1; ++i) { for (int j = 0; j < n2; ++j) { indexes[count] = new PairInt(i, j); costs[count] = cost[i][j]; count++; } } assert (count == nTot); QuickSort.sortBy1stArg(costs, indexes); Set<PairInt> set1 = new HashSet<PairInt>(); Set<PairInt> set2 = new HashSet<PairInt>(); List<PairInt> matches = new ArrayList<PairInt>(); // visit lowest costs (== differences) first for (int i = 0; i < nTot; ++i) { PairInt index12 = indexes[i]; int idx1 = index12.getX(); int idx2 = index12.getY(); PairInt p1 = keypoints1.get(idx1); PairInt p2 = keypoints2.get(idx2); if (set1.contains(p1) || set2.contains(p2)) { continue; } //System.out.println("p1=" + p1 + " " + " p2=" + p2 + " cost=" + costs[i]); matches.add(index12); set1.add(p1); set2.add(p2); } int[][] results = new int[matches.size()][2]; for (int i = 0; i < matches.size(); ++i) { results[i][0] = matches.get(i).getX(); results[i][1] = matches.get(i).getY(); } return results; } private static double[] sumKeypointDistanceDifference(TIntList a2Indexes, PairIntArray tr2, TIntList kpX2, TIntList kpY2, NearestNeighbor2D nn, TransformationParameters params, int maxX, int maxY, int pixTolerance, double maxDist, int[] m1x, int[] m1y, int[] m2x, int[] m2y) { double sum2 = 0; int mCount = 0; for (int k = 0; k < tr2.getN(); ++k) { int x2Tr = tr2.getX(k); int y2Tr = tr2.getY(k); int idx2 = a2Indexes.get(k); Set<PairInt> nearest = null; if ((x2Tr >= 0) && (y2Tr >= 0) && (x2Tr <= (maxX + pixTolerance)) && (y2Tr <= (maxY + pixTolerance))) { nearest = nn.findClosest(x2Tr, y2Tr, pixTolerance); } double minDist = Double.MAX_VALUE; PairInt minDistP1 = null; if (nearest != null && !nearest.isEmpty()) { for (PairInt p11 : nearest) { double dist = ORBMatcher.distance(x2Tr, y2Tr, p11); if (dist < minDist) { minDist = dist; minDistP1 = p11; } } } if (minDistP1 != null) { double dist = minDist; double distNorm = dist / maxDist; sum2 += distNorm; m2x[mCount] = kpX2.get(idx2); m2y[mCount] = kpY2.get(idx2); m1x[mCount] = minDistP1.getX(); m1y[mCount] = minDistP1.getY(); mCount++; } else { sum2 += 1; } } // end loop over trnsformed set 2 return new double[]{sum2, mCount}; } private static double[] sumKeypointDescAndDist(int[][] cost, int nBands, TIntList a1Indexes, PairIntArray tr1, TIntList kpX1, TIntList kpY1, NearestNeighbor2D nn2, TObjectIntMap<PairInt> p2KPIndexMap, TransformationParameters params, int maxX2, int maxY2, int pixTolerance, double maxDist, PairInt[] m1, PairInt[] m2) { double sumDesc = 0; double sumDist = 0; int count = 0; double maxDesc = nBands * 256.0; for (int k = 0; k < tr1.getN(); ++k) { int x1Tr = tr1.getX(k); int y1Tr = tr1.getY(k); int idx1 = a1Indexes.get(k); Set<PairInt> nearest = null; if ((x1Tr >= 0) && (y1Tr >= 0) && (x1Tr <= (maxX2 + pixTolerance)) && (y1Tr <= (maxY2 + pixTolerance))) { nearest = nn2.findClosest(x1Tr, y1Tr, pixTolerance); } int minC = Integer.MAX_VALUE; PairInt minCP2 = null; int minIdx2 = 0; if (nearest != null && !nearest.isEmpty()) { for (PairInt p2 : nearest) { int idx2 = p2KPIndexMap.get(p2); int c = cost[idx1][idx2]; if (c < minC) { minC = c; minCP2 = p2; minIdx2 = idx2; } } } if (minCP2 != null) { double scoreNorm = (nBands * 256 - minC) / maxDesc; double costNorm = 1.0 - scoreNorm; sumDesc += costNorm; double dist = ORBMatcher.distance(x1Tr, y1Tr, minCP2); double distNorm = dist / maxDist; sumDist += distNorm; m1[count] = new PairInt(kpX1.get(idx1), kpY1.get(idx1)); m2[count] = minCP2; count++; } else { sumDesc += 1; sumDist += 1; } } return new double[]{sumDesc, sumDist, count}; } private static double[] sumKeypointDescAndDist(int[][] cost, int nBands, TIntList a1Indexes, PairIntArray tr1, TIntList kpX1, TIntList kpY1, NearestNeighbor2D nn2, TObjectIntMap<PairInt> p2KPIndexMap, int maxX2, int maxY2, int pixTolerance, double maxDist, int[] m1, int[] m2) { double sumDesc = 0; double sumDist = 0; int count = 0; double maxDesc = nBands * 256.0; //best first match, after nearest neighbors // TODO: consider optimal bipartite matching when have an // implementation of multi-level-buckets float[] costA = new float[tr1.getN()]; float[] costDesc = new float[tr1.getN()]; float[] costDist = new float[tr1.getN()]; int[] indexes = new int[tr1.getN()]; for (int k = 0; k < tr1.getN(); ++k) { int x1Tr = tr1.getX(k); int y1Tr = tr1.getY(k); int idx1 = a1Indexes.get(k); Set<PairInt> nearest = null; if ((x1Tr >= 0) && (y1Tr >= 0) && (x1Tr <= (maxX2 + pixTolerance)) && (y1Tr <= (maxY2 + pixTolerance))) { nearest = nn2.findClosest(x1Tr, y1Tr, pixTolerance); } int minC = Integer.MAX_VALUE; PairInt minCP2 = null; int minIdx2 = 0; if (nearest != null && !nearest.isEmpty()) { for (PairInt p2 : nearest) { int idx2 = p2KPIndexMap.get(p2); int c = cost[idx1][idx2]; if (c < minC) { minC = c; minCP2 = p2; minIdx2 = idx2; } } } if (minCP2 != null) { double scoreNorm = (nBands * 256 - minC) / maxDesc; double costNorm = 1.0 - scoreNorm; sumDesc += costNorm; double dist = ORBMatcher.distance(x1Tr, y1Tr, minCP2); double distNorm = dist / maxDist; sumDist += distNorm; m1[count] = idx1; m2[count] = minIdx2; costA[count] = (float) (costNorm + distNorm); costDesc[count] = (float) costNorm; costDist[count] = (float) distNorm; indexes[count] = count; count++; } else { sumDesc += 1; sumDist += 1; } } if (count > 1) { costA = Arrays.copyOf(costA, count); indexes = Arrays.copyOf(indexes, count); QuickSort.sortBy1stArg(costA, indexes); TIntSet set1 = new TIntHashSet(); TIntSet set2 = new TIntHashSet(); List<PairInt> matched = new ArrayList<PairInt>(); TIntList idxs = new TIntArrayList(); for (int i = 0; i < count; ++i) { int idx = indexes[i]; int idx1 = m1[idx]; int idx2 = m2[idx]; if (set1.contains(idx1) || set2.contains(idx2)) { continue; } idxs.add(idx); matched.add(new PairInt(idx1, idx2)); set1.add(idx1); set2.add(idx2); } int nRedundant = count - matched.size(); if (nRedundant > 0) { sumDesc = 0; sumDist = 0; for (int i = 0; i < matched.size(); ++i) { m1[i] = matched.get(i).getX(); m2[i] = matched.get(i).getY(); int idx = idxs.get(i); sumDesc += costDesc[idx]; sumDist += costDist[idx]; } sumDesc += (tr1.getN() - matched.size()); sumDist += (tr1.getN() - matched.size()); count = matched.size(); } } return new double[]{sumDesc, sumDist, count}; } /** * match left and right using the right transformed to left and nn1 * and return the sum of the descriptor costs, distance differences * and number matched. * NOTE that the each item in a sum has been normalized before adding, * so for example, one item contributes between 0 and 1 to the total sum. * a "+1" or +maxValue was not added for points not matched. * @param cost * @param nBands * @param a2 * @param a2TrTo1 * @param nn1 * @param p1KPIndexMap * @param p2KPIndexMap * @param img1Width * @param img1Height * @param pixTolerance * @param maxDist * @param m1 * @param m2 * @return */ private static double[] sumKeypointDescAndDist2To1( int[][] cost, int nBands, PairIntArray a2, PairIntArray a2TrTo1, NearestNeighbor2D nn1, TObjectIntMap<PairInt> p1KPIndexMap, TObjectIntMap<PairInt> p2KPIndexMap, int img1Width, int img1Height, int pixTolerance, double maxDist, int[] m1, int[] m2) { int n2 = a2TrTo1.getN(); int count = 0; double maxDesc = nBands * 256.0; //best first match, after nearest neighbors // TODO: consider optimal bipartite matching when have an // implementation of multi-level-buckets float[] costA = new float[n2]; float[] costDesc = new float[n2]; float[] costDist = new float[n2]; int[] indexes = new int[n2]; for (int k = 0; k < n2; ++k) { int x1Tr = a2TrTo1.getX(k); int y1Tr = a2TrTo1.getY(k); if (x1Tr < 0 || y1Tr < 0) { continue; } int kpIdx2 = p2KPIndexMap.get(new PairInt(a2.getX(k), a2.getY(k))); Set<PairInt> nearest = nn1.findClosest(x1Tr, y1Tr, pixTolerance); int minC = Integer.MAX_VALUE; PairInt minCP1 = null; int minCIdx1 = 0; if (nearest != null && !nearest.isEmpty()) { for (PairInt p1 : nearest) { int kpIdx1 = p1KPIndexMap.get(p1); int c = cost[kpIdx1][kpIdx2]; if (c < minC) { minC = c; minCP1 = p1; minCIdx1 = kpIdx1; } } } if (minCP1 != null) { double scoreNorm = (nBands * 256 - minC) / maxDesc; double costNorm = 1.0 - scoreNorm; double dist = ORBMatcher.distance(x1Tr, y1Tr, minCP1); double distNorm = dist / maxDist; m1[count] = minCIdx1; m2[count] = kpIdx2; costA[count] = (float) (costNorm + distNorm); costDesc[count] = (float) costNorm; costDist[count] = (float) distNorm; indexes[count] = count; count++; } } double sumDesc = 0; double sumDist = 0; if (count > 1) { if (count < costA.length) { costA = Arrays.copyOf(costA, count); indexes = Arrays.copyOf(indexes, count); costDesc = Arrays.copyOf(costDesc, count); costDist = Arrays.copyOf(costDist, count); } QuickSort.sortBy1stArg(costA, indexes); TIntSet set1 = new TIntHashSet(); TIntSet set2 = new TIntHashSet(); PairIntArray matched = new PairIntArray(count); TIntList idxs = new TIntArrayList(); for (int i = 0; i < count; ++i) { int idx = indexes[i]; int idx1 = m1[idx]; int idx2 = m2[idx]; if (set1.contains(idx1) || set2.contains(idx2)) { continue; } idxs.add(idx); matched.add(idx1, idx2); set1.add(idx1); set2.add(idx2); sumDesc += costDesc[idx]; sumDist += costDist[idx]; } count = matched.getN(); for (int i = 0; i < count; ++i) { m1[i] = matched.getX(i); m2[i] = matched.getY(i); } } return new double[]{sumDesc, sumDist, count}; } /** * match left and right using the right transformed to left and nn1 * and return the sum of the descriptor costs, distance differences * and number matched. * NOTE that the each item in a sum has been normalized before adding, * so for example, one item contributes between 0 and 1 to the total sum. * a "+1" or +maxValue was not added for points not matched. * @param a2 * @param a2TrTo1 * @param nn1 * @param p1KPIndexMap * @param p2KPIndexMap * @param img1Width * @param img1Height * @param pixTolerance * @param maxDist * @param m1 * @param m2 * @return */ private static double[] sumKeypointDescAndDist2To1( PairIntArray a2, PairIntArray a2TrTo1, NearestNeighbor2D nn1, TObjectIntMap<PairInt> p1KPIndexMap, TObjectIntMap<PairInt> p2KPIndexMap, int img1Width, int img1Height, int pixTolerance, double maxDist, int[] m1, int[] m2) { int n2 = a2TrTo1.getN(); int count = 0; float[] costDist = new float[n2]; int[] indexes = new int[n2]; for (int k = 0; k < n2; ++k) { int x1Tr = a2TrTo1.getX(k); int y1Tr = a2TrTo1.getY(k); if (x1Tr < 0 || y1Tr < 0) { continue; } int kpIdx2 = p2KPIndexMap.get(new PairInt(a2.getX(k), a2.getY(k))); Set<PairInt> nearest = nn1.findClosest(x1Tr, y1Tr, pixTolerance); PairInt minP1 = null; int minIdx1 = 0; if (nearest != null && !nearest.isEmpty()) { minP1 = nearest.iterator().next(); minIdx1 = p1KPIndexMap.get(minP1); } if (minP1 != null) { double dist = ORBMatcher.distance(x1Tr, y1Tr, minP1); double distNorm = dist / maxDist; m1[count] = minIdx1; m2[count] = kpIdx2; costDist[count] = (float) distNorm; indexes[count] = count; count++; } } double sumDist = 0; if (count > 1) { if (count < costDist.length) { indexes = Arrays.copyOf(indexes, count); costDist = Arrays.copyOf(costDist, count); } QuickSort.sortBy1stArg(costDist, indexes); TIntSet set1 = new TIntHashSet(); TIntSet set2 = new TIntHashSet(); PairIntArray matched = new PairIntArray(count); TIntList idxs = new TIntArrayList(); for (int i = 0; i < count; ++i) { int idx = indexes[i]; int idx1 = m1[idx]; int idx2 = m2[idx]; if (set1.contains(idx1) || set2.contains(idx2)) { continue; } idxs.add(idx); matched.add(idx1, idx2); set1.add(idx1); set2.add(idx2); sumDist += costDist[idx]; } count = matched.getN(); for (int i = 0; i < count; ++i) { m1[i] = matched.getX(i); m2[i] = matched.getY(i); } } return new double[]{sumDist, count}; } private static PairIntArray trimToImageBounds(TwoDFloatArray octaveImg, PairIntArray a) { int n0 = octaveImg.a.length; int n1 = octaveImg.a[0].length; return trimToImageBounds(n1, n0, a); } private static PairIntArray trimToImageBounds( int width, int height, PairIntArray a) { PairIntArray b = new PairIntArray(a.getN()); for (int i = 0; i < a.getN(); ++i) { int x = a.getX(i); int y = a.getY(i); if (x < 0 || x > (width - 1)) { continue; } else if (y < 0 || y > (height - 1)) { continue; } b.add(x, y); } return b; } private static PairIntArray reduceBounds(PairIntArray bounds, float scale) { Set<PairInt> added = new HashSet<PairInt>(); PairIntArray out = new PairIntArray(bounds.getN()); for (int i = 0; i < bounds.getN(); ++i) { int x = Math.round((float)bounds.getX(i)/scale); int y = Math.round((float)bounds.getY(i)/scale); PairInt p = new PairInt(x, y); if (added.contains(p)) { continue; } out.add(x, y); added.add(p); } return out; } public static int maxNumberOfGaps(PairIntArray bounds, PartialShapeMatcher.Result r) { TIntSet mIdxs = new TIntHashSet(r.getNumberOfMatches()); for (int i = 0; i < r.getNumberOfMatches(); ++i) { mIdxs.add(r.getIdx1(i)); } int maxGapStartIdx = -1; int maxGap = 0; int cStartIdx = -1; int cGap = 0; // handling for startIdx of 0 to check for wraparound // of gap at end of block int gap0 = 0; for (int i = 0; i < bounds.getN(); ++i) { if (!mIdxs.contains(i)) { // is a gap if (cStartIdx == -1) { cStartIdx = i; } cGap++; if (i == (bounds.getN() - 1)) { if (gap0 > 0) { // 0 1 2 3 4 5 // g g g g // gap0=2 // cGap=2 cStartIdx=4 if (cStartIdx > (gap0 - 1)) { gap0 += cGap; } } if (cGap > maxGap) { maxGap = cGap; maxGapStartIdx = cStartIdx; } if (gap0 > maxGap) { maxGap = gap0; maxGapStartIdx = 0; } } } else { // is not a gap if (cStartIdx > -1) { if (cGap > maxGap) { maxGap = cGap; maxGapStartIdx = cStartIdx; } if (cStartIdx == 0) { gap0 = cGap; } cStartIdx = -1; cGap = 0; } } } return maxGap; } private PairIntArray createOrderedBounds(ORB orb1, Set<PairInt> labeledPoints1, SIGMA sigma) { ImageProcessor imageProcessor = new ImageProcessor(); Set<PairInt> set = new HashSet<PairInt>(); set.addAll(labeledPoints1); PairIntArray bounds = imageProcessor.extractSmoothedOrderedBoundary( new HashSet<PairInt>(set), sigma, orb1.getPyramidImages().get(0).a[0].length, orb1.getPyramidImages().get(0).a.length); return bounds; } private PairIntArray getOrCreateOrderedBounds(TwoDFloatArray img, TIntObjectMap<PairIntArray> boundsMap, int segIdx, Set<PairInt> set, SIGMA sigma) { PairIntArray bounds = boundsMap.get(segIdx); if (bounds != null) { return bounds; } ImageProcessor imageProcessor = new ImageProcessor(); bounds = imageProcessor.extractSmoothedOrderedBoundary( new HashSet<PairInt>(set), sigma, img.a[0].length, img.a.length); boundsMap.put(segIdx, bounds); { MiscellaneousCurveHelper curveHelper = new MiscellaneousCurveHelper(); double[] xyCen = curveHelper.calculateXYCentroids(bounds); System.out.println("bounds center=" + (int)xyCen[0] + "," + (int)xyCen[1] + " size_full=" + calculateObjectSize(bounds)); } return bounds; } private double sumDistances(PairFloatArray distances) { double sum = 0; for (int i = 0; i < distances.getN(); ++i) { float d1 = distances.getX(i); float d2 = distances.getY(i); sum += Math.sqrt(d1 * d1 + d2 * d2); } return sum; } /** * from matched points in list 1 to list2, choose 2 pairs that have * small cost and large difference. * @param result * @param costD * @param xPoints1 * @param yPoints1 * @param xPoints2 * @param yPoints2 * @return */ private QuadInt[] choose2ReferencePoints(Result result, PairIntArray bounds1, PairIntArray bounds2, TObjectIntMap<PairInt> point1KP1Map, TObjectIntMap<PairInt> point2KP1Map, int[][] costD) { int n = result.getNumberOfMatches(); float[] costs = new float[n]; PairInt[] points1 = new PairInt[n]; int count = 0; for (int i = 0; i < n; ++i) { int idx1 = result.idx1s.get(i); int idx2 = result.idx2s.get(i); int x1 = bounds1.getX(idx1); int y1 = bounds1.getY(idx1); PairInt p1 = new PairInt(x1, y1); int x2 = bounds2.getX(idx2); int y2 = bounds2.getY(idx2); PairInt p2 = new PairInt(x2, y2); if (point1KP1Map.containsKey(p1) && point2KP1Map.containsKey(p2)) { int kpIdx1 = point1KP1Map.get(p1); int kpIdx2 = point2KP1Map.get(p2); costs[count] = costD[kpIdx1][kpIdx2]; // these points are in full size reference frae points1[count] = p1; count++; } } if (count > 1) { if (count < n) { costs = Arrays.copyOf(costs, count); points1 = Arrays.copyOf(points1, count); } QuickSort.sortBy1stArg(costs, points1); int end = (int)(0.2 * n); if (end < 10) { end = n; } Set<PairInt> points = new HashSet<PairInt>(); for (int i = 0; i < end; i++) { PairInt p = points1[i]; points.add(p); } FurthestPair fp = new FurthestPair(); PairInt[] furthest = fp.find(points); assert(furthest != null); assert(furthest.length == 2); PairInt[] furthest2 = new PairInt[2]; for (int i = 0; i < n; ++i) { int idx1 = result.idx1s.get(i); int idx2 = result.idx2s.get(i); PairInt p1 = new PairInt(bounds1.getX(idx1), bounds1.getY(idx1)); if (furthest2[0] == null) { if (furthest[0].equals(p1)) { furthest2[0] = new PairInt(bounds2.getX(idx2), bounds2.getY(idx2)); } } if (furthest2[1] == null) { if (furthest[1].equals(p1)) { furthest2[1] = new PairInt(bounds2.getX(idx2), bounds2.getY(idx2)); } } if (furthest2[0] != null && furthest2[1] != null) { break; } } if (furthest2 != null && furthest2.length == 2) { QuadInt[] refs = new QuadInt[2]; refs[0] = new QuadInt(furthest[0], furthest2[0]); refs[1] = new QuadInt(furthest[1], furthest2[1]); return refs; } } // re-do the calculation w/o trying to use descr cost. Set<PairInt> points = new HashSet<PairInt>(n); for (int i = 0; i < n; ++i) { int idx1 = result.idx1s.get(i); PairInt p1 = new PairInt(bounds1.getX(idx1), bounds1.getY(idx1)); points.add(p1); } FurthestPair fp = new FurthestPair(); PairInt[] furthest = fp.find(points); assert(furthest != null); assert(furthest.length == 2); PairInt[] furthest2 = new PairInt[2]; for (int i = 0; i < n; ++i) { int idx1 = result.idx1s.get(i); int idx2 = result.idx2s.get(i); PairInt p1 = new PairInt(bounds1.getX(idx1), bounds1.getY(idx1)); if (furthest2[0] == null) { if (furthest[0].equals(p1)) { furthest2[0] = new PairInt(bounds2.getX(idx2), bounds2.getY(idx2)); } } if (furthest2[1] == null) { if (furthest[1].equals(p1)) { furthest2[1] = new PairInt(bounds2.getX(idx2), bounds2.getY(idx2)); } } if (furthest2[0] != null && furthest2[1] != null) { break; } } assert (furthest2 != null && furthest2.length == 2); QuadInt[] refs = new QuadInt[2]; refs[0] = new QuadInt(furthest[0], furthest2[0]); refs[1] = new QuadInt(furthest[1], furthest2[1]); return refs; } private List<PairInt> matchUsingFM(ORB orb1, ORB orb2, int[][] costD, int octave1, int octave2, PairIntArray bounds1, PairIntArray bounds2, PartialShapeMatcher matcher, PartialShapeMatcher.Result result, TObjectIntMap<PairInt> keypoints1IndexMap, TObjectIntMap<PairInt> keypoints2IndexMap, SimpleMatrix fm, PairIntArray unmatchedKP1, PairIntArray unmatchedKP2, TObjectIntMap<PairInt> unmatchedKP1Idxs, TObjectIntMap<PairInt> unmatchedKP2Idxs, int nBands, float normDesc, float distTol, double[] output) { // output variable to hold sums and count // 0 = totalChordDiffSum // 1 = max avg chord diff // 2 = totalDistance // 3 = max avg total dist // 4 = totalDescrSum // 5 = nDescr double maxAvgChord = Double.MIN_VALUE; double maxAvgDist = Double.MIN_VALUE; List<PairInt> addedKPIdxs = new ArrayList<PairInt>(); EpipolarTransformer eTransformer = new EpipolarTransformer(); SimpleMatrix unmatchedLeft = eTransformer.rewriteInto3ColumnMatrix(unmatchedKP1); SimpleMatrix unmatchedRight = eTransformer.rewriteInto3ColumnMatrix(unmatchedKP2); SimpleMatrix rightEpipolarLines = fm.mult(unmatchedLeft); SimpleMatrix leftEpipolarLines = fm.transpose().mult(unmatchedRight); float[] outputDist = new float[2]; int nLeftUnmatched = unmatchedLeft.numCols(); int nRightUnmatched = unmatchedRight.numCols(); float dist, descCost; double d; TFloatList totalCost = new TFloatArrayList(); TIntList indexes = new TIntArrayList(); TFloatList eDist = new TFloatArrayList(); TIntList idx1s = new TIntArrayList(); TIntList idx2s = new TIntArrayList(); for (int i = 0; i < nLeftUnmatched; ++i) { PairInt p1 = new PairInt(unmatchedKP1.getX(i), unmatchedKP1.getY(i)); int kp1Idx = unmatchedKP1Idxs.get(p1); for (int j = 0; j < nRightUnmatched; ++j) { PairInt p2 = new PairInt(unmatchedKP2.getX(j), unmatchedKP2.getY(j)); int kp2Idx = unmatchedKP2Idxs.get(p2); eTransformer.calculatePerpDistFromLines(unmatchedLeft, unmatchedRight, rightEpipolarLines, leftEpipolarLines, i, j, outputDist); if (outputDist[0] <= distTol && outputDist[1] <= distTol) { d = Math.sqrt(outputDist[0] * outputDist[0] + outputDist[1] * outputDist[1]); dist = (float)d; // normalized descriptor cost descCost = 1.f - ((nBands * 256.f - costD[kp1Idx][kp2Idx]) /normDesc); eDist.add(dist); totalCost.add(dist + descCost); indexes.add(indexes.size()); idx1s.add(kp1Idx); idx2s.add(kp2Idx); } } } QuickSort.sortBy1stArg(totalCost, indexes); // choose 2 reference points from result, // preferably 2 that are keypoints w/ lowest descr costs // and are far from each other // returns results as 2 quadints of paired x1,y1,x2,y2 QuadInt[] resultRefs = choose2ReferencePoints(result, bounds1, bounds2, keypoints1IndexMap, keypoints2IndexMap, costD); // new matched keypoint indexes TIntSet added1 = new TIntHashSet(); TIntSet added2 = new TIntHashSet(); for (int j = 0; j < totalCost.size(); ++j) { int idx = indexes.get(j); int kpIdx1 = idx1s.get(idx); int kpIdx2 = idx2s.get(idx); if (added1.contains(kpIdx1) || added2.contains(kpIdx2)) { continue; } // output: // 0 = totalChordDiffSum // 1 = max avg chord diff // 2 = totalDistance // 3 = max avg total dist // 4 = totalDescrSum // 5 = nDescr added1.add(kpIdx1); added2.add(kpIdx2); addedKPIdxs.add(new PairInt(kpIdx1, kpIdx2)); output[2] += eDist.get(idx); d = eDist.get(idx); if (d > maxAvgDist) { maxAvgDist = d; } float descrCost = totalCost.get(j) - eDist.get(idx); output[4] += descrCost; output[5]++; int x1 = orb1.getKeyPoint1List().get(octave1).get(kpIdx1); int y1 = orb1.getKeyPoint0List().get(octave1).get(kpIdx1); int x2 = orb2.getKeyPoint1List().get(octave2).get(kpIdx2); int y2 = orb2.getKeyPoint0List().get(octave2).get(kpIdx2); // calc chord diff for the new points using 2 reference // points from result. double chordDiff = matcher. calculateAChordDifference( resultRefs[0].getA(), resultRefs[0].getB(), resultRefs[1].getA(), resultRefs[1].getB(), x1, y1, resultRefs[0].getC(), resultRefs[0].getD(), resultRefs[1].getC(), resultRefs[1].getD(), x2, y2 ); output[0] += chordDiff; if (chordDiff > maxAvgChord) { maxAvgChord = chordDiff; } } output[1] = maxAvgChord; output[3] = maxAvgDist; // output: // 0 = totalChordDiffSum // 1 = max avg chord diff // 2 = totalDistance // 3 = max avg total dist // 4 = totalDescrSum // 5 = nDescr return addedKPIdxs; } private List<PairInt> matchUsingFM(ORB orb1, ORB orb2, int[][] costD, int octave1, int octave2, TObjectIntMap<PairInt> keypoints1IndexMap, TObjectIntMap<PairInt> keypoints2IndexMap, SimpleMatrix fm, PairIntArray unmatchedKP1, PairIntArray unmatchedKP2, TObjectIntMap<PairInt> unmatchedKP1Idxs, TObjectIntMap<PairInt> unmatchedKP2Idxs, int nBands, float normDesc, float distTol, double[] output) { // output variable to hold sums and count // 0 = totalDistance // 1 = max avg total dist // 2 = totalDescrSum // 3 = nDescr double maxAvgDist = Double.MIN_VALUE; List<PairInt> addedKPIdxs = new ArrayList<PairInt>(); EpipolarTransformer eTransformer = new EpipolarTransformer(); SimpleMatrix unmatchedLeft = eTransformer.rewriteInto3ColumnMatrix(unmatchedKP1); SimpleMatrix unmatchedRight = eTransformer.rewriteInto3ColumnMatrix(unmatchedKP2); SimpleMatrix rightEpipolarLines = fm.mult(unmatchedLeft); SimpleMatrix leftEpipolarLines = fm.transpose().mult(unmatchedRight); float[] outputDist = new float[2]; int nLeftUnmatched = unmatchedLeft.numCols(); int nRightUnmatched = unmatchedRight.numCols(); float dist, descCost; double d; TFloatList totalCost = new TFloatArrayList(); TIntList indexes = new TIntArrayList(); TFloatList eDist = new TFloatArrayList(); TIntList idx1s = new TIntArrayList(); TIntList idx2s = new TIntArrayList(); for (int i = 0; i < nLeftUnmatched; ++i) { PairInt p1 = new PairInt(unmatchedKP1.getX(i), unmatchedKP1.getY(i)); int kp1Idx = unmatchedKP1Idxs.get(p1); for (int j = 0; j < nRightUnmatched; ++j) { PairInt p2 = new PairInt(unmatchedKP2.getX(j), unmatchedKP2.getY(j)); int kp2Idx = unmatchedKP2Idxs.get(p2); eTransformer.calculatePerpDistFromLines(unmatchedLeft, unmatchedRight, rightEpipolarLines, leftEpipolarLines, i, j, outputDist); if (outputDist[0] <= distTol && outputDist[1] <= distTol) { d = Math.sqrt(outputDist[0] * outputDist[0] + outputDist[1] * outputDist[1]); dist = (float)d; // normalized descriptor cost descCost = 1.f - ((nBands * 256.f - costD[kp1Idx][kp2Idx]) /normDesc); eDist.add(dist); totalCost.add(dist + descCost); indexes.add(indexes.size()); idx1s.add(kp1Idx); idx2s.add(kp2Idx); } } } QuickSort.sortBy1stArg(totalCost, indexes); // new matched keypoint indexes TIntSet added1 = new TIntHashSet(); TIntSet added2 = new TIntHashSet(); for (int j = 0; j < totalCost.size(); ++j) { int idx = indexes.get(j); int kpIdx1 = idx1s.get(idx); int kpIdx2 = idx2s.get(idx); if (added1.contains(kpIdx1) || added2.contains(kpIdx2)) { continue; } // output variable to hold sums and count // 0 = totalDistance // 1 = max avg total dist // 2 = totalDescrSum // 3 = nDescr added1.add(kpIdx1); added2.add(kpIdx2); addedKPIdxs.add(new PairInt(kpIdx1, kpIdx2)); output[0] += eDist.get(idx); d = eDist.get(idx); if (d > maxAvgDist) { maxAvgDist = d; } float descrCost = totalCost.get(j) - eDist.get(idx); output[2] += descrCost; output[3]++; } output[1] = maxAvgDist; return addedKPIdxs; } // sum, avg, maxAvg private double[] sumAndMaxEPDist(SimpleMatrix fm, PairIntArray m1, PairIntArray m2) { EpipolarTransformer eTransformer = new EpipolarTransformer(); SimpleMatrix matchedLeft = eTransformer.rewriteInto3ColumnMatrix(m1); SimpleMatrix matchedRight = eTransformer.rewriteInto3ColumnMatrix(m2); // this goes into total epipolar distances at end of block // NOTE: this method needs testing. // currently no normalization is used internally PairFloatArray distances = eTransformer .calculateDistancesFromEpipolar(fm, matchedLeft, matchedRight); double max = Double.MIN_VALUE; double sum = 0; double d; for (int i = 0; i < distances.getN(); ++i) { float d1 = distances.getX(i); float d2 = distances.getY(i); d = Math.sqrt(d1*d1 + d2*d2); if (d > max) { max = d; } sum += d; } double avg = sum/(double)distances.getN(); return new double[]{sum, avg, max}; } private void matchGreedy(PairIntArray left, PairIntArray right, int nBands, int[][] costD, NearestNeighbor2D nn1, TObjectIntMap<PairInt> keypoints1IndexMap, TObjectIntMap<PairInt> keypoints2IndexMap, PairIntArray outLeft, PairIntArray outRight, int img1Width, int img1Height, int pixTol, PairIntArray extr1, PairIntArray extr2, NearestNeighbor2D nnExtr1, TObjectIntMap<PairInt> extr1IndexMap, TObjectIntMap<PairInt> extr2IndexMap) { // NOTE that all coordinates are in the full reference frame // and remain that way through this method /* -- finds best 20 matches of descriptors -- from the best 20, -- forms combinations of pairs of points -- for each pair, -- calc euclid transformation evaluate it on all points result is best starting match of points. */ int n1 = left.getN(); int n2 = right.getN(); // for each left, find the best matching right int[] b = new int[n1]; int[] indexes = new int[n1]; float[] costs = new float[n1]; double c; for (int i = 0; i < n1; ++i) { double bestCost = Double.MAX_VALUE; int bestIdx2 = -1; PairInt p1 = new PairInt(left.getX(i), left.getY(i)); int kpIdx1 = keypoints1IndexMap.get(p1); assert(keypoints1IndexMap.containsKey(p1)); for (int j = 0; j < n2; ++j) { PairInt p2 = new PairInt(right.getX(j), right.getY(j)); int kpIdx2 = keypoints2IndexMap.get(p2); assert(keypoints2IndexMap.containsKey(p2)); c = costD[kpIdx1][kpIdx2]; if (c < bestCost) { bestCost = c; bestIdx2 = j; } } b[i] = bestIdx2; indexes[i] = i; costs[i] = (float)bestCost; } QuickSort.sortBy1stArg(costs, indexes); int topK = 20; if (topK > n1) { topK = n1; } int pixTol2 = (int)Math.round(Math.sqrt(2) * pixTol); MatchedPointsTransformationCalculator tc = new MatchedPointsTransformationCalculator(); Transformer transformer = new Transformer(); double bestCost = Double.MAX_VALUE; int[] bestMIdx1s = null; int[] bestMIdx2s = null; int[] bestMIdxExtr1s = null; int[] bestMIdxExtr2s = null; int bestN = -1; int bestExtrN = -1; for (int i = 0; i < topK; ++i) { int idxI1 = indexes[i]; int idxJ1 = b[idxI1]; int leftX1 = left.getX(idxI1); int leftY1 = left.getY(idxI1); int rightX1 = right.getX(idxJ1); int rightY1 = right.getY(idxJ1); for (int j = (i + 1); j < topK; ++j) { int idxI2 = indexes[j]; int idxJ2 = b[idxI2]; int leftX2 = left.getX(idxI2); int leftY2 = left.getY(idxI2); int rightX2 = right.getX(idxJ2); int rightY2 = right.getY(idxJ2); // transform dataset 2 into frame 1 // (direction is 2 to 1 to be able to reuse nearest // neighbors containing dataset1 keypoints TransformationParameters params = tc.calulateEuclidean( rightX1, rightY1, rightX2, rightY2, leftX1, leftY1, leftX2, leftY2, 0, 0); if (params == null) { continue; } PairIntArray rightTr = transformer.applyTransformation( params, right); if (rightTr.getN() == 0) { continue; } int[] mIdx1s = new int[n2]; int[] mIdx2s = new int[n2]; //sumDesc, sumDist, count double[] sums = sumKeypointDescAndDist2To1( costD, nBands, right, rightTr, nn1, keypoints1IndexMap, keypoints2IndexMap, img1Width, img1Height, pixTol, pixTol2, mIdx1s, mIdx2s); int nMatched = (int)sums[2]; double sumDescr = sums[0]; double sumDist = sums[1]; int nUnmatched = n1 - nMatched; sumDescr += nUnmatched; sumDist += nUnmatched; double tot = sumDescr + sumDist; // evaluate the extra points int ne1 = extr1.getN(); int ne2 = extr2.getN(); PairIntArray extr2Tr = transformer.applyTransformation( params, extr2); // since the extra points do not have descriptors, // need to decide between adding a ne1 to sumDescr // or 0 to it (changing the importance of the orid keypoints). int nMatchedExtr = -1; int[] mIdxExtr1s = new int[ne2]; int[] mIdxExtr2s = new int[ne2]; if (extr2Tr.getN() == 0) { // adding a +1 for the distance limits tot += ne1; } else { //sumDist, count double[] sumsExtr = sumKeypointDescAndDist2To1( extr2, extr2Tr, nnExtr1, extr1IndexMap, extr2IndexMap, img1Width, img1Height, pixTol, pixTol2, mIdxExtr1s, mIdxExtr2s); nMatchedExtr = (int)sumsExtr[1]; double sumDistExtr = sumsExtr[0]; int nUnmatchedExtr = ne1 - nMatchedExtr; tot += sumDistExtr; tot += nUnmatchedExtr; } if (tot < bestCost) { bestCost = tot; bestMIdx1s = mIdx1s; bestMIdx2s = mIdx2s; bestN = nMatched; bestMIdxExtr1s = mIdxExtr1s; bestMIdxExtr2s = mIdxExtr2s; bestExtrN = nMatchedExtr; } } } if (bestMIdx1s != null) { for (int i = 0; i < bestN; ++i) { int kpIdx1 = bestMIdx1s[i]; int kpIdx2 = bestMIdx2s[i]; outLeft.add(left.getX(kpIdx1), left.getY(kpIdx1)); outRight.add(right.getX(kpIdx2), right.getY(kpIdx2)); } for (int i = 0; i < bestExtrN; ++i) { int idx1 = bestMIdxExtr1s[i]; int idx2 = bestMIdxExtr2s[i]; outLeft.add(extr1.getX(idx1), extr1.getY(idx1)); outRight.add(extr2.getX(idx2), extr2.getY(idx2)); } } } private static class PObject { final PartialShapeMatcher.Result r; final PairIntArray bounds1; final PairIntArray bounds2; final float scale1; final float scale2; public PObject(PartialShapeMatcher.Result result, PairIntArray b1, PairIntArray b2, float s1, float s2) { r = result; bounds1 = b1; bounds2 = b2; scale1 = s1; scale2 = s2; } } private static class CObject implements Comparable<CObject> { final double cost; final CorrespondenceList cCor; final PairIntArray transformedTemplate; public CObject(double cost, CorrespondenceList cL, PairIntArray templTr) { this.cost = cost; this.cCor = cL; this.transformedTemplate = templTr; } @Override public int compareTo(CObject other) { if (cost < other.cost) { return -1; } else if (cost > other.cost) { return 1; } else { int n1 = cCor.getPoints1().size(); int n2 = other.cCor.getPoints1().size(); if (n1 > n2) { return -1; } else if (n1 < n2) { return 1; } } return 0; } } private static class CObject4 implements Comparable<CObject4> { final double cost; final TransformationParameters params; final QuadInt q; public CObject4(double sum, TransformationParameters params, QuadInt q) { this.cost = sum; this.q = q; this.params = params; } @Override public int compareTo(CObject4 other) { if (cost < other.cost) { return -1; } else if (cost > other.cost) { return 1; } return 0; } } private static class CObject3 implements Comparable<CObject3> { final double cost; final double costDesc; final double costDist; final double costCount; final int index; final PairInt[] m1; final PairInt[] m2; final double sumPatch; final TransformationParameters params; QuadInt q; int keypointCount; public CObject3(CObject2 cObject2, double sum, double sumPatch, TransformationParameters params) { this.sumPatch = sumPatch; this.cost = sum; this.costDesc = cObject2.costDesc; this.costDist = cObject2.costDist; this.costCount = cObject2.costCount; this.index = cObject2.index; this.m1 = cObject2.m1; this.m2 = cObject2.m2; this.params = params; } @Override public int compareTo(CObject3 other) { if (cost < other.cost) { return -1; } else if (cost > other.cost) { return 1; } return 0; } } private static class CObject2 implements Comparable<CObject2> { final double cost; final double costDesc; final double costDist; final double costCount; final int index; final PairInt[] m1; final PairInt[] m2; public CObject2(int index, double cost, double costDesc, double costDist, double costCount, PairInt[] matched1, PairInt[] matched2) { this.cost = cost; this.index = index; this.m1 = matched1; this.m2 = matched2; this.costDesc = costDesc; this.costDist = costDist; this.costCount = costCount; } @Override public int compareTo(CObject2 other) { if (cost < other.cost) { return -1; } else if (cost > other.cost) { return 1; } return 0; } } private static void debugPlot(int i, int j, FixedSizeSortedVector<CObject> vec, TwoDFloatArray pyr1, TwoDFloatArray pyr2, float s1, float s2) { Image img1 = convertToImage(pyr1); Image img2 = convertToImage(pyr2); try { CorrespondenceList cor = vec.getArray()[0].cCor; Image img1Cp = img1.copyImage(); Image img2Cp = img2.copyImage(); CorrespondencePlotter plotter = new CorrespondencePlotter( img1Cp, img2Cp); for (int ii = 0; ii < cor.getPoints1().size(); ++ii) { PairInt p1 = cor.getPoints1().get(ii); PairInt p2 = cor.getPoints2().get(ii); int x1 = Math.round(p1.getX() / s1); int y1 = Math.round(p1.getY() / s1); int x2 = Math.round(p2.getX() / s2); int y2 = Math.round(p2.getY() / s2); plotter.drawLineInAlternatingColors(x1, y1, x2, y2, 0); } String strI = Integer.toString(i); while (strI.length() < 3) { strI = "0" + strI; } String strJ = Integer.toString(j); while (strJ.length() < 3) { strJ = "0" + strJ; } String str = strI + "_" + strJ + "_"; plotter.writeImage("_MATCH_" + str); } catch (Exception e) { } } private static void debugPlot2(int i, int j, FixedSizeSortedVector<CObject3> vec, TwoDFloatArray pyr1, TwoDFloatArray pyr2, float s1, float s2) { Image img1 = convertToImage(pyr1); Image img2 = convertToImage(pyr2); try { PairInt[] m1 = vec.getArray()[0].m1; PairInt[] m2 = vec.getArray()[0].m2; Image img1Cp = img1.copyImage(); Image img2Cp = img2.copyImage(); CorrespondencePlotter plotter = new CorrespondencePlotter( img1Cp, img2Cp); for (int ii = 0; ii < m1.length; ++ii) { PairInt p1 = m1[ii]; PairInt p2 = m2[ii]; int x1 = Math.round(p1.getX() / s1); int y1 = Math.round(p1.getY() / s1); int x2 = Math.round(p2.getX() / s2); int y2 = Math.round(p2.getY() / s2); plotter.drawLineInAlternatingColors(x1, y1, x2, y2, 0); } String strI = Integer.toString(i); while (strI.length() < 3) { strI = "0" + strI; } String strJ = Integer.toString(j); while (strJ.length() < 3) { strJ = "0" + strJ; } String str = strI + "_" + strJ + "_"; plotter.writeImage("_MATCH_" + str); } catch (Exception e) { } } private static void debugPlot(int i, int j, FixedSizeSortedVector<CObject> vecD, FixedSizeSortedVector<CObject> vecF, TwoDFloatArray pyr1, TwoDFloatArray pyr2, float s1, float s2) { Image img1 = convertToImage(pyr1); Image img2 = convertToImage(pyr2); try { for (int i0 = 0; i0 < 2; ++i0) { CorrespondenceList cor = null; if (i0 == 0) { cor = vecD.getArray()[0].cCor; } else { cor = vecF.getArray()[0].cCor; } Image img1Cp = img1.copyImage(); Image img2Cp = img2.copyImage(); CorrespondencePlotter plotter = new CorrespondencePlotter( img1Cp, img2Cp); for (int ii = 0; ii < cor.getPoints1().size(); ++ii) { PairInt p1 = cor.getPoints1().get(ii); PairInt p2 = cor.getPoints2().get(ii); int x1 = Math.round(p1.getX()/s1); int y1 = Math.round(p1.getY()/s1); int x2 = Math.round(p2.getX()/s2); int y2 = Math.round(p2.getY()/s2); plotter.drawLineInAlternatingColors(x1, y1, x2, y2, 0); } String strI = Integer.toString(i); while (strI.length() < 3) { strI = "0" + strI; } String strJ = Integer.toString(j); while (strJ.length() < 3) { strJ = "0" + strJ; } String str = strI + "_" + strJ + "_"; if (i0 == 0) { str = str + "factor"; } else { str = str + "divisor"; } plotter.writeImage("_MATCH_" + str); } } catch(Exception e) {} } }
package com.test.thalitest; import org.junit.runner.RunWith; import org.junit.runners.Suite; import io.jxcore.node.ConnectionHelperTest; import io.jxcore.node.ConnectionModelTest; import io.jxcore.node.ConnectivityMonitorTest; import io.jxcore.node.IncomingSocketThreadTest; import io.jxcore.node.LifeCycleMonitorTest; import io.jxcore.node.ListenerOrIncomingConnectionTest; import io.jxcore.node.OutgoingSocketThreadTest; import io.jxcore.node.SocketThreadBaseTest; import io.jxcore.node.StartStopOperationHandlerTest; import io.jxcore.node.StartStopOperationTest; @RunWith(Suite.class) @Suite.SuiteClasses({ ConnectionModelTest.class /* ConnectivityMonitorTest.class, LifeCycleMonitorTest.class, ListenerOrIncomingConnectionTest.class, StartStopOperationTest.class, IncomingSocketThreadTest.class, OutgoingSocketThreadTest.class, SocketThreadBaseTest.class, StartStopOperationHandlerTest.class, ConnectionHelperTest.class */ }) public class ThaliTestSuite { }
package com.dwarfholm.activitystats.braizhauler; import java.util.HashMap; import org.bukkit.entity.Player; public class ASData { private HashMap <String, ASPlayer> playerlist; private ASDatabase database; private ActivityStats plugin; public ASData(ActivityStats plugin) { this.plugin = plugin; playerlist = new HashMap<String, ASPlayer>(); database = new ASDatabase(plugin); } public void saveAll() { for (ASPlayer player:playerlist.values()) database.updatePlayer(player); } public void createDatabase() { database.createTables(); } public void recordOnline() { if (playerlist!=null && playerlist.size() > 0) { for (ASPlayer player:playerlist.values()) if ( plugin.getServer().getPlayer(player.getName()).isOnline() ) player.curPeriod.addOnline(); if( plugin.PeriodRolloverDue()) { plugin.info("Paying all Players"); for (ASPlayer player:playerlist.values()) { plugin.info(player.getName()); plugin.payPlayer(player); } if( plugin.DayRolloverDue()) rolloverDay(); if( plugin.WeekRolloverDue()) rolloverWeek(); if( plugin.MonthRolloverDue()) rolloverMonth(); rolloverPeriod(); Player pPlayer; for (ASPlayer player:playerlist.values()) { pPlayer = plugin.getServer().getPlayer(player.getName()); if ( pPlayer.isOnline() ) plugin.autoPromoterCheck(pPlayer); else playerlist.remove(player.getName()); } saveAll(); } } } public void loadPlayer(String player) { database.loadPlayer(player); } public void savePlayer(String name) { savePlayer(playerlist.get(name)); } public void savePlayer(ASPlayer player) { database.updatePlayer(player); } public void setPlayer(ASPlayer player) { playerlist.put(player.getName(), player); } public ASPlayer getPlayer(String name) { return playerlist.get(name); } public void rolloverPeriod() { for(String player: playerlist.keySet()) { playerlist.get(player).rolloverPeriod(); } plugin.rolledoverPeriod(); } public void rolloverDay() { for(String player: playerlist.keySet()) playerlist.get(player).rolloverDay(); plugin.rolledoverDay(); } public void rolloverWeek() { for(String player: playerlist.keySet()) playerlist.get(player).rolloverWeek(); plugin.rolledoverWeek(); } public void rolloverMonth() { for(String player: playerlist.keySet()) playerlist.get(player).rolloverMonth(); plugin.rolledoverMonth(); } public void loadOnlinePlayers() { for (Player player: plugin.getServer().getOnlinePlayers() ) loadPlayer(player.getName()); } }
package de.javawi.jstun.test.demo; import java.io.IOException; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.Inet4Address; import java.net.InetAddress; import java.net.SocketException; import java.net.UnknownHostException; import java.util.Vector; import java.util.logging.FileHandler; import java.util.logging.Handler; import java.util.logging.Level; import java.util.logging.Logger; import java.util.logging.SimpleFormatter; import de.javawi.jstun.attribute.MappedXORMapped; import de.javawi.jstun.attribute.UnknownAttribute; import de.javawi.jstun.attribute.MessageAttributeInterface.MessageAttributeType; import de.javawi.jstun.attribute.exception.MessageAttributeException; import de.javawi.jstun.attribute.exception.MessageAttributeParsingException; import de.javawi.jstun.attribute.exception.UnknownMessageAttributeException; import de.javawi.jstun.attribute.legacy.ChangedAddress; import de.javawi.jstun.attribute.legacy.SourceAddress; import de.javawi.jstun.header.MessageHeader; import de.javawi.jstun.header.MessageHeaderInterface; import de.javawi.jstun.header.MessageHeaderParsingException; import de.javawi.jstun.header.MessageHeaderInterface.MessageHeaderClass; import de.javawi.jstun.header.MessageHeaderInterface.MessageHeaderVersion; import de.javawi.jstun.header.messagetype.method.Binding; import de.javawi.jstun.util.Address; import de.javawi.jstun.util.IPv4Address; import de.javawi.jstun.util.UtilityException; /* * This class implements a STUN server as described in RFC 3489. * The server requires a machine that is dual-homed to be functional. */ public class StunServer { private static Logger logger = Logger.getLogger("de.javawi.stun.test.StunServer"); Vector<DatagramSocket> sockets; public StunServer(int primaryPort, InetAddress primary, int secondaryPort, InetAddress secondary) throws SocketException { sockets = new Vector<DatagramSocket>(); sockets.add(new DatagramSocket(primaryPort, primary)); sockets.add(new DatagramSocket(secondaryPort, primary)); sockets.add(new DatagramSocket(primaryPort, secondary)); sockets.add(new DatagramSocket(secondaryPort, secondary)); } public void start() throws SocketException { for (DatagramSocket socket : sockets) { socket.setReceiveBufferSize(2000); StunServerReceiverThread ssrt = new StunServerReceiverThread(socket); ssrt.start(); } } /* * Inner class to handle incoming packets and react accordingly. * I decided not to start a thread for every received Binding Request, because the time * required to receive a Binding Request, parse it, generate a Binding Response and send * it varies only between 2 and 4 milliseconds. This amount of time is small enough so * that no extra thread is needed for incoming Binding Request. */ class StunServerReceiverThread extends Thread { private final DatagramSocket receiverSocket; private DatagramSocket changedPort; private DatagramSocket changedIP; private DatagramSocket changedPortIP; StunServerReceiverThread(DatagramSocket datagramSocket) { receiverSocket = datagramSocket; for (DatagramSocket socket : sockets) { if ((socket.getLocalPort() != receiverSocket.getLocalPort()) && (socket.getLocalAddress().equals(receiverSocket.getLocalAddress()))) changedPort = socket; if ((socket.getLocalPort() == receiverSocket.getLocalPort()) && (!socket.getLocalAddress().equals(receiverSocket.getLocalAddress()))) changedIP = socket; if ((socket.getLocalPort() != receiverSocket.getLocalPort()) && (!socket.getLocalAddress().equals(receiverSocket.getLocalAddress()))) changedPortIP = socket; } } public void run() { while (true) { try { DatagramPacket receive = new DatagramPacket(new byte[200], 200); receiverSocket.receive(receive); logger.finest(receiverSocket.getLocalAddress().getHostAddress() + ":" + receiverSocket.getLocalPort() + " datagram received from " + receive.getAddress().getHostAddress() + ":" + receive.getPort()); MessageHeader receiveMH = MessageHeader.parseHeader(receive.getData()); MessageHeaderVersion v; try { /* TODO backwards compatibility checks * check magic cookie * check for unknown attributes */ if (receiveMH.checkMagicCookie()) { // stun2 v = MessageHeaderVersion.STUN2; } else { /* TODO stun1: * SHOULD use Mapped instead of XOR-Mapped * SHOULD not use multiplexing */ v = MessageHeaderVersion.STUN1; } // TODO check for unknown attributes receiveMH.parseAttributes(receive.getData()); // TODO doesn't work if (receiveMH.getType().getEncoding() == MessageHeaderInterface.BINDINGREQUEST) { logger.config(receiverSocket.getLocalAddress().getHostAddress() + ":" + receiverSocket.getLocalPort() + " Binding Request received from " + receive.getAddress().getHostAddress() + ":" + receive.getPort()); MessageHeader sendMH = new MessageHeader(new Binding(MessageHeaderClass.SUCCESSRESPONSE)); sendMH.setTransactionID(receiveMH.getTransactionID()); // Mapped address attribute if (v ==) MappedXORMapped ma = new MappedXORMapped(); // TODO make it work independently of the IP version ma.setAddress(new IPv4Address((Inet4Address) receive.getAddress())); ma.setPort(receive.getPort()); sendMH.addMessageAttribute(ma); // Changed address attribute ChangedAddress ca = new ChangedAddress(); ca.setAddress(new Address(changedPortIP.getLocalAddress() .getAddress())); ca.setPort(changedPortIP.getLocalPort()); sendMH.addMessageAttribute(ca); } else { /* TODO there are no other cases for know, * as there is one single method defined in the RFC. * what should we do here? */ } if (cr.isChangePort() && (!cr.isChangeIP())) { logger .finer("Change port received in Change Request attribute"); // Source address attribute SourceAddress sa = new SourceAddress(); sa.setAddress(new Address(changedPort.getLocalAddress() .getAddress())); sa.setPort(changedPort.getLocalPort()); sendMH.addMessageAttribute(sa); byte[] data = sendMH.getBytes(); DatagramPacket send = new DatagramPacket(data, data.length); if (ra != null) { send.setPort(ra.getPort()); send.setAddress(ra.getAddress().getInetAddress()); } else { send.setPort(receive.getPort()); send.setAddress(receive.getAddress()); } changedPort.send(send); logger.config(changedPort.getLocalAddress().getHostAddress() + ":" + changedPort.getLocalPort() + " send Binding Response to " + send.getAddress().getHostAddress() + ":" + send.getPort()); } else if ((!cr.isChangePort()) && cr.isChangeIP()) { logger.finer("Change ip received in Change Request attribute"); // Source address attribute SourceAddress sa = new SourceAddress(); sa.setAddress(new Address(changedIP.getLocalAddress() .getAddress())); sa.setPort(changedIP.getLocalPort()); sendMH.addMessageAttribute(sa); byte[] data = sendMH.getBytes(); DatagramPacket send = new DatagramPacket(data, data.length); if (ra != null) { send.setPort(ra.getPort()); send.setAddress(ra.getAddress().getInetAddress()); } else { send.setPort(receive.getPort()); send.setAddress(receive.getAddress()); } changedIP.send(send); logger.config(changedIP.getLocalAddress().getHostAddress() + ":" + changedIP.getLocalPort() + " send Binding Response to " + send.getAddress().getHostAddress() + ":" + send.getPort()); } else if ((!cr.isChangePort()) && (!cr.isChangeIP())) { logger.finer("Nothing received in Change Request attribute"); // Source address attribute SourceAddress sa = new SourceAddress(); sa.setAddress(new Address(receiverSocket.getLocalAddress() .getAddress())); sa.setPort(receiverSocket.getLocalPort()); sendMH.addMessageAttribute(sa); byte[] data = sendMH.getBytes(); DatagramPacket send = new DatagramPacket(data, data.length); if (ra != null) { send.setPort(ra.getPort()); send.setAddress(ra.getAddress().getInetAddress()); } else { send.setPort(receive.getPort()); send.setAddress(receive.getAddress()); } receiverSocket.send(send); logger.config(receiverSocket.getLocalAddress() .getHostAddress() + ":" + receiverSocket.getLocalPort() + " send Binding Response to " + send.getAddress().getHostAddress() + ":" + send.getPort()); } else if (cr.isChangePort() && cr.isChangeIP()) { logger .finer("Change port and ip received in Change Request attribute"); // Source address attribute SourceAddress sa = new SourceAddress(); sa.setAddress(new Address(changedPortIP.getLocalAddress() .getAddress())); sa.setPort(changedPortIP.getLocalPort()); sendMH.addMessageAttribute(sa); byte[] data = sendMH.getBytes(); DatagramPacket send = new DatagramPacket(data, data.length); if (ra != null) { send.setPort(ra.getPort()); send.setAddress(ra.getAddress().getInetAddress()); } else { send.setPort(receive.getPort()); send.setAddress(receive.getAddress()); } changedPortIP.send(send); logger.config(changedPortIP.getLocalAddress().getHostAddress() + ":" + changedPortIP.getLocalPort() + " send Binding Response to " + send.getAddress().getHostAddress() + ":" + send.getPort()); } } catch (UnknownMessageAttributeException umae) { umae.printStackTrace(); // Generate Binding error response MessageHeader sendMH = new MessageHeader(MessageHeaderType.BindingErrorResponse); sendMH.setTransactionID(receiveMH.getTransactionID()); // Unknown attributes UnknownAttribute ua = new UnknownAttribute(); ua.addAttribute(umae.getType()); sendMH.addMessageAttribute(ua); byte[] data = sendMH.getBytes(); DatagramPacket send = new DatagramPacket(data, data.length); send.setPort(receive.getPort()); send.setAddress(receive.getAddress()); receiverSocket.send(send); logger.config(changedPortIP.getLocalAddress().getHostAddress() + ":" + changedPortIP.getLocalPort() + " send Binding Error Response to " + send.getAddress().getHostAddress() + ":" + send.getPort()); } } catch (IOException ioe) { ioe.printStackTrace(); } catch (MessageAttributeParsingException mape) { mape.printStackTrace(); } catch (MessageAttributeException mae) { mae.printStackTrace(); } catch (MessageHeaderParsingException mhpe) { mhpe.printStackTrace(); } catch (UtilityException ue) { ue.printStackTrace(); } catch (ArrayIndexOutOfBoundsException aioobe) { aioobe.printStackTrace(); } } } } /* * To invoke the STUN server two IP addresses and two ports are required. */ public static void main(String args[]) { try { if (args.length != 4) { System.out .println("usage: java de.javawi.jstun.test.demo.StunServer PORT1 IP1 PORT2 IP2"); System.out.println(); System.out .println(" PORT1 - the first port that should be used by the server"); System.out .println(" IP1 - the first ip address that should be used by the server"); System.out .println(" PORT2 - the second port that should be used by the server"); System.out .println(" IP2 - the second ip address that should be used by the server"); System.exit(0); } Handler fh = new FileHandler("logging_server.txt"); fh.setFormatter(new SimpleFormatter()); Logger.getLogger("de.javawi.stun").addHandler(fh); Logger.getLogger("de.javawi.stun").setLevel(Level.ALL); StunServer ss = new StunServer(Integer.parseInt(args[0]), InetAddress .getByName(args[1]), Integer.parseInt(args[2]), InetAddress .getByName(args[3])); ss.start(); } catch (SocketException se) { se.printStackTrace(); } catch (UnknownHostException uhe) { uhe.printStackTrace(); } catch (IOException ioe) { ioe.printStackTrace(); } } }
package de.kisi.android; import java.util.List; import org.json.JSONArray; import com.manavo.rest.RestCallback; import de.kisi.android.model.Gate; import de.kisi.android.model.Location; import android.os.Bundle; import android.support.v4.app.Fragment; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.RelativeLayout; import android.widget.TextView; import android.widget.Toast; public class LocationViewFragment extends Fragment { private RelativeLayout layout; static LocationViewFragment newInstance(int index) { // Fragments must not have a custom constructor LocationViewFragment f = new LocationViewFragment(); Bundle args = new Bundle(); args.putInt("index", index); f.setArguments(args); return f; } public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { if (container == null) { return null; } int index = getArguments().getInt("index"); final Location l = ((KisiMain)getActivity()).locations.valueAt(index); layout = (RelativeLayout) inflater.inflate(R.layout.locationtwodoors, container, false); TextView adress = (TextView) layout.findViewById(R.id.textViewTwoDoors); adress.setText(l.getAddress()); KisiApi api = new KisiApi(this.getActivity()); api.setCallback(new RestCallback() { public void success(Object obj) { JSONArray data = (JSONArray)obj; l.setGates(data); setupButtons(l.getGates()); } }); api.setLoadingMessage(null); api.get("locations/" + String.valueOf(l.getId()) + "/gates"); return layout; } public void setupButtons(List<Gate> gates) { int[] buttons = {R.id.buttonTwoDoorOne, R.id.buttonTwoDoorTwo}; int i = 0; for ( final Gate gate : gates ) { if ( i >= buttons.length ) { Log.d("waring", "more gates then buttons!"); break; } final Button button = (Button) layout.findViewById(buttons[i++]); button.setText("Unlock " + gate.getName()); button.setVisibility(View.VISIBLE); button.setOnClickListener( new OnClickListener() { @Override public void onClick(View v) { Log.d("pressed", "opening door " + String.valueOf(gate.getName())); KisiApi api = new KisiApi(getActivity()); api.setCallback(new RestCallback() { public void success(Object obj) { Toast.makeText(getActivity(), "Gate was opened successfully", Toast.LENGTH_LONG).show(); } }); api.setLoadingMessage("Opening gate..."); api.post("locations/" + String.valueOf(gate.getLocationId()) + "/gates/" + String.valueOf(gate.getId()) + "/access" ); } }); } } }
package com.gotech.tv.launcher.service; import com.gotech.tv.launcher.util.LogTool; import com.gotech.tv.launcher.util.NetworkUtil; import android.content.Context; import android.content.Intent; import android.net.ConnectivityManager; public class NetStateTracker implements Runnable { private static final String TAG = NetStateTracker.class.getSimpleName(); public static final String TVLAUNCHER_NETWORK_STATE_CHANGE = "tvlauncher.network.state.change"; public static final int NET_STATE_CMNET_UNABLE = 6; public static final int NET_STATE_CMNET = 4; public static final int NET_STATE_WIFI_UNABLE = 5; public static final int NET_STATE_WIFI = 3; public static final int NET_STATE_NO_CMNET = 2; public static final int NET_STATE_NO_WIFI = 1; private static NetStateTracker mInstance; private int mCurState = NET_STATE_CMNET_UNABLE; private Context mCtx; private Thread mChecker; private NetStateTracker(Context ctx) { mCtx = ctx.getApplicationContext(); initState(); } private void initState() { setCurNetState(NetworkUtil.getAPNType(mCtx)); } public synchronized static NetStateTracker getTracker(Context ctx) { if (mInstance == null) { mInstance = new NetStateTracker(ctx); } return mInstance; } public synchronized void startTrack() { if (mChecker == null || !mChecker.isAlive()) { mChecker = new Thread(this, "NetworkChecker"); mChecker.start(); } } public synchronized void stopCheck() { if (mChecker != null && mChecker.isAlive()) { mChecker.interrupt(); } } public synchronized int getCurNetState() { return mCurState; } public synchronized void setCurNetState(int mCurState) { if (this.mCurState != mCurState) { this.mCurState = mCurState; mCtx.sendBroadcast(new Intent(TVLAUNCHER_NETWORK_STATE_CHANGE).putExtra("state", mCurState)); } } public boolean isNetworkAvailable() { int state = getCurNetState(); if (state == NET_STATE_CMNET || state == NET_STATE_WIFI) { return true; } else { return false; } } @Override public void run() { while (!Thread.currentThread().isInterrupted()) { boolean isReached = false; if (NetworkUtil.isNetworkAvailable(mCtx)) { isReached = NetworkUtil.isReachable(ServerManager.obtain().getBaseServer());// HttpTool.obtain().isReachedUrl(Constant.getProvinceCityNameUrl); LogTool.v(TAG, ServerManager.obtain().getBaseServer() + " " + (isReached ? "can reached" : "can not reached")); if (isReached) { switch (NetworkUtil.getNetworkType(mCtx)) { case ConnectivityManager.TYPE_ETHERNET: case ConnectivityManager.TYPE_MOBILE: setCurNetState(NET_STATE_CMNET); break; case ConnectivityManager.TYPE_WIFI: setCurNetState(NET_STATE_WIFI); break; } } else { switch (NetworkUtil.getNetworkType(mCtx)) { case ConnectivityManager.TYPE_ETHERNET: case ConnectivityManager.TYPE_MOBILE: setCurNetState(NET_STATE_CMNET_UNABLE); break; case ConnectivityManager.TYPE_WIFI: setCurNetState(NET_STATE_WIFI_UNABLE); break; } } } else { LogTool.v(TAG, " is not NetworkAvailable"); setCurNetState(NetworkUtil.getAPNType(mCtx)); } try { Thread.sleep(isReached ? 50000 : 10000); } catch (InterruptedException e) { e.printStackTrace(); break; } } } public static interface NetworkStateCheckBack { public void onNetworkStaeCheckBack(int state); } }
package com.littlepanpc.downloader.db; import com.littlepanpc.downloader.util.LogUtils; import android.content.Context; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; public class DownloadDBHelper extends SQLiteOpenHelper { private static final String TAG = DownloadDBHelper.class.getSimpleName(); public static final String DB_NAME = "downloads.db"; public static final String DB_TABLE_NAME = "download_task"; public static final int DB_VERSION = 1; private static DownloadDBHelper instance; public static DownloadDBHelper getInstance(Context context) { if (instance == null) { synchronized (DownloadDBHelper.class) { if (instance == null) { instance = new DownloadDBHelper(context); } } } LogUtils.i(TAG, "getInstance---"+instance); return instance; } /** * <p>Title: </p> * <p>Description: Constructor</p> * * @param context */ private DownloadDBHelper(Context context) { // TODO Auto-generated constructor stub super(context, DB_NAME, null, DB_VERSION); LogUtils.i(TAG, "DownloadDBHelper constructor"); } /* (non-Javadoc) * <p>Title: onCreate</p> * <p>Description: create database table</p> * @param db * @see android.database.sqlite.SQLiteOpenHelper#onCreate(android.database.sqlite.SQLiteDatabase) */ @Override public void onCreate(SQLiteDatabase db) { // TODO Auto-generated method stub LogUtils.i(TAG, "onCreate start"); String createSQL = "create table " + DB_TABLE_NAME + "(" + "_id integer primary key autoincrement, " + "url text unique, " + "parent_path text, " + "file_name text, " + "downloaded_size integer, " + "total_size integer, " + "status integer)"; LogUtils.i(TAG, "createsql---"+createSQL); LogUtils.i(TAG, "onCreate before execute"); db.execSQL(createSQL); LogUtils.i(TAG, "onCreate after execute"); LogUtils.i(TAG, "onCreate finish"); } /* (non-Javadoc) * <p>Title: onUpgrade</p> * <p>Description: update database table</p> * @param db * @param oldVersion * @param newVersion * @see android.database.sqlite.SQLiteOpenHelper#onUpgrade(android.database.sqlite.SQLiteDatabase, int, int) */ @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { // TODO Auto-generated method stub String dropSQL = "drop table if exists " + DB_TABLE_NAME; LogUtils.i(TAG, "dropsql---"+dropSQL); LogUtils.i(TAG, "onUpgrade before execute"); db.execSQL(dropSQL); LogUtils.i(TAG, "onUpgrade after execute"); onCreate(db); LogUtils.i(TAG, "onUpgrade finish"); } }
package com.newrelic.plugins.mysql.instance; import java.sql.Connection; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.logging.Logger; import com.newrelic.metrics.publish.Agent; import com.newrelic.metrics.publish.binding.Context; import com.newrelic.plugins.mysql.MetricMeta; import com.newrelic.plugins.mysql.MySQL; /** * This class creates a specific MySQL agent that is used to * obtain a MySQL database connection, gather requested metrics * and report to New Relic * * @author Ronald Bradford me@ronaldbradford.com * */ public class MySQLAgent extends Agent { private static final String GUID = "com.newrelic.plugins.mysql.instance"; private static final String version = "1.0.7"; public static final String AGENT_DEFAULT_HOST = "localhost"; // Default values for MySQL Agent public static final String AGENT_DEFAULT_USER = "newrelic"; public static final String AGENT_DEFAULT_PASSWD = "f63c225f4abe9e13"; public static final String AGENT_DEFAULT_PROPERTIES = ""; public static final String AGENT_DEFAULT_METRICS = "status,newrelic"; public static final String AGENT_CONFIG_FILE = "mysql.instance.json"; public static final String CATEGORY_CONFIG_FILE = "metric.category.json"; public static final String COMMA = ","; private String name; // Agent Name private String host; // MySQL Connection parameters private String user; private String passwd; private String properties; private String metrics; // Metrics to be collected for this agent private Map<String, MetricMeta> metricsMeta = // Definition of MySQL meta data (counter, unit, type etc) new HashMap<String, MetricMeta>(); private Map<String, Object> metricCategories = // Definition of categories of metrics new HashMap<String, Object>(); private MySQL m; // Per agent MySQL Object final Logger logger; // Local convenience variable private boolean firstReport = true; /** * Default constructor to create a new MySQL Agent * @param map * * @param String Human name for Agent * @param String MySQL Instance host:port * @param String MySQL user * @param String MySQL user password * @param String CSVm List of metrics to be monitored */ public MySQLAgent(String name, String host, String user, String passwd, String properties, String metrics, Map<String, Object> metricCategories) { super(GUID, version); this.name = name; // Set local attributes for new class object this.host = host; this.user = user; this.passwd = passwd; this.properties = properties; this.metrics = metrics.toLowerCase(); this.metricCategories = metricCategories; this.m = new MySQL(); logger = Context.getLogger(); // Set logging to current Context MySQL.setLogger(logger); // Push logger to MySQL Object createMetaData(); // Define incremental counters that are value/sec etc logger.fine("MySQL Agent initialized: " + formatAgentParams(name, host, user, properties, metrics)); } /** * Format Agent parameters for logging * @param name * @param host * @param user * @param properties * @param metrics * @return A formatted String representing the Agent parameters */ private String formatAgentParams(String name, String host, String user, String properties, String metrics) { StringBuilder builder = new StringBuilder(); builder.append("name: ").append(name).append(" | "); builder.append("host: ").append(host).append(" | "); builder.append("user: ").append(user).append(" | "); builder.append("properties: ").append(properties).append(" | "); builder.append("metrics: ").append(metrics).append(" | "); return builder.toString(); } /** * This method is run for every poll cycle of the Agent. * Get a MySQL Database connection and gather metrics. */ public void pollCycle() { Connection c = m.getConnection(host, user, passwd, properties); // Get a database connection (which should be cached) if (c == null) return; // Unable to continue without a valid database connection logger.fine("Gathering MySQL metrics. " + getAgentInfo()); Map<String,Number> results = gatherMetrics(c, metrics); // Gather defined metrics reportMetrics(results); // Report Metrics to New Relic firstReport = false; } /** * This method runs the varies categories of MySQL statements * and gathers the metrics that can be reported * * @param Connection c MySQL Database Connection * @param String List of metrics to be obtained for this agent * @return Map of metrics and values */ private Map<String, Number> gatherMetrics(Connection c, String metrics) { Map<String,Number> results = new HashMap<String,Number>(); // Create an empty set of results Map<String,Object> categories = getMetricCategories(); // Get current Metric Categories Iterator<String> iter = categories.keySet().iterator(); metrics = metrics + COMMA; // Add trailing comma for search criteria while (iter.hasNext()) { String category = (String)iter.next(); @SuppressWarnings("unchecked") Map<String, String> attributes = (Map<String,String>)categories.get(category); if (metrics.contains(category + COMMA)) { // Use a dumb search, including comma to handle overlapping categories results.putAll(MySQL.runSQL(c, category, attributes.get("SQL"), attributes.get("result"))); } } results.putAll(newRelicMetrics(results, metrics)); return results; } /** * This method creates a number of custom New Relic Metrics, that are derived from * raw MySQL status metrics * * @param Map existing Gathered MySQL metrics * @param metrics String of the Metric Categories to capture * @return Map Additional derived metrics */ protected Map<String, Number> newRelicMetrics(Map<String, Number> existing, String metrics) { Map<String, Number> derived = new HashMap<String,Number>(); if (!metrics.contains("newrelic" + COMMA)) return derived; // Only calculate newrelic category if specified. if (!metrics.contains("status" + COMMA)) return derived; // "status" category is a pre-requisite for newrelic metrics logger.fine("Adding New Relic derived metrics"); /* read and write volume */ if(areRequiredMetricsPresent("Reads", existing, "status/com_select", "status/qcache_hits")) { derived.put("newrelic/volume_reads", existing.get("status/com_select").intValue() + existing.get("status/qcache_hits").intValue()); } if(areRequiredMetricsPresent("Writes", existing, "status/com_insert", "status/com_update", "status/com_delete", "status/com_replace", "status/com_insert_select", "status/com_update_multi", "status/com_delete_multi", "status/com_replace_select")) { derived.put("newrelic/volume_writes", existing.get("status/com_insert").intValue() + existing.get("status/com_insert_select").intValue() + existing.get("status/com_update").intValue() + existing.get("status/com_update_multi").intValue() + existing.get("status/com_delete").intValue() + existing.get("status/com_delete_multi").intValue() + existing.get("status/com_replace").intValue() + existing.get("status/com_replace_select").intValue()); } /* read and write throughput */ if(areRequiredMetricsPresent("Read Throughput", existing, "status/bytes_sent")) { derived.put("newrelic/bytes_reads", existing.get("status/bytes_sent").intValue()); } if(areRequiredMetricsPresent("Write Throughput", existing, "status/bytes_received")) { derived.put("newrelic/bytes_writes", existing.get("status/bytes_received").intValue()); } /* Connection management */ if(areRequiredMetricsPresent("Connection Management", existing, "status/threads_connected", "status/threads_running", "status/threads_cached")) { float threads_connected = existing.get("status/threads_connected").floatValue(); float threads_running = existing.get("status/threads_running").floatValue(); derived.put("newrelic/connections_connected", (int)threads_connected); derived.put("newrelic/connections_running", (int)threads_running); derived.put("newrelic/connections_cached", existing.get("status/threads_cached").intValue()); derived.put("newrelic/pct_connection_utilization", (threads_running / threads_connected) * 100.0); } /* InnoDB Metrics */ if(areRequiredMetricsPresent("InnoDB", existing, "status/innodb_pages_created", "status/innodb_pages_read", "status/innodb_pages_written", "status/innodb_buffer_pool_read_requests", "status/innodb_buffer_pool_reads", "status/innodb_data_fsyncs", "status/innodb_os_log_fsyncs")) { derived.put("newrelic/innodb_bp_pages_created", existing.get("status/innodb_pages_created").intValue()); derived.put("newrelic/innodb_bp_pages_read", existing.get("status/innodb_pages_read").intValue()); derived.put("newrelic/innodb_bp_pages_written", existing.get("status/innodb_pages_written").intValue()); /* Innodb Specific Metrics */ float innodb_read_requests = existing.get("status/innodb_buffer_pool_read_requests").floatValue(); float innodb_reads = existing.get("status/innodb_buffer_pool_reads").floatValue(); derived.put("newrelic/pct_innodb_buffer_pool_hit_ratio", (innodb_read_requests / (innodb_read_requests + innodb_reads)) * 100.0); derived.put("newrelic/innodb_fsyncs_data", existing.get("status/innodb_data_fsyncs").intValue()); derived.put("newrelic/innodb_fsyncs_os_log", existing.get("status/innodb_os_log_fsyncs").intValue()); } /* InnoDB Buffer Metrics */ if(areRequiredMetricsPresent("InnoDB Buffers", existing, "status/innodb_buffer_pool_pages_total", "status/innodb_buffer_pool_pages_data", "status/innodb_buffer_pool_pages_misc", "status/innodb_buffer_pool_pages_dirty", "status/innodb_buffer_pool_pages_free")) { int pages_total = existing.get("status/innodb_buffer_pool_pages_total").intValue(); int pages_data = existing.get("status/innodb_buffer_pool_pages_data").intValue(); int pages_misc = existing.get("status/innodb_buffer_pool_pages_misc").intValue(); int pages_dirty = existing.get("status/innodb_buffer_pool_pages_dirty").intValue(); int pages_free = existing.get("status/innodb_buffer_pool_pages_free").intValue(); derived.put("newrelic/innodb_buffer_pool_pages_clean", pages_data - pages_dirty); derived.put("newrelic/innodb_buffer_pool_pages_dirty", pages_dirty); derived.put("newrelic/innodb_buffer_pool_pages_misc", pages_misc); derived.put("newrelic/innodb_buffer_pool_pages_free", pages_free); derived.put("newrelic/innodb_buffer_pool_pages_unassigned", pages_total - pages_data - pages_free - pages_misc); } /* Query Cache */ if(areRequiredMetricsPresent("Query Cache", existing, "status/qcache_hits", "status/com_select", "status/qcache_free_blocks", "status/qcache_total_blocks", "status/qcache_inserts", "status/qcache_not_cached")) { float qc_hits = existing.get("status/qcache_hits").floatValue(); float reads = existing.get("status/com_select").floatValue(); float free = existing.get("status/qcache_free_blocks").floatValue(); float total = existing.get("status/qcache_total_blocks").floatValue(); derived.put("newrelic/query_cache_hits", (int)qc_hits); derived.put("newrelic/query_cache_misses", existing.get("status/qcache_inserts").intValue()); derived.put("newrelic/query_cache_not_cached", existing.get("status/qcache_not_cached").intValue()); derived.put("newrelic/pct_query_cache_hit_utilization", (qc_hits / (qc_hits + reads))* 100.0); derived.put("newrelic/pct_query_cache_memory_in_use", 100 - ((free/total)* 100.0)); } /* Temp Table */ if(areRequiredMetricsPresent("Temp Tables", existing, "status/created_tmp_tables", "status/created_tmp_disk_tables")) { float tmp_tables = existing.get("status/created_tmp_tables").floatValue(); float tmp_tables_disk = existing.get("status/created_tmp_disk_tables").floatValue(); derived.put("newrelic/pct_tmp_tables_written_to_disk", (tmp_tables_disk/tmp_tables)* 100.0); } /* Replication specifics */ // "slave" category is a pre-requisite for these metrics if (metrics.contains("slave" + COMMA)) { if (areRequiredMetricsPresent("newrelic/replication_lag", existing, "slave/seconds_behind_master")) { derived.put("newrelic/replication_lag", existing.get("slave/seconds_behind_master").intValue()); } if (areRequiredMetricsPresent("newrelic/replication_status", existing, "slave/slave_io_running", "slave/slave_sql_running")) { int slave_io_thread_running = existing.get("slave/slave_io_running").intValue(); int slave_sql_thread_running = existing.get("slave/slave_sql_running").intValue(); /* both need to be YES, which is 1 */ int replication_status = 1; // Default as in ERROR if (slave_io_thread_running + slave_sql_thread_running == 2) { replication_status = 0; } derived.put("newrelic/replication_status", replication_status); } if (areRequiredMetricsPresent("newrelic/slave_relay_log_bytes", existing, "slave/relay_log_pos")) { derived.put("newrelic/slave_relay_log_bytes", existing.get("slave/relay_log_pos").intValue()); } if (areRequiredMetricsPresent("newrelic/master_log_lag_bytes", existing, "slave/read_master_log_pos", "slave/exec_master_log_pos")) { derived.put("newrelic/master_log_lag_bytes", existing.get("slave/read_master_log_pos").intValue() - existing.get("slave/exec_master_log_pos").intValue()); } } else {// This is a hack because the NR UI can't handle it missing for graphs derived.put("newrelic/replication_lag", 0); derived.put("newrelic/replication_status", 0); derived.put("newrelic/slave_relay_log_bytes", 0); derived.put("newrelic/master_log_lag_bytes", 0); } return derived; } /** * This method does the reporting of metrics to New Relic * * @param Map results */ public void reportMetrics(Map<String,Number> results) { int count = 0; logger.fine("Collected " + results.size() + " MySQL metrics. " + getAgentInfo()); logger.finest(results.toString()); Iterator<String> iter = results.keySet().iterator(); while (iter.hasNext()) { // Iterate over current metrics String key = (String)iter.next().toLowerCase(); Number val = results.get(key); MetricMeta md = getMetricMeta(key); if (md != null) { // Metric Meta data exists (from metric.category.json) logger.fine("Metric " + " " + key + "(" + md.getUnit() + ")=" + val + " " + (md.isCounter() ? "counter" : "")); count++; if (md.isCounter()) { // Metric is a counter reportMetric(key , md.getUnit(), md.getCounter().process(val)); } else { // Metric is a fixed Number if (java.lang.Float.class.equals(results.get(key).getClass())) { reportMetric(key, md.getUnit(), val.floatValue()); // We are working with a float value } else { reportMetric(key , md.getUnit(), val.intValue()); // We are working with an int } } } else { // md != null if (firstReport) // Provide some feedback of available metrics for future reporting logger.fine("Not reporting identified metric " + key); } } logger.fine("Reported to New Relic " + count + " metrics. " + getAgentInfo()); } private String getAgentInfo() { return "Agent Name: " + name + ". Agent Version: " + version; } /** * This method creates the metric meta data that is derived from the provided configuration * and New Relic specific metrics. */ private void createMetaData() { Map<String,Object> categories = getMetricCategories(); // Get current Metric Categories Iterator<String> iter = categories.keySet().iterator(); while (iter.hasNext()) { String category = (String)iter.next(); @SuppressWarnings("unchecked") Map<String, String> attributes = (Map<String,String>)categories.get(category); String valueMetrics = attributes.get("value_metrics"); if (valueMetrics != null) { Set<String> metrics = new HashSet<String>(Arrays.asList(valueMetrics.toLowerCase().replaceAll(" ", "").split(MySQLAgent.COMMA))); for (String s: metrics) { addMetricMeta(category + MySQL.SEPARATOR + s, new MetricMeta(false)); } } String counterMetrics = attributes.get("counter_metrics"); if (counterMetrics != null) { Set<String> metrics = new HashSet<String>(Arrays.asList(counterMetrics.toLowerCase().replaceAll(" ", "").split(MySQLAgent.COMMA))); for (String s: metrics) { addMetricMeta(category + MySQL.SEPARATOR + s, new MetricMeta(true)); } } } /* Define New Relic specific metrics used for default dashboards */ addMetricMeta("newrelic/volume_reads", new MetricMeta(true, "Queries/Second")); addMetricMeta("newrelic/volume_writes", new MetricMeta(true, "Queries/Second")); addMetricMeta("newrelic/bytes_reads", new MetricMeta(true, "Bytes/Second")); addMetricMeta("newrelic/bytes_writes", new MetricMeta(true, "Bytes/Second")); addMetricMeta("newrelic/connections_connected", new MetricMeta(false, "Connections")); addMetricMeta("newrelic/connections_running", new MetricMeta(false, "Connections")); addMetricMeta("newrelic/connections_cached", new MetricMeta(false, "Connections")); addMetricMeta("newrelic/innodb_bp_pages_created", new MetricMeta(true, "Pages/Second")); addMetricMeta("newrelic/innodb_bp_pages_read", new MetricMeta(true, "Pages/Second")); addMetricMeta("newrelic/innodb_bp_pages_written", new MetricMeta(true, "Pages/Second")); addMetricMeta("newrelic/query_cache_hits", new MetricMeta(true, "Queries/Seconds")); addMetricMeta("newrelic/query_cache_misses", new MetricMeta(true, "Queries/Seconds")); addMetricMeta("newrelic/query_cache_not_cached", new MetricMeta(true, "Queries/Seconds")); addMetricMeta("newrelic/replication_lag", new MetricMeta(false, "Seconds")); addMetricMeta("newrelic/replication_status", new MetricMeta(false, "State")); addMetricMeta("newrelic/pct_connection_utilization", new MetricMeta(false, "Percent")); addMetricMeta("newrelic/pct_innodb_buffer_pool_hit_ratio", new MetricMeta(false, "Percent")); addMetricMeta("newrelic/pct_query_cache_hit_utilization", new MetricMeta(false, "Percent")); addMetricMeta("newrelic/pct_query_cache_memory_in_use", new MetricMeta(false, "Percent")); addMetricMeta("newrelic/pct_tmp_tables_written_to_disk", new MetricMeta(false, "Percent")); addMetricMeta("newrelic/innodb_fsyncs_data", new MetricMeta(true, "Fsyncs/Second")); addMetricMeta("newrelic/innodb_fsyncs_os_log", new MetricMeta(true, "Fsyncs/Second")); addMetricMeta("newrelic/slave_relay_log_bytes", new MetricMeta(true, "Bytes/Second")); addMetricMeta("newrelic/master_log_lag_bytes", new MetricMeta(true, "Bytes/Second")); /* Define improved metric values for certain general metrics */ addMetricMeta("status/aborted_clients", new MetricMeta(true, "Connections/Second")); addMetricMeta("status/aborted_connects", new MetricMeta(true, "Connections/Second")); addMetricMeta("status/bytes_sent", new MetricMeta(true, "Bytes/Second")); addMetricMeta("status/bytes_received", new MetricMeta(true, "Bytes/Second")); addMetricMeta("status/com_select", new MetricMeta(true, "Selects/Second")); addMetricMeta("status/com_insert", new MetricMeta(true, "Inserts/Second")); addMetricMeta("status/com_insert_select", new MetricMeta(true, "Inserts/Second")); addMetricMeta("status/com_update", new MetricMeta(true, "Updates/Second")); addMetricMeta("status/com_update_multi", new MetricMeta(true, "Updates/Second")); addMetricMeta("status/com_delete", new MetricMeta(true, "Deletes/Second")); addMetricMeta("status/com_delete_multi", new MetricMeta(true, "Deletes/Second")); addMetricMeta("status/com_replace", new MetricMeta(true, "Replaces/Second")); addMetricMeta("status/com_replace_select", new MetricMeta(true, "Replaces/Second")); addMetricMeta("status/slow_queries", new MetricMeta(true, "Queries/Second")); addMetricMeta("status/created_tmp_tables", new MetricMeta(true, "Queries/Second")); addMetricMeta("status/created_tmp_disk_tables", new MetricMeta(true, "Queries/Second")); addMetricMeta("status/innodb_buffer_pool_pages_flushed",new MetricMeta(true, "Pages/Second")); addMetricMeta("newrelic/innodb_buffer_pool_pages_clean", new MetricMeta(false, "Pages")); addMetricMeta("newrelic/innodb_buffer_pool_pages_dirty", new MetricMeta(false, "Pages")); addMetricMeta("newrelic/innodb_buffer_pool_pages_misc", new MetricMeta(false, "Pages")); addMetricMeta("newrelic/innodb_buffer_pool_pages_free", new MetricMeta(false, "Pages")); addMetricMeta("newrelic/innodb_buffer_pool_pages_unassigned", new MetricMeta(false, "Pages")); addMetricMeta("status/innodb_data_fsyncs", new MetricMeta(true, "Fsyncs/Second")); addMetricMeta("status/innodb_os_log_fsyncs", new MetricMeta(true, "Fsyncs/Second")); addMetricMeta("status/innodb_os_log_written", new MetricMeta(true, "Bytes/Second")); /* Query Cache Units */ addMetricMeta("status/qcache_free_blocks", new MetricMeta(false, "Blocks")); addMetricMeta("status/qcache_free_memory", new MetricMeta(false, "Bytes")); addMetricMeta("status/qcache_hits", new MetricMeta(true, "Queries/Second")); addMetricMeta("status/qcache_inserts", new MetricMeta(true, "Queries/Second")); addMetricMeta("status/qcache_lowmem_prunes", new MetricMeta(true, "Queries/Second")); addMetricMeta("status/qcache_not_cached", new MetricMeta(true, "Queries/Second")); addMetricMeta("status/qcache_queries_in_cache", new MetricMeta(false, "Queries")); addMetricMeta("status/qcache_total_blocks", new MetricMeta(false, "Blocks")); addMetricMeta("innodb_status/history_list_length", new MetricMeta(false, "Pages")); addMetricMeta("innodb_status/queries_inside_innodb",new MetricMeta(false, "Queries")); addMetricMeta("innodb_status/queries_in_queue", new MetricMeta(false, "Queries")); addMetricMeta("innodb_status/checkpoint_age", new MetricMeta(false, "Bytes")); addMetricMeta("master/position", new MetricMeta(true, "Bytes/Second")); addMetricMeta("slave/relay_log_pos", new MetricMeta(true, "Bytes/Second")); } /** * Add the given metric meta information to the Map of all metric meta information for this agent * * @param String key * @param Metric mm */ private void addMetricMeta(String key, MetricMeta mm) { metricsMeta.put(key.toLowerCase(), mm); } /** * This provides a lazy instantiation of a MySQL metric where no meta data was defined * and means new metrics can be captured automatically. * * A default metric is a integer value * * @param String Metric to look up * @return MetridMeta Structure of information about the metric */ private MetricMeta getMetricMeta(String key) { if (key.startsWith("innodb_mutex/") && !metricsMeta.containsKey(key)) { // This is a catch all for dynamic name metrics addMetricMeta(key, new MetricMeta(true, "Operations/Second")); } return (MetricMeta)metricsMeta.get(key.toLowerCase()); // Look for existing meta data on metric } /** * Private utility function to validate that all required data is present for constructing atomic metrics * @param category - a display name for which metric category will not be included if a given key is not present * @param map - the map of available data points * @param keys - keys that are expected to be present for this operation * @return true if all expected keys are present, otherwise false */ private boolean areRequiredMetricsPresent(String category, Map<String, Number> map, String...keys) { for(String key : keys) { if(!map.containsKey(key)) { if(firstReport) { // Only report missing category data on the first run so as not to clutter the log logger.finest("Not reporting on '" + category + "' due to missing data field '" + key + "'"); } return false; } } return true; } /** * Return the human readable name for this agent. * * @return String */ @Override public String getComponentHumanLabel() { return name; } /** * Return the map of metric categories * * @return Map */ public Map<String, Object> getMetricCategories() { return metricCategories; } }
package ess.jels; import java.io.IOException; import xal.model.IComponent; import xal.model.IElement; import xal.model.Lattice; import xal.model.ModelException; import xal.model.Sector; import xal.model.alg.EnvelopeTracker; import xal.model.alg.Tracker; import xal.model.elem.ElementSeq; import xal.model.probe.EnvelopeProbe; import xal.model.xml.LatticeXmlWriter; import xal.sim.scenario.ElsScenarioGenerator; import xal.sim.scenario.OldScenarioMapping; import xal.sim.scenario.Scenario; import xal.smf.AcceleratorSeq; import xal.smf.impl.Bend; import xal.tools.beam.IConstants; import xal.tools.beam.Twiss; public class BendTest { public static void main(String[] args) throws InstantiationException, ModelException { System.out.println("Running\n"); AcceleratorSeq sequence = new AcceleratorSeq("DriftTest"); // input from TraceWin double entry_angle_deg = -5.5; double exit_angle_deg = -5.5; double alpha_deg = -11; // angle in degrees double rho = 9375.67*1e-3; // curvature radius (in m) double N = 0.; // field Index final int HV = 1; // 0 - horizontal, 1 - vertical /* G,K1,K2 - gap, fringe field factors are supported in the model but not SMF (use G*1.e-3)*/ // calculations double alpha = alpha_deg * Math.PI/180.0; double len = Math.abs(rho*alpha); double quadComp = N / (rho*rho); // following are used to calculate field EnvelopeProbe probe = setupProbeViaJavaCalls(); probe.initialize(); double c = IConstants.LightSpeed; double e = probe.getSpeciesCharge(); double Er = probe.getSpeciesRestEnergy(); double gamma = probe.getGamma(); double b = probe.getBeta(); double B0 = b*gamma*Er/(e*c*rho)*Math.signum(alpha); Bend bend = new Bend("b") { @Override public int getOrientation() { if (HV == 0) return HORIZONTAL; else return VERTICAL; // currently impossible to put it into a file } }; bend.setPosition(len*0.5); //always position on center! bend.setLength(len); // both paths are used in calculation bend.getMagBucket().setPathLength(len); bend.getMagBucket().setDipoleEntrRotAngle(-entry_angle_deg); bend.getMagBucket().setBendAngle(alpha_deg); bend.getMagBucket().setDipoleExitRotAngle(-exit_angle_deg); bend.setDfltField(B0); bend.getMagBucket().setDipoleQuadComponent(quadComp); sequence.addNode(bend); sequence.setLength(len); // Generates lattice from SMF accelerator Scenario oscenario = Scenario.newScenarioFor(sequence); Scenario scenario = new ElsScenarioGenerator(sequence, new OldScenarioMapping()).getScenario(); //Scenario oscenario = Scenario.newAndImprovedScenarioFor(sequence); // Outputting lattice elements saveLattice(scenario.getLattice(), "lattice.xml"); saveLattice(oscenario.getLattice(), "elattice.xml"); // Creating a probe scenario.setProbe(probe); // Prints transfer matrices /*for (IComponent comp : ((ElementSeq)((Sector)scenario.getLattice().getElementList().get(0)).getChild(1)).getElementList()) { IElement el = (IElement)comp; el.transferMap(probe, el.getLength()).getFirstOrder().print(); }*/ // Setting up synchronization mode scenario.setSynchronizationMode(Scenario.SYNC_MODE_DESIGN); scenario.resync(); // Running simulation scenario.run(); // Getting results Twiss[] t = probe.getCovariance().computeTwiss(); double[] beta = new double[3]; for (int i=0; i<3; i++) beta[i] = t[i].getBeta(); beta[2]/=probe.getGamma()*probe.getGamma(); double[] sigma = new double[3]; for (int i=0; i<3; i++) sigma[i] = Math.sqrt(beta[i]*t[i].getEmittance()/probe.getBeta()/probe.getGamma()); System.out.printf("%E %E %E %E ",probe.getPosition(), sigma[0], sigma[1], sigma[2]); System.out.printf("%E %E %E\n", beta[0], beta[1], beta[2]); /* ELS output: 7.000000E-02 1.060867E-03 9.629023E-04 1.920023E-03 3.918513E-01 3.239030E-01 9.445394E-01 */ } private static EnvelopeProbe setupProbeViaJavaCalls() { // Envelope probe and tracker EnvelopeTracker envelopeTracker = new EnvelopeTracker(); envelopeTracker.setRfGapPhaseCalculation(false); envelopeTracker.setUseSpacecharge(false); envelopeTracker.setEmittanceGrowth(false); envelopeTracker.setStepSize(0.004); envelopeTracker.setProbeUpdatePolicy(Tracker.UPDATE_EXIT); EnvelopeProbe envelopeProbe = new EnvelopeProbe(); envelopeProbe.setAlgorithm(envelopeTracker); envelopeProbe.setSpeciesCharge(-1); envelopeProbe.setSpeciesRestEnergy(9.3829431e8); envelopeProbe.setKineticEnergy(2.5e6);//energy envelopeProbe.setPosition(0.0); envelopeProbe.setTime(0.0); /* number of particles = 1000 beam current in A = 0 Duty Cycle in %= 4 normalized horizontal emittance in m*rad= 0.2098e-6 normalized vertical emittance in m*rad = 0.2091e-6 normalized longitudinal emittance in m*rad = 0.2851e-6 kinetic energy in MeV = 3 alfa x = -0.1763 beta x in m/rad = 0.2442 alfa y = -0.3247 beta y in m/rad = 0.3974 alfa z = -0.5283 beta z in m/rad = 0.8684 */ envelopeProbe.initFromTwiss(new Twiss[]{new Twiss(-0.1763,0.2442,0.2098e-6), new Twiss(-0.3247,0.3974,0.2091e-6), new Twiss(-0.5283,0.8684,0.2851e-6)}); envelopeProbe.setBeamCurrent(0.0); envelopeProbe.setBunchFrequency(4.025e8);//frequency return envelopeProbe; } private static void saveLattice(Lattice lattice, String file) { try { LatticeXmlWriter.writeXml(lattice, file); } catch (IOException e1) { e1.printStackTrace(); return; } } }
package com.redhat.ceylon.model.typechecker.model; import static com.redhat.ceylon.model.typechecker.model.ModelUtil.NO_TYPE_ARGS; import static com.redhat.ceylon.model.typechecker.model.ModelUtil.addToIntersection; import static com.redhat.ceylon.model.typechecker.model.ModelUtil.addToUnion; import static com.redhat.ceylon.model.typechecker.model.ModelUtil.appliedType; import static com.redhat.ceylon.model.typechecker.model.ModelUtil.canonicalIntersection; import static com.redhat.ceylon.model.typechecker.model.ModelUtil.intersectionType; import static com.redhat.ceylon.model.typechecker.model.ModelUtil.isNameMatching; import static com.redhat.ceylon.model.typechecker.model.ModelUtil.isOverloadedVersion; import static com.redhat.ceylon.model.typechecker.model.ModelUtil.isToplevelAnonymousClass; import static com.redhat.ceylon.model.typechecker.model.ModelUtil.isToplevelClassConstructor; import static com.redhat.ceylon.model.typechecker.model.ModelUtil.union; import static com.redhat.ceylon.model.typechecker.model.ModelUtil.unionType; import static com.redhat.ceylon.model.typechecker.model.Module.LANGUAGE_MODULE_NAME; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.TreeMap; import com.redhat.ceylon.model.typechecker.context.TypeCache; public class Unit { private Package pkg; private List<Import> imports = new ArrayList<Import>(); private List<Declaration> declarations = new ArrayList<Declaration>(); private String filename; private List<ImportList> importLists = new ArrayList<ImportList>(); private Set<Declaration> duplicateDeclarations = new HashSet<Declaration>(); private final Set<String> dependentsOf = new HashSet<String>(); private String fullPath; private String relativePath; public List<Import> getImports() { return imports; } public List<ImportList> getImportLists() { return importLists; } /** * @return the dependentsOf */ public Set<String> getDependentsOf() { return dependentsOf; } public Set<Declaration> getDuplicateDeclarations() { return duplicateDeclarations; } public Package getPackage() { return pkg; } public void setPackage(Package p) { pkg = p; } public List<Declaration> getDeclarations() { synchronized (declarations) { return new ArrayList<Declaration>(declarations); } } public void addDeclaration(Declaration declaration) { synchronized (declarations) { declarations.add(declaration); } } public String getFilename() { return filename; } public void setFilename(String filename) { this.filename = filename; } public String getFullPath() { return fullPath; } public void setFullPath(String fullPath) { this.fullPath = fullPath; } public String getRelativePath() { return relativePath; } public void setRelativePath(String relativePath) { this.relativePath = relativePath; } @Override public String toString() { return filename; } public Import getImport(String name) { for (Import i: getImports()) { if (!i.isAmbiguous() && i.getTypeDeclaration()==null && i.getAlias().equals(name)) { return i; } } return null; } public String getAliasedName(Declaration dec) { for (Import i: getImports()) { if (!i.isAmbiguous() && i.getDeclaration().equals(getAbstraction(dec))) { return i.getAlias(); } } return dec.getName(); } public static Declaration getAbstraction(Declaration dec){ if (isOverloadedVersion(dec)) { return dec.getContainer() .getDirectMember(dec.getName(), null, false); } else { return dec; } } /** * Search the imports of a compilation unit * for the named toplevel declaration. */ public Declaration getImportedDeclaration(String name, List<Type> signature, boolean ellipsis) { for (Import i: getImports()) { if (!i.isAmbiguous() && i.getAlias().equals(name)) { //in case of an overloaded member, this will //be the "abstraction", so search for the //correct overloaded version Declaration d = i.getDeclaration(); if (isToplevelImport(i, d)) { return d.getContainer() .getMember(d.getName(), signature, ellipsis); } } } return null; } static boolean isToplevelImport(Import i, Declaration d) { return d.isToplevel() || d.isStaticallyImportable() || isToplevelClassConstructor(i.getTypeDeclaration(), d) || isToplevelAnonymousClass(i.getTypeDeclaration()); } /** * Search the imports of a compilation unit * for the named member declaration. */ public Declaration getImportedDeclaration(TypeDeclaration td, String name, List<Type> signature, boolean ellipsis) { for (Import i: getImports()) { TypeDeclaration itd = i.getTypeDeclaration(); if (itd!=null && itd.equals(td) && !i.isAmbiguous() && i.getAlias().equals(name)) { //in case of an overloaded member, this will //be the "abstraction", so search for the //correct overloaded version Declaration d = i.getDeclaration(); return d.getContainer() .getMember(d.getName(), signature, ellipsis); } } return null; } public Map<String, DeclarationWithProximity> getMatchingImportedDeclarations(String startingWith, int proximity) { Map<String, DeclarationWithProximity> result = new TreeMap<String, DeclarationWithProximity>(); for (Import i: new ArrayList<Import>(getImports())) { if (i.getAlias()!=null && !i.isAmbiguous() && isNameMatching(startingWith, i)) { Declaration d = i.getDeclaration(); if (isToplevelImport(i, d)) { result.put(i.getAlias(), new DeclarationWithProximity(i, proximity)); } } } return result; } public Map<String, DeclarationWithProximity> getMatchingImportedDeclarations(TypeDeclaration td, String startingWith, int proximity) { Map<String, DeclarationWithProximity> result = new TreeMap<String, DeclarationWithProximity>(); for (Import i: new ArrayList<Import>(getImports())) { TypeDeclaration itd = i.getTypeDeclaration(); if (i.getAlias()!=null && !i.isAmbiguous() && itd!=null && itd.equals(td) && isNameMatching(startingWith, i)) { result.put(i.getAlias(), new DeclarationWithProximity(i, proximity)); } } return result; } @Override public boolean equals(Object obj) { if (obj instanceof Unit) { Unit that = (Unit) obj; return that==this || that.getPackage() .equals(getPackage()) && Objects.equals(getFilename(), that.getFilename()) && Objects.equals(that.getFullPath(), getFullPath()); } else { return false; } } @Override public int hashCode() { return getFullPath().hashCode(); } private Module languageModule; private Package languagePackage; /** * Search for a declaration in the language module. */ public Declaration getLanguageModuleDeclaration(String name) { //all elements in ceylon.language are auto-imported //traverse all default module packages provided they //have not been traversed yet Module languageModule = getLanguageModule(); if (languageModule!=null && languageModule.isAvailable()) { if ("Nothing".equals(name)) { return getNothingDeclaration(); } if (languagePackage==null) { languagePackage = languageModule.getPackage(LANGUAGE_MODULE_NAME); } if (languagePackage != null) { Declaration d = languagePackage.getMember(name, null, false); if (d != null && d.isShared()) { return d; } } } return null; } private Module getLanguageModule() { if (languageModule==null) { languageModule = getPackage().getModule() .getLanguageModule(); } return languageModule; } /** * Search for a declaration in {@code ceylon.language.meta.model} */ public Declaration getLanguageModuleModelDeclaration(String name) { Module languageModule = getPackage().getModule() .getLanguageModule(); if (languageModule!=null && languageModule.isAvailable()) { Package languageScope = languageModule.getPackage("ceylon.language.meta.model"); if (languageScope!=null) { Declaration d = languageScope.getMember(name, null, false); if (d!=null && d.isShared()) { return d; } } } return null; } /** * Search for a declaration in {@code ceylon.language.meta.declaration} */ public Declaration getLanguageModuleDeclarationDeclaration(String name) { Module languageModule = getPackage().getModule() .getLanguageModule(); if (languageModule!=null && languageModule.isAvailable()) { Package languageScope = languageModule.getPackage("ceylon.language.meta.declaration"); if (languageScope!=null) { Declaration d = languageScope.getMember(name, null, false); if (d!=null && d.isShared()) { return d; } } } return null; } /** * Search for a declaration in {@code ceylon.language.serialization} */ public Declaration getLanguageModuleSerializationDeclaration(String name) { Module languageModule = getPackage().getModule() .getLanguageModule(); if (languageModule!=null && languageModule.isAvailable()) { Package languageScope = languageModule.getPackage("ceylon.language.serialization"); if (languageScope!=null) { Declaration d = languageScope.getMember(name, null, false); if (d != null && d.isShared()) { return d; } } } return null; } public Interface getCorrespondenceDeclaration() { return (Interface) getLanguageModuleDeclaration("Correspondence"); } public Class getAnythingDeclaration() { return (Class) getLanguageModuleDeclaration("Anything"); } public Class getNullDeclaration() { return (Class) getLanguageModuleDeclaration("Null"); } public Value getNullValueDeclaration() { return (Value) getLanguageModuleDeclaration("null"); } public Interface getEmptyDeclaration() { return (Interface) getLanguageModuleDeclaration("Empty"); } public Interface getSequenceDeclaration() { return (Interface) getLanguageModuleDeclaration("Sequence"); } public Class getObjectDeclaration() { return (Class) getLanguageModuleDeclaration("Object"); } public Class getBasicDeclaration() { return (Class) getLanguageModuleDeclaration("Basic"); } public Interface getIdentifiableDeclaration() { return (Interface) getLanguageModuleDeclaration("Identifiable"); } public Class getThrowableDeclaration() { return (Class) getLanguageModuleDeclaration("Throwable"); } public Class getErrorDeclaration() { return (Class) getLanguageModuleDeclaration("Error"); } public Class getExceptionDeclaration() { return (Class) getLanguageModuleDeclaration("Exception"); } public Interface getCategoryDeclaration() { return (Interface) getLanguageModuleDeclaration("Category"); } public Interface getIterableDeclaration() { return (Interface) getLanguageModuleDeclaration("Iterable"); } public Interface getSequentialDeclaration() { return (Interface) getLanguageModuleDeclaration("Sequential"); } public Interface getListDeclaration() { return (Interface) getLanguageModuleDeclaration("List"); } public Interface getCollectionDeclaration() { return (Interface) getLanguageModuleDeclaration("Collection"); } public Interface getIteratorDeclaration() { return (Interface) getLanguageModuleDeclaration("Iterator"); } public Interface getCallableDeclaration() { return (Interface) getLanguageModuleDeclaration("Callable"); } public Interface getScalableDeclaration() { return (Interface) getLanguageModuleDeclaration("Scalable"); } public Interface getSummableDeclaration() { return (Interface) getLanguageModuleDeclaration("Summable"); } public Interface getNumericDeclaration() { return (Interface) getLanguageModuleDeclaration("Numeric"); } public Interface getIntegralDeclaration() { return (Interface) getLanguageModuleDeclaration("Integral"); } public Interface getInvertableDeclaration() { return (Interface) getLanguageModuleDeclaration("Invertible"); } public Interface getExponentiableDeclaration() { return (Interface) getLanguageModuleDeclaration("Exponentiable"); } public Interface getSetDeclaration() { return (Interface) getLanguageModuleDeclaration("Set"); } public TypeDeclaration getComparisonDeclaration() { return (TypeDeclaration) getLanguageModuleDeclaration("Comparison"); } public TypeDeclaration getBooleanDeclaration() { return (TypeDeclaration) getLanguageModuleDeclaration("Boolean"); } public Value getTrueValueDeclaration() { return (Value) getLanguageModuleDeclaration("true"); } public Value getFalseValueDeclaration() { return (Value) getLanguageModuleDeclaration("false"); } public TypeDeclaration getStringDeclaration() { return (TypeDeclaration) getLanguageModuleDeclaration("String"); } public TypeDeclaration getFloatDeclaration() { return (TypeDeclaration) getLanguageModuleDeclaration("Float"); } public TypeDeclaration getIntegerDeclaration() { return (TypeDeclaration) getLanguageModuleDeclaration("Integer"); } public TypeDeclaration getCharacterDeclaration() { return (TypeDeclaration) getLanguageModuleDeclaration("Character"); } public TypeDeclaration getByteDeclaration() { return (TypeDeclaration) getLanguageModuleDeclaration("Byte"); } public Interface getComparableDeclaration() { return (Interface) getLanguageModuleDeclaration("Comparable"); } public Interface getUsableDeclaration() { return (Interface) getLanguageModuleDeclaration("Usable"); } public Interface getDestroyableDeclaration() { return (Interface) getLanguageModuleDeclaration("Destroyable"); } public Interface getObtainableDeclaration() { return (Interface) getLanguageModuleDeclaration("Obtainable"); } public Interface getOrdinalDeclaration() { return (Interface) getLanguageModuleDeclaration("Ordinal"); } public Interface getEnumerableDeclaration() { return (Interface) getLanguageModuleDeclaration("Enumerable"); } public Class getRangeDeclaration() { return (Class) getLanguageModuleDeclaration("Range"); } public Class getSpanDeclaration() { return (Class) getLanguageModuleDeclaration("Span"); } public Class getMeasureDeclaration() { return (Class) getLanguageModuleDeclaration("Measure"); } public Class getTupleDeclaration() { return (Class) getLanguageModuleDeclaration("Tuple"); } public TypeDeclaration getArrayDeclaration() { return (Class) getLanguageModuleDeclaration("Array"); } public Interface getRangedDeclaration() { return (Interface) getLanguageModuleDeclaration("Ranged"); } public Class getEntryDeclaration() { return (Class) getLanguageModuleDeclaration("Entry"); } Type getCallableType(Reference ref, Type rt) { Type result = rt; Declaration declaration = ref.getDeclaration(); if (declaration instanceof Functional) { Functional fd = (Functional) declaration; List<ParameterList> pls = fd.getParameterLists(); for (int i=pls.size()-1; i>=0; i boolean hasSequenced = false; boolean atLeastOne = false; int firstDefaulted = -1; List<Parameter> ps = pls.get(i).getParameters(); List<Type> args = new ArrayList<Type> (ps.size()); for (int j=0; j<ps.size(); j++) { Parameter p = ps.get(j); if (p.getModel()==null) { args.add(getUnknownType()); } else { TypedReference np = ref.getTypedParameter(p); Type npt = np.getType(); if (npt==null) { args.add(getUnknownType()); } else { if (p.isDefaulted() && firstDefaulted==-1) { firstDefaulted = j; } if (np.getDeclaration() instanceof Functional) { args.add(getCallableType(np, npt)); } else if (p.isSequenced()) { args.add(getIteratedType(npt)); hasSequenced = true; atLeastOne = p.isAtLeastOne(); } else { args.add(npt); } } } } Type paramListType = getTupleType(args, hasSequenced, atLeastOne, firstDefaulted); result = appliedType(getCallableDeclaration(), result, paramListType); } } return result; } public Type getTupleType( List<Type> elemTypes, Type variadicTailType, int firstDefaulted) { boolean hasVariadicTail = variadicTailType!=null; Type result = hasVariadicTail ? variadicTailType : getEmptyType(); Type union = hasVariadicTail ? getSequentialElementType(variadicTailType) : getNothingType(); return getTupleType(elemTypes, false, false, firstDefaulted, result, union); } public Type getTupleType( List<Type> elemTypes, boolean variadic, boolean atLeastOne, int firstDefaulted) { return getTupleType(elemTypes, variadic, atLeastOne, firstDefaulted, getEmptyType(), getNothingType()); } private Type getTupleType( List<Type> elemTypes, boolean variadic, boolean atLeastOne, int firstDefaulted, Type result, Type union) { int last = elemTypes.size()-1; for (int i=last; i>=0; i Type elemType = elemTypes.get(i); union = unionType(union, elemType, this); if (variadic && i==last) { result = atLeastOne ? getSequenceType(elemType) : getSequentialType(elemType); } else { result = appliedType(getTupleDeclaration(), union, elemType, result); if (firstDefaulted>=0 && i>=firstDefaulted) { result = unionType(result, getEmptyType(), this); } } } return result; } public Type getEmptyType(Type pt) { return pt==null ? null : unionType(pt, getEmptyType(), this); } public Type getPossiblyEmptyType(Type pt) { return pt==null ? null : appliedType(getSequentialDeclaration(), getSequentialElementType(pt)); } public Type getOptionalType(Type pt) { return pt==null ? null : unionType(pt, getNullType(), this); } public Type getUnknownType() { return new UnknownType(this).getType(); } public Type getNothingType() { return getType(getNothingDeclaration()); } public Type getEmptyType() { return getType(getEmptyDeclaration()); } public Type getAnythingType() { return getType(getAnythingDeclaration()); } public Type getObjectType() { return getType(getObjectDeclaration()); } public Type getIdentifiableType() { return getType(getIdentifiableDeclaration()); } public Type getBasicType() { return getType(getBasicDeclaration()); } public Type getNullType() { return getType(getNullDeclaration()); } public Type getThrowableType() { return getType(getThrowableDeclaration()); } public Type getExceptionType() { return getType(getExceptionDeclaration()); } public Type getBooleanType() { return getType(getBooleanDeclaration()); } public Type getStringType() { return getType(getStringDeclaration()); } public Type getIntegerType() { return getType(getIntegerDeclaration()); } public Type getFloatType() { return getType(getFloatDeclaration()); } public Type getCharacterType() { return getType(getCharacterDeclaration()); } public Type getByteType() { return getType(getByteDeclaration()); } public Type getComparisonType() { return getType(getComparisonDeclaration()); } public Type getDestroyableType() { return getType(getDestroyableDeclaration()); } public Type getObtainableType() { return getType(getObtainableDeclaration()); } public Type getSequenceType(Type et) { return appliedType(getSequenceDeclaration(), et); } public Type getSequentialType(Type et) { return appliedType(getSequentialDeclaration(), et); } public Type getIterableType(Type et) { return appliedType(getIterableDeclaration(), et, getNullType()); } public Type getNonemptyIterableType(Type et) { return appliedType(getIterableDeclaration(), et, getNothingType()); } public Type getSetType(Type et) { return appliedType(getSetDeclaration(), et); } /** * Returns a Type corresponding to {@code Iterator<T>} * @param et The Type corresponding to {@code T} * @return The Type corresponding to {@code Iterator<T>} */ public Type getIteratorType(Type et) { return appliedType(getIteratorDeclaration(), et); } /** * Returns a Type corresponding to {@code Span<T>} * @param rt The Type corresponding to {@code T} * @return The Type corresponding to {@code Span<T>} */ public Type getSpanType(Type rt) { return appliedType(getRangeDeclaration(), rt); } /** * Returns a Type corresponding to {@code SizedRange<T>|[]} * @param rt The Type corresponding to {@code T} * @return The Type corresponding to {@code SizedRange<T>|[]} */ public Type getMeasureType(Type rt) { return unionType(appliedType(getRangeDeclaration(), rt), getEmptyType(), this); } public Type getEntryType(Type kt, Type vt) { return appliedType(getEntryDeclaration(), kt, vt); } public Type getKeyType(Type type) { Type st = type.getSupertype(getEntryDeclaration()); if (st!=null && st.getTypeArguments().size()==2) { return st.getTypeArgumentList().get(0); } else { return null; } } public Type getValueType(Type type) { Type st = type.getSupertype(getEntryDeclaration()); if (st!=null && st.getTypeArguments().size()==2) { return st.getTypeArgumentList().get(1); } else { return null; } } public Type getIteratedType(Type type) { Interface id = getIterableDeclaration(); Type st = type.getSupertype(id); if (st!=null && st.getTypeArguments().size()>0) { return st.getTypeArgumentList().get(0); } else { return null; } } public Type getFirstType(Type type) { Interface id = getIterableDeclaration(); Type st = type.getSupertype(id); if (st!=null && st.getTypeArguments().size()>1) { return st.getTypeArgumentList().get(1); } else { return null; } } public boolean isNonemptyIterableType(Type type) { Type ft = getFirstType(type); return ft!=null && ft.isNothing(); } public Type getSetElementType(Type type) { Type st = type.getSupertype(getSetDeclaration()); if (st!=null && st.getTypeArguments().size()==1) { return st.getTypeArgumentList().get(0); } else { return null; } } public Type getSequentialElementType(Type type) { Interface sd = getSequentialDeclaration(); Type st = type.getSupertype(sd); if (st!=null && st.getTypeArguments().size()==1) { return st.getTypeArgumentList().get(0); } else { return null; } } public Type getDefiniteType(Type pt) { return intersectionType(getObjectType(), pt, this); } public Type getNonemptyType(Type pt) { Type st = appliedType(getSequenceDeclaration(), getSequentialElementType(pt)); return intersectionType(st, pt, this); } public Type getNonemptyDefiniteType(Type pt) { return getNonemptyType(getDefiniteType(pt)); } public boolean isEntryType(Type pt) { return pt.getDeclaration() .inherits(getEntryDeclaration()); } public boolean isIterableType(Type pt) { return pt.getDeclaration() .inherits(getIterableDeclaration()); } public boolean isUsableType(Type pt) { return pt.getDeclaration() .inherits(getUsableDeclaration()); } public boolean isSequentialType(Type pt) { return pt.getDeclaration() .inherits(getSequentialDeclaration()); } public boolean isSequenceType(Type pt) { return pt.getDeclaration() .inherits(getSequenceDeclaration()); } public boolean isEmptyType(Type pt) { return pt.getDeclaration() .inherits(getEmptyDeclaration()); } public boolean isTupleType(Type pt) { return pt.getDeclaration() .inherits(getTupleDeclaration()); } public boolean isOptionalType(Type pt) { //must have non-empty intersection with Null //and non-empty intersection with Value return !intersectionType(getNullType(), pt, this) .isNothing() && !intersectionType(getObjectType(), pt, this) .isNothing(); } public boolean isPossiblyEmptyType(Type pt) { //must be a subtype of Sequential<Anything> return isSequentialType(getDefiniteType(pt)) && //must have non-empty intersection with Empty //and non-empty intersection with Sequence<Nothing> !intersectionType(getEmptyType(), pt, this) .isNothing() && !intersectionType(getSequentialType(getNothingType()), pt, this) .isNothing(); } public boolean isCallableType(Type pt) { return pt!=null && pt.getDeclaration() .inherits(getCallableDeclaration()); } public NothingType getNothingDeclaration() { return new NothingType(this); } public Type denotableType(Type type) { if (type!=null) { if (type.isUnion()) { List<Type> cts = type.getCaseTypes(); List<Type> list = new ArrayList<Type> (cts.size()+1); for (Type ct: cts) { addToUnion(list, denotableType(ct)); } return union(list, this); } if (type.isIntersection()) { List<Type> sts = type.getSatisfiedTypes(); List<Type> list = new ArrayList<Type> (sts.size()+1); for (Type st: sts) { addToIntersection(list, denotableType(st), this); } return canonicalIntersection(list, this); } TypeDeclaration dec = type.getDeclaration(); Type et = dec.getExtendedType(); TypeDeclaration ed = et==null ? null : et.getDeclaration(); if (dec.isOverloaded()) { type = type.getSupertype(ed); } if (dec instanceof Constructor) { return type.getSupertype(ed); } if (dec instanceof Class && dec.isAnonymous()) { List<Type> sts = dec.getSatisfiedTypes(); List<Type> list = new ArrayList<Type> (sts.size()+1); if (et!=null) { TypeDeclaration etd = et.getDeclaration(); addToIntersection(list, type.getSupertype(etd), this); } for (Type st: sts) { if (st!=null) { TypeDeclaration std = st.getDeclaration(); addToIntersection(list, type.getSupertype(std), this); } } return canonicalIntersection(list, this); } else { List<Type> typeArgList = type.getTypeArgumentList(); if (typeArgList.isEmpty()) { return type; } else { dec = type.getDeclaration(); List<TypeParameter> typeParamList = dec.getTypeParameters(); List<Type> typeArguments = new ArrayList<Type> (typeArgList.size()); for (int i=0; i<typeParamList.size() && i<typeArgList.size(); i++) { Type at = typeArgList.get(i); TypeParameter tp = typeParamList.get(i); typeArguments.add(tp.isCovariant() ? denotableType(at) : at); } Type qt = type.getQualifyingType(); Type dt = dec.appliedType(qt, typeArguments); dt.setUnderlyingType(type.getUnderlyingType()); dt.setVarianceOverrides(type.getVarianceOverrides()); dt.setTypeConstructor(type.isTypeConstructor()); dt.setTypeConstructorParameter( type.getTypeConstructorParameter()); dt.setRaw(type.isRaw()); return dt; } } } else { return null; } } public Type nonemptyArgs(Type args) { return getEmptyType().isSubtypeOf(args) ? getNonemptyType(args) : args; } public boolean isHomogeneousTuple(Type args) { if (args!=null) { Class td = getTupleDeclaration(); Type tuple = args.getSupertype(td); if (tuple!=null) { List<Type> tal = tuple.getTypeArgumentList(); Type elemType; if (tal.size()>=3) { elemType = tal.get(0); } else { return false; } Type emptyType = getEmptyType(); while (true) { tal = tuple.getTypeArgumentList(); if (tal.size()>=3) { Type first = tal.get(1); if (first==null) { return false; } else if (!first.isExactly(elemType)) { return false; } else { Type rest = tal.get(2); if (rest==null) { return false; } else if (rest.isExactly(emptyType)) { return true; } else { tuple = rest.getSupertype(td); if (tuple==null) { return false; } } } } else { return false; } } } else { return false; } } else { return false; } } public int getHomogeneousTupleLength(Type args) { if (args!=null) { Class td = getTupleDeclaration(); Type tuple = args.getSupertype(td); if (tuple!=null) { List<Type> tal = tuple.getTypeArgumentList(); Type elemType; if (tal.size()>=1) { elemType = tal.get(0); } else { return -1; } int size = 0; Type emptyType = getEmptyType(); while (true) { size++; tal = tuple.getTypeArgumentList(); if (tal.size()>=3) { Type first = tal.get(1); if (first==null) { return -1; } else if (!first.isExactly(elemType)) { return -1; } else { Type rest = tal.get(2); if (rest==null) { return -1; } else if (rest.isExactly(emptyType)) { return size; } else { tuple = rest.getSupertype(td); if (tuple==null) { return -1; } } } } else { return -1; } } } else { return -1; } } else { return -1; } } public List<Type> getTupleElementTypes(Type args) { if (args!=null) { /*List<Type> simpleResult = getSimpleTupleElementTypes(args, 0); if (simpleResult!=null) { return simpleResult; }*/ if (isEmptyType(args)) { return NO_TYPE_ARGS; } Class td = getTupleDeclaration(); Type tuple = nonemptyArgs(args) .getSupertype(td); if (tuple!=null) { List<Type> result = new LinkedList<Type>(); while (true) { List<Type> tal = tuple.getTypeArgumentList(); if (tal.size()>=3) { Type first = tal.get(1); if (first==null) { first = getUnknownType(); } result.add(first); Type rest = tal.get(2); if (rest==null) { result.add(getUnknownType()); return result; } else if (isEmptyType(rest)) { return result; } else { tuple = nonemptyArgs(rest) .getSupertype(td); if (tuple==null) { if (isSequentialType(rest)) { //this is pretty weird: return the whole //tail type as the element of the list! result.add(rest); return result; } else { result.add(getUnknownType()); return result; } } //else continue the loop! } } else { result.add(getUnknownType()); return result; } } } else if (isSequentialType(args)) { //this is pretty weird: return the whole //tail type as the element of the list! return singleton(args); } } return singleton(getUnknownType()); } private static List<Type> singleton(Type pt) { List<Type> result = new ArrayList<Type>(1); result.add(pt); return result; } /*private List<Type> getSimpleTupleElementTypes( Type args, int count) { // can be a defaulted tuple of Empty|Tuple if (args.isUnion()) { List<Type> caseTypes = args.getCaseTypes(); if (caseTypes == null || caseTypes.size() != 2) { return null; } Type caseA = caseTypes.get(0); Type caseB = caseTypes.get(1); if (!caseA.isClassOrInterface() || !caseB.isClassOrInterface()) { return null; } TypeDeclaration caseADecl = caseA.getDeclaration(); TypeDeclaration caseBDecl = caseB.getDeclaration(); String caseAName = caseADecl.getQualifiedNameString(); String caseBName = caseBDecl.getQualifiedNameString(); if (caseAName.equals("ceylon.language::Empty") && caseBName.equals("ceylon.language::Tuple")) { return getSimpleTupleElementTypes(caseB, count); } if (caseBName.equals("ceylon.language::Empty") && caseAName.equals("ceylon.language::Tuple")) { return getSimpleTupleElementTypes(caseA, count); } return null; } // can be Tuple, Empty, Sequence or Sequential if (!(args.isClassOrInterface())) { return null; } String name = args.getDeclaration() .getQualifiedNameString(); if (name.equals("ceylon.language::Tuple")){ List<Type> tal = args.getTypeArgumentList(); Type first = tal.get(1); Type rest = tal.get(2); List<Type> ret = getSimpleTupleElementTypes(rest, count+1); if (ret == null) return null; ret.set(count, first); return ret; } if (name.equals("ceylon.language::Empty")){ ArrayList<Type> ret = new ArrayList<Type>(count); for (int i=0;i<count;i++) { ret.add(null); } return ret; } if (name.equals("ceylon.language::Sequential") || name.equals("ceylon.language::Sequence") || name.equals("ceylon.language::Range")) { ArrayList<Type> ret = new ArrayList<Type>(count+1); for (int i=0;i<count;i++) { ret.add(null); } ret.add(args); return ret; } return null; }*/ public boolean isTupleLengthUnbounded(Type args) { if (args!=null) { /*Boolean simpleTupleLengthUnbounded = isSimpleTupleLengthUnbounded(args); if (simpleTupleLengthUnbounded != null) { return simpleTupleLengthUnbounded.booleanValue(); }*/ if (args.isSubtypeOf(getEmptyType())) { return false; } //TODO: this doesn't account for the case where // a tuple occurs in a union with [] Class td = getTupleDeclaration(); Type tst = nonemptyArgs(args) .getSupertype(td); if (tst==null) { return true; } else { while (true) { List<Type> tal = tst.getTypeArgumentList(); if (tal.size()>=3) { Type rest = tal.get(2); if (rest==null) { return false; } else if (rest.isSubtypeOf(getEmptyType())) { return false; } else { tst = nonemptyArgs(rest) .getSupertype(td); if (tst==null) { return true; } //else continue the loop! } } else { return false; } } } } return false; } /*protected Boolean isSimpleTupleLengthUnbounded(Type args) { // can be a defaulted tuple of Empty|Tuple if (args.isUnion()) { List<Type> caseTypes = args.getCaseTypes(); if (caseTypes == null || caseTypes.size() != 2) { return null; } Type caseA = caseTypes.get(0); Type caseB = caseTypes.get(1); if (!caseA.isClassOrInterface() || !caseB.isClassOrInterface()) { return null; } TypeDeclaration caseADecl = caseA.getDeclaration(); TypeDeclaration caseBDecl = caseB.getDeclaration(); String caseAName = caseADecl.getQualifiedNameString(); String caseBName = caseBDecl.getQualifiedNameString(); if (caseAName.equals("ceylon.language::Empty") && caseBName.equals("ceylon.language::Tuple")) { return isSimpleTupleLengthUnbounded(caseB); } if (caseBName.equals("ceylon.language::Empty") && caseAName.equals("ceylon.language::Tuple")) { return isSimpleTupleLengthUnbounded(caseA); } return null; } // can be Tuple, Empty, Sequence or Sequential if (!(args.isClassOrInterface())) { return null; } String name = args.getDeclaration() .getQualifiedNameString(); if (name.equals("ceylon.language::Tuple")) { Type rest = args.getTypeArgumentList().get(2); return isSimpleTupleLengthUnbounded(rest); } if (name.equals("ceylon.language::Empty")) { return false; } if (name.equals("ceylon.language::Range")) { return true; } if (name.equals("ceylon.language::Sequential") || name.equals("ceylon.language::Sequence")) { return true; } return null; }*/ public boolean isTupleVariantAtLeastOne(Type args) { if (args!=null) { /*Boolean simpleTupleVariantAtLeastOne = isSimpleTupleVariantAtLeastOne(args); if (simpleTupleVariantAtLeastOne != null) { return simpleTupleVariantAtLeastOne.booleanValue(); }*/ if (getEmptyType().isSubtypeOf(args)) { return false; } Class td = getTupleDeclaration(); Type tuple = nonemptyArgs(args) .getSupertype(td); if (tuple == null) { return isSequenceType(args); } else { while (true) { List<Type> tal = tuple.getTypeArgumentList(); if (tal.size()>=3) { Type rest = tal.get(2); if (rest==null) { return false; } else if (getEmptyType() .isSubtypeOf(rest)) { return false; } else if (isSequenceType(rest) && !isTupleType(args)) { return true; } else { tuple = nonemptyArgs(rest) .getSupertype(td); if (tuple==null) { return isSequenceType(args); } //else continue the loop! } } else { return false; } } } } return false; } /*private Boolean isSimpleTupleVariantAtLeastOne(Type args) { // can be a defaulted tuple of Empty|Tuple if (args.isUnion()) { List<Type> caseTypes = args.getCaseTypes(); if (caseTypes == null || caseTypes.size() != 2) { return null; } Type caseA = caseTypes.get(0); Type caseB = caseTypes.get(1); if (!caseA.isClassOrInterface() || !caseB.isClassOrInterface()) { return null; } TypeDeclaration caseADecl = caseA.getDeclaration(); TypeDeclaration caseBDecl = caseB.getDeclaration(); String caseAName = caseADecl.getQualifiedNameString(); String caseBName = caseBDecl.getQualifiedNameString(); if (caseAName.equals("ceylon.language::Empty") && caseBName.equals("ceylon.language::Tuple")) { return isSimpleTupleVariantAtLeastOne(caseB); } if (caseBName.equals("ceylon.language::Empty") && caseAName.equals("ceylon.language::Tuple")) { return isSimpleTupleVariantAtLeastOne(caseA); } return null; } // can be Tuple, Empty, Sequence or Sequential if (!(args.isClassOrInterface())) { return null; } String name = args.getDeclaration() .getQualifiedNameString(); if (name.equals("ceylon.language::Tuple")) { Type rest = args.getTypeArgumentList().get(2); return isSimpleTupleVariantAtLeastOne(rest); } if (name.equals("ceylon.language::Empty")) { return false; } if (name.equals("ceylon.language::Range")) { return true; } if (name.equals("ceylon.language::Sequential")) { return false; } if (name.equals("ceylon.language::Sequence")) { return true; } return null; }*/ public int getTupleMinimumLength(Type args) { if (args!=null) { /*int simpleMinimumLength = getSimpleTupleMinimumLength(args); if (simpleMinimumLength != -1) { return simpleMinimumLength; }*/ if (getEmptyType().isSubtypeOf(args)) { return 0; } Class td = getTupleDeclaration(); Type tuple = nonemptyArgs(args) .getSupertype(td); if (tuple == null) { return isSequenceType(args) ? 1 : 0; } else { int size = 0; while (true) { List<Type> tal = tuple.getTypeArgumentList(); size++; if (tal.size()>=3) { Type rest = tal.get(2); if (rest==null) { return size; } else if (getEmptyType() .isSubtypeOf(rest)) { return size; } else { tuple = nonemptyArgs(rest) .getSupertype(td); if (tuple==null) { return isSequenceType(args) ? size+1 : size; } //else continue the loop! } } else { return size; } } } } return 0; } /*private int getSimpleTupleMinimumLength(Type args) { // can be a defaulted tuple of Empty|Tuple if (args.isUnion()){ List<Type> caseTypes = args.getCaseTypes(); if (caseTypes == null || caseTypes.size() != 2) { return -1; } Type caseA = caseTypes.get(0); Type caseB = caseTypes.get(1); if (!caseA.isClassOrInterface() || !caseB.isClassOrInterface()) { return -1; } TypeDeclaration caseADecl = caseA.getDeclaration(); TypeDeclaration caseBDecl = caseB.getDeclaration(); String caseAName = caseADecl.getQualifiedNameString(); String caseBName = caseBDecl.getQualifiedNameString(); if (caseAName.equals("ceylon.language::Empty") && caseBName.equals("ceylon.language::Tuple")) { return 0; } if (caseBName.equals("ceylon.language::Empty") && caseAName.equals("ceylon.language::Tuple")) { return 0; } return -1; } // can be Tuple, Empty, Sequence or Sequential if (!(args.isClassOrInterface())) { return -1; } String name = args.getDeclaration() .getQualifiedNameString(); if (name.equals("ceylon.language::Tuple")) { Type rest = args.getTypeArgumentList().get(2); int ret = getSimpleTupleMinimumLength(rest); return ret == -1 ? -1 : ret + 1; } if (name.equals("ceylon.language::Empty")) { return 0; } if (name.equals("ceylon.language::Range")) { return 1; } if (name.equals("ceylon.language::Sequential")) { return 0; } if (name.equals("ceylon.language::Sequence")) { return 1; } return -1; }*/ public List<Type> getCallableArgumentTypes(Type t) { Type tuple = getCallableTuple(t); if (tuple == null) { return Collections.emptyList(); } else { return getTupleElementTypes(tuple); } } public Type getCallableTuple(Type t) { if (t==null) return null; Interface cd = getCallableDeclaration(); Type ct = t.getSupertype(cd); if (ct!=null) { List<Type> typeArgs = ct.getTypeArgumentList(); if (typeArgs.size()>=2) { return typeArgs.get(1); } } return null; } public Type getCallableReturnType(Type t) { if (t==null) return null; if (t.isNothing()) return t; Interface cd = getCallableDeclaration(); Type ct = t.getSupertype(cd); if (ct!=null) { List<Type> typeArgs = ct.getTypeArgumentList(); if (typeArgs.size()>=1) { return typeArgs.get(0); } } return null; } public boolean isIterableParameterType(Type t) { return t.getDeclaration().isIterable(); } public TypeDeclaration getLanguageModuleModelTypeDeclaration (String name) { return (TypeDeclaration) getLanguageModuleModelDeclaration(name); } public TypeDeclaration getLanguageModuleDeclarationTypeDeclaration (String name) { return (TypeDeclaration) getLanguageModuleDeclarationDeclaration(name); } private final Map<String,String> modifiers = new HashMap<String,String>(); private void put(String modifier) { modifiers.put(modifier, modifier); } { put("shared"); put("default"); put("formal"); put("native"); put("actual"); put("abstract"); put("final"); put("sealed"); put("variable"); put("late"); put("deprecated"); put("annotation"); put("optional"); put("serializable"); } public Map<String, String> getModifiers() { return modifiers; } public Type getValueMetatype(TypedReference pr) { boolean variable = pr.getDeclaration().isVariable(); Type getType = pr.getType(); TypeDeclaration typeDec = getType == null ? null : getType.getDeclaration(); boolean constructor = typeDec instanceof Constructor; if (constructor) { getType = getType.getExtendedType(); } Type setType = variable ? getType : getNothingType(); Type qualifyingType = pr.getQualifyingType(); if (qualifyingType!=null && !constructor) { TypeDeclaration ad = getLanguageModuleModelTypeDeclaration( "Attribute"); return appliedType(ad, qualifyingType, getType, setType); } else { TypeDeclaration vd = getLanguageModuleModelTypeDeclaration( "Value"); return appliedType(vd, getType, setType); } } public Type getFunctionMetatype(TypedReference pr) { TypedDeclaration d = pr.getDeclaration(); Functional f = (Functional) d; if (f.getParameterLists().isEmpty()) { return null; } ParameterList fpl = f.getFirstParameterList(); List<Parameter> params = fpl.getParameters(); Type parameterTuple = getParameterTypesAsTupleType(params, pr); Type returnType = getCallableReturnType(pr.getFullType()); if (returnType == null) { return null; } else { Type qualifyingType = pr.getQualifyingType(); if (qualifyingType!=null) { TypeDeclaration md = getLanguageModuleModelTypeDeclaration( "Method"); return appliedType(md, qualifyingType, returnType, parameterTuple); } else { TypeDeclaration fd = getLanguageModuleModelTypeDeclaration( "Function"); return appliedType(fd, returnType, parameterTuple); } } } public Type getConstructorMetatype(Type pr) { TypeDeclaration d = pr.getDeclaration(); Functional f = (Functional) d; if (f.getParameterLists().isEmpty()) { return null; } ParameterList fpl = f.getFirstParameterList(); List<Parameter> params = fpl.getParameters(); Type parameterTuple = getNothingType(); Scope scope = d.getContainer(); if (scope instanceof Class) { Class c = (Class) scope; if (c.isClassOrInterfaceMember() || c.isToplevel()) { parameterTuple = getParameterTypesAsTupleType(params, pr); } else { parameterTuple = getNothingType(); } } Type returnType = denotableType(getCallableReturnType(pr.getFullType())); if (returnType == null) { return null; } else { Type qt = pr.getQualifyingType(); if (qt!=null && qt.getDeclaration() .isClassOrInterfaceMember()) { Type qqt = qt.getQualifyingType(); TypeDeclaration mccd = getLanguageModuleModelTypeDeclaration( "MemberClassConstructor"); return appliedType(mccd, qqt, returnType, parameterTuple); } else { TypeDeclaration cd = getLanguageModuleModelTypeDeclaration( "Constructor"); return appliedType(cd, returnType, parameterTuple); } } } public Type getClassMetatype(Type literalType) { Class c = (Class) literalType.getDeclaration(); ParameterList parameterList = c.getParameterList(); Type parameterTuple; if ((c.isClassOrInterfaceMember() || c.isToplevel()) && parameterList!=null) { parameterTuple = getParameterTypesAsTupleType( parameterList.getParameters(), literalType); } else { parameterTuple = getNothingType(); } Type qualifyingType = literalType.getQualifyingType(); if (qualifyingType!=null) { TypeDeclaration mcd = getLanguageModuleModelTypeDeclaration( "MemberClass"); return appliedType(mcd, qualifyingType, literalType, parameterTuple); } else { TypeDeclaration cd = getLanguageModuleModelTypeDeclaration( "Class"); return appliedType(cd, literalType, parameterTuple); } } public Type getInterfaceMetatype(Type literalType) { Type qualifyingType = literalType.getQualifyingType(); if (qualifyingType!=null) { TypeDeclaration mid = getLanguageModuleModelTypeDeclaration( "MemberInterface"); return appliedType(mid, qualifyingType, literalType); } else { TypeDeclaration id = getLanguageModuleModelTypeDeclaration( "Interface"); return appliedType(id, literalType); } } public Type getTypeMetaType(Type literalType) { if (literalType.isUnion()) { TypeDeclaration utd = getLanguageModuleModelTypeDeclaration( "UnionType"); return appliedType(utd, literalType); } else if (literalType.isIntersection()) { TypeDeclaration itd = getLanguageModuleModelTypeDeclaration( "IntersectionType"); return appliedType(itd, literalType); } else { TypeDeclaration td = getLanguageModuleModelTypeDeclaration( "Type"); return appliedType(td, literalType); } } public Type getParameterTypesAsTupleType( List<Parameter> params, Reference reference) { List<Type> paramTypes = new ArrayList<Type> (params.size()); int max = params.size()-1; int firstDefaulted = -1; boolean sequenced = false; boolean atLeastOne = false; for (int i=0; i<=max; i++) { Parameter p = params.get(i); Type fullType; if (p.getModel() == null) { fullType = getUnknownType(); } else { if (reference==null) { //this special case is here because //TypeArgumentInference abuses this //API by passing a qualifying type //which does not actually own the //given parameters directly fullType = p.getModel() .getReference() .getFullType(); } else { fullType = reference.getTypedParameter(p) .getFullType(); } if (firstDefaulted<0 && p.isDefaulted()) { firstDefaulted = i; } if (i==max && p.isSequenced()) { sequenced = true; atLeastOne = p.isAtLeastOne(); if (fullType!=null) { fullType = getIteratedType(fullType); } } } paramTypes.add(fullType); } return getTupleType(paramTypes, sequenced, atLeastOne, firstDefaulted); } public Type getTailType(Type sequenceType, int fixedLength) { int i=0; Type tail = sequenceType; while (i++<fixedLength && tail!=null) { if (isTupleType(tail)) { List<Type> list = tail.getTypeArgumentList(); if (list.size()>=3) { tail = list.get(2); } else { tail = null; } } else { tail = null; } } return tail; } public Type getType(TypeDeclaration td) { return td==null ? getUnknownType() : td.getType(); } public Type getPackageDeclarationType() { return getType(getLanguageModuleDeclarationTypeDeclaration("Package")); } public Type getModuleDeclarationType() { return getType(getLanguageModuleDeclarationTypeDeclaration("Module")); } public Type getImportDeclarationType() { return getType(getLanguageModuleDeclarationTypeDeclaration("Import")); } public Type getClassDeclarationType() { return getType(getLanguageModuleDeclarationTypeDeclaration("ClassDeclaration")); } public Type getClassDeclarationType(Class clazz) { return clazz.hasConstructors() ? getType(getLanguageModuleDeclarationTypeDeclaration("ClassWithConstructorsDeclaration")) : getType(getLanguageModuleDeclarationTypeDeclaration("ClassWithInitializerDeclaration")); } public Type getConstructorDeclarationType() { return getType(getLanguageModuleDeclarationTypeDeclaration("ConstructorDeclaration")); } public Type getInterfaceDeclarationType() { return getType(getLanguageModuleDeclarationTypeDeclaration("InterfaceDeclaration")); } public Type getAliasDeclarationType() { return getType(getLanguageModuleDeclarationTypeDeclaration("AliasDeclaration")); } public Type getTypeParameterDeclarationType() { return getType(getLanguageModuleDeclarationTypeDeclaration("TypeParameter")); } public Type getFunctionDeclarationType() { return getType(getLanguageModuleDeclarationTypeDeclaration("FunctionDeclaration")); } public Type getValueDeclarationType() { return getType(getLanguageModuleDeclarationTypeDeclaration("ValueDeclaration")); } public Type getValueDeclarationType(TypedDeclaration value) { return !(value instanceof Value) || ((Value) value).isTransient() ? getValueDeclarationType() : getType(getLanguageModuleDeclarationTypeDeclaration("ReferenceDeclaration")); } public TypeDeclaration getAnnotationDeclaration() { return (TypeDeclaration) getLanguageModuleDeclaration("Annotation"); } public TypeDeclaration getConstrainedAnnotationDeclaration() { return (TypeDeclaration) getLanguageModuleDeclaration("ConstrainedAnnotation"); } public TypeDeclaration getSequencedAnnotationDeclaration() { return (TypeDeclaration) getLanguageModuleDeclaration("SequencedAnnotation"); } public TypeDeclaration getOptionalAnnotationDeclaration() { return (TypeDeclaration) getLanguageModuleDeclaration("OptionalAnnotation"); } public TypeDeclaration getDeclarationDeclaration() { return getLanguageModuleDeclarationTypeDeclaration("Declaration"); } public TypeCache getCache() { Module module = getPackage().getModule(); return module != null ? module.getCache() : null; } }
package com.solertium.util.gwt.charts.client; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; public class ChartData { private ArrayList<ChartDataStruct> data; private String defaultScaling; public ChartData() { data = new ArrayList<ChartDataStruct>(); } public void setDefaultScaling(String defaultScaling) { this.defaultScaling = defaultScaling; } public void addData(Collection<Double> data) { ChartDataExtras extras = new ChartDataExtras(); extras.legendColors = new String[] {"000000"}; extras.legendNames = new String[] {"Column " + (this.data.size()+1)}; addData(data, extras); } public void addData(Collection<Double> data, ChartDataExtras chartDataExtras) { this.data.add(new ChartDataStruct(data, chartDataExtras)); } public String getData() { final StringBuilder builder = new StringBuilder(); builder.append("t:"); for (Iterator<ChartDataStruct> keyIter = data.listIterator(); keyIter.hasNext(); ) { Collection<Double> current = keyIter.next().data; for (Iterator<Double> iter = current.iterator(); iter.hasNext(); ) builder.append(iter.next() + (iter.hasNext() ? "," : "")); if (keyIter.hasNext()) builder.append('|'); } return builder.toString(); } public String getEncodedData() { double maxValue = 0; for (ChartDataStruct struct : data) { double max = getMaxValue(struct.data); if (max > maxValue) maxValue = max; } final StringBuilder builder = new StringBuilder(); builder.append("s:"); for (Iterator<ChartDataStruct> keyIter = data.listIterator(); keyIter.hasNext(); ) { Collection<Double> current = keyIter.next().data; final StringBuilder csv = new StringBuilder(); for (Iterator<Double> iter = current.iterator(); iter.hasNext(); ) csv.append(iter.next() + (iter.hasNext() ? "," : "")); builder.append(_encode(csv.toString(), Double.toString(maxValue))); if (keyIter.hasNext()) builder.append(','); } return builder.toString(); } public String getScaling() { if (defaultScaling != null) return defaultScaling; final StringBuilder csv = new StringBuilder(); for (Iterator<ChartDataStruct> iter = data.listIterator(); iter.hasNext(); ) { String out = iter.next().extras.scaling; if (out == null) out = "-1000,1000"; csv.append(out + (iter.hasNext() ? "," : "")); } return csv.toString(); } public String getLegendNames() { final StringBuilder builder = new StringBuilder(); for (Iterator<ChartDataStruct> iter = data.listIterator(); iter.hasNext(); ) { String name = iter.next().extras.getLegendNames(); if (name != null) { builder.append(name); if (iter.hasNext()) builder.append('|'); } } final String out = builder.toString(); return "".equals(out) ? null : out; } public String getLegendColors() { final StringBuilder builder = new StringBuilder(); for (Iterator<ChartDataStruct> iter = data.listIterator(); iter.hasNext(); ) { String color = iter.next().extras.getLegendColors(); if (color != null) { builder.append(color); if (iter.hasNext()) builder.append(','); } } final String out = builder.toString(); return "".equals(out) ? null : out; } public Double getMaxValue(Collection<Double> dataSet) { Double maxValue = new Double(0); for (Iterator<Double> iter = dataSet.iterator(); iter.hasNext(); ) { Double current = iter.next(); if (current.doubleValue() > maxValue.doubleValue()) maxValue = current; } return maxValue.doubleValue() > 0 ? maxValue : new Double(100); } public static native String _encode(String csv, String maxValueStr) /*-{ var simpleEncoding = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'; var maxValue = maxValueStr * 1; var valueArray = csv.split(","); var chartData = ['']; for (var i = 0; i < valueArray.length; i++) { var currentValue = valueArray[i]; if (!isNaN(currentValue) && currentValue >= 0) { chartData.push(simpleEncoding.charAt(Math.round((simpleEncoding.length-1) * currentValue / maxValue))); } else { chartData.push('_'); } } return chartData.join(''); }-*/; public int size() { return data.size(); } public static class ChartDataExtras { private String scaling; private String[] legendNames; private String[] legendColors; public void setLegendColors(String... legendColors) { this.legendColors = legendColors; } public void setLegendNames(String... legendNames) { this.legendNames = legendNames; } public void setScaling(String scaling) { this.scaling = scaling; } public String getLegendColors() { return legendColors == null ? null : toCSV(legendColors, '|'); } public String getLegendNames() { return legendNames == null ? null : toCSV(legendNames, '|'); } private String toCSV(String[] array, char separator) { StringBuilder out = new StringBuilder(); int size = array.length; for (int i = 0; i < size; i++) { out.append(array[i]); if ((i+1) < size) out.append(separator); } return out.toString(); } } private static class ChartDataStruct { private final Collection<Double> data; private final ChartDataExtras extras; public ChartDataStruct(Collection<Double> data, ChartDataExtras extras) { this.data = data; this.extras = extras; } } }
package dr.app.beauti.generator; import dr.app.beauti.components.ComponentFactory; import dr.app.beauti.components.ancestralstates.AncestralStatesComponentOptions; import dr.app.beauti.options.*; import dr.app.beauti.types.TreePriorType; import dr.app.beauti.util.XMLWriter; import dr.evolution.datatype.DataType; import dr.evolution.util.Taxa; import dr.evomodel.branchratemodel.BranchRateModel; import dr.evomodel.branchratemodel.MixtureModelBranchRates; import dr.evomodel.tree.TMRCAStatistic; import dr.evomodel.tree.TreeLengthStatistic; import dr.evomodel.tree.TreeModel; import dr.evomodelxml.branchratemodel.*; import dr.evomodelxml.tree.TreeLengthStatisticParser; import dr.evomodelxml.treelikelihood.MarkovJumpsTreeLikelihoodParser; import dr.inference.model.CompoundLikelihood; import dr.oldevomodelxml.clock.ACLikelihoodParser; import dr.evomodelxml.coalescent.CoalescentLikelihoodParser; import dr.evomodelxml.coalescent.GMRFSkyrideLikelihoodParser; import dr.evomodelxml.speciation.*; import dr.evomodelxml.tree.TMRCAStatisticParser; import dr.evomodelxml.tree.TreeLoggerParser; import dr.evomodelxml.tree.TreeModelParser; import dr.inference.model.ParameterParser; import dr.inferencexml.distribution.MixedDistributionLikelihoodParser; import dr.inferencexml.loggers.ColumnsParser; import dr.inferencexml.loggers.LoggerParser; import dr.inferencexml.model.CompoundLikelihoodParser; import dr.oldevomodelxml.treelikelihood.AncestralStateTreeLikelihoodParser; import dr.oldevomodelxml.treelikelihood.TreeLikelihoodParser; import dr.util.Attribute; import dr.xml.XMLParser; import java.util.ArrayList; import java.util.List; /** * @author Alexei Drummond * @author Andrew Rambaut * @author Walter Xie */ public class LogGenerator extends Generator { private final static String TREE_FILE_LOG = "treeFileLog"; private final static String SUB_TREE_FILE_LOG = "substTreeFileLog"; public LogGenerator(BeautiOptions options, ComponentFactory[] components) { super(options, components); } /** * write log to screen * * @param writer XMLWriter * @param clockModelGenerator ClockModelGenerator */ public void writeLogToScreen(XMLWriter writer, ClockModelGenerator clockModelGenerator, SubstitutionModelGenerator substitutionModelGenerator) { writer.writeComment("write log to screen"); writer.writeOpenTag(LoggerParser.LOG, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, "screenLog"), new Attribute.Default<String>(LoggerParser.LOG_EVERY, options.echoEvery + "") }); if (options.hasData()) { writer.writeOpenTag(ColumnsParser.COLUMN, new Attribute[]{ new Attribute.Default<String>(ColumnsParser.LABEL, "Joint"), new Attribute.Default<String>(ColumnsParser.DECIMAL_PLACES, "4"), new Attribute.Default<String>(ColumnsParser.WIDTH, "12") } ); writer.writeIDref(CompoundLikelihoodParser.JOINT, "joint"); writer.writeCloseTag(ColumnsParser.COLUMN); } writer.writeOpenTag(ColumnsParser.COLUMN, new Attribute[]{ new Attribute.Default<String>(ColumnsParser.LABEL, "Prior"), new Attribute.Default<String>(ColumnsParser.DECIMAL_PLACES, "4"), new Attribute.Default<String>(ColumnsParser.WIDTH, "12") } ); writer.writeIDref(CompoundLikelihoodParser.PRIOR, "prior"); writer.writeCloseTag(ColumnsParser.COLUMN); if (options.hasData()) { writer.writeOpenTag(ColumnsParser.COLUMN, new Attribute[]{ new Attribute.Default<String>(ColumnsParser.LABEL, "Likelihood"), new Attribute.Default<String>(ColumnsParser.DECIMAL_PLACES, "4"), new Attribute.Default<String>(ColumnsParser.WIDTH, "12") } ); writer.writeIDref(CompoundLikelihoodParser.LIKELIHOOD, "likelihood"); writer.writeCloseTag(ColumnsParser.COLUMN); } for (PartitionTreeModel model : options.getPartitionTreeModels()) { writer.writeOpenTag(ColumnsParser.COLUMN, new Attribute[]{ // new Attribute.Default<String>(ColumnsParser.LABEL, model.getPrefix() + TreeModelParser.ROOT_HEIGHT), // Switching to use 'rootAge' in screen log (an absolute date if tip dates are used) (model.hasTipCalibrations() ? new Attribute.Default<String>(ColumnsParser.LABEL, model.getPrefix() + "age(root)") : new Attribute.Default<String>(ColumnsParser.LABEL, model.getPrefix() + "rootHeight") ), new Attribute.Default<String>(ColumnsParser.SIGNIFICANT_FIGURES, "6"), new Attribute.Default<String>(ColumnsParser.WIDTH, "12") } ); if (model.hasTipCalibrations()) { writer.writeIDref(TMRCAStatisticParser.TMRCA_STATISTIC, model.getPrefix() + "age(root)"); } else { writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + TreeModel.TREE_MODEL + "." + TreeModelParser.ROOT_HEIGHT); } writer.writeCloseTag(ColumnsParser.COLUMN); } for (PartitionClockModel model : options.getPartitionClockModels()) { if (model.performModelAveraging() || !model.getClockRateParameter().isFixed()) { writer.writeOpenTag(ColumnsParser.COLUMN, new Attribute[]{ new Attribute.Default<String>(ColumnsParser.LABEL, clockModelGenerator.getClockRateString(model)), new Attribute.Default<String>(ColumnsParser.SIGNIFICANT_FIGURES, "6"), new Attribute.Default<String>(ColumnsParser.WIDTH, "12") } ); clockModelGenerator.writeAllClockRateRefs(model, writer); writer.writeCloseTag(ColumnsParser.COLUMN); } } for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) { if (model.getDataType().getType() == DataType.MICRO_SAT) substitutionModelGenerator.writeMicrosatSubstModelParameterRef(model, writer); } generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_SCREEN_LOG, writer); writer.writeCloseTag(LoggerParser.LOG); generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_SCREEN_LOG, writer); } /** * write log to file * * @param writer XMLWriter * @param treePriorGenerator TreePriorGenerator * @param clockModelGenerator ClockModelGenerator * @param substitutionModelGenerator SubstitutionModelGenerator * @param treeLikelihoodGenerator TreeLikelihoodGenerator */ public void writeLogToFile(XMLWriter writer, TreePriorGenerator treePriorGenerator, ClockModelGenerator clockModelGenerator, SubstitutionModelGenerator substitutionModelGenerator, TreeLikelihoodGenerator treeLikelihoodGenerator, TMRCAStatisticsGenerator tmrcaStatisticsGenerator) { writer.writeComment("write log to file"); if (options.logFileName == null) { options.logFileName = options.fileNameStem + ".log"; } writer.writeOpenTag(LoggerParser.LOG, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, "fileLog"), new Attribute.Default<String>(LoggerParser.LOG_EVERY, options.logEvery + ""), new Attribute.Default<String>(LoggerParser.FILE_NAME, options.logFileName), new Attribute.Default<Boolean>(LoggerParser.ALLOW_OVERWRITE_LOG, options.allowOverwriteLog) }); if (options.hasData()) { writer.writeIDref(CompoundLikelihoodParser.JOINT, "joint"); } writer.writeIDref(CompoundLikelihoodParser.PRIOR, "prior"); if (options.hasData()) { writer.writeIDref(CompoundLikelihoodParser.LIKELIHOOD, "likelihood"); } for (PartitionTreeModel model : options.getPartitionTreeModels()) { writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + TreeModel.TREE_MODEL + "." + TreeModelParser.ROOT_HEIGHT); } // for convenience, log root age statistic - gives the absolute age of the root given the tip dates. // @todo check for redundancy with rootHeight - if no tip dates or given as heights (time before present) for (PartitionTreeModel model : options.getPartitionTreeModels()) { if (model.hasTipCalibrations()) { writer.writeIDref(TMRCAStatisticParser.TMRCA_STATISTIC, model.getPrefix() + "age(root)"); } } for (PartitionTreeModel model : options.getPartitionTreeModels()) { writer.writeIDref(TreeLengthStatisticParser.TREE_LENGTH_STATISTIC, model.getPrefix() + "treeLength"); } tmrcaStatisticsGenerator.writeTMRCAStatisticReferences(writer); for (PartitionTreePrior prior : options.getPartitionTreePriors()) { treePriorGenerator.writeParameterLog(prior, writer); } for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) { substitutionModelGenerator.writeLog(model, writer); } for (PartitionClockModel model : options.getPartitionClockModels()) { clockModelGenerator.writeLog(model, writer); } for (PartitionClockModel model : options.getPartitionClockModels()) { clockModelGenerator.writeLogStatistic(model, writer); } generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_FILE_LOG_PARAMETERS, writer); treeLikelihoodGenerator.writeTreeLikelihoodReferences(writer); clockModelGenerator.writeClockLikelihoodReferences(writer); generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_FILE_LOG_LIKELIHOODS, writer); // coalescentLikelihood for (PartitionTreeModel model : options.getPartitionTreeModels()) { PartitionTreePrior prior = model.getPartitionTreePrior(); if (prior.getNodeHeightPrior() != TreePriorType.EXTENDED_SKYLINE && prior.getNodeHeightPrior() != TreePriorType.SKYGRID) { // if not using a multi-locus model... treePriorGenerator.writePriorLikelihoodReferenceLog(prior, model, writer); writer.writeText(""); } } for (PartitionTreePrior prior : options.getPartitionTreePriors()) { if (prior.getNodeHeightPrior() == TreePriorType.EXTENDED_SKYLINE) { writer.writeIDref(CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD, prior.getPrefix() + COALESCENT); // only 1 coalescent } else if (prior.getNodeHeightPrior() == TreePriorType.SKYGRID) { writer.writeIDref(GMRFSkyrideLikelihoodParser.SKYGRID_LIKELIHOOD, prior.getPrefix() + "skygrid"); } } writer.writeCloseTag(LoggerParser.LOG); generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_FILE_LOG, writer); } public void writeDemographicLogToFile(XMLWriter writer, TreePriorGenerator treePriorGenerator, ClockModelGenerator clockModelGenerator, SubstitutionModelGenerator substitutionModelGenerator, TreeLikelihoodGenerator treeLikelihoodGenerator) { writer.writeComment("demographic log file"); if (options.demographicLogFileName == null) { options.demographicLogFileName = options.fileNameStem + ".demo.log"; } String header = "Demographic Model: " + options.demographicModelName; writer.writeOpenTag(LoggerParser.LOG, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, "fileLog"), new Attribute.Default<String>(LoggerParser.HEADER, header + ""), new Attribute.Default<String>(LoggerParser.LOG_EVERY, options.logEvery + ""), new Attribute.Default<String>(LoggerParser.FILE_NAME, options.logFileName), new Attribute.Default<Boolean>(LoggerParser.ALLOW_OVERWRITE_LOG, options.allowOverwriteLog) }); if (options.hasData()) { writer.writeIDref(CompoundLikelihoodParser.JOINT, "joint"); } writer.writeIDref(CompoundLikelihoodParser.PRIOR, "prior"); for (PartitionTreeModel model : options.getPartitionTreeModels()) { writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + TreeModel.TREE_MODEL + "." + TreeModelParser.ROOT_HEIGHT); } for (Taxa taxa : options.taxonSets) { // make tmrca(tree.name) eay to read in log for Tracer PartitionTreeModel treeModel = options.taxonSetsTreeModel.get(taxa); writer.writeIDref(TMRCAStatisticParser.TMRCA_STATISTIC, "tmrca(" + treeModel.getPrefix() + taxa.getId() + ")"); } // if ( options.shareSameTreePrior ) { // Share Same Tree Prior // treePriorGenerator.setModelPrefix(""); // treePriorGenerator.writeParameterLog(options.activedSameTreePrior, writer); // } else { // no species for (PartitionTreePrior prior : options.getPartitionTreePriors()) { // treePriorGenerator.setModelPrefix(prior.getPrefix()); // priorName.treeModel treePriorGenerator.writeParameterLog(prior, writer); } for (PartitionSubstitutionModel model : options.getPartitionSubstitutionModels()) { substitutionModelGenerator.writeLog(model, writer); } for (PartitionClockModel model : options.getPartitionClockModels()) { // if (model.getRateTypeOption() == FixRateType.FIXED_MEAN) { // writer.writeIDref(ParameterParser.PARAMETER, model.getName()); // if (model.getClockType() == ClockType.UNCORRELATED) { // switch (model.getClockDistributionType()) { // case LOGNORMAL: // writer.writeIDref(ParameterParser.PARAMETER, model.getPrefix() + ClockType.UCLD_STDEV); // break; // case GAMMA: // throw new UnsupportedOperationException("Uncorrelated gamma model not implemented yet"); //// break; // case CAUCHY: // throw new UnsupportedOperationException("Uncorrelated Cauchy model not implemented yet"); //// break; // case EXPONENTIAL: // // nothing required // break; clockModelGenerator.writeLog(model, writer); } for (PartitionClockModel model : options.getPartitionClockModels()) { clockModelGenerator.writeLogStatistic(model, writer); } generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_FILE_LOG_PARAMETERS, writer); treeLikelihoodGenerator.writeTreeLikelihoodReferences(writer); clockModelGenerator.writeClockLikelihoodReferences(writer); generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_FILE_LOG_LIKELIHOODS, writer); // coalescentLikelihood for (PartitionTreeModel model : options.getPartitionTreeModels()) { PartitionTreePrior prior = model.getPartitionTreePrior(); treePriorGenerator.writePriorLikelihoodReferenceLog(prior, model, writer); writer.writeText(""); } for (PartitionTreePrior prior : options.getPartitionTreePriors()) { if (prior.getNodeHeightPrior() == TreePriorType.EXTENDED_SKYLINE) { writer.writeIDref(CoalescentLikelihoodParser.COALESCENT_LIKELIHOOD, prior.getPrefix() + COALESCENT); // only 1 coalescent } else if (prior.getNodeHeightPrior() == TreePriorType.SKYGRID) { writer.writeIDref(GMRFSkyrideLikelihoodParser.SKYGRID_LIKELIHOOD, prior.getPrefix() + "skygrid"); } } writer.writeCloseTag(LoggerParser.LOG); generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_FILE_LOG, writer); } /** * write tree log to file * * @param writer XMLWriter */ public void writeTreeLogToFile(XMLWriter writer) { writer.writeComment("write tree log to file"); for (PartitionTreeModel tree : options.getPartitionTreeModels()) { String treeLogFileName; if (options.substTreeLog) { treeLogFileName = options.fileNameStem + "." + tree.getPrefix() + "(time).trees"; } else { treeLogFileName = options.fileNameStem + "." + tree.getPrefix() + "trees"; // stem.partitionName.tree } if (options.treeFileName.get(0).endsWith(".txt")) { treeLogFileName += ".txt"; } writeTreeLogToFile(writer, treeLogFileName, tree); } if (options.substTreeLog) { // gene tree for (PartitionTreeModel tree : options.getPartitionTreeModels()) { String treeLogFileName = options.fileNameStem + "." + tree.getPrefix() + "(subst).trees"; if (options.treeFileName.get(0).endsWith(".txt")) { treeLogFileName += ".txt"; } writeSubstTreeLogToFile(writer, treeLogFileName, tree); } } generateInsertionPoint(ComponentGenerator.InsertionPoint.AFTER_TREES_LOG, writer); } private void writeTreeLogToFile(XMLWriter writer, String treeLogFileName, PartitionTreeModel tree) { List<Attribute> attributes = new ArrayList<Attribute>(); attributes.add(new Attribute.Default<String>(XMLParser.ID, tree.getPrefix() + TREE_FILE_LOG)); // partionName.treeFileLog attributes.add(new Attribute.Default<String>(TreeLoggerParser.LOG_EVERY, options.logEvery + "")); attributes.add(new Attribute.Default<String>(TreeLoggerParser.NEXUS_FORMAT, "true")); attributes.add(new Attribute.Default<String>(TreeLoggerParser.FILE_NAME, treeLogFileName)); attributes.add(new Attribute.Default<String>(TreeLoggerParser.SORT_TRANSLATION_TABLE, "true")); writer.writeOpenTag(TreeLoggerParser.LOG_TREE, attributes); writer.writeIDref(TreeModel.TREE_MODEL, tree.getPrefix() + TreeModel.TREE_MODEL); writeTreeTraits(writer, tree); if (options.hasData()) { // we have data... writer.writeIDref(CompoundLikelihoodParser.JOINT, "joint"); } generateInsertionPoint(ComponentGenerator.InsertionPoint.IN_TREES_LOG, tree, writer); writer.writeCloseTag(TreeLoggerParser.LOG_TREE); } private void writeSubstTreeLogToFile(XMLWriter writer, String treeLogFileName, PartitionTreeModel tree) { // write tree log to file writer.writeOpenTag(TreeLoggerParser.LOG_TREE, new Attribute[]{ new Attribute.Default<String>(XMLParser.ID, tree.getPrefix() + SUB_TREE_FILE_LOG), new Attribute.Default<String>(TreeLoggerParser.LOG_EVERY, options.logEvery + ""), new Attribute.Default<String>(TreeLoggerParser.NEXUS_FORMAT, "true"), new Attribute.Default<String>(TreeLoggerParser.FILE_NAME, treeLogFileName), new Attribute.Default<String>(TreeLoggerParser.BRANCH_LENGTHS, TreeLoggerParser.SUBSTITUTIONS) }); writer.writeIDref(TreeModel.TREE_MODEL, tree.getPrefix() + TreeModel.TREE_MODEL); PartitionClockModel model = options.getPartitionClockModels(options.getDataPartitions(tree)).get(0); String tag = ""; String id = model.getPrefix() + BranchRateModel.BRANCH_RATES; switch (model.getClockType()) { case STRICT_CLOCK: tag = StrictClockBranchRatesParser.STRICT_CLOCK_BRANCH_RATES; break; case UNCORRELATED: if (model.performModelAveraging()) { tag = MixtureModelBranchRatesParser.MIXTURE_MODEL_BRANCH_RATES; } else { tag = model.isContinuousQuantile() ? ContinuousBranchRatesParser.CONTINUOUS_BRANCH_RATES : DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES; } break; case RANDOM_LOCAL_CLOCK: tag = RandomLocalClockModelParser.LOCAL_BRANCH_RATES; break; case FIXED_LOCAL_CLOCK: tag = LocalClockModelParser.LOCAL_CLOCK_MODEL; break; case AUTOCORRELATED: tag = ACLikelihoodParser.AC_LIKELIHOOD; break; default: throw new IllegalArgumentException("Unknown clock model"); } writer.writeIDref(tag, id); writeTreeTrait(writer, tag, id, BranchRateModel.RATE, model.getPrefix() + BranchRateModel.RATE); writer.writeCloseTag(TreeLoggerParser.LOG_TREE); } private void writeTreeTraits(XMLWriter writer, PartitionTreeModel tree) { for (PartitionClockModel model : options.getPartitionClockModels(options.getDataPartitions(tree))) { String prefix = model.getPrefix(); switch (model.getClockType()) { case STRICT_CLOCK: writeTreeTrait(writer, StrictClockBranchRatesParser.STRICT_CLOCK_BRANCH_RATES, prefix + BranchRateModel.BRANCH_RATES, BranchRateModel.RATE, prefix + BranchRateModel.RATE); break; case UNCORRELATED: writeTreeTrait(writer, model.performModelAveraging() ? MixtureModelBranchRatesParser.MIXTURE_MODEL_BRANCH_RATES : model.isContinuousQuantile() ? ContinuousBranchRatesParser.CONTINUOUS_BRANCH_RATES : DiscretizedBranchRatesParser.DISCRETIZED_BRANCH_RATES, prefix + BranchRateModel.BRANCH_RATES, BranchRateModel.RATE, prefix + BranchRateModel.RATE); break; case RANDOM_LOCAL_CLOCK: writeTreeTrait(writer, RandomLocalClockModelParser.LOCAL_BRANCH_RATES, prefix + BranchRateModel.BRANCH_RATES, BranchRateModel.RATE, prefix + BranchRateModel.RATE); break; case FIXED_LOCAL_CLOCK: writeTreeTrait(writer, LocalClockModelParser.LOCAL_CLOCK_MODEL, prefix + BranchRateModel.BRANCH_RATES, BranchRateModel.RATE, prefix + BranchRateModel.RATE); break; case AUTOCORRELATED: writer.writeIDref(ACLikelihoodParser.AC_LIKELIHOOD, prefix + BranchRateModel.BRANCH_RATES); writeTreeTrait(writer, ACLikelihoodParser.AC_LIKELIHOOD, prefix + BranchRateModel.BRANCH_RATES, BranchRateModel.RATE, model.getPrefix() + BranchRateModel.RATE); break; default: throw new IllegalArgumentException("Unknown clock model"); } } } private void writeTreeTrait(XMLWriter writer, String treeTraitTag, String treeTraitID, String traitName, String traitTag) { writer.writeOpenTag(TreeLoggerParser.TREE_TRAIT, new Attribute[]{ new Attribute.Default<String>(TreeLoggerParser.NAME, traitName), new Attribute.Default<String>(TreeLoggerParser.TAG, traitTag) }); writer.writeIDref(treeTraitTag, treeTraitID); writer.writeCloseTag(TreeLoggerParser.TREE_TRAIT); } }
package dr.inference.markovchain; import dr.evomodel.operators.AbstractImportanceDistributionOperator; import dr.evomodel.operators.SimpleMetropolizedGibbsOperator; import dr.inference.model.Likelihood; import dr.inference.model.Model; import dr.inference.model.CompoundLikelihood; import dr.inference.operators.*; import dr.inference.prior.Prior; import java.util.ArrayList; import java.util.logging.Logger; /** * A concrete markov chain. This is final as the only things that should need * overriding are in the delegates (prior, likelihood, schedule and acceptor). * The design of this class is to be fairly immutable as far as settings goes. * * @author Alexei Drummond * @author Andrew Rambaut * @version $Id: MarkovChain.java,v 1.10 2006/06/21 13:34:42 rambaut Exp $ */ public final class MarkovChain { private final static boolean DEBUG = false; private final OperatorSchedule schedule; private final Acceptor acceptor; private final Prior prior; private final Likelihood likelihood; private boolean pleaseStop = false; private boolean isStopped = false; private double bestScore, currentScore, initialScore; private int currentLength; private boolean useCoercion = true; private final int fullEvaluationCount; private final int minOperatorCountForFullEvaluation; private static final double EVALUATION_TEST_THRESHOLD = 1e-6; public MarkovChain(Prior prior, Likelihood likelihood, OperatorSchedule schedule, Acceptor acceptor, int fullEvaluationCount, int minOperatorCountForFullEvaluation, boolean useCoercion) { currentLength = 0; this.prior = prior; this.likelihood = likelihood; this.schedule = schedule; this.acceptor = acceptor; this.useCoercion = useCoercion; this.fullEvaluationCount = fullEvaluationCount; this.minOperatorCountForFullEvaluation = minOperatorCountForFullEvaluation; currentScore = evaluate(likelihood, prior); } /** * Resets the markov chain */ public void reset() { currentLength = 0; // reset operator acceptance levels for(int i = 0; i < schedule.getOperatorCount(); i++) { schedule.getOperator(i).reset(); } } /** * Run the chain for a given number of states. * * @param length * number of states to run the chain. * @param onTheFlyOperatorWeights */ public int chain(int length, boolean disableCoerce, int onTheFlyOperatorWeights) { currentScore = evaluate(likelihood, prior); int currentState = currentLength; final Model currentModel = likelihood.getModel(); if( currentState == 0 ) { initialScore = currentScore; bestScore = currentScore; fireBestModel(currentState, currentModel); } if( currentScore == Double.NEGATIVE_INFINITY ) { // identify which component of the score is zero... if( prior != null ) { double logPrior = prior.getLogPrior(likelihood.getModel()); if( logPrior == Double.NEGATIVE_INFINITY ) { throw new IllegalArgumentException( "The initial model is invalid because one of the priors has zero probability."); } } String message = "The initial likelihood is zero"; if( likelihood instanceof CompoundLikelihood ) { message += ": " + ((CompoundLikelihood) likelihood).getDiagnosis(); } else { message += "!"; } throw new IllegalArgumentException(message); } pleaseStop = false; isStopped = false; int otfcounter = onTheFlyOperatorWeights > 0 ? onTheFlyOperatorWeights : 0; double[] logr = {0.0}; boolean usingFullEvaluation = true; // set ops count in mcmc element instead // if (fullEvaluationCount == 0) // Temporary solution until full code review // usingFullEvaluation = false; boolean fullEvaluationError = false; while( !pleaseStop && (currentState < (currentLength + length)) ) { // periodically log states fireCurrentModel(currentState, currentModel); if( pleaseStop ) { isStopped = true; break; } // Get the operator final int op = schedule.getNextOperatorIndex(); final MCMCOperator mcmcOperator = schedule.getOperator(op); double oldScore = currentScore; // assert Profiler.startProfile("Store"); // The current model is stored here in case the proposal fails if( currentModel != null ) { currentModel.storeModelState(); } // assert Profiler.stopProfile("Store"); boolean operatorSucceeded = true; double hastingsRatio = 1.0; boolean accept = false; logr[0] = -Double.MAX_VALUE; try { // The new model is proposed // assert Profiler.startProfile("Operate"); if( DEBUG ) { System.out.println("\n&& Operator: " + mcmcOperator.getOperatorName()); } if( mcmcOperator instanceof SimpleMetropolizedGibbsOperator ) { hastingsRatio = ((SimpleMetropolizedGibbsOperator) mcmcOperator).operate(prior, likelihood); } else if( mcmcOperator instanceof AbstractImportanceDistributionOperator ) { hastingsRatio = ((AbstractImportanceDistributionOperator) mcmcOperator).operate(prior, likelihood); } else { hastingsRatio = mcmcOperator.operate(); } // assert Profiler.stopProfile("Operate"); } catch( OperatorFailedException e ) { operatorSucceeded = false; } double score = 0.0; double deviation = 0.0; if( operatorSucceeded ) { // The new model is proposed // assert Profiler.startProfile("Evaluate"); if( DEBUG ) { System.out.println("** Evaluate"); } // The new model is evaluated score = evaluate(likelihood, prior); // assert Profiler.stopProfile("Evaluate"); if (usingFullEvaluation) { // This is a test that the state is correctly restored. The // restored state is fully evaluated and the likelihood compared with // that before the operation was made. likelihood.makeDirty(); final double testScore = evaluate(likelihood, prior); if( Math.abs(testScore - score) > EVALUATION_TEST_THRESHOLD ) { Logger.getLogger("error").severe( "State was not correctly calculated after an operator move.\n" + "Likelihood evaluation: " + score + "\nFull Likelihood evaluation: " + testScore + "\n" + "Operator: " + mcmcOperator + " " + mcmcOperator.getOperatorName()); fullEvaluationError = true; } } if( score > bestScore ) { bestScore = score; fireBestModel(currentState, currentModel); } accept = mcmcOperator instanceof GibbsOperator || acceptor.accept(oldScore, score, hastingsRatio, logr); deviation = score - oldScore; } // The new model is accepted or rejected if( accept ) { if( DEBUG ) { System.out.println("** Move accepted: new score = " + score + ", old score = " + oldScore); } mcmcOperator.accept(deviation); currentModel.acceptModelState(); currentScore = score; if( otfcounter > 0 ) { --otfcounter; if( otfcounter == 0 ) { adjustOpWeights(currentState); otfcounter = onTheFlyOperatorWeights; } } oldScore = score; // for the usingFullEvaluation test } else { if( DEBUG ) { System.out.println("** Move rejected: new score = " + score + ", old score = " + oldScore); } mcmcOperator.reject(); // assert Profiler.startProfile("Restore"); currentModel.restoreModelState(); } // assert Profiler.stopProfile("Restore"); if( usingFullEvaluation ) { // This is a test that the state is correctly restored. The // restored state is fully evaluated and the likelihood compared with // that before the operation was made. likelihood.makeDirty(); final double testScore = evaluate(likelihood, prior); if( Math.abs(testScore - oldScore) > EVALUATION_TEST_THRESHOLD ) { final Logger logger = Logger.getLogger("error"); logger.severe("State was not correctly restored after reject step.\n" + "Likelihood before: " + oldScore + " Likelihood after: " + testScore + "\n" + "Operator: " + mcmcOperator + " " + mcmcOperator.getOperatorName()); fullEvaluationError = true; } } if( !disableCoerce && mcmcOperator instanceof CoercableMCMCOperator ) { coerceAcceptanceProbability((CoercableMCMCOperator) mcmcOperator, logr[0]); } if (usingFullEvaluation && schedule.getMinimumAcceptAndRejectCount() >= minOperatorCountForFullEvaluation && currentState >= fullEvaluationCount ) { // full evaluation is only switched off when each operator has done a // minimum number of operations (currently 1) and fullEvalationCount // operations in total. usingFullEvaluation = false; if (fullEvaluationError) { // If there has been an error then stop with an error throw new RuntimeException( "One or more evaluation errors occured during the test phase of this\n" + "run. These errors imply critical errors which may produce incorrect\n" + "results."); } } currentState += 1; } currentLength = currentState; fireFinished(currentLength); // Profiler.report(); return currentLength; } private void adjustOpWeights(int currentState) { final int count = schedule.getOperatorCount(); double[] s = new double[count]; final double factor = 100; final double limitSpan = 1000; System.err.println("start cycle " + currentState); double sHas = 0.0/* , sNot = 0.0 */, nHas = 0.0; for(int no = 0; no < count; ++no) { final MCMCOperator op = schedule.getOperator(no); final double v = op.getSpan(true); if( v == 0 ) { // sNot += op.getWeight(); s[no] = 0; } else { sHas += op.getWeight(); s[no] = Math.max(factor * Math.min(v, limitSpan), 1); nHas += s[no]; } } // for(int no = 0; no < count; ++no) { // final MCMCOperator op = schedule.getOperator(no); // final double v = op.getSpan(false); // if( v == 0 ) { // System.err.println(op.getOperatorName() + " blocks"); // return; // keep sum of changed parts unchanged final double scaleHas = sHas / nHas; for(int no = 0; no < count; ++no) { final MCMCOperator op = schedule.getOperator(no); if( s[no] > 0 ) { final double val = s[no] * scaleHas; op.setWeight(val); System.err.println("set " + op.getOperatorName() + " " + val); } else { System.err.println("** " + op.getOperatorName() + " = " + op.getWeight()); } } schedule.operatorsHasBeenUpdated(); } public Prior getPrior() { return prior; } public Likelihood getLikelihood() { return likelihood; } public Model getModel() { return likelihood.getModel(); } public OperatorSchedule getSchedule() { return schedule; } public Acceptor getAcceptor() { return acceptor; } public double getInitialScore() { return initialScore; } public double getBestScore() { return bestScore; } public int getCurrentLength() { return currentLength; } public void setCurrentLength(int currentLength) { this.currentLength = currentLength; } public double getCurrentScore() { return currentScore; } public void pleaseStop() { pleaseStop = true; } public boolean isStopped() { return isStopped; } private double evaluate(Likelihood likelihood, Prior prior) { double logPosterior = 0.0; if( prior != null ) { final double logPrior = prior.getLogPrior(likelihood.getModel()); if( logPrior == Double.NEGATIVE_INFINITY ) { return Double.NEGATIVE_INFINITY; } logPosterior += logPrior; } final double logLikelihood = likelihood.getLogLikelihood(); if( Double.isNaN(logLikelihood) ) { return Double.NEGATIVE_INFINITY; } // System.err.println("** " + logPosterior + " + " + logLikelihood + // " = " + (logPosterior + logLikelihood)); logPosterior += logLikelihood; return logPosterior; } /** * Updates the proposal parameter, based on the target acceptance * probability This method relies on the proposal parameter being a * decreasing function of acceptance probability. * * @param op The operator * @param logr */ private void coerceAcceptanceProbability(CoercableMCMCOperator op, double logr) { if( isCoercable(op) ) { final double p = op.getCoercableParameter(); final double i = schedule.getOptimizationTransform(MCMCOperator.Utils.getOperationCount(op)); final double target = op.getTargetAcceptanceProbability(); final double newp = p + ((1.0 / (i + 1.0)) * (Math.exp(logr) - target)); if( newp > -Double.MAX_VALUE && newp < Double.MAX_VALUE ) { op.setCoercableParameter(newp); } } } private boolean isCoercable(CoercableMCMCOperator op) { return op.getMode() == CoercionMode.COERCION_ON || (op.getMode() != CoercionMode.COERCION_OFF && useCoercion); } public void addMarkovChainListener(MarkovChainListener listener) { listeners.add(listener); } public void removeMarkovChainListener(MarkovChainListener listener) { listeners.remove(listener); } public void fireBestModel(int state, Model bestModel) { for(MarkovChainListener listener : listeners) { listener.bestState(state, bestModel); } } public void fireCurrentModel(int state, Model currentModel) { for(MarkovChainListener listener : listeners) { listener.currentState(state, currentModel); } } public void fireFinished(int chainLength) { for(MarkovChainListener listener : listeners) { listener.finished(chainLength); } } private final ArrayList<MarkovChainListener> listeners = new ArrayList<MarkovChainListener>(); }
package dr.inference.operators; import dr.inference.model.Bounds; import dr.inference.model.Parameter; import dr.math.MathUtils; import dr.xml.*; /** * A generic scale operator for use with a multi-dimensional parameters. * Either scale all dimentions at once or scale one dimention at a time. * An optional bit vector and a threshold is used to vary the rate of the individual dimentions according * to their on/off status. For example a threshold of 1 means pick only "on" dimentions. * * @author Alexei Drummond * @author Andrew Rambaut * @version $Id: ScaleOperator.java,v 1.20 2005/06/14 10:40:34 rambaut Exp $ */ public class ScaleOperator extends SimpleMCMCOperator implements CoercableMCMCOperator { public static final String SCALE_OPERATOR = "scaleOperator"; public static final String SCALE_ALL = "scaleAll"; public static final String SCALE_ALL_IND = "scaleAllIndependently"; public static final String SCALE_FACTOR = "scaleFactor"; public static final String DEGREES_OF_FREEDOM = "df"; public static final String INDICATORS = "indicators"; public static final String PICKONEPROB = "pickoneprob"; private Parameter indicator; private double indicatorOnProb; public ScaleOperator(Parameter parameter, boolean scaleAll, int degreesOfFreedom, double scale, int mode, Parameter indicator, double indicatorOnProb, boolean scaleAllInd) { this.parameter = parameter; this.indicator = indicator; this.indicatorOnProb = indicatorOnProb; this.scaleAll = scaleAll; this.scaleAllIndependently = scaleAllInd; this.scaleFactor = scale; this.mode = mode; this.degreesOfFreedom = degreesOfFreedom; } /** * @return the parameter this operator acts on. */ public Parameter getParameter() { return parameter; } /** * change the parameter and return the hastings ratio. */ public final double doOperation() throws OperatorFailedException { final double scale = (scaleFactor + (MathUtils.nextDouble() * ((1.0 / scaleFactor) - scaleFactor))); double logq; final Bounds bounds = parameter.getBounds(); final int dim = parameter.getDimension(); if (scaleAllIndependently) { // update all dimensions independently. logq = 0; for (int i = 0; i < dim; i++) { final double scaleOne = (scaleFactor + (MathUtils.nextDouble() * ((1.0 / scaleFactor) - scaleFactor))); final double value = scaleOne * parameter.getParameterValue(i); logq -= Math.log(scaleOne); if (value < bounds.getLowerLimit(i) || value > bounds.getUpperLimit(i)) { throw new OperatorFailedException("proposed value outside boundaries"); } parameter.setParameterValue(i, value); } } else if (scaleAll) { // update all dimensions // hasting ratio is dim-2 times of 1dim case. would be nice to have a reference here // for the proof. It is supposed to be somewhere in an Alexei/Nicholes article. if (degreesOfFreedom > 0) // For parameters with non-uniform prior on only one dimension logq = -degreesOfFreedom * Math.log(scale); else logq = (parameter.getDimension() - 2) * Math.log(scale); for (int i = 0; i < dim; i++) { final double value = parameter.getParameterValue(i) * scale; if (value < bounds.getLowerLimit(i) || value > bounds.getUpperLimit(i)) { throw new OperatorFailedException("proposed value outside boundaries"); } parameter.setParameterValue(i, value); } } else { logq = -Math.log(scale); // which bit to scale int index; if (indicator != null) { final int idim = indicator.getDimension(); final boolean impliedOne = idim == (dim - 1); // available bit locations int[] loc = new int[idim + 1]; int nLoc = 0; // choose active or non active ones? final boolean takeOne = indicatorOnProb >= 1.0 || MathUtils.nextDouble() < indicatorOnProb; if (impliedOne && takeOne) { loc[nLoc] = 0; ++nLoc; } for (int i = 0; i < idim; i++) { final double value = indicator.getStatisticValue(i); if (takeOne == (value > 0)) { loc[nLoc] = i + (impliedOne ? 1 : 0); ++nLoc; } } if (nLoc > 0) { final int rand = MathUtils.nextInt(nLoc); index = loc[rand]; } else { throw new OperatorFailedException("no active indicators"); } } else { // any is good index = MathUtils.nextInt(dim); } final double oldValue = parameter.getParameterValue(index); final double newValue = scale * oldValue; if (newValue < bounds.getLowerLimit(index) || newValue > bounds.getUpperLimit(index)) { throw new OperatorFailedException("proposed value outside boundaries"); } parameter.setParameterValue(index, newValue); // provides a hook for subclasses cleanupOperation(newValue, oldValue); } return logq; } /** * This method should be overridden by operators that need to do something just before the return of doOperation. * * @param newValue the proposed parameter value * @param oldValue the old parameter value */ void cleanupOperation(double newValue, double oldValue) { // DO NOTHING } //MCMCOperator INTERFACE public final String getOperatorName() { return "scale(" + parameter.getParameterName() + ")"; } public double getCoercableParameter() { return Math.log(1.0 / scaleFactor - 1.0); } public void setCoercableParameter(double value) { scaleFactor = 1.0 / (Math.exp(value) + 1.0); } public double getRawParameter() { return scaleFactor; } public int getMode() { return mode; } public double getScaleFactor() { return scaleFactor; } public double getTargetAcceptanceProbability() { return 0.234; } public double getMinimumAcceptanceLevel() { return 0.1; } public double getMaximumAcceptanceLevel() { return 0.4; } public double getMinimumGoodAcceptanceLevel() { return 0.20; } public double getMaximumGoodAcceptanceLevel() { return 0.30; } public final String getPerformanceSuggestion() { double prob = MCMCOperator.Utils.getAcceptanceProbability(this); double targetProb = getTargetAcceptanceProbability(); dr.util.NumberFormatter formatter = new dr.util.NumberFormatter(5); double sf = OperatorUtils.optimizeScaleFactor(scaleFactor, prob, targetProb); if (prob < getMinimumGoodAcceptanceLevel()) { return "Try setting scaleFactor to about " + formatter.format(sf); } else if (prob > getMaximumGoodAcceptanceLevel()) { return "Try setting scaleFactor to about " + formatter.format(sf); } else return ""; } public static dr.xml.XMLObjectParser PARSER = new dr.xml.AbstractXMLObjectParser() { public String getParserName() { return SCALE_OPERATOR; } public Object parseXMLObject(XMLObject xo) throws XMLParseException { boolean scaleAll = false; boolean scaleAllInd = false; int degreesOfFreedom = 0; int mode = CoercableMCMCOperator.DEFAULT; if (xo.hasAttribute(SCALE_ALL)) { scaleAll = xo.getBooleanAttribute(SCALE_ALL); } if (xo.hasAttribute(SCALE_ALL_IND)) { scaleAllInd = xo.getBooleanAttribute(SCALE_ALL_IND); } if (xo.hasAttribute(DEGREES_OF_FREEDOM)) { degreesOfFreedom = xo.getIntegerAttribute(DEGREES_OF_FREEDOM); } if (xo.hasAttribute(AUTO_OPTIMIZE)) { mode = xo.getBooleanAttribute(AUTO_OPTIMIZE) ? CoercableMCMCOperator.COERCION_ON : CoercableMCMCOperator.COERCION_OFF; } final double weight = xo.getDoubleAttribute(WEIGHT); final double scaleFactor = xo.getDoubleAttribute(SCALE_FACTOR); if (scaleFactor <= 0.0 || scaleFactor >= 1.0) { throw new XMLParseException("scaleFactor must be between 0.0 and 1.0"); } final Parameter parameter = (Parameter) xo.getChild(Parameter.class); Parameter indicator = null; double indicatorOnProb = 1.0; final XMLObject cxo = (XMLObject) xo.getChild(INDICATORS); if (cxo != null) { indicator = (Parameter) cxo.getChild(Parameter.class); if (cxo.hasAttribute(PICKONEPROB)) { indicatorOnProb = cxo.getDoubleAttribute(PICKONEPROB); if (!(0 <= indicatorOnProb && indicatorOnProb <= 1)) { throw new XMLParseException("pickoneprob must be between 0.0 and 1.0"); } } } ScaleOperator operator = new ScaleOperator(parameter, scaleAll, degreesOfFreedom, scaleFactor, mode, indicator, indicatorOnProb, scaleAllInd); operator.setWeight(weight); return operator; }
package dr.inferencexml.loggers; import dr.app.beast.BeastVersion; import dr.inference.loggers.*; import dr.math.MathUtils; import dr.util.FileHelpers; import dr.util.Identifiable; import dr.util.Property; import dr.xml.*; import java.io.File; import java.io.PrintWriter; import java.util.Date; /** * @author Alexei Drummond * @author Andrew Rambaut */ public class LoggerParser extends AbstractXMLObjectParser { public static final String LOG = "log"; public static final String ECHO = "echo"; public static final String ECHO_EVERY = "echoEvery"; public static final String TITLE = "title"; public static final String FILE_NAME = FileHelpers.FILE_NAME; public static final String FORMAT = "format"; public static final String TAB = "tab"; public static final String HTML = "html"; public static final String PRETTY = "pretty"; public static final String LOG_EVERY = "logEvery"; public static final String ALLOW_OVERWRITE_LOG = "overwrite"; public static final String COLUMNS = "columns"; public static final String COLUMN = "column"; public static final String LABEL = "label"; public static final String SIGNIFICANT_FIGURES = "sf"; public static final String DECIMAL_PLACES = "dp"; public static final String WIDTH = "width"; public String getParserName() { return LOG; } /** * @return an object based on the XML element it was passed. */ public Object parseXMLObject(XMLObject xo) throws XMLParseException { // You must say how often you want to log final int logEvery = xo.getIntegerAttribute(LOG_EVERY); String fileName = null; if (xo.hasAttribute(FILE_NAME)) { fileName = xo.getStringAttribute(FILE_NAME); } boolean allowOverwrite = false; if (xo.hasAttribute(ALLOW_OVERWRITE_LOG)) { allowOverwrite = xo.getBooleanAttribute(ALLOW_OVERWRITE_LOG); } // override with a runtime set System Property if (System.getProperty("allow.overwrite") != null) { allowOverwrite = Boolean.parseBoolean(System.getProperty("allow.overwrite", "false")); } if (fileName!= null && (!allowOverwrite)) { File f = new File(fileName); if (f.exists()) { throw new XMLParseException("\nThe log file " + fileName + " already exists in the working directory." + "\nYou cannot overwrite it, unless adding an attribute " + ALLOW_OVERWRITE_LOG + "=\"true\" in " + LOG + " element in xml.\nFor example: <" + LOG + " ... fileName=\"" + fileName + "\" " + ALLOW_OVERWRITE_LOG + "=\"true\">"); } } final PrintWriter pw = XMLParser.getFilePrintWriter(xo, getParserName()); final LogFormatter formatter = new TabDelimitedFormatter(pw); boolean performanceReport = false; if (!xo.hasAttribute(FILE_NAME)) { // is a screen log performanceReport = true; } // added a performance measurement delay to avoid the full evaluation period. final MCLogger logger = new MCLogger(fileName, formatter, logEvery, performanceReport, 10000); if (xo.hasAttribute(TITLE)) { logger.setTitle(xo.getStringAttribute(TITLE)); } else { final BeastVersion version = new BeastVersion(); final String title = "BEAST " + version.getVersionString() + ", " + version.getBuildString() + "\n" + "Generated " + (new Date()).toString() + " [seed=" + MathUtils.getSeed() + "]"; logger.setTitle(title); } for (int i = 0; i < xo.getChildCount(); i++) { final Object child = xo.getChild(i); if (child instanceof Columns) { logger.addColumns(((Columns) child).getColumns()); } else if (child instanceof Loggable) { logger.add((Loggable) child); } else if (child instanceof Identifiable) { logger.addColumn(new LogColumn.Default(((Identifiable) child).getId(), child)); } else if (child instanceof Property) { logger.addColumn(new LogColumn.Default(((Property) child).getAttributeName(), child)); } else { logger.addColumn(new LogColumn.Default(child.getClass().toString(), child)); } } return logger; } public static PrintWriter getLogFile(XMLObject xo, String parserName) throws XMLParseException { return XMLParser.getFilePrintWriter(xo, parserName); }
package org.wyona.yanel.servlet; import java.io.InputStream; import java.io.IOException; import java.io.OutputStream; import java.io.PrintWriter; import java.io.Writer; import java.net.URL; import java.util.Enumeration; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.wyona.yanel.core.Path; import org.wyona.yanel.core.Resource; import org.wyona.yanel.core.ResourceTypeDefinition; import org.wyona.yanel.core.ResourceTypeRegistry; import org.wyona.yanel.core.api.attributes.ModifiableV1; import org.wyona.yanel.core.api.attributes.ModifiableV2; import org.wyona.yanel.core.api.attributes.ViewableV1; import org.wyona.yanel.core.attributes.viewable.View; import org.wyona.yanel.core.map.Map; import org.wyona.yanel.core.map.MapFactory; import org.wyona.yanel.core.map.Realm; import org.wyona.yanel.util.ResourceAttributeHelper; import org.wyona.security.core.IdentityManagerFactory; import org.wyona.security.core.PolicyManagerFactory; import org.wyona.security.core.api.Identity; import org.wyona.security.core.api.IdentityManager; import org.wyona.security.core.api.PolicyManager; import org.wyona.security.core.api.Role; import org.apache.log4j.Category; import org.apache.avalon.framework.configuration.Configuration; import org.apache.avalon.framework.configuration.DefaultConfigurationBuilder; import org.w3c.dom.Document; import org.w3c.dom.Element; public class YanelServlet extends HttpServlet { private static Category log = Category.getInstance(YanelServlet.class); private ServletConfig config; ResourceTypeRegistry rtr; PolicyManager pm; IdentityManager im; Map map; private static String IDENTITY_KEY = "identity"; String proxyServerName = null; String proxyPort = null; String proxyPrefix = null; private static final String METHOD_PROPFIND = "PROPFIND"; private static final String METHOD_GET = "GET"; private static final String METHOD_POST = "POST"; private static final String METHOD_PUT = "PUT"; public void init(ServletConfig config) { this.config = config; rtr = new ResourceTypeRegistry(); PolicyManagerFactory pmf = PolicyManagerFactory.newInstance(); pm = pmf.newPolicyManager(); IdentityManagerFactory imf = IdentityManagerFactory.newInstance(); im = imf.newIdentityManager(); MapFactory mf = MapFactory.newInstance(); map = mf.newMap(); proxyServerName = rtr.proxyHostName; proxyPort = rtr.proxyPort; proxyPrefix = rtr.proxyPrefix; } public void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String httpAcceptMediaTypes = request.getHeader("Accept"); log.debug("HTTP Accept Media Types: " + httpAcceptMediaTypes); String httpUserAgent = request.getHeader("User-Agent"); log.debug("HTTP User Agent: " + httpUserAgent); String httpAcceptLanguage = request.getHeader("Accept-Language"); log.debug("HTTP Accept Language: " + httpAcceptLanguage); // Logout from Yanel String yanelUsecase = request.getParameter("yanel.usecase"); if(yanelUsecase != null && yanelUsecase.equals("logout")) { if(doLogout(request, response) != null) return; } // Authentication if(doAuthenticate(request, response) != null) return; // Check authorization if(doAuthorize(request, response) != null) return; // Delegate ... String method = request.getMethod(); if (method.equals(METHOD_PROPFIND)) { doPropfind(request, response); } else if (method.equals(METHOD_GET)) { doGet(request, response); } else if (method.equals(METHOD_POST)) { doPost(request, response); } else if (method.equals(METHOD_PUT)) { doPut(request, response); } else { log.error("No such method implemented: " + method); } } public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { getContent(request, response); } private void getContent(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { View view = null; org.w3c.dom.Document doc = null; javax.xml.parsers.DocumentBuilderFactory dbf= javax.xml.parsers.DocumentBuilderFactory.newInstance(); try { javax.xml.parsers.DocumentBuilder parser = dbf.newDocumentBuilder(); org.w3c.dom.DOMImplementation impl = parser.getDOMImplementation(); org.w3c.dom.DocumentType doctype = null; doc = impl.createDocument("http: } catch(Exception e) { log.error(e.getMessage(), e); throw new ServletException(e.getMessage()); } Element rootElement = doc.getDocumentElement(); String servletContextRealPath = config.getServletContext().getRealPath("/"); rootElement.setAttribute("servlet-context-real-path", servletContextRealPath); Element requestElement = (Element) rootElement.appendChild(doc.createElement("request")); requestElement.setAttribute("uri", request.getRequestURI()); requestElement.setAttribute("servlet-path", request.getServletPath()); HttpSession session = request.getSession(true); Element sessionElement = (Element) rootElement.appendChild(doc.createElement("session")); sessionElement.setAttribute("id", session.getId()); Enumeration enum = session.getAttributeNames(); if (!enum.hasMoreElements()) { Element sessionNoAttributesElement = (Element) sessionElement.appendChild(doc.createElement("no-attributes")); } while (enum.hasMoreElements()) { String name = (String)enum.nextElement(); String value = session.getAttribute(name).toString(); Element sessionAttributeElement = (Element) sessionElement.appendChild(doc.createElement("attribute")); sessionAttributeElement.setAttribute("name", name); sessionAttributeElement.appendChild(doc.createTextNode(value)); } String rti = map.getResourceTypeIdentifier(new Path(request.getServletPath())); Resource res = null; long lastModified = -1; if (rti != null) { ResourceTypeDefinition rtd = rtr.getResourceTypeDefinition(rti); if (rtd == null) { String message = "No such resource type registered: " + rti + ", check " + rtr.getConfigurationFile(); log.error(message); Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception")); exceptionElement.appendChild(doc.createTextNode(message)); setYanelOutput(response, doc); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } Element rtiElement = (Element) rootElement.appendChild(doc.createElement("resource-type-identifier")); rtiElement.setAttribute("namespace", rtd.getResourceTypeNamespace()); rtiElement.setAttribute("local-name", rtd.getResourceTypeLocalName()); try { res = rtr.newResource(rti); if (res != null) { res.setRTD(rtd); Element resourceElement = (Element) rootElement.appendChild(doc.createElement("resource")); if (ResourceAttributeHelper.hasAttributeImplemented(res, "Viewable", "1")) { Element viewElement = (Element) resourceElement.appendChild(doc.createElement("view")); viewElement.appendChild(doc.createTextNode("View Descriptors: " + ((ViewableV1) res).getViewDescriptors())); String viewId = request.getParameter("yanel.resource.viewid"); try { view = ((ViewableV1) res).getView(request, viewId); } catch(org.wyona.yarep.core.NoSuchNodeException e) { // TODO: Log all 404 within a dedicated file (with client info attached) such that an admin can react to it ... String message = "No such node exception: " + e; log.warn(e); //log.error(e.getMessage(), e); Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception")); exceptionElement.appendChild(doc.createTextNode(message)); response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND); setYanelOutput(response, doc); return; } catch(Exception e) { log.error(e.getMessage(), e); String message = e.toString(); log.error(e.getMessage(), e); Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception")); exceptionElement.appendChild(doc.createTextNode(message)); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); setYanelOutput(response, doc); return; } } else { Element noViewElement = (Element) resourceElement.appendChild(doc.createElement("no-view")); noViewElement.appendChild(doc.createTextNode(res.getClass().getName() + " is not viewable!")); } if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "2")) { lastModified = ((ModifiableV2) res).getLastModified(new Path(request.getServletPath())); Element lastModifiedElement = (Element) resourceElement.appendChild(doc.createElement("last-modified")); lastModifiedElement.appendChild(doc.createTextNode(new java.util.Date(lastModified).toString())); } else { Element noLastModifiedElement = (Element) resourceElement.appendChild(doc.createElement("no-last-modified")); } } else { Element resourceIsNullElement = (Element) rootElement.appendChild(doc.createElement("resource-is-null")); } } catch(Exception e) { log.error(e.getMessage(), e); String message = e.toString(); log.error(e.getMessage(), e); Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception")); exceptionElement.appendChild(doc.createTextNode(message)); setYanelOutput(response, doc); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } } else { Element noRTIFoundElement = (Element) rootElement.appendChild(doc.createElement("no-resource-type-identifier-found")); noRTIFoundElement.setAttribute("servlet-path", request.getServletPath()); } String usecase = request.getParameter("yanel.resource.usecase"); if (usecase != null && usecase.equals("checkout")) { log.debug("Checkout data ..."); // TODO: Implement checkout ... log.warn("Acquire lock has not been implemented yet ...!"); // acquireLock(); } String meta = request.getParameter("yanel.resource.meta"); if (meta != null) { if (meta.length() > 0) { log.error("DEBUG: meta length: " + meta.length()); } else { log.error("DEBUG: Show all meta"); } response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); setYanelOutput(response, doc); return; } if (view != null) { response.setContentType(patchContentType(view.getMimeType(), request)); InputStream is = view.getInputStream(); byte buffer[] = new byte[8192]; int bytesRead; if (is != null) { // TODO: Yarep does not set returned Stream to null resp. is missing Exception Handling for the constructor. Exceptions should be handled here, but rather within Yarep or whatever repositary layer is being used ... bytesRead = is.read(buffer); if (bytesRead == -1) { String message = "InputStream of view does not seem to contain any data!"; Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception")); exceptionElement.appendChild(doc.createTextNode(message)); setYanelOutput(response, doc); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } // TODO: Compare If-Modified-Since with lastModified and return 304 without content resp. check on ETag String ifModifiedSince = request.getHeader("If-Modified-Since"); if (ifModifiedSince != null) { log.error("DEBUG: TODO: Implement 304 ..."); } java.io.OutputStream os = response.getOutputStream(); os.write(buffer, 0, bytesRead); while ((bytesRead = is.read(buffer)) != -1) { os.write(buffer, 0, bytesRead); } if(lastModified >= 0) response.setDateHeader("Last-Modified", lastModified); return; } else { String message = "InputStream of view is null!"; Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception")); exceptionElement.appendChild(doc.createTextNode(message)); } } else { String message = "View is null!"; Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception")); exceptionElement.appendChild(doc.createTextNode(message)); } setYanelOutput(response, doc); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String value = request.getParameter("yanel.resource.usecase"); if (value != null && value.equals("save")) { log.debug("Save data ..."); save(request, response); return; } else if (value != null && value.equals("checkin")) { log.debug("Checkin data ..."); save(request, response); // TODO: Implement checkin ... log.warn("Release lock has not been implemented yet ..."); // releaseLock(); return; } else { log.warn("No parameter yanel.resource.usecase!"); String contentType = request.getContentType(); log.error("DEBUG: Content Type: " + contentType); InputStream in = intercept(request.getInputStream()); if (contentType.equals("application/atom+xml")) { try { Resource atomEntry = rtr.newResource("<{http: // TODO: Replace hardcoded path ... Path entryPath = new Path("/demo/atom/entries/" + new java.util.Date().getTime() + ".xml"); //Path entryPath = new Path("/atom/entries/" + new java.util.Date().getTime() + ".xml"); OutputStream out = ((ModifiableV2)atomEntry).getOutputStream(entryPath); byte buffer[] = new byte[8192]; int bytesRead; while ((bytesRead = in.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } log.error("DEBUG: Atom entry has been created: " + entryPath); InputStream resourceIn = ((ModifiableV2)atomEntry).getInputStream(entryPath); OutputStream responseOut = response.getOutputStream(); while ((bytesRead = resourceIn.read(buffer)) != -1) { responseOut.write(buffer, 0, bytesRead); } // TODO: Fix Location ... response.setHeader("Location", "http://ulysses.wyona.org" + entryPath); response.setStatus(javax.servlet.http.HttpServletResponse.SC_CREATED); return; } catch (Exception e) { log.error(e.getMessage(), e); throw new IOException(e.getMessage()); } } getContent(request, response); } } /** * HTTP PUT implementation */ public void doPut(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // TODO: Reuse code doPost resp. share code with doPut String value = request.getParameter("yanel.resource.usecase"); if (value != null && value.equals("save")) { log.debug("Save data ..."); save(request, response); return; } else if (value != null && value.equals("checkin")) { log.debug("Checkin data ..."); save(request, response); // TODO: Implement checkin ... log.warn("Release lock has not been implemented yet ...!"); // releaseLock(); return; } else { log.warn("No parameter yanel.resource.usecase!"); getContent(request, response); } } private Resource getResource(HttpServletRequest request) { String rti = map.getResourceTypeIdentifier(new Path(request.getServletPath())); if (rti != null) { ResourceTypeDefinition rtd = rtr.getResourceTypeDefinition(rti); try { Resource res = rtr.newResource(rti); res.setRTD(rtd); return res; } catch(Exception e) { log.error(e.getMessage(), e); return null; } } else { log.error("<no-resource-type-identifier-found servlet-path=\""+request.getServletPath()+"\"/>"); return null; } } private void save(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { log.debug("Save data ..."); InputStream in = request.getInputStream(); java.io.ByteArrayOutputStream baos = new java.io.ByteArrayOutputStream(); byte[] buf = new byte[8192]; int bytesR; while ((bytesR = in.read(buf)) != -1) { baos.write(buf, 0, bytesR); } // Buffer within memory (TODO: Maybe replace with File-buffering ...) byte[] memBuffer = baos.toByteArray(); // Check on well-formedness ... String contentType = request.getContentType(); log.debug("Content-Type: " + contentType); if (contentType.equals("application/xml") || contentType.equals("application/xhtml+xml")) { javax.xml.parsers.DocumentBuilderFactory dbf= javax.xml.parsers.DocumentBuilderFactory.newInstance(); try { javax.xml.parsers.DocumentBuilder parser = dbf.newDocumentBuilder(); // TODO: Get log messages into log4j ... //parser.setErrorHandler(...); // NOTE: DOCTYPE is being resolved/retrieved (e.g. xhtml schema from w3.org) also // if isValidating is set to false. // Hence, for performance and network reasons we use a local catalog ... // TODO: What about a resolver factory? parser.setEntityResolver(new org.apache.xml.resolver.tools.CatalogResolver()); parser.parse(new java.io.ByteArrayInputStream(memBuffer)); //org.w3c.dom.Document document = parser.parse(new ByteArrayInputStream(memBuffer)); } catch (org.xml.sax.SAXException e) { log.warn("Data is not well-formed: "+e.getMessage()); StringBuffer sb = new StringBuffer(); sb.append("<?xml version=\"1.0\"?>"); sb.append("<exception xmlns=\"http: sb.append("<message>Data is not well-formed: "+e.getMessage()+"</message>"); sb.append("</exception>"); response.setContentType("application/xml"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); PrintWriter w = response.getWriter(); w.print(sb); return; } catch (Exception e) { log.error(e.getMessage(), e); StringBuffer sb = new StringBuffer(); sb.append("<?xml version=\"1.0\"?>"); sb.append("<exception xmlns=\"http: //sb.append("<message>" + e.getStackTrace() + "</message>"); //sb.append("<message>" + e.getMessage() + "</message>"); sb.append("<message>" + e + "</message>"); sb.append("</exception>"); response.setContentType("application/xml"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); PrintWriter w = response.getWriter(); w.print(sb); return; } log.info("Data seems to be well-formed :-)"); } /* if (bytesRead == -1) { response.setContentType("text/plain"); PrintWriter writer = response.getWriter(); writer.print("No content!"); return; } */ OutputStream out = null; Resource res = getResource(request); if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "1")) { out = ((ModifiableV1) res).getOutputStream(new Path(request.getServletPath())); } else if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "2")) { out = ((ModifiableV2) res).getOutputStream(new Path(request.getServletPath())); } else { String message = res.getClass().getName() + " is not modifiable (neither V1 nor V2)!"; log.warn(message); StringBuffer sb = new StringBuffer(); // TODO: Differentiate between Neutron based and other clients ... /* sb.append("<?xml version=\"1.0\"?>"); sb.append("<html>"); sb.append("<body>"); sb.append("<resource>" + message + "</resource>"); sb.append("</body>"); sb.append("</html>"); response.setContentType("application/xhtml+xml"); */ sb.append("<?xml version=\"1.0\"?>"); sb.append("<exception xmlns=\"http: sb.append("<message>" + message + "</message>"); sb.append("</exception>"); response.setContentType("application/xml"); PrintWriter w = response.getWriter(); w.print(sb); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } if (out != null) { log.debug("Content-Type: " + contentType); // TODO: Compare mime-type from response with mime-type of resource //if (contentType.equals("text/xml")) { ... } byte[] buffer = new byte[8192]; int bytesRead; java.io.ByteArrayInputStream memIn = new java.io.ByteArrayInputStream(memBuffer); while ((bytesRead = memIn.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } StringBuffer sb = new StringBuffer(); sb.append("<?xml version=\"1.0\"?>"); sb.append("<html>"); sb.append("<body>"); sb.append("<p>Data has been saved ...</p>"); sb.append("</body>"); sb.append("</html>"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); response.setContentType("application/xhtml+xml"); PrintWriter w = response.getWriter(); w.print(sb); log.info("Data has been saved ..."); return; } else { log.error("OutputStream is null!"); StringBuffer sb = new StringBuffer(); sb.append("<?xml version=\"1.0\"?>"); sb.append("<html>"); sb.append("<body>"); sb.append("<p>Exception: OutputStream is null!</p>"); sb.append("</body>"); sb.append("</html>"); PrintWriter w = response.getWriter(); w.print(sb); response.setContentType("application/xhtml+xml"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); return; } } /** * Authorize request * TODO: Replace hardcoded roles by mapping between roles amd query strings ... */ private HttpServletResponse doAuthorize(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { Role role = null; String value = request.getParameter("yanel.resource.usecase"); if (value != null && value.equals("save")) { log.debug("Save data ..."); role = new Role("write"); } else if (value != null && value.equals("checkin")) { log.debug("Checkin data ..."); role = new Role("write"); } else if (value != null && value.equals("checkout")) { log.debug("Checkout data ..."); role = new Role("open"); } else { log.debug("No parameter yanel.resource.usecase!"); role = new Role("view"); } boolean authorized = false; // HTTP BASIC Authorization (For clients without session handling, e.g. OpenOffice or cadaver) String authorization = request.getHeader("Authorization"); log.debug("Checking for Authorization Header: " + authorization); if (authorization != null) { if (authorization.toUpperCase().startsWith("BASIC")) { log.debug("Using BASIC authorization ..."); // Get encoded user and password, comes after "BASIC " String userpassEncoded = authorization.substring(6); // Decode it, using any base 64 decoder sun.misc.BASE64Decoder dec = new sun.misc.BASE64Decoder(); String userpassDecoded = new String(dec.decodeBuffer(userpassEncoded)); log.error("DEBUG: userpassDecoded: " + userpassDecoded); // TODO: Use security package and remove hardcoded ... // Authenticate every request ... //if (im.authenticate(...)) { if (userpassDecoded.equals("lenya:levi")) { //return pm.authorize(new org.wyona.commons.io.Path(request.getServletPath()), new Identity(...), new Role("view")); authorized = true; return null; } authorized = false; PrintWriter writer = response.getWriter(); writer.print("BASIC Authorization/Authentication Failed!"); response.sendError(response.SC_UNAUTHORIZED); return response; } else if (authorization.toUpperCase().startsWith("DIGEST")) { log.error("DIGEST is not implemented"); authorized = false; PrintWriter writer = response.getWriter(); writer.print("DIGEST is not implemented!"); response.sendError(response.SC_UNAUTHORIZED); return response; } else { log.warn("No such authorization implemented resp. handled by session based authorization: " + authorization); authorized = false; } } // Custom Authorization log.debug("Do session based custom authorization"); //String[] groupnames = {"null", "null"}; HttpSession session = request.getSession(true); Identity identity = (Identity) session.getAttribute(IDENTITY_KEY); if (identity == null) { log.debug("Identity is WORLD"); identity = new Identity(); } authorized = pm.authorize(new org.wyona.commons.io.Path(request.getServletPath()), identity, role); if(!authorized) { log.warn("Access denied: " + getRequestURLQS(request, null, false)); // TODO: Shouldn't this be here instead at the beginning of service() ...? //if(doAuthenticate(request, response) != null) return response; // HTTP Authorization/Authentication // TODO: Ulysses has not HTTP BASIC or DIGEST implemented yet! /* response.setHeader("WWW-Authenticate", "BASIC realm=\"yanel\""); response.sendError(response.SC_UNAUTHORIZED); */ // Custom Authorization/Authentication // TODO: Check if this is a neutron request or just a common GET request StringBuffer sb = new StringBuffer(""); String neutronVersions = request.getHeader("Neutron"); String clientSupportedAuthScheme = request.getHeader("WWW-Authenticate"); Realm realm = map.getRealm(new Path(request.getServletPath())); if (clientSupportedAuthScheme != null && clientSupportedAuthScheme.equals("Neutron-Auth")) { log.debug("Neutron Versions supported by client: " + neutronVersions); log.debug("Authentication Scheme supported by client: " + clientSupportedAuthScheme); sb.append("<?xml version=\"1.0\"?>"); sb.append("<exception xmlns=\"http: sb.append("<message>Authorization denied: " + getRequestURLQS(request, null, true) + "</message>"); sb.append("<authentication>"); sb.append("<original-request url=\"" + getRequestURLQS(request, null, true) + "\"/>"); //TODO: Also support https ... sb.append("<login url=\"" + getRequestURLQS(request, "yanel.usecase=neutron-auth", true) + "\" method=\"POST\">"); sb.append("<form>"); sb.append("<message>Enter username and password for \"" + realm.getName() + "\" at \"" + realm.getMountPoint() + "\"</message>"); sb.append("<param description=\"Username\" name=\"username\"/>"); sb.append("<param description=\"Password\" name=\"password\"/>"); sb.append("</form>"); sb.append("</login>"); // NOTE: Needs to be a full URL, because user might switch the server ... sb.append("<logout url=\"" + getRequestURLQS(request, "yanel.usecase=logout", true) + "\" realm=\"" + realm.getName() + "\"/>"); sb.append("</authentication>"); sb.append("</exception>"); log.debug("Neutron-Auth response: " + sb); response.setContentType("application/xml"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_UNAUTHORIZED); } else { // Custom HTML Form authentication // TODO: Use configurable XSLT for layout, whereas each realm should be able to overwrite ... sb.append("<?xml version=\"1.0\"?>"); sb.append("<html xmlns=\"http: sb.append("<body>"); sb.append("<p>Authorization denied: " + getRequestURLQS(request, null, true) + "</p>"); sb.append("<p>Enter username and password for realm \"" + realm.getName() + "\" at \"" + realm.getMountPoint() + "\" (Context Path: " + request.getContextPath() + ")</p>"); sb.append("<form method=\"POST\">"); sb.append("<p>"); sb.append("<table>"); sb.append("<tr><td>Username:</td><td>&#160;</td><td><input type=\"text\" name=\"yanel.login.username\"/></td></tr>"); sb.append("<tr><td>Password:</td><td>&#160;</td><td><input type=\"password\" name=\"yanel.login.password\"/></td></tr>"); sb.append("<tr><td colspan=\"2\">&#160;</td><td align=\"right\"><input type=\"submit\" value=\"Login\"/></td></tr>"); sb.append("</table>"); sb.append("</p>"); sb.append("</form>"); sb.append("</body>"); sb.append("</html>"); response.setContentType("application/xhtml+xml"); } PrintWriter w = response.getWriter(); w.print(sb); return response; } else { log.info("Access granted: " + getRequestURLQS(request, null, false)); return null; } } private String getRequestURLQS(HttpServletRequest request, String addQS, boolean xml) { URL url = null; try { url = new URL(request.getRequestURL().toString()); if (proxyServerName != null) { url = new URL(url.getProtocol(), proxyServerName, url.getPort(), url.getFile()); } if (proxyPort != null) { if (proxyPort.length() > 0) { url = new URL(url.getProtocol(), url.getHost(), new Integer(proxyPort).intValue(), url.getFile()); } else { url = new URL(url.getProtocol(), url.getHost(), url.getDefaultPort(), url.getFile()); } } if (proxyPrefix != null) { url = new URL(url.getProtocol(), url.getHost(), url.getPort(), url.getFile().substring(proxyPrefix.length())); } if(proxyServerName != null || proxyPort != null || proxyPrefix != null) { log.debug("Proxy enabled request: " + url); } } catch (Exception e) { log.error(e); } String urlQS = url.toString(); if (request.getQueryString() != null) { urlQS = urlQS + "?" + request.getQueryString(); if (addQS != null) urlQS = urlQS + "&" + addQS; } else { if (addQS != null) urlQS = urlQS + "?" + addQS; } if (xml) urlQS = urlQS.replaceAll("&", "&amp;"); log.debug("Request: " + urlQS); return urlQS; } public void doPropfind(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType("application/xml"); StringBuffer sb = new StringBuffer(""); sb.append("<?xml version=\"1.0\"?>"); sb.append("<D:multistatus xmlns:D=\"DAV:\">"); sb.append("<D:response xmlns:lp1=\"DAV:\">"); sb.append("<D:href>" + request.getRequestURL() + "</D:href>"); sb.append("<D:propstat>"); sb.append("<D:prop>"); sb.append("<lp1:resourcetype>"); //sb.append("<D:collection/>"); sb.append("<D:resource/>"); sb.append("</lp1:resourcetype>"); //sb.append("<D:getcontenttype>httpd/unix-directory</D:getcontenttype>"); sb.append("</D:prop>"); sb.append("</D:propstat>"); sb.append("<D:status>HTTP/1.1 200 OK</D:status>"); sb.append("</D:response>"); sb.append("</D:multistatus>"); log.error("DEBUG: " + sb.toString()); response.setStatus(response.SC_OK); PrintWriter writer = response.getWriter(); writer.print(sb.toString()); return; } public HttpServletResponse doAuthenticate(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { Realm realm = map.getRealm(new Path(request.getServletPath())); // HTML Form based authentication String loginUsername = request.getParameter("yanel.login.username"); if(loginUsername != null) { HttpSession session = request.getSession(true); if (im.authenticate(loginUsername, request.getParameter("yanel.login.password"), realm.getID())) { log.debug("Realm: " + realm); session.setAttribute(IDENTITY_KEY, new Identity(loginUsername, null)); return null; } else { log.warn("Login failed: " + loginUsername); // TODO: Implement form based response ... response.setHeader("WWW-Authenticate", "BASIC realm=\"yanel\""); response.sendError(response.SC_UNAUTHORIZED); return response; } } // Neutron-Auth based authentication String yanelUsecase = request.getParameter("yanel.usecase"); if(yanelUsecase != null && yanelUsecase.equals("neutron-auth")) { log.debug("Neutron Authentication ..."); String username = null; String password = null; String originalRequest = null; DefaultConfigurationBuilder builder = new DefaultConfigurationBuilder(); try { Configuration config = builder.build(request.getInputStream()); Configuration originalRequestConfig = config.getChild("original-request"); originalRequest = originalRequestConfig.getAttribute("url", null); Configuration[] paramConfig = config.getChildren("param"); for (int i = 0; i < paramConfig.length; i++) { String paramName = paramConfig[i].getAttribute("name", null); if (paramName != null) { if (paramName.equals("username")) { username = paramConfig[i].getValue(); } else if (paramName.equals("password")) { password = paramConfig[i].getValue(); } } } } catch(Exception e) { log.warn(e); } log.debug("Username: " + username); if (username != null) { HttpSession session = request.getSession(true); log.debug("Realm ID: " + realm.getID()); if (im.authenticate(username, password, realm.getID())) { log.info("Authentication successful: " + username); session.setAttribute(IDENTITY_KEY, new Identity(username, null)); // TODO: send some XML content, e.g. <authentication-successful/> response.setContentType("text/plain"); response.setStatus(response.SC_OK); PrintWriter writer = response.getWriter(); writer.print("Neutron Authentication Successful!"); return response; } else { log.warn("Neutron Authentication failed: " + username); // TODO: Refactor this code with the one from doAuthenticate ... log.debug("Original Request: " + originalRequest); StringBuffer sb = new StringBuffer(""); sb.append("<?xml version=\"1.0\"?>"); sb.append("<exception xmlns=\"http: sb.append("<message>Authentication failed!</message>"); sb.append("<authentication>"); // TODO: ... sb.append("<original-request url=\"" + originalRequest + "\"/>"); //sb.append("<original-request url=\"" + getRequestURLQS(request, null, true) + "\"/>"); //TODO: Also support https ... // TODO: ... sb.append("<login url=\"" + originalRequest + "&amp;yanel.usecase=neutron-auth" + "\" method=\"POST\">"); //sb.append("<login url=\"" + getRequestURLQS(request, "yanel.usecase=neutron-auth", true) + "\" method=\"POST\">"); sb.append("<form>"); sb.append("<message>Enter username and password for \"" + realm.getName() + "\" at \"" + realm.getMountPoint() + "\"</message>"); sb.append("<param description=\"Username\" name=\"username\"/>"); sb.append("<param description=\"Password\" name=\"password\"/>"); sb.append("</form>"); sb.append("</login>"); // NOTE: Needs to be a full URL, because user might switch the server ... // TODO: ... sb.append("<logout url=\"" + originalRequest + "&amp;yanel.usecase=logout" + "\" realm=\"" + realm.getName() + "\"/>"); sb.append("</authentication>"); sb.append("</exception>"); log.debug("Neutron-Auth response: " + sb); PrintWriter w = response.getWriter(); w.print(sb); response.setContentType("application/xml"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_UNAUTHORIZED); return response; } } else { // TODO: Refactor resp. reuse response from above ... log.warn("Neutron Authentication failed because username is NULL!"); StringBuffer sb = new StringBuffer(""); sb.append("<?xml version=\"1.0\"?>"); sb.append("<exception xmlns=\"http: sb.append("<message>Authentication failed because no username was sent!</message>"); sb.append("<authentication>"); // TODO: ... sb.append("<original-request url=\"" + originalRequest + "\"/>"); //sb.append("<original-request url=\"" + getRequestURLQS(request, null, true) + "\"/>"); //TODO: Also support https ... // TODO: ... sb.append("<login url=\"" + originalRequest + "&amp;yanel.usecase=neutron-auth" + "\" method=\"POST\">"); //sb.append("<login url=\"" + getRequestURLQS(request, "yanel.usecase=neutron-auth", true) + "\" method=\"POST\">"); sb.append("<form>"); sb.append("<message>Enter username and password for \"" + realm.getName() + "\" at \"" + realm.getMountPoint() + "\"</message>"); sb.append("<param description=\"Username\" name=\"username\"/>"); sb.append("<param description=\"Password\" name=\"password\"/>"); sb.append("</form>"); sb.append("</login>"); // NOTE: Needs to be a full URL, because user might switch the server ... // TODO: ... sb.append("<logout url=\"" + originalRequest + "&amp;yanel.usecase=logout" + "\" realm=\"" + realm.getName() + "\"/>"); sb.append("</authentication>"); sb.append("</exception>"); PrintWriter writer = response.getWriter(); response.setContentType("application/xml"); writer.print(sb); response.setStatus(javax.servlet.http.HttpServletResponse.SC_UNAUTHORIZED); return response; } } else { log.debug("Neutron Authentication successful."); return null; } } public HttpServletResponse doLogout(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { log.info("Logout from Yanel ..."); HttpSession session = request.getSession(true); session.setAttribute(IDENTITY_KEY, null); String clientSupportedAuthScheme = request.getHeader("WWW-Authenticate"); if (clientSupportedAuthScheme != null && clientSupportedAuthScheme.equals("Neutron-Auth")) { // TODO: send some XML content, e.g. <logout-successful/> response.setContentType("text/plain"); response.setStatus(response.SC_OK); PrintWriter writer = response.getWriter(); writer.print("Neutron Logout Successful!"); return response; } return null; } public String patchContentType(String contentType, HttpServletRequest request) throws ServletException, IOException { String httpAcceptMediaTypes = request.getHeader("Accept"); log.debug("HTTP Accept Media Types: " + httpAcceptMediaTypes); if (contentType.equals("application/xhtml+xml") && httpAcceptMediaTypes != null && httpAcceptMediaTypes.indexOf("application/xhtml+xml") < 0) { log.error("DEBUG: Patch contentType with text/html because client (" + request.getHeader("User-Agent") + ") does not seem to understand application/xhtml+xml"); return "text/html"; } return contentType; } /** * Intercept InputStream and log content ... */ public InputStream intercept(InputStream in) throws IOException { java.io.ByteArrayOutputStream baos = new java.io.ByteArrayOutputStream(); byte[] buf = new byte[8192]; int bytesR; while ((bytesR = in.read(buf)) != -1) { baos.write(buf, 0, bytesR); } // Buffer within memory (TODO: Maybe replace with File-buffering ...) byte[] memBuffer = baos.toByteArray(); log.error("DEBUG: InputStream: " + baos); return new java.io.ByteArrayInputStream(memBuffer); } private void setYanelOutput(HttpServletResponse response, Document doc) throws ServletException { response.setContentType("application/xml"); try { javax.xml.transform.TransformerFactory.newInstance().newTransformer().transform(new javax.xml.transform.dom.DOMSource(doc), new javax.xml.transform.stream.StreamResult(response.getWriter())); /* OutputStream out = response.getOutputStream(); javax.xml.transform.TransformerFactory.newInstance().newTransformer().transform(new javax.xml.transform.dom.DOMSource(doc), new javax.xml.transform.stream.StreamResult(out)); out.close(); */ } catch (Exception e) { log.error(e.getMessage(), e); throw new ServletException(e.getMessage()); } } }
package edu.incense.android.sensor; import java.util.List; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import android.app.PendingIntent; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.location.Location; import android.location.LocationManager; import android.util.Log; import edu.incense.android.datatask.data.GpsData; /** * Receives location updates with a BroadcastReceiver. It includes a controller * (Runnable) that turns off GPS for while (MAX_TIME_WITHOUT_NEW_LOCATION + * RESTART_TIME), if there is no new location in certain time * (MAX_TIME_WITHOUT_NEW_LOCATION). * * @author mxpxgx * */ public class GpsSensor extends Sensor { private final static String LOCATION_UPDATE_ACTION = "locationUpdate"; private final static long MIN_RATE_TIME = 20L * 1000L; // 20 seconds private final static float MIN_DISTANCE = 5.0F; // In meters private final static long MAX_TIME_WITHOUT_NEW_LOCATION = 2L * 60L * 1000L; // minutes private final static long RESTART_TIME = 5L * 60L * 1000L; // 5 minutes private final static String TAG = "GpsSensor"; private ScheduledThreadPoolExecutor stpe; private LocationManager locationManager; private long lastLocationTime; private boolean locationAdded; // private boolean start; // private HandlerThread handlerThread; private PendingIntent pendingIntent; public GpsSensor(Context context) { super(context); setName("GPS"); locationAdded = false; // LocationManager initialization String service = Context.LOCATION_SERVICE; locationManager = (LocationManager) context.getSystemService(service); Intent intent = new Intent(LOCATION_UPDATE_ACTION); pendingIntent = PendingIntent.getBroadcast(getContext(), 5000, intent, 0); } private Location registerProvider(String provider) { long minTime = this.getPeriodTime() < MIN_RATE_TIME ? MIN_RATE_TIME : getPeriodTime(); Location location = null; try { Log.d(TAG, "Time rate: " + minTime); locationManager.requestLocationUpdates(provider, minTime, MIN_DISTANCE, pendingIntent); // Initialize it with the last known location (it is better than // nothing at all). location = locationManager.getLastKnownLocation(provider); if (location != null) { Log.i(TAG, "New location: " + location.toString()); } } catch (Exception e) { Log.e(TAG, "Requesting location updates failed", e); } Log.i(TAG, "Location Provider registered: " + provider); return location; } @Override public void start() { super.start(); // We are using any provider (GPS, NETWORK or PASSIVE) addLocationListenerWithAllProviders(); // Thread thread = new Thread(controller); // thread.start(); stpe = new ScheduledThreadPoolExecutor(1); stpe.scheduleAtFixedRate(controller, MAX_TIME_WITHOUT_NEW_LOCATION, MAX_TIME_WITHOUT_NEW_LOCATION, TimeUnit.MILLISECONDS); // try { // start = true; // handlerThread = new HandlerThread("GPS Thread"); // handlerThread.start(); // new Handler(handlerThread.getLooper()).post(controller); // } catch (Exception e) { // Log.e(TAG, "GpsSensor start failed", e); IntentFilter intentFilter = new IntentFilter(LOCATION_UPDATE_ACTION); getContext().registerReceiver(locationReceiver, intentFilter); Log.d(TAG, "Finished starting"); } private void addLocationListenerWithAllProviders() { List<String> providers = locationManager.getAllProviders(); Location location = null; if (providers.contains(LocationManager.PASSIVE_PROVIDER)) { location = registerProvider(LocationManager.PASSIVE_PROVIDER); } if (providers.contains(LocationManager.NETWORK_PROVIDER)) { location = registerProvider(LocationManager.NETWORK_PROVIDER); } if (providers.contains(LocationManager.GPS_PROVIDER)) { location = registerProvider(LocationManager.GPS_PROVIDER); } if (location != null) { GpsData newData = new GpsData(location); currentData = newData; } // Very important flags lastLocationTime = System.currentTimeMillis(); locationAdded = true; Log.d(TAG, "Finished adding listener"); } private void removeLocationListener() { locationManager.removeUpdates(pendingIntent); locationAdded = false; } @Override public void stop() { super.stop(); stpe.shutdown(); removeLocationListener(); getContext().unregisterReceiver(locationReceiver); // handlerThread.getLooper().quit(); } private BroadcastReceiver locationReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { String intentAction = intent.getAction(); if (intentAction.equalsIgnoreCase(LOCATION_UPDATE_ACTION)) { Location location = intent .getParcelableExtra(LocationManager.KEY_LOCATION_CHANGED); GpsData newData = new GpsData(location); currentData = newData; Log.i(TAG, "New location: " + location.toString()); Log.i(TAG, "With provider: " + location.getProvider()); lastLocationTime = newData.getTimestamp(); } } }; private Runnable controller = new Runnable() { public void run() { // long timeElapsed; // if (start) { // addLocationListenerWithAllProviders(); // start = false; // while (isSensing()) { // Log.d(TAG, "Sleeping " + MAX_TIME_WITHOUT_NEW_LOCATION); // try { // Thread.sleep(MAX_TIME_WITHOUT_NEW_LOCATION); // } catch (Exception e) { // Log.e(TAG, "GpsSensor run failed", e); long timeElapsed = System.currentTimeMillis() - lastLocationTime; Log.d(TAG, "Checked " + timeElapsed + " > " + MAX_TIME_WITHOUT_NEW_LOCATION); if (timeElapsed > MAX_TIME_WITHOUT_NEW_LOCATION && locationAdded) { removeLocationListener(); Log.d(TAG, "LocationListener removed: " + !locationAdded); } else if (timeElapsed > (MAX_TIME_WITHOUT_NEW_LOCATION + RESTART_TIME) && !locationAdded) { addLocationListenerWithAllProviders(); Log.d(TAG, "LocationListener added: " + locationAdded); } Log.d(TAG, "sensorController finished"); } }; }
package edu.rutgers.css.Rutgers; import java.util.ArrayList; import org.jdeferred.DoneCallback; import org.jdeferred.FailCallback; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import android.app.Activity; import android.app.Dialog; import android.content.Intent; import android.content.IntentSender.SendIntentException; import android.content.res.Configuration; import android.location.Location; import android.net.Uri; import android.os.Bundle; import android.support.v4.app.ActionBarDrawerToggle; import android.support.v4.app.DialogFragment; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.support.v4.app.FragmentManager; import android.support.v4.view.GravityCompat; import android.support.v4.widget.DrawerLayout; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.FrameLayout; import android.widget.ListView; import com.androidquery.callback.AjaxStatus; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.GooglePlayServicesClient; import com.google.android.gms.common.GooglePlayServicesUtil; import com.google.android.gms.location.LocationClient; import edu.rutgers.css.Rutgers.api.ComponentFactory; import edu.rutgers.css.Rutgers.api.Request; import edu.rutgers.css.Rutgers.auxiliary.RMenuAdapter; import edu.rutgers.css.Rutgers.auxiliary.RMenuPart; import edu.rutgers.css.Rutgers.auxiliary.SlideMenuHeader; import edu.rutgers.css.Rutgers.auxiliary.SlideMenuItem; import edu.rutgers.css.Rutgers.fragments.DTable; import edu.rutgers.css.Rutgers.location.LocationUtils; import edu.rutgers.css.Rutgers2.R; /** * RU Mobile main activity * */ public class MainActivity extends FragmentActivity implements GooglePlayServicesClient.ConnectionCallbacks, GooglePlayServicesClient.OnConnectionFailedListener { private static final String TAG = "MainActivity"; private static final String SC_API = "https://rumobile.rutgers.edu/1/shortcuts.txt"; private LocationClient mLocationClient; private DrawerLayout mDrawerLayout; private ListView mDrawerList; private ActionBarDrawerToggle mDrawerToggle; private RMenuAdapter mDrawerAdapter; public static class ErrorDialogFragment extends DialogFragment { private Dialog mDialog; public ErrorDialogFragment() { super(); mDialog = null; } public void setDialog(Dialog dialog) { mDialog = dialog; } @Override public Dialog onCreateDialog(Bundle savedInstanceState) { return mDialog; } } public LocationClient getLocationClient() { return mLocationClient; } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); // Connect to Google Play location service mLocationClient = new LocationClient(this, this, this); // Sliding menu setup native items ArrayList<RMenuPart> menuArray = new ArrayList<RMenuPart>(); mDrawerAdapter = new RMenuAdapter(this, R.layout.main_drawer_item, R.layout.main_drawer_header, menuArray); menuArray.add(new SlideMenuHeader("Channels")); menuArray.add(new SlideMenuItem("Bus", "bus")); menuArray.add(new SlideMenuItem("News", "dtable", "https://rumobile.rutgers.edu/1/news.txt")); menuArray.add(new SlideMenuItem("Food", "food")); menuArray.add(new SlideMenuItem("Places", "places")); menuArray.add(new SlideMenuItem("Recreation", "dtable", "https://rumobile.rutgers.edu/1/rec.txt")); menuArray.add(new SlideMenuItem("Events*", "reader", "http://ruevents.rutgers.edu/events/getEventsRss.xml")); // Sliding menu set up web shortcuts mDrawerAdapter.add(new SlideMenuHeader("Shortcuts")); loadWebShortcuts(mDrawerAdapter); // Set up Navigation Drawer mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout); mDrawerList = (ListView) findViewById(R.id.left_drawer); mDrawerToggle = new ActionBarDrawerToggle( this, /* host Activity */ mDrawerLayout, /* DrawerLayout object */ R.drawable.ic_drawer, /* nav drawer icon to replace 'Up' caret */ R.string.drawer_open, /* "open drawer" description */ R.string.drawer_close /* "close drawer" description */ ) { /** Called when a drawer has settled in a completely closed state. */ public void onDrawerClosed(View view) { super.onDrawerClosed(view); //getActionBar().setTitle(mTitle); } /** Called when a drawer has settled in a completely open state. */ public void onDrawerOpened(View drawerView) { super.onDrawerOpened(drawerView); //getActionBar().setTitle(mDrawerTitle); } }; mDrawerLayout.setDrawerListener(mDrawerToggle); mDrawerLayout.setDrawerShadow(R.drawable.drawer_shadow, GravityCompat.START); //false to disable <back arrow on title bar getActionBar().setDisplayHomeAsUpEnabled(true); getActionBar().setHomeButtonEnabled(true); mDrawerList.setAdapter(mDrawerAdapter); mDrawerList.setOnItemClickListener(new OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { if(view.isEnabled() == false) return; SlideMenuItem clickedItem = (SlideMenuItem) parent.getAdapter().getItem(position); if(clickedItem == null) { Log.e("SlidingMenu", "Failed sliding menu click, index " + position); mDrawerLayout.closeDrawer(mDrawerList); return; } Bundle clickedArgs = clickedItem.getArgs(); // Launch browser if(clickedArgs.getString("component").equalsIgnoreCase("www")) { Intent i = new Intent(Intent.ACTION_VIEW); i.setData(Uri.parse(clickedArgs.getString("url"))); startActivity(i); } // Launch channel component else { FragmentManager fm = MainActivity.this.getSupportFragmentManager(); Fragment fragment = ComponentFactory.getInstance().createFragment(clickedArgs); if(fragment == null) { Log.e("SlidingMenu", "Failed to create component"); return; } fm.beginTransaction() .replace(R.id.main_content_frame, fragment) .commit(); } mDrawerLayout.closeDrawer(mDrawerList); // Close menu after a click } }); FragmentManager fm = MainActivity.this.getSupportFragmentManager(); FrameLayout contentFrame = (FrameLayout) findViewById(R.id.main_content_frame); contentFrame.removeAllViews(); /* Default to Food screen until main screen is made */ Bundle args = new Bundle(); args.putString("title", "Food"); args.putString("component", "food"); Fragment fragment = ComponentFactory.getInstance().createFragment(args); fm.beginTransaction() .replace(R.id.main_content_frame, fragment) .commit(); ComponentFactory.getInstance().mMainActivity = this; /* This loads list of native channels (not complete) */ /* Request.api("app").done(new DoneCallback<JSONObject>() { public void onDone(JSONObject result) { Log.d("MainActivity", "got app data " + result.toString()); System.out.println("API loaded: " + result.toString()); } }); */ /* Nextbus.stopPredict("nb", "Hill Center").done(new DoneCallback<ArrayList>() { @Override public void onDone(ArrayList predictions) { for (Object o : predictions) { Prediction p = (Prediction) o; Log.d("Main", "title: " + p.getTitle() + " direction: " + p.getDirection() + " minutes: " + p.getMinutes()); } } }).fail(new FailCallback<Exception>() { @Override public void onFail(Exception e) { Log.d("Main", Log.getStackTraceString(e)); } }); */ } @Override protected void onStart() { super.onStart(); // Connect to location services when activity becomes visible mLocationClient.connect(); } @Override protected void onStop() { // Disconnect from location services when activity is no longer visible mLocationClient.disconnect(); super.onStop(); } @Override protected void onPostCreate(Bundle savedInstanceState) { super.onPostCreate(savedInstanceState); // Sync the toggle state after onRestoreInstanceState has occurred. mDrawerToggle.syncState(); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); mDrawerToggle.onConfigurationChanged(newConfig); } @Override public boolean onOptionsItemSelected(MenuItem item) { // Pass the event to ActionBarDrawerToggle, if it returns // true, then it has handled the app icon touch event if (mDrawerToggle.onOptionsItemSelected(item)) { Log.d(TAG,""); return true; } // Handle your other action bar items... return super.onOptionsItemSelected(item); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.main, menu); return true; } @Override public void onConnected(Bundle dataBundle) { Log.d(LocationUtils.APPTAG, "Connected to Google Play services"); servicesConnected(); if(mLocationClient != null) { Location currentLocation = mLocationClient.getLastLocation(); if(currentLocation != null) { Log.d(LocationUtils.APPTAG, currentLocation.toString()); } } } @Override public void onDisconnected() { Log.d(LocationUtils.APPTAG, "Disconnected from Google Play services"); } @Override public void onConnectionFailed(ConnectionResult connectionResult) { if(connectionResult.hasResolution()) { try { connectionResult.startResolutionForResult(this, LocationUtils.CONNECTION_FAILURE_RESOLUTION_REQUEST); } catch (SendIntentException e) { Log.e(LocationUtils.APPTAG, Log.getStackTraceString(e)); } } else { Dialog dialog = GooglePlayServicesUtil.getErrorDialog(connectionResult.getErrorCode(), this, 0); if (dialog != null) { ErrorDialogFragment errorFragment = new ErrorDialogFragment(); errorFragment.setDialog(dialog); errorFragment.show(getSupportFragmentManager(), LocationUtils.APPTAG); } } } @Override protected void onActivityResult(int requestCode, int resultCode, Intent intent) { // Choose what to do based on the request code switch (requestCode) { // If the request code matches the code sent in onConnectionFailed case LocationUtils.CONNECTION_FAILURE_RESOLUTION_REQUEST : switch (resultCode) { // If Google Play services resolved the problem case Activity.RESULT_OK: Log.d(LocationUtils.APPTAG, "resolved by google play"); break; // If any other result was returned by Google Play services default: Log.d(LocationUtils.APPTAG, "not resolved by google play"); break; } // If any other request code was received default: // Report that this Activity received an unknown requestCode Log.d(LocationUtils.APPTAG, "unknown request code " + requestCode); break; } } /** * Check if Google Play services is connected. * @return True if connected, false if not. */ private boolean servicesConnected() { int resultCode = GooglePlayServicesUtil.isGooglePlayServicesAvailable(this); if(resultCode == ConnectionResult.SUCCESS) { Log.d(TAG, "Google Play services available."); return true; } else { Dialog dialog = GooglePlayServicesUtil.getErrorDialog(resultCode, this, 0); if (dialog != null) { ErrorDialogFragment errorFragment = new ErrorDialogFragment(); errorFragment.setDialog(dialog); errorFragment.show(getSupportFragmentManager(), LocationUtils.APPTAG); } return false; } } /** * Grab web links and add them to the menu. * @param menuArray Array that holds the menu objects */ private void loadWebShortcuts(final RMenuAdapter menuAdapter) { Request.jsonArray(SC_API, Request.EXPIRE_ONE_HOUR).done(new DoneCallback<JSONArray>() { @Override public void onDone(JSONArray shortcutsArray) { // Get each shortcut from array and add it to the sliding menu for(int i = 0; i < shortcutsArray.length(); i++) { try { JSONObject curShortcut = shortcutsArray.getJSONObject(i); String title = DTable.getLocalTitle(curShortcut.get("title")); String url = curShortcut.getString("url"); menuAdapter.add(new SlideMenuItem(title, "www", url)); } catch (JSONException e) { Log.e(TAG, e.getMessage()); continue; } } } }).fail(new FailCallback<AjaxStatus>() { @Override public void onFail(AjaxStatus status) { Log.e(TAG, status.getMessage()); } }); } }
package edu.ucsc.dbtune.core; import edu.ucsc.dbtune.spi.Environment; import org.hamcrest.CoreMatchers; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Condition; import org.junit.If; import org.junit.Ignore; import org.junit.Test; import java.io.File; import java.util.Properties; import static edu.ucsc.dbtune.core.JdbcConnectionManager.DATABASE; import static edu.ucsc.dbtune.core.JdbcConnectionManager.DRIVER; import static edu.ucsc.dbtune.core.JdbcConnectionManager.PASSWORD; import static edu.ucsc.dbtune.core.JdbcConnectionManager.URL; import static edu.ucsc.dbtune.core.JdbcConnectionManager.USERNAME; import static edu.ucsc.dbtune.core.JdbcConnectionManager.makeDatabaseConnectionManager; import static org.junit.Assert.assertThat; /** * @author huascar.sanchez@gmail.com (Huascar A. Sanchez) */ public class IndexExtractorFunctionalTest { private static DatabaseConnection connection; @BeforeClass public static void setUp() throws Exception { final Environment environment = Environment.getInstance(); final Properties properties = new Properties(){{ setProperty(DRIVER, environment.getJDBCDriver()); setProperty(URL, environment.getDatabaseUrl()); setProperty(DATABASE, environment.getDatabaseName()); setProperty(USERNAME, environment.getUsername()); setProperty(PASSWORD, environment.getPassword()); }}; final ConnectionManager manager = makeDatabaseConnectionManager(properties); connection = manager.connect(); } @Test @If(condition = "isDatabaseConnectionAvailable", is = true) public void testIndexExtractorNotNull() throws Exception { final IndexExtractor extractor = connection.getIndexExtractor(); assertThat(extractor, CoreMatchers.notNullValue()); } @Test @If(condition = "isDatabaseConnectionAvailable", is = true) @Ignore // I am getting java.lang.RuntimeException: org.postgresql.util.PSQLException: ERROR: syntax error at or near "RECOMMEND" // are we running the correct Postgres version? public void testRecommendIndexes() throws Exception { final IndexExtractor extractor = connection.getIndexExtractor(); final File workload = new File(System.getProperty("user.dir") + "/resources/select/" + "workload.sql"); final Iterable<DBIndex> candidates = extractor.recommendIndexes(workload); assertThat(candidates, CoreMatchers.<Object>notNullValue()); } @Test @If(condition = "isDatabaseConnectionAvailable", is = true) @Ignore // I am getting java.lang.RuntimeException: org.postgresql.util.PSQLException: ERROR: syntax error at or near "RECOMMEND" // are we running the correct Postgres version? public void testSingleSQLRecommendIndexes() throws Exception { final IndexExtractor extractor = connection.getIndexExtractor(); final Iterable<DBIndex> candidates = extractor.recommendIndexes("SELECT 1, COUNT(*) FROM tpch.lineitem table0 " + "WHERE table0.l_commitdate BETWEEN '1996-06-24-21.19.14.000000' " + "AND '1996-07-03-21.19.14.000000';"); assertThat(candidates, CoreMatchers.<Object>notNullValue()); } @AfterClass public static void tearDown() throws Exception{ connection = null; } @Condition public static boolean isDatabaseConnectionAvailable(){ final boolean isNotNull = connection != null; boolean isOpened = false; if(isNotNull){ isOpened = connection.isOpened(); } return isNotNull && isOpened; } }
package org.wyona.yanel.servlet; import java.io.IOException; import java.io.PrintWriter; import java.net.URL; import java.util.Enumeration; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.wyona.yanel.core.Path; import org.wyona.yanel.core.Resource; import org.wyona.yanel.core.ResourceTypeDefinition; import org.wyona.yanel.core.ResourceTypeRegistry; import org.wyona.yanel.core.api.attributes.ModifiableV1; import org.wyona.yanel.core.api.attributes.ViewableV1; import org.wyona.yanel.core.attributes.viewable.View; import org.wyona.yanel.core.map.Map; import org.wyona.yanel.core.map.MapFactory; import org.wyona.yanel.core.map.Realm; import org.wyona.yanel.util.ResourceAttributeHelper; import org.wyona.security.core.IdentityManagerFactory; import org.wyona.security.core.PolicyManagerFactory; import org.wyona.security.core.api.Identity; import org.wyona.security.core.api.IdentityManager; import org.wyona.security.core.api.PolicyManager; import org.wyona.security.core.api.Role; import org.apache.log4j.Category; import org.apache.avalon.framework.configuration.Configuration; import org.apache.avalon.framework.configuration.DefaultConfigurationBuilder; public class YanelServlet extends HttpServlet { private static Category log = Category.getInstance(YanelServlet.class); private ServletConfig config; ResourceTypeRegistry rtr; PolicyManager pm; IdentityManager im; Map map; private static String IDENTITY_KEY = "identity"; String proxyServerName = null; String proxyPort = null; String proxyPrefix = null; private static final String METHOD_PROPFIND = "PROPFIND"; private static final String METHOD_GET = "GET"; private static final String METHOD_POST = "POST"; private static final String METHOD_PUT = "PUT"; public void init(ServletConfig config) { this.config = config; rtr = new ResourceTypeRegistry(); PolicyManagerFactory pmf = PolicyManagerFactory.newInstance(); pm = pmf.newPolicyManager(); IdentityManagerFactory imf = IdentityManagerFactory.newInstance(); im = imf.newIdentityManager(); MapFactory mf = MapFactory.newInstance(); map = mf.newMap(); proxyServerName = rtr.proxyHostName; proxyPort = rtr.proxyPort; proxyPrefix = rtr.proxyPrefix; } public void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String httpAcceptMediaTypes = request.getHeader("Accept"); log.error("DEBUG: HTTP Accept Media Types: " + httpAcceptMediaTypes); String httpUserAgent = request.getHeader("User-Agent"); log.error("DEBUG: HTTP User Agent: " + httpUserAgent); String httpAcceptLanguage = request.getHeader("Accept-Language"); log.error("DEBUG: HTTP Accept Language: " + httpAcceptLanguage); // Logout from Yanel String yanelUsecase = request.getParameter("yanel.usecase"); if(yanelUsecase != null && yanelUsecase.equals("logout")) { if(doLogout(request, response) != null) return; } // Authentication if(doAuthenticate(request, response) != null) return; // Check authorization if(doAuthorize(request, response) != null) return; // Delegate ... String method = request.getMethod(); if (method.equals(METHOD_PROPFIND)) { doPropfind(request, response); } else if (method.equals(METHOD_GET)) { doGet(request, response); } else if (method.equals(METHOD_POST)) { doPost(request, response); } else if (method.equals(METHOD_PUT)) { doPut(request, response); } else { log.error("No such method implemented: " + method); } } public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { getContent(request, response); } private void getContent(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { View view = null; StringBuffer sb = new StringBuffer(""); sb.append("<?xml version=\"1.0\"?>"); String servletContextRealPath = config.getServletContext().getRealPath("/"); sb.append("<yanel servlet-context-real-path=\""+servletContextRealPath+"\" xmlns=\"http: sb.append("<request uri=\""+request.getRequestURI()+"\" servlet-path=\""+request.getServletPath()+"\"/>"); HttpSession session = request.getSession(true); sb.append("<session id=\""+session.getId()+"\">"); Enumeration enum = session.getAttributeNames(); if (!enum.hasMoreElements()) { sb.append("<no-attributes/>"); } while (enum.hasMoreElements()) { String name = (String)enum.nextElement(); String value = session.getAttribute(name).toString(); sb.append("<attribute name=\"" + name + "\">" + value + "</attribute>"); } sb.append("</session>"); String rti = map.getResourceTypeIdentifier(new Path(request.getServletPath())); if (rti != null) { ResourceTypeDefinition rtd = rtr.getResourceTypeDefinition(rti); sb.append("<resource-type-identifier namespace=\"" + rtd.getResourceTypeNamespace() + "\" local-name=\"" + rtd.getResourceTypeLocalName() + "\"/>"); try { Resource res = rtr.newResource(rti); res.setRTD(rtd); if (ResourceAttributeHelper.hasAttributeImplemented(res, "Viewable", "1")) { sb.append("<resource>View Descriptors: " + ((ViewableV1) res).getViewDescriptors() + "</resource>"); String viewId = request.getParameter("yanel.resource.viewid"); view = ((ViewableV1) res).getView(request, viewId); } else { sb.append("<resource>" + res.getClass().getName() + " is not viewable!</resource>"); } } catch(Exception e) { sb.append("<exception>" + e + "</exception>"); log.error(e.getMessage(), e); } } else { sb.append("<no-resource-type-identifier-found servlet-path=\""+request.getServletPath()+"\"/>"); } String value = request.getParameter("yanel.resource.usecase"); if (value != null && value.equals("checkout")) { log.debug("Checkout data ..."); // TODO: Implement checkout ... log.warn("Acquire lock has not been implemented yet ...!"); // acquireLock(); } if (view != null) { response.setContentType(view.getMimeType()); java.io.InputStream is = view.getInputStream(); byte buffer[] = new byte[8192]; int bytesRead; if (is != null) { // TODO: Yarep does not set returned Stream to null resp. is missing Exception Handling for the constructor. Exceptions should be handled here, but rather within Yarep or whatever repositary layer is being used ... bytesRead = is.read(buffer); if (bytesRead == -1) { sb.append("<exception>InputStream of view does not seem to contain any data!</exception>"); sb.append("</yanel>"); response.setContentType("application/xml"); PrintWriter writer = response.getWriter(); writer.print(sb); return; } java.io.OutputStream os = response.getOutputStream(); os.write(buffer, 0, bytesRead); while ((bytesRead = is.read(buffer)) != -1) { os.write(buffer, 0, bytesRead); } return; } else { sb.append("<exception>InputStream of view is null!</exception>"); } } else { sb.append("<exception>View is null!</exception>"); } sb.append("</yanel>"); response.setContentType("application/xml"); PrintWriter writer = response.getWriter(); writer.print(sb); return; } public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String value = request.getParameter("yanel.resource.usecase"); if (value != null && value.equals("save")) { log.debug("Save data ..."); save(request, response); return; } else if (value != null && value.equals("checkin")) { log.debug("Checkin data ..."); save(request, response); // TODO: Implement checkin ... log.warn("Release lock has not been implemented yet ..."); // releaseLock(); return; } else { log.warn("No parameter yanel.resource.usecase!"); getContent(request, response); } } /** * HTTP PUT implementation */ public void doPut(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // TODO: Reuse code doPost resp. share code with doPut String value = request.getParameter("yanel.resource.usecase"); if (value != null && value.equals("save")) { log.debug("Save data ..."); save(request, response); return; } else if (value != null && value.equals("checkin")) { log.debug("Checkin data ..."); save(request, response); // TODO: Implement checkin ... log.warn("Release lock has not been implemented yet ...!"); // releaseLock(); return; } else { log.warn("No parameter yanel.resource.usecase!"); getContent(request, response); } } private Resource getResource(HttpServletRequest request) { String rti = map.getResourceTypeIdentifier(new Path(request.getServletPath())); if (rti != null) { ResourceTypeDefinition rtd = rtr.getResourceTypeDefinition(rti); try { Resource res = rtr.newResource(rti); res.setRTD(rtd); return res; } catch(Exception e) { log.error(e.getMessage(), e); return null; } } else { log.error("<no-resource-type-identifier-found servlet-path=\""+request.getServletPath()+"\"/>"); return null; } } private void save(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { StringBuffer sb = new StringBuffer(); log.debug("Save data ..."); java.io.InputStream in = request.getInputStream(); java.io.ByteArrayOutputStream baos = new java.io.ByteArrayOutputStream(); byte[] buf = new byte[8192]; int bytesR; while ((bytesR = in.read(buf)) != -1) { baos.write(buf, 0, bytesR); } // Buffer within memory (TODO: Maybe replace with File-buffering ...) byte[] memBuffer = baos.toByteArray(); // TODO: Well-formedness should NOT be checked necessarily, but only if POST/PUT is supposed to be XML ... // Check on well-formedness ... if (true) { javax.xml.parsers.DocumentBuilderFactory dbf= javax.xml.parsers.DocumentBuilderFactory.newInstance(); try { javax.xml.parsers.DocumentBuilder parser = dbf.newDocumentBuilder(); // TODO: Get log messages into log4j ... //parser.setErrorHandler(...); // NOTE: DOCTYPE is being resolved/retrieved (e.g. xhtml schema from w3.org) also // if isValidating is set to false. // Hence, for performance and network reasons we use a local catalog ... // TODO: What about a resolver factory? parser.setEntityResolver(new org.apache.xml.resolver.tools.CatalogResolver()); parser.parse(new java.io.ByteArrayInputStream(memBuffer)); //org.w3c.dom.Document document = parser.parse(new ByteArrayInputStream(memBuffer)); } catch (org.xml.sax.SAXException e) { log.warn("Data is not well-formed: "+e.getMessage()); sb.append("<?xml version=\"1.0\"?>"); sb.append("<exception xmlns=\"http: sb.append("<message>Data is not well-formed: "+e.getMessage()+"</message>"); sb.append("</exception>"); response.setContentType("application/xml"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); PrintWriter w = response.getWriter(); w.print(sb); return; } catch (Exception e) { log.error(e.getMessage(), e); sb.append("<?xml version=\"1.0\"?>"); sb.append("<exception xmlns=\"http: //sb.append("<message>" + e.getStackTrace() + "</message>"); //sb.append("<message>" + e.getMessage() + "</message>"); sb.append("<message>" + e + "</message>"); sb.append("</exception>"); response.setContentType("application/xml"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR); PrintWriter w = response.getWriter(); w.print(sb); return; } log.info("Data seems to be well-formed :-)"); } /* if (bytesRead == -1) { response.setContentType("text/plain"); PrintWriter writer = response.getWriter(); writer.print("No content!"); return; } */ /* java.io.ByteArrayOutputStream out = new java.io.ByteArrayOutputStream(); out.write(buffer, 0, bytesRead); while ((bytesRead = in.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } log.error("DEBUG: Received Data: " + out.toString()); */ Resource res = getResource(request); if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "1")) { String contentType = request.getContentType(); log.debug("Content-Type: " + contentType); // TODO: Compare mime-type from response with mime-type of resource //if (contentType.equals("text/xml")) { ... } byte[] buffer = new byte[8192]; int bytesRead; java.io.ByteArrayInputStream memIn = new java.io.ByteArrayInputStream(memBuffer); java.io.OutputStream out = ((ModifiableV1) res).getOutputStream(new Path(request.getServletPath())); while ((bytesRead = memIn.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } sb.append("<?xml version=\"1.0\"?>"); sb.append("<html>"); sb.append("<body>"); sb.append("<p>Data has been saved ...</p>"); sb.append("</body>"); sb.append("</html>"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK); response.setContentType("application/xhtml+xml"); log.info("Data has been saved ..."); } else { log.warn(res.getClass().getName() + " is not modifiable!"); sb.append("<?xml version=\"1.0\"?>"); sb.append("<html>"); sb.append("<body>"); sb.append("<resource>" + res.getClass().getName() + " is not modifiable!</resource>"); sb.append("</body>"); sb.append("</html>"); response.setContentType("application/xhtml+xml"); } } /** * Authorize request * TODO: Replace hardcoded roles by mapping between roles amd query strings ... */ private HttpServletResponse doAuthorize(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { Role role = null; String value = request.getParameter("yanel.resource.usecase"); if (value != null && value.equals("save")) { log.debug("Save data ..."); role = new Role("write"); } else if (value != null && value.equals("checkin")) { log.debug("Checkin data ..."); role = new Role("write"); } else if (value != null && value.equals("checkout")) { log.debug("Checkout data ..."); role = new Role("open"); } else { log.debug("No parameter yanel.resource.usecase!"); role = new Role("view"); } boolean authorized = false; // HTTP BASIC Authorization (For clients without session handling, e.g. OpenOffice or cadaver) String authorization = request.getHeader("Authorization"); log.debug("Checking for Authorization Header: " + authorization); if (authorization != null) { if (authorization.toUpperCase().startsWith("BASIC")) { log.debug("Using BASIC authorization ..."); // Get encoded user and password, comes after "BASIC " String userpassEncoded = authorization.substring(6); // Decode it, using any base 64 decoder sun.misc.BASE64Decoder dec = new sun.misc.BASE64Decoder(); String userpassDecoded = new String(dec.decodeBuffer(userpassEncoded)); log.error("DEBUG: userpassDecoded: " + userpassDecoded); // TODO: Use security package and remove hardcoded ... // Authenticate every request ... //if (im.authenticate(...)) { if (userpassDecoded.equals("lenya:levi")) { //return pm.authorize(new org.wyona.commons.io.Path(request.getServletPath()), new Identity(...), new Role("view")); authorized = true; return null; } authorized = false; PrintWriter writer = response.getWriter(); writer.print("BASIC Authorization/Authentication Failed!"); response.sendError(response.SC_UNAUTHORIZED); return response; } else if (authorization.toUpperCase().startsWith("DIGEST")) { log.error("DIGEST is not implemented"); authorized = false; PrintWriter writer = response.getWriter(); writer.print("DIGEST is not implemented!"); response.sendError(response.SC_UNAUTHORIZED); return response; } else { log.warn("No such authorization implemented resp. handled by session based authorization: " + authorization); authorized = false; } } // Custom Authorization log.debug("Do session based custom authorization"); //String[] groupnames = {"null", "null"}; HttpSession session = request.getSession(true); Identity identity = (Identity) session.getAttribute(IDENTITY_KEY); if (identity == null) { log.debug("Identity is WORLD"); identity = new Identity(); } authorized = pm.authorize(new org.wyona.commons.io.Path(request.getServletPath()), identity, role); if(!authorized) { log.warn("Access denied: " + getRequestURLQS(request, null, false)); // TODO: Shouldn't this be here instead at the beginning of service() ...? //if(doAuthenticate(request, response) != null) return response; // HTTP Authorization/Authentication // TODO: Ulysses has not HTTP BASIC or DIGEST implemented yet! /* response.setHeader("WWW-Authenticate", "BASIC realm=\"yanel\""); response.sendError(response.SC_UNAUTHORIZED); */ // Custom Authorization/Authentication // TODO: Check if this is a neutron request or just a common GET request StringBuffer sb = new StringBuffer(""); String neutronVersions = request.getHeader("Neutron"); String clientSupportedAuthScheme = request.getHeader("WWW-Authenticate"); Realm realm = map.getRealm(new Path(request.getServletPath())); if (clientSupportedAuthScheme != null && clientSupportedAuthScheme.equals("Neutron-Auth")) { log.debug("Neutron Versions supported by client: " + neutronVersions); log.debug("Authentication Scheme supported by client: " + clientSupportedAuthScheme); sb.append("<?xml version=\"1.0\"?>"); sb.append("<exception xmlns=\"http: sb.append("<message>Authorization denied: " + getRequestURLQS(request, null, true) + "</message>"); sb.append("<authentication>"); sb.append("<original-request url=\"" + getRequestURLQS(request, null, true) + "\"/>"); //TODO: Also support https ... sb.append("<login url=\"" + getRequestURLQS(request, "yanel.usecase=neutron-auth", true) + "\" method=\"POST\">"); sb.append("<form>"); sb.append("<message>Enter username and password for \"" + realm.getName() + "\" at \"" + realm.getMountPoint() + "\"</message>"); sb.append("<param description=\"Username\" name=\"username\"/>"); sb.append("<param description=\"Password\" name=\"password\"/>"); sb.append("</form>"); sb.append("</login>"); // NOTE: Needs to be a full URL, because user might switch the server ... sb.append("<logout url=\"" + getRequestURLQS(request, "yanel.usecase=logout", true) + "\" realm=\"" + realm.getName() + "\"/>"); sb.append("</authentication>"); sb.append("</exception>"); log.debug("Neutron-Auth response: " + sb); response.setContentType("application/xml"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_UNAUTHORIZED); } else { // Custom HTML Form authentication // TODO: Use configurable XSLT for layout, whereas each realm should be able to overwrite ... sb.append("<?xml version=\"1.0\"?>"); sb.append("<html xmlns=\"http: sb.append("<body>"); sb.append("<p>Authorization denied: " + getRequestURLQS(request, null, true) + "</p>"); sb.append("<p>Enter username and password for realm \"" + realm.getName() + "\" at \"" + realm.getMountPoint() + "\" (Context Path: " + request.getContextPath() + ")</p>"); sb.append("<form method=\"POST\">"); sb.append("<p>"); sb.append("<table>"); sb.append("<tr><td>Username:</td><td>&#160;</td><td><input type=\"text\" name=\"yanel.login.username\"/></td></tr>"); sb.append("<tr><td>Password:</td><td>&#160;</td><td><input type=\"password\" name=\"yanel.login.password\"/></td></tr>"); sb.append("<tr><td colspan=\"2\">&#160;</td><td align=\"right\"><input type=\"submit\" value=\"Login\"/></td></tr>"); sb.append("</table>"); sb.append("</p>"); sb.append("</form>"); sb.append("</body>"); sb.append("</html>"); response.setContentType("application/xhtml+xml"); } PrintWriter w = response.getWriter(); w.print(sb); return response; } else { log.info("Access granted: " + getRequestURLQS(request, null, false)); return null; } } private String getRequestURLQS(HttpServletRequest request, String addQS, boolean xml) { URL url = null; try { url = new URL(request.getRequestURL().toString()); if (proxyServerName != null) { url = new URL(url.getProtocol(), proxyServerName, url.getPort(), url.getFile()); } if (proxyPort != null) { if (proxyPort.length() > 0) { url = new URL(url.getProtocol(), url.getHost(), new Integer(proxyPort).intValue(), url.getFile()); } else { url = new URL(url.getProtocol(), url.getHost(), url.getDefaultPort(), url.getFile()); } } if (proxyPrefix != null) { url = new URL(url.getProtocol(), url.getHost(), url.getPort(), url.getFile().substring(proxyPrefix.length())); } if(proxyServerName != null || proxyPort != null || proxyPrefix != null) { log.debug("Proxy enabled request: " + url); } } catch (Exception e) { log.error(e); } String urlQS = url.toString(); if (request.getQueryString() != null) { urlQS = urlQS + "?" + request.getQueryString(); if (addQS != null) urlQS = urlQS + "&" + addQS; } else { if (addQS != null) urlQS = urlQS + "?" + addQS; } if (xml) urlQS = urlQS.replaceAll("&", "&amp;"); log.debug("Request: " + urlQS); return urlQS; } public void doPropfind(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType("application/xml"); StringBuffer sb = new StringBuffer(""); sb.append("<?xml version=\"1.0\"?>"); sb.append("<D:multistatus xmlns:D=\"DAV:\">"); sb.append("<D:response xmlns:lp1=\"DAV:\">"); sb.append("<D:href>" + request.getRequestURL() + "</D:href>"); sb.append("<D:propstat>"); sb.append("<D:prop>"); sb.append("<lp1:resourcetype>"); //sb.append("<D:collection/>"); sb.append("<D:resource/>"); sb.append("</lp1:resourcetype>"); //sb.append("<D:getcontenttype>httpd/unix-directory</D:getcontenttype>"); sb.append("</D:prop>"); sb.append("</D:propstat>"); sb.append("<D:status>HTTP/1.1 200 OK</D:status>"); sb.append("</D:response>"); sb.append("</D:multistatus>"); log.error("DEBUG: " + sb.toString()); PrintWriter writer = response.getWriter(); writer.print(sb.toString()); response.setStatus(response.SC_OK); return; } public HttpServletResponse doAuthenticate(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { Realm realm = map.getRealm(new Path(request.getServletPath())); // HTML Form based authentication String loginUsername = request.getParameter("yanel.login.username"); if(loginUsername != null) { HttpSession session = request.getSession(true); if (im.authenticate(loginUsername, request.getParameter("yanel.login.password"), realm.getID())) { log.debug("Realm: " + realm); session.setAttribute(IDENTITY_KEY, new Identity(loginUsername, null)); return null; } else { log.warn("Login failed: " + loginUsername); // TODO: Implement form based response ... response.setHeader("WWW-Authenticate", "BASIC realm=\"yanel\""); response.sendError(response.SC_UNAUTHORIZED); return response; } } // Neutron-Auth based authentication String yanelUsecase = request.getParameter("yanel.usecase"); if(yanelUsecase != null && yanelUsecase.equals("neutron-auth")) { log.debug("Neutron Authentication ..."); String username = null; String password = null; String originalRequest = null; DefaultConfigurationBuilder builder = new DefaultConfigurationBuilder(); try { Configuration config = builder.build(request.getInputStream()); Configuration[] paramConfig = config.getChildren("param"); for (int i = 0; i < paramConfig.length; i++) { String paramName = paramConfig[i].getAttribute("name", null); if (paramName != null) { if (paramName.equals("username")) { username = paramConfig[i].getValue(); } else if (paramName.equals("password")) { password = paramConfig[i].getValue(); } } } Configuration originalRequestConfig = config.getChild("original-request"); originalRequest = originalRequestConfig.getAttribute("url", null); } catch(Exception e) { log.error(e); } log.debug("Username: " + username); if (username != null) { HttpSession session = request.getSession(true); log.debug("Realm ID: " + realm.getID()); if (im.authenticate(username, password, realm.getID())) { log.info("Authentication successful: " + username); session.setAttribute(IDENTITY_KEY, new Identity(username, null)); // TODO: send some XML content, e.g. <authentication-successful/> response.setContentType("text/plain"); PrintWriter writer = response.getWriter(); writer.print("Neutron Authentication Successful!"); response.setStatus(response.SC_OK); return response; } else { log.warn("Neutron Authentication failed: " + username); // TODO: Refactor this code with the one from doAuthenticate ... log.debug("Original Request: " + originalRequest); StringBuffer sb = new StringBuffer(""); sb.append("<?xml version=\"1.0\"?>"); sb.append("<exception xmlns=\"http: sb.append("<message>Authentication failed ...</message>"); sb.append("<authentication>"); // TODO: ... sb.append("<original-request url=\"" + originalRequest + "\"/>"); //sb.append("<original-request url=\"" + getRequestURLQS(request, null, true) + "\"/>"); //TODO: Also support https ... // TODO: ... sb.append("<login url=\"" + originalRequest + "&amp;yanel.usecase=neutron-auth" + "\" method=\"POST\">"); //sb.append("<login url=\"" + getRequestURLQS(request, "yanel.usecase=neutron-auth", true) + "\" method=\"POST\">"); sb.append("<form>"); sb.append("<message>Enter username and password for \"" + realm.getName() + "\" at \"" + realm.getMountPoint() + "\"</message>"); sb.append("<param description=\"Username\" name=\"username\"/>"); sb.append("<param description=\"Password\" name=\"password\"/>"); sb.append("</form>"); sb.append("</login>"); // NOTE: Needs to be a full URL, because user might switch the server ... // TODO: ... sb.append("<logout url=\"" + originalRequest + "&amp;yanel.usecase=logout" + "\" realm=\"" + realm.getName() + "\"/>"); sb.append("</authentication>"); sb.append("</exception>"); log.debug("Neutron-Auth response: " + sb); PrintWriter w = response.getWriter(); w.print(sb); response.setContentType("application/xml"); response.setStatus(javax.servlet.http.HttpServletResponse.SC_UNAUTHORIZED); return response; } } else { // TODO: Resend login information ... log.warn("Neutron Authentication failed because username is NULL!"); response.setContentType("text/plain"); PrintWriter writer = response.getWriter(); writer.print("Authentication Failed!"); response.sendError(response.SC_UNAUTHORIZED); return response; } } else { log.debug("Neutron Authentication successful."); return null; } } public HttpServletResponse doLogout(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { log.info("Logout from Yanel ..."); HttpSession session = request.getSession(true); session.setAttribute(IDENTITY_KEY, null); String clientSupportedAuthScheme = request.getHeader("WWW-Authenticate"); if (clientSupportedAuthScheme != null && clientSupportedAuthScheme.equals("Neutron-Auth")) { // TODO: send some XML content, e.g. <logout-successful/> response.setContentType("text/plain"); PrintWriter writer = response.getWriter(); writer.print("Neutron Logout Successful!"); response.setStatus(response.SC_OK); return response; } return null; } }
package edu.wustl.xipHost.gui; import javax.swing.*; import edu.wustl.xipHost.application.Application; import edu.wustl.xipHost.application.ApplicationManagerFactory; import edu.wustl.xipHost.avt2ext.AVTPanel; import edu.wustl.xipHost.caGrid.GridPanel; import edu.wustl.xipHost.dicom.DicomPanel; import edu.wustl.xipHost.globalSearch.GlobalSearchPanel; import edu.wustl.xipHost.hostControl.HostConfigurator; import edu.wustl.xipHost.hostLogin.Login; import edu.wustl.xipHost.localFileSystem.LocalFileSystemPanel; import edu.wustl.xipHost.nbia.DataSourcePanel; import edu.wustl.xipHost.worklist.WorklistPanel; import edu.wustl.xipHost.xds.XDSPanel; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.util.UUID; public class HostMainWindow extends JFrame implements ActionListener { static HostIconBar toolBar = new HostIconBar(); static JTabbedPane sideTabbedPane; SideTabMouseAdapter mouseAdapter = new SideTabMouseAdapter(); //CenterTabMouseAdapter mouseAdapterCenterTabs = new CenterTabMouseAdapter(); static PanelUUID hostPanel = new PanelUUID(); JTabbedPane tabPaneCenter = new JTabbedPane(); static Rectangle appScreenSize = new Rectangle(); OptionsDialog optionsDialog = new OptionsDialog(new JFrame()); static Color xipColor = new Color(51, 51, 102); Color xipLightBlue = new Color(156, 162, 189); Font font = new Font("Tahoma", 0, 12); String userName; WorklistPanel worklistPanel; DicomPanel dicomPanel; GridPanel gridPanel; GlobalSearchPanel globalSearchPanel; XDSPanel xdsPanel; AVTPanel avt2extPanel; LocalFileSystemPanel localFileSystemPanel; DataSourcePanel nbiaPanel; static Dimension screenSize; public HostMainWindow(){ super("XIP Host"); worklistPanel = new WorklistPanel(); dicomPanel = new DicomPanel(); gridPanel = new GridPanel(); globalSearchPanel = new GlobalSearchPanel(); avt2extPanel = new AVTPanel(); xdsPanel = new XDSPanel(); localFileSystemPanel = new LocalFileSystemPanel(); nbiaPanel = new DataSourcePanel(); if(HostConfigurator.OS.contains("Windows")){ setUndecorated(true); }else{ setUndecorated(false); setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE); } } public void display(){ UIManager.put("TabbedPane.selected", xipLightBlue); setLayout(new BorderLayout()); toolBar.setUserName(userName); add(toolBar, BorderLayout.NORTH); sideTabbedPane = VerticalTextIcon.createTabbedPane(JTabbedPane.RIGHT); sideTabbedPane.setBackground(xipColor); add(sideTabbedPane, BorderLayout.CENTER); sideTabbedPane.addMouseListener(mouseAdapter); hostPanel.add(tabPaneCenter); hostPanel.setBackground(xipColor); buildHostPanelLayout(); hostPanel.setUUID(UUID.randomUUID()); VerticalTextIcon.addTab(sideTabbedPane, "Host", hostPanel); //Add tabs ImageIcon icon = null; //tabPaneCenter.addTab("NBIA", icon, nbiaPanel, null); tabPaneCenter.addTab("Local File System", icon, localFileSystemPanel, null); tabPaneCenter.addTab("AVT AD", icon, avt2extPanel, null); tabPaneCenter.addTab("NBIA/caGrid", icon, gridPanel, null); tabPaneCenter.addTab("PACS", icon, dicomPanel, null); tabPaneCenter.addTab("GlobalSearch", icon, globalSearchPanel, null); tabPaneCenter.addTab("XDS", icon, xdsPanel, null); tabPaneCenter.addTab("Worklist", icon, worklistPanel, null); tabPaneCenter.setFont(font); tabPaneCenter.setSelectedComponent(localFileSystemPanel); toolBar.btnHost.addActionListener(this); //toolBar.btnLocal.addActionListener(this); toolBar.btnOptions.addActionListener(this); toolBar.btnExit.addActionListener(this); toolBar.btnSuspend.addActionListener(this); toolBar.btnCancel.addActionListener(this); toolBar.btnSwitchUser.addActionListener(this); toolBar.btnExitApp.addActionListener(this); toolBar.lblAbout.addMouseListener(new MouseAdapter(){ public void mouseClicked(MouseEvent e){ new AboutDialog(new JFrame()); } }); toolBar.lblHelp.addMouseListener(new MouseAdapter(){ public void mouseClicked(MouseEvent e){ new HelpManager(new JFrame()); } }); screenSize = Toolkit.getDefaultToolkit().getScreenSize(); setBounds(0, 0, (int)screenSize.getWidth(), (int)screenSize.getHeight()); getContentPane().setBackground(xipColor); setVisible(true); setAlwaysOnTop(true); setAlwaysOnTop(false); } void buildHostPanelLayout(){ GridBagLayout layout = new GridBagLayout(); GridBagConstraints constraints = new GridBagConstraints(); hostPanel.setLayout(layout); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 0; constraints.gridy = 0; constraints.anchor = GridBagConstraints.CENTER; layout.setConstraints(tabPaneCenter, constraints); } public void actionPerformed(ActionEvent e) { if(e.getSource() == toolBar.btnHost){ sideTabbedPane.setSelectedIndex(0); toolBar.switchButtons(0); setAlwaysOnTop(true); setAlwaysOnTop(false); } else if(e.getSource() == toolBar.btnOptions){ int x = (int)((JButton)e.getSource()).getLocationOnScreen().getX(); int y = (int)((JButton)e.getSource()).getLocationOnScreen().getY() + 45; optionsDialog.display(x, y); } else if(e.getSource() == toolBar.btnSwitchUser) { //TODO //Stop all running hosted applications for the previous user Login login = HostConfigurator.getLogin(); login.invalidateNBIASecuredConnection(); HostConfigurator.getHostConfigurator().logNewUser(); } else if (e.getSource() == toolBar.btnExit) { HostConfigurator hostConfig = HostConfigurator.getHostConfigurator(); hostConfig.runHostShutdownSequence(); }else if(e.getSource() == toolBar.btnCancel){ Application app = getSelectedApplication(); if(!app.cancelProcessing()){ new ExceptionDialog("Selected application processing cannot be canceled.", "Application state must be INPROGRESS or SUSPENDED.", "Host Dialog"); } }else if(e.getSource() == toolBar.btnSuspend){ Application app = getSelectedApplication(); if(!app.suspendProcessing()){ new ExceptionDialog("Selected application processing cannot be suspended.", "Application state must be INPROGRESS but is " + app.getState().toString() + ".", "Host Dialog"); } }else if(e.getSource() == toolBar.btnExitApp){ Application app = getSelectedApplication(); app.shutDown(); } } Application getSelectedApplication(){ int index = sideTabbedPane.getSelectedIndex(); UUID uuid = ((PanelUUID)sideTabbedPane.getComponentAt(index)).getUUID(); Application app = ApplicationManagerFactory.getInstance().getApplication(uuid); return app; } public static Rectangle getApplicationPreferredSize(){ int appXPosition; int appYPosition; int appWidth; int appHeight; if(HostConfigurator.OS.contains("Windows") || HostConfigurator.OS.contains("Mac OS X")){ appXPosition = (int) hostPanel.getLocationOnScreen().getX(); appYPosition = (int) hostPanel.getLocationOnScreen().getY(); appWidth = (int) hostPanel.getBounds().getWidth(); appHeight = (int) hostPanel.getBounds().getHeight(); } else { appXPosition = 0; appYPosition = 0; appWidth = (int)screenSize.getWidth(); appHeight = (int)screenSize.getHeight(); } appScreenSize.setBounds(appXPosition, appYPosition, appWidth, appHeight); return appScreenSize; } public static void addTab(String appName, UUID appUUID){ PanelUUID panel = new PanelUUID(appUUID); panel.setBackground(xipColor); VerticalTextIcon.addTab(sideTabbedPane, appName, panel); int tabCount = sideTabbedPane.getTabCount(); sideTabbedPane.setSelectedIndex(tabCount - 1); toolBar.switchButtons(1); } public static void removeTab(UUID appUUID){ int tabCount = sideTabbedPane.getTabCount(); for(int i = 0; i < tabCount; i++){ UUID selectedTabUUID = ((PanelUUID)sideTabbedPane.getComponentAt(i)).getUUID(); if(appUUID.equals(selectedTabUUID)){ final int index; index = i; java.awt.EventQueue.invokeLater(new Runnable() { @Override public void run() { if(this != null) { sideTabbedPane.remove(index); sideTabbedPane.setSelectedIndex(0); sideTabbedPane.repaint(); } } }); toolBar.switchButtons(0); return; } } } class SideTabMouseAdapter extends MouseAdapter{ public void mouseClicked(MouseEvent e) { if(e.getButton() == 1){ if(e.getSource() == sideTabbedPane){ int i = (((JTabbedPane)e.getSource()).getSelectedIndex()); UUID uuid = ((PanelUUID)sideTabbedPane.getComponentAt(i)).getUUID(); if (sideTabbedPane.getSelectedIndex() == 0){ toolBar.switchButtons(0); bringToFront(); } else if (sideTabbedPane.getSelectedIndex() != 0){ Application app = ApplicationManagerFactory.getInstance().getApplication(uuid); bringToBack(); app.bringToFront(); toolBar.switchButtons(1); }else { setAlwaysOnTop(true); setAlwaysOnTop(false); } } } } } private void bringToFront() { java.awt.EventQueue.invokeLater(new Runnable() { @Override public void run() { if(this != null) { toFront(); repaint(); } } }); } private void bringToBack() { java.awt.EventQueue.invokeLater(new Runnable() { @Override public void run() { if(this != null) { toBack(); repaint(); } } }); } public void setUserName(String userName){ this.userName = userName; } /*public void deiconify() { int state = getExtendedState(); state &= ~Frame.ICONIFIED; setExtendedState(state); }*/ public void iconify() { int state = getExtendedState(); state |= Frame.ICONIFIED; setExtendedState(state); } public static HostIconBar getHostIconBar(){ return toolBar; } public Component getSelectedSearchTab() { return tabPaneCenter.getSelectedComponent(); } }
package de.haw.rnp01.newsticker; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; public class GeneralPurposeListener implements ActionListener { private Controller controller; public GeneralPurposeListener(Controller controller) { this.controller = controller; } @Override public void actionPerformed(ActionEvent e) { this.controller.performAction(e); } }
package eu.visualize.ini.convnet; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import com.jogamp.opengl.GL2; import com.jogamp.opengl.GLAutoDrawable; import com.jogamp.opengl.util.awt.TextRenderer; import java.awt.Font; import java.util.Arrays; import net.sf.jaer.Description; import net.sf.jaer.DevelopmentStatus; import net.sf.jaer.chip.AEChip; import net.sf.jaer.event.EventPacket; import net.sf.jaer.eventprocessing.FilterChain; import net.sf.jaer.graphics.MultilineAnnotationTextRenderer; import net.sf.jaer.util.SpikeSound; /** * Extends DavisDeepLearnCnnProcessor to add annotation graphics to show * RoShamBo demo output for development of rock-scissors-paper robot * * @author Tobi */ @Description("Displays RoShamBo (rock-scissors-paper) CNN results; subclass of DavisDeepLearnCnnProcessor") @DevelopmentStatus(DevelopmentStatus.Status.Experimental) public class RoShamBoCNN extends DavisDeepLearnCnnProcessor implements PropertyChangeListener { private boolean hideOutput = getBoolean("hideOutput", false); private boolean showAnalogDecisionOutput = getBoolean("showAnalogDecisionOutput", false); private boolean playSpikeSounds = getBoolean("playSpikeSounds", false); // private TargetLabeler targetLabeler = null; Error error = new Error(); private float decisionLowPassMixingFactor = getFloat("decisionLowPassMixingFactor", .2f); private SpikeSound spikeSound=null; /** output units */ private static final int DECISION_PAPER=0, DECISION_SCISSORS=1, DECISION_ROCK=2; private static final String[] DECISION_STRINGS={"Paper", "Scissors", "Rock"}; public RoShamBoCNN(AEChip chip) { super(chip); String faceDetector="Face detector"; setPropertyTooltip(faceDetector,"showAnalogDecisionOutput", "Shows face detection as analog activation of face unit in softmax of network output"); setPropertyTooltip(faceDetector,"hideOutput", "Hides output face detection indications"); setPropertyTooltip(faceDetector,"decisionLowPassMixingFactor", "The softmax outputs of the CNN are low pass filtered using this mixing factor; reduce decisionLowPassMixingFactor to filter more decisions"); setPropertyTooltip(faceDetector,"playSpikeSounds", "Play a spike sound on change of network output decision"); FilterChain chain = new FilterChain(chip); setEnclosedFilterChain(chain); apsDvsNet.getSupport().addPropertyChangeListener(DeepLearnCnnNetwork.EVENT_MADE_DECISION, error); } @Override public synchronized EventPacket<?> filterPacket(EventPacket<?> in) { EventPacket out = super.filterPacket(in); return out; } @Override public void resetFilter() { super.resetFilter(); error.reset(); } @Override public synchronized void setFilterEnabled(boolean yes) { super.setFilterEnabled(yes); } @Override public void annotate(GLAutoDrawable drawable) { super.annotate(drawable); if (hideOutput) { return; } GL2 gl = drawable.getGL().getGL2(); checkBlend(gl); if ((apsDvsNet != null) && (apsDvsNet.outputLayer != null) && (apsDvsNet.outputLayer.activations != null)) { drawDecisionOutput(gl, drawable.getSurfaceWidth(), drawable.getSurfaceHeight()); } error.draw(gl); } private TextRenderer textRenderer=null; private void drawDecisionOutput(GL2 gl, int width, int height) { float brightness = 0.0f; if (showAnalogDecisionOutput) { brightness = error.maxActivation; // brightness scale } else { brightness = 1; } gl.glColor3f(0.0f, brightness, brightness); // gl.glPushMatrix(); // gl.glTranslatef(chip.getSizeX() / 2, chip.getSizeY() / 2, 0); if(textRenderer==null){ textRenderer=textRenderer = new TextRenderer(new Font("SansSerif", Font.PLAIN, 36)); } textRenderer.setColor(brightness, brightness, brightness,1); textRenderer.beginRendering(width, height); textRenderer.draw(DECISION_STRINGS[error.maxUnit],chip.getSizeX() / 2, chip.getSizeY() / 2); textRenderer.endRendering(); // gl.glPopMatrix(); } /** * @return the hideOutput */ public boolean isHideOutput() { return hideOutput; } /** * @param hideOutput the hideOutput to set */ public void setHideOutput(boolean hideOutput) { this.hideOutput = hideOutput; putBoolean("hideOutput", hideOutput); } /** * @return the showAnalogDecisionOutput */ public boolean isShowAnalogDecisionOutput() { return showAnalogDecisionOutput; } /** * @param showAnalogDecisionOutput the showAnalogDecisionOutput to set */ public void setShowAnalogDecisionOutput(boolean showAnalogDecisionOutput) { this.showAnalogDecisionOutput = showAnalogDecisionOutput; putBoolean("showAnalogDecisionOutput", showAnalogDecisionOutput); } /** * @return the decisionLowPassMixingFactor */ public float getDecisionLowPassMixingFactor() { return decisionLowPassMixingFactor; } /** * @param decisionLowPassMixingFactor the decisionLowPassMixingFactor to set */ public void setDecisionLowPassMixingFactor(float decisionLowPassMixingFactor) { if (decisionLowPassMixingFactor > 1) { decisionLowPassMixingFactor = 1; } this.decisionLowPassMixingFactor = decisionLowPassMixingFactor; putFloat("decisionLowPassMixingFactor", decisionLowPassMixingFactor); } /** * @return the playSpikeSounds */ public boolean isPlaySpikeSounds() { return playSpikeSounds; } /** * @param playSpikeSounds the playSpikeSounds to set */ public void setPlaySpikeSounds(boolean playSpikeSounds) { this.playSpikeSounds = playSpikeSounds; putBoolean("playSpikeSounds",playSpikeSounds); } private class Error implements PropertyChangeListener { final int NUM_CLASSES = 3; int totalCount, totalCorrect, totalIncorrect; int[] correct = new int[NUM_CLASSES], incorrect = new int[NUM_CLASSES], count = new int[NUM_CLASSES]; int dvsTotalCount, dvsCorrect, dvsIncorrect; int apsTotalCount, apsCorrect, apsIncorrect; int[] decisionCounts = new int[NUM_CLASSES]; final int FACE = 0, NONFACE = 1; final String[] decisionStrings = {"Face", "Non-Face"}; float[] lowpassFilteredOutputUnits = new float[NUM_CLASSES]; final int HISTORY_LENGTH=10; int[] decisionHistory = new int[HISTORY_LENGTH]; float maxActivation = Float.NEGATIVE_INFINITY; int maxUnit = -1; public Error() { reset(); } void reset() { totalCount = 0; totalCorrect = 0; totalIncorrect = 0; Arrays.fill(correct, 0); Arrays.fill(incorrect, 0); Arrays.fill(count, 0); Arrays.fill(decisionCounts, 0); Arrays.fill(lowpassFilteredOutputUnits, 0); dvsTotalCount = 0; dvsCorrect = 0; dvsIncorrect = 0; apsTotalCount = 0; apsCorrect = 0; apsIncorrect = 0; } @Override public String toString() { if (totalCount == 0) { return "Error: no samples yet"; } StringBuilder sb = new StringBuilder("Decision statistics: "); for (int i = 0; i < NUM_CLASSES; i++) { sb.append(String.format("%s: %d (%.1f%%) ", decisionStrings[i], decisionCounts[i], 100 * (float) decisionCounts[i] / totalCount)); } return sb.toString(); } @Override public synchronized void propertyChange(PropertyChangeEvent evt) { if (evt.getPropertyName() == DeepLearnCnnNetwork.EVENT_MADE_DECISION) { int lastOutput=maxUnit; DeepLearnCnnNetwork net = (DeepLearnCnnNetwork) evt.getNewValue(); maxActivation = Float.NEGATIVE_INFINITY; maxUnit = -1; for (int i = 0; i < NUM_CLASSES; i++) { float output = net.outputLayer.activations[i]; lowpassFilteredOutputUnits[i] = (1 - decisionLowPassMixingFactor) * lowpassFilteredOutputUnits[i] + output * decisionLowPassMixingFactor; if (lowpassFilteredOutputUnits[i] > maxActivation) { maxActivation = lowpassFilteredOutputUnits[i]; maxUnit = i; } } decisionCounts[maxUnit]++; totalCount++; if(playSpikeSounds && maxUnit!=lastOutput ) if(spikeSound==null){ spikeSound=new SpikeSound(); } spikeSound.play(); } } } private void draw(GL2 gl) { MultilineAnnotationTextRenderer.resetToYPositionPixels(.8f * chip.getSizeY()); MultilineAnnotationTextRenderer.renderMultilineString(toString()); } } }
package de.mrapp.android.adapter; import java.util.Collection; import java.util.Iterator; import java.util.ListIterator; import android.widget.ExpandableListView; import de.mrapp.android.adapter.expandablelist.ExpandableListAdapterListener; import de.mrapp.android.adapter.expandablelist.ExpansionListener; /** * Defines the interface, an adapter, whose underlying data is managed as a list * of arbitrary group and child items, must implement. Such an adapter's purpose * is to provide the underlying data for visualization using a * {@link ExpandableListView} widget. * * @param <GroupType> * The type of the underlying data of the adapter's group items * @param <ChildType> * The type of the underlying data of the adapter's child items * * @author Michael Rapp * * @since 0.1.0 */ public interface ExpandableListAdapter<GroupType, ChildType> extends Adapter<ExpandableListView>, android.widget.ExpandableListAdapter { /** * Returns, whether duplicate group items are allowed, or not. * * @return True, if duplicate group items are allowed, false otherwise */ boolean areDuplicateGroupsAllowed(); /** * Sets, whether duplicate group items should be allowed, or not. * * @param allowDuplicateGroups * True , if duplicate group items should be allowed, false * otherwise */ void allowDuplicateGroups(boolean allowDuplicateGroups); /** * Adds a specific group item to the adapter. The item will be added at the * end. * * @param group * The group item, which should be added to the adapter, as an * instance of the generic type GroupType. The group item may not * be null * @return The index of the group, which has been added to the adapter, as * an {@link Integer} value or -1, if the group has not been added */ int addGroup(GroupType group); /** * Adds a specific group item to the adapter. The group item will be added * at a specific index. * * @param index * The index, the group item should be added at, as an * {@link Integer} value. The index must be between 0 and the * value of the method <code>getGroupCount():int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param group * The group item, which should be added to the adapter, as an * instance of the generic type GroupType. The group item may not * be null * @return True, if the group item has been added to the adapter, false * otherwise */ boolean addGroup(int index, GroupType group); /** * Adds all group items, which are contained by a specific collection, to * the adapter. The group items will be added in a consecutive order at the * end. * * @param groups * The collection, which contains the group items, which should * be added to the adapter, as an instance of the type * {@link Collection} or an empty collection, if no group items * should be added * @return True, if all group items have been added to the adapter, false * otherwise */ boolean addAllGroups(Collection<GroupType> groups); /** * Adds all group items, which are contained by a specific collection, to * the adapter. The group items will be added in a consecutive order at a * specific index. * * @param index * The index, the group items should be added at, as an * {@link Integer} value. The index must be between 0 and the * value of the method <code>getGroupCount():int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param groups * The collection, which contains the group items, which should * be added to the adapter, as an instance of the type * {@link Collection} or an empty collection, if no group items * should be added * @return True, if all items have been added to the adapter, false * otherwise */ boolean addAllGroups(int index, Collection<GroupType> groups); /** * Adds all group items, which are contained by a specific array, to the * adapter. The group items will be added in a consecutive order at the end. * * @param groups * The array, which contains the group items, which should be * added to the adapter, as an array of the generic type * GroupType or an empty array, if no group items should be added * @return True, if all group items have been added to the adapter, false * otherwise */ @SuppressWarnings("unchecked") boolean addAllGroups(GroupType... groups); /** * Adds all group items, which are contained by a specific array, to the * adapter. The group items will be added in a consecutive order at a * specific index. * * @param index * The index, the group items should be added at, as an * {@link Integer} value. The index must be between 0 and the * value of the method <code>getGroupCount():int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param groups * The array, which contains the group items, which should be * added to the adapter, as an array of the generic type * GroupType or an empty array, if no group items should be added * @return True, if all group items have been added to the adapter, false * otherwise */ @SuppressWarnings("unchecked") boolean addAllGroups(int index, GroupType... groups); /** * Replaces the group item, which belongs to a specific index, by an other * group item. The group's children will be retained. * * @param index * The index of the group item, which should be replaced, as an * {@link Integer} value. The index must be between 0 and the * value of the method <code>getGroupCount():int</code> - 1, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param group * The group item, which should replace the group item at the * given index, as an instance of the generic type GroupType. The * group item may not be null * @return The group item, which has been replaced, as an instance of the * generic type GroupType. The group item may not be null */ GroupType replaceGroup(int index, GroupType group); /** * Removes the group item, which belongs to a specific index, from the * adapter. The group's children will be also removed from the adapter. * * @param index * The index of the group item, which should be removed from the * adapter, as an {@link Integer} value. The index must be * between 0 and the value of the method * <code>getGroupCount():int</code> - 1, otherwise an * {@link IndexOutOfBoundsException} will be thrown * @return The group item, which has been removed, as an instance of the * generic type GroupType. The group item may not be null */ GroupType removeGroup(int index); /** * Removes a specific group item from the adapter. The group's children will * be also removed from the adapter. * * @param group * The group item, which should be removed, as an instance of the * generic type GroupType. The group item may not be null * @return True, if the group item has been removed, false otherwise */ boolean removeGroup(GroupType group); /** * Removes all group items, which are contained by a specific collection, * from the adapter. The groups' children will also be removed from the * adapter. * * @param groups * The collection, which contains the group items, which should * be removed from the adapter, as an instance of the type * {@link Collection} or an empty collection, if no group items * should be removed * @return True, if all group items have been removed from the adapter, * false otherwise */ boolean removeAllGroups(Collection<GroupType> groups); /** * Removes all group items, which are contained by a specific array, from * the adapter. The groups' children will also be removed from the adapter. * * @param groups * The array, which contains the group items, which should be * removed from the adapter, as an array of the generic type * GroupType or an empty array, if no group items should be * removed * @return True, if all group items have been removed from the adapter, * false otherwise */ @SuppressWarnings("unchecked") boolean removeAllGroups(GroupType... groups); /** * Removes all group items from the adapter, except of the group items, * which are contained by a specific collection. The removed groups' * children will also be removed from the adapter. * * @param groups * The collection, which contains the group items, which should * be retained, as an instance of the type {@link Collection} or * an empty collection, if no group items should be retained */ void retainAllGroups(Collection<GroupType> groups); /** * Removes all group items from the adapter, except of the group items, * which are contained by a specific array. The removed groups' children * will also be removed from the adapter. * * @param groups * The array, which contains the group items, which should be * retained, as an array of the generic type DataType or an empty * array, if no group items should be retained */ @SuppressWarnings("unchecked") void retainAllGroups(GroupType... groups); /** * Removes all groups and their child items from the adapter. */ void clearGroups(); /** * Returns an iterator, which allows to iterate the adapter's group items. * * @return An iterator, which allows to iterate the adapter's group items, * as an instance of the type {@link Iterator}. The iterator may not * be null */ Iterator<GroupType> groupIterator(); /** * Returns a list iterator, which allows to iterate the adapter's group * items. * * @return A list iterator, which allows to iterate the adapter's group * items, as an instance of the type {@link ListIterator}. The * iterator may not be null */ ListIterator<GroupType> groupListIterator(); /** * Returns a list iterator, which allows to iterate the adapter's group * items, starting at a specific index. * * @param index * The index, the iterator should start at, as an {@link Integer} * value. The index must be between 0 and the value of the method * <code>getGroupCount():int</code> - 1, otherwise an * {@link IndexOutOfBoundsException} will be thrown * @return A list iterator, which allows to iterate the adapter's group * items, starting at the given index, as an instance of the type * {@link ListIterator}. The iterator may not be null */ ListIterator<GroupType> groupListIterator(int index); /** * Returns a collection, which contains the adapter's group items between a * specific start and end index. * * @param start * The start index of the group items, which should be returned, * as an {@link Integer} value. The group item, which belongs to * the start index will be included. The index must be between 0 * and the value of the method <code>getGroupCount():int</code> - * 1, otherwise an {@link IndexOutOfBoundsException} will be * thrown * @param end * The end index of the group items, which should be returned, as * an {@link Integer} value. The group item, which belongs to the * end index, will be excluded. The index must be between 0 and * the value of the method <code>getGroupCount():int</code> -1 * and it must be greater than the start index, otherwise an * {@link IndexOutOfBoundsException} will be thrown * @return A collection, which contains the adapter's group items, between a * specific start end end index, as an instance of the type * {@link Collection} or an empty collection, if the adapter does * not contain any group items */ Collection<GroupType> subListGroups(int start, int end); /** * Returns an array, which contains the adapter's group items. * * @return An array, which contains the adapter's group items, as an * {@link Object} array or an empty array, if the adapter does not * contain any group items */ Object[] groupsToArray(); /** * Returns an array, which contains all of the adapter's group items. If the * given array is large enough to hold the items, the specified array is * used, otherwise an array of the same type is created. If the given array * can hold more items, the array's elements, following the adapter's items, * are set to null. * * @param <T> * The type of the array, which should be returned * @param array * The array, which should be used, if it is large enough, as an * array of the generic type T. The array may not be null * @return An array, which contains all of the adapter's group item, as an * array of the generic type T or an empty array, if the adapter * does not contain any group items */ <T> T[] groupsToArray(T[] array); /** * Returns the group item, which belongs to a specific index. * * @param index * The index of the group item, which should be returned, as an * {@link Integer} value. The index must be between 0 and the * value of the method <code>getGroupCount():int</code> - 1, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @return The group item, which belongs to the given index, as an instance * of the generic type GroupType. The group item may not be null */ GroupType getGroup(int index); /** * Returns the index of a specific group item. * * @param group * The group item, whose index should be returned, as an instance * of the generic type GroupType. The group item may not be null * @return The index of the the given group item, as an {@link Integer} * value or -1, if the adapter does not contain the given group item */ int indexOfGroup(GroupType group); /** * Returns, whether the adapter contains a specific group item, or not. * * @param group * The group item, whose presence should be checked, as an * instance of the generic type GroupType. The group item may not * be null * @return True, if the adapter contains the given group item, false * otherwise */ boolean containsGroup(GroupType group); /** * Returns, whether the adapter contains all group items, which are * contained by a specific collection, or not. * * @param groups * The collection, which contains the group items, which whose * presence should be checked, as an instance of the type * {@link Collection}. The collection may not be null * @return True, if the adapter contains all group items, which are * contained by the given collection, false otherwise */ boolean containsAllGroups(Collection<GroupType> groups); /** * Returns, whether the adapter contains all group items, which are * contained by a specific array, or not. * * @param groups * The array, which contains the group items, which whose * presence should be checked, as an array of the generic type * GroupType. The array may not be null * @return True, if the adapter contains all group items, which are * contained by the given array, false otherwise */ @SuppressWarnings("unchecked") boolean containsAllGroups(GroupType... groups); /** * Returns the number of group items, which are contained by the adapter. * * @return The number of group items, which are contained by the adapter, as * an {@link Integer} value */ int getGroupCount(); /** * Returns a collection, which contains all of the adapter's group items. * * @return A collection, which contains all of the adapter's group items, as * an instance of the type {@link Collection} or an empty * collection, if the adapter does not contain any group items */ Collection<GroupType> getAllGroups(); /** * Returns, whether the adapter is empty, or not. * * @return True, if the adapter is empty, false otherwise */ boolean isEmpty(); /** * Returns, whether the group, which belongs to a specific index, is empty * or not. * * @param groupIndex * The index of the group, which should be checked, as an * {@link Integer} value. The index must be between 0 and the * value of the method <code>getGroupCount():int</code> - 1, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @return True, if the group is empty, false otherwise */ boolean isGroupEmpty(int groupIndex); /** * Returns, whether a specific group is empty or not. * * @param group * The group, which should be checked, as an instance of the * generic type GroupType. The group may not be null. If the * group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @return True, if the group is empty, false otherwise */ boolean isGroupEmpty(GroupType group); /** * Returns, whether duplicate child items, regardless from the group they * belong to, are allowed, or not. * * @return True, if duplicate child items, regardless from the group they * belong to, are allowed, false otherwise */ boolean areDuplicateChildrenAllowed(); /** * Sets, whether duplicate child items, regardless from the group they * belong to, should be allowed, or not. * * @param allowDuplicateChildren * True, if duplicate child items, regardless from the group they * belong to, should be allowed, false otherwise */ void allowDuplicateChildren(boolean allowDuplicateChildren); /** * Returns, whether duplicate child items within the group, which belongs to * a specific index, are allowed, or not. * * @param groupIndex * The index of the group, whose constraints regarding duplicate * child items should be checked, as an {@link Integer} value. * The index must be between 0 and the value of the method * <code>getGroupCount():int</code> - 1, otherwise an * {@link IndexOutOfBoundsException} will be thrown * @return True, if duplicate child items are allowed within the group, * false otherwise */ boolean areDuplicateChildrenAllowed(int groupIndex); /** * Sets, whether duplicate child items within the group, which belongs to a * specific index, should be allowed, or not. * * @param groupIndex * The index of the group, whose constraints regarding duplicate * child items should be set, as an {@link Integer} value. The * index must be between 0 and the value of the method * <code>getGroupCount():int</code> - 1, otherwise an * {@link IndexOutOfBoundsException} will be thrown * @param allowDuplicateChildren * True, if duplicate child items should be allowed within the * group, false otherwise */ void allowDuplicateChildren(int groupIndex, boolean allowDuplicateChildren); /** * Returns, whether duplicate child items within a specific group are * allowed, or not. * * @param group * The group, whose constraints regarding duplicate child items * should be checked, as an instance of the generic type * GroupType. The group may not be null. If the group does not * belong to the adapter, a {@link NoSuchElementException} will * be thrown * @return True, if duplicate child items are allowed within the group, * false otherwise */ boolean areDuplicateChildrenAllowed(GroupType group); /** * Sets, whether duplicate child items within a specific group should be * allowed, or not. * * @param group * The group, whose constraints regarding duplicate child items * should be set, as an instance of the generic type GroupType. * The group may not be null. If the group does not belong to the * adapter, a {@link NoSuchElementException} will be thrown * @param allowDuplicateChildren * True, if duplicate child items should be allowed within the * group, false otherwise */ void allowDuplicateChildren(GroupType group, boolean allowDuplicateChildren); /** * Adds a specific child item to the group, which belongs to a specific * index. The child item will be added at the end of the group. * * @param groupIndex * The index of the group, the child item should be added to, as * an {@link Integer} value. The index must be between 0 and the * value of the method <code>getGroupCount():int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param child * The child item, which should be added to the group, as an * instance of the generic type ChildType. The child item may not * be null * @return The index of the child, which has been added, as an * {@link Integer} value or -1, if the child has not been added */ int addChild(int groupIndex, ChildType child); /** * Adds a specific child item to a specific group. The child item will be * added at the end of the group. * * @param group * The group, the child item should be added to, as an instance * of the generic type GroupType. The group may not be null. If * the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param child * The child item, which should be added to the group, as an * instance of the generic type ChildType. The child item may not * be null * @return The index of the child, which has been added, as an * {@link Integer} value or -1, if the child has not been added */ int addChild(GroupType group, ChildType child); /** * Adds a specific child item to the group, which belongs to a specific * index. The child item will be added at a specific index of the group. * * @param groupIndex * The index of the group, the child item should be added to, as * an {@link Integer} value. The index must be between 0 and the * value of the method <code>getGroupCount():int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param index * The index, the child item should be added at, as an * {@link Integer} value. The index must be between 0 and the * value of the method <code>getChildCount(groupIndex):int</code> * , otherwise an {@link IndexOutOfBoundsException} will be * thrown * @param child * The child item, which should be added to the group, as an * instance of the generic type ChildType. The child item may not * be null * @return True, if the child item has been added to the group, false * otherwise */ boolean addChild(int groupIndex, int index, ChildType child); /** * Adds a specific child item to a specific group. The child item will be * added at the end of the group. The child item will be added at a specific * index of the group. * * @param group * The group, the child item should be added to, as an instance * of the generic type GroupType. The group may not be null. If * the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param index * The index, the child item should be added at, as an * {@link Integer} value. The index must be between 0 and the * value of the method <code>getChildCount(group):int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param child * The child item, which should be added to the group, as an * instance of the generic type ChildType. The child item may not * be null * @return True, if the child item has been added to the group, false * otherwise */ boolean addChild(GroupType group, int index, ChildType child); /** * Adds all child items, which are contained by a specific collection, to * the group, which belongs to a specific index. The items will be added in * a consecutive order at the end of the group. * * @param groupIndex * The index of the group, the child items should be added to, as * an {@link Integer} value. The index must be between 0 and the * value of the method <code>getGroupCount():int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param children * The collection, which contains the child items, which should * be added to the group, as an instance of the type * {@link Collection} or an empty collection, if no child items * should be added * @return True, if all child items have been added to the group, false * otherwise */ boolean addAllChildren(int groupIndex, Collection<ChildType> children); /** * Adds all child items, which are contained by a specific collection, to a * specific group. The items will be added in a consecutive order at the end * of the group. * * @param group * The group, the child items should be added to, as an instance * of the generic type GroupType. The group may not be null. If * the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param children * The collection, which contains the child items, which should * be added to the group, as an instance of the type * {@link Collection} or an empty collection, if no child items * should be added * @return True, if all child items have been added to the group, false * otherwise */ boolean addAllChildren(GroupType group, Collection<ChildType> children); /** * Adds all child items, which are contained by a specific collection, to * the group, which belongs to a specific index. The items will be added in * a consecutive order at a specific index of the group. * * @param groupIndex * The index of the group, the child items should be added to, as * an {@link Integer} value. The index must be between 0 and the * value of the method <code>getGroupCount():int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param index * The index, the child items should be added at, as an * {@link Integer} value. The index must be between 0 and the * value of the method <code>getChildCount(groupIndex):int</code> * , otherwise an {@link IndexOutOfBoundsException} will be * thrown * @param children * The collection, which contains the child items, which should * be added to the group, as an instance of the type * {@link Collection} or an empty collection, if no child items * should be added * @return True, if all child items have been added to the group, false * otherwise */ boolean addAllChildren(int groupIndex, int index, Collection<ChildType> children); /** * Adds all child items, which are contained by a specific collection, to a * specific group. The items will be added in a consecutive order at a * specific index of the group. * * @param group * The group, the child items should be added to, as an instance * of the generic type GroupType. The group may not be null. If * the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param index * The index, the child items should be added at, as an * {@link Integer} value. The index must be between 0 and the * value of the method <code>getChildCount(group):int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param children * The collection, which contains the child items, which should * be added to the group, as an instance of the type * {@link Collection} or an empty collection, if no child items * should be added * @return True, if all child items have been added to the group, false * otherwise */ boolean addAllChildren(GroupType group, int index, Collection<ChildType> children); /** * Adds all child items, which are contained by a specific array, to the * group, which belongs to a specific index. The items will be added in a * consecutive order at the end of the group. * * @param groupIndex * The index of the group, the child items should be added to, as * an {@link Integer} value. The index must be between 0 and the * value of the method <code>getGroupCount():int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param children * The array, which contains the child items, which should be * added to the group, as an array of the generic type ChildType * or an empty array, if no child items should be added * @return True, if all child items have been added to the group, false * otherwise */ @SuppressWarnings("unchecked") boolean addAllChildren(int groupIndex, ChildType... children); /** * Adds all child items, which are contained by a specific array, to a * specific group. The items will be added in a consecutive order at the end * of the group. * * @param group * The group, the child items should be added to, as an instance * of the generic type GroupType. The group may not be null. If * the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param children * The array, which contains the child items, which should be * added to the group, as an array of the generic type ChildType * or an empty array, if no child items should be added * @return True, if all child items have been added to the group, false * otherwise */ @SuppressWarnings("unchecked") boolean addAllChildren(GroupType group, ChildType... children); /** * Adds all child items, which are contained by a specific array, to the * group, which belongs to a specific index. The items will be added in a * consecutive order at a specific index of the group. * * @param groupIndex * The index of the group, the child items should be added to, as * an {@link Integer} value. The index must be between 0 and the * value of the method <code>getGroupCount():int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param index * The index, the child items should be added at, as an * {@link Integer} value. The index must be between 0 and the * value of the method <code>getChildCount(groupIndex):int</code> * , otherwise an {@link IndexOutOfBoundsException} will be * thrown * @param children * The array, which contains the child items, which should be * added to the group, as an array of the generic type ChildType * or an empty array, if no child items should be added * @return True, if all child items have been added to the group, false * otherwise */ @SuppressWarnings("unchecked") boolean addAllChildren(int groupIndex, int index, ChildType... children); /** * Adds all child items, which are contained by a specific array, to a * specific group. The items will be added in a consecutive order at a * specific index of the group. * * @param group * The group, the child items should be added to, as an instance * of the generic type GroupType. The group may not be null. If * the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param index * The index, the child items should be added at, as an * {@link Integer} value. The index must be between 0 and the * value of the method <code>getChildCount(group):int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param children * The array, which contains the child items, which should be * added to the group, as an array of the generic type ChildType * or an empty array, if no child items should be added * @return True, if all child items have been added to the group, false * otherwise */ @SuppressWarnings("unchecked") boolean addAllChildren(GroupType group, int index, ChildType... children); /** * Replaces the child item, which belongs to a specific index of a specific * group, by an other item. * * @param groupIndex * The index of the group, the child item, which should be * replaced, belongs to, as an {@link Integer} value. The index * must be between 0 and the value of the method * <code>getGroupCount():int</code>, otherwise an * {@link IndexOutOfBoundsException} will be thrown * @param index * The index of the item, which should be replaced, as an * {@link Integer} value. The index must be between 0 and the * value of the method <code>getChildCount(groupIndex):int</code> * - 1, otherwise an {@link IndexOutOfBoundsException} will be * thrown * @param child * The child item, which should replace the child item at the * given index, as an instance of the generic type ChildType. The * child item may not be null * @return The child item, which has been replaced, as an instance of the * generic type ChildType. The child item may not be null */ ChildType replaceChild(int groupIndex, int index, ChildType child); /** * Replaces the child item, which belongs to a specific index of a specific * group, by an other item. * * @param group * The group, the child item, which should be replaced, belongs * to, as an instance of the generic type GroupType. The group * may not be null. If the group does not belong to the adapter, * a {@link NoSuchElementException} will be thrown * @param index * The index of the item, which should be replaced, as an * {@link Integer} value. The index must be between 0 and the * value of the method <code>getChildCount(group):int</code> - 1, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param child * The child item, which should replace the child item at the * given index, as an instance of the generic type ChildType. The * child item may not be null * @return The child item, which has been replaced, as an instance of the * generic type ChildType. The child item may not be null */ ChildType replaceChild(GroupType group, int index, ChildType child); /** * Removes the child item, which belongs to a specific index, from the * group, which belongs to a specific index. The group, the child item * belongs to, will not be removed, even if it becomes empty. * * @param groupIndex * The index of the group, the child item should be removed from, * as an {@link Integer} value. The index must be between 0 and * the value of the method <code>getGroupCount():int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param index * The index of the child item, which should be removed from the * group, as an {@link Integer} value. The index must be between * 0 and the value of the method * <code>getChildCount(groupIndex):int</code> - 1, otherwise an * {@link IndexOutOfBoundsException} will be thrown * @return The child item, which has been removed, as an instance of the * generic type ChildType. The item may not be null */ ChildType removeChild(int groupIndex, int index); /** * Removes the child item, which belongs to a specific index, from the * group, which belongs to a specific index. * * @param removeEmptyGroup * True, if the group, the child item belongs to, should also be * removed, if it becomes empty, false otherwise * @param groupIndex * The index of the group, the child item should be removed from, * as an {@link Integer} value. The index must be between 0 and * the value of the method <code>getGroupCount():int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param index * The index of the child item, which should be removed from the * group, as an {@link Integer} value. The index must be between * 0 and the value of the method * <code>getChildCount(groupIndex):int</code> - 1, otherwise an * {@link IndexOutOfBoundsException} will be thrown * @return The child item, which has been removed, as an instance of the * generic type ChildType. The item may not be null */ ChildType removeChild(boolean removeEmptyGroup, int groupIndex, int index); /** * Removes the child item, which belongs to a specific index, from a * specific group. The group, the child item belongs to, will not be * removed, even if it becomes empty. * * @param group * The group, the child item should be removed from, as an * instance of the generic type GroupType. The group may not be * null. If the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param index * The index of the child item, which should be removed from the * group, as an {@link Integer} value. The index must be between * 0 and the value of the method * <code>getChildCount(group):int</code> - 1, otherwise an * {@link IndexOutOfBoundsException} will be thrown * @return The child item, which has been removed, as an instance of the * generic type ChildType. The item may not be null */ ChildType removeChild(GroupType group, int index); /** * Removes the child item, which belongs to a specific index, from a * specific group. * * @param removeEmptyGroup * True, if the group, the child item belongs to, should also be * removed, if it becomes empty, false otherwise * @param group * The group, the child item should be removed from, as an * instance of the generic type GroupType. The group may not be * null. If the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param index * The index of the child item, which should be removed from the * group, as an {@link Integer} value. The index must be between * 0 and the value of the method * <code>getChildCount(group):int</code> - 1, otherwise an * {@link IndexOutOfBoundsException} will be thrown * @return The child item, which has been removed, as an instance of the * generic type ChildType. The item may not be null */ ChildType removeChild(boolean removeEmptyGroup, GroupType group, int index); /** * Removes a specific child item from the group, which belongs to a specific * index. The group, the child item belongs to, will not be removed, even if * it becomes empty. * * @param groupIndex * The index of the group, the child item should be removed from, * as an {@link Integer} value. The index must be between 0 and * the value of the method <code>getGroupCount():int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param child * The child item, which should be removed from the group, as an * instance of the generic type ChildType. The child item may not * be null * @return True, if the child item has been removed from the group, false * otherwise */ boolean removeChild(int groupIndex, ChildType child); /** * Removes a specific child item from the group, which belongs to a specific * index. * * @param removeEmptyGroup * True, if the group, the child item belongs to, should also be * removed, if it becomes empty, false otherwise * @param groupIndex * The index of the group, the child item should be removed from, * as an {@link Integer} value. The index must be between 0 and * the value of the method <code>getGroupCount():int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param child * The child item, which should be removed from the group, as an * instance of the generic type ChildType. The child item may not * be null * @return True, if the child item has been removed from the group, false * otherwise */ boolean removeChild(boolean removeEmptyGroup, int groupIndex, ChildType child); /** * Removes a specific child item from a specific group. The group, the child * item belongs to, will not be removed, even if it becomes empty. * * @param group * The group, the child item should be removed from, as an * instance of the generic type GroupType. The group may not be * null. If the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param child * The child item, which should be removed from the group, as an * instance of the generic type ChildType. The child item may not * be null * @return True, if the child item has been removed from the group, false * otherwise */ boolean removeChild(GroupType group, ChildType child); /** * Removes a specific child item from a specific group. * * @param removeEmptyGroup * True, if the group, the child item belongs to, should also be * removed, if it becomes empty, false otherwise * @param group * The group, the child item should be removed from, as an * instance of the generic type GroupType. The group may not be * null. If the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param child * The child item, which should be removed from the group, as an * instance of the generic type ChildType. The child item may not * be null * @return True, if the child item has been removed from the group, false * otherwise */ boolean removeChild(boolean removeEmptyGroup, GroupType group, ChildType child); /** * Removes all child items, which are contained by a specific collection, * from the adapter. No groups will be removed, even if they become empty. * * @param children * The collection, which contains the child items, which should * be removed from the adapter, as an instance of the type * {@link Collection} or an empty collection, if no child items * should be removed * @return True, if all child items have been removed from the adapter, * false otherwise */ boolean removeAllChildren(Collection<ChildType> children); /** * Removes all child items, which are contained by a specific collection, * from the adapter. * * @param removeEmptyGroups * True, if groups, which become empty, should also be removed, * false otherwise * @param children * The collection, which contains the child items, which should * be removed from the adapter, as an instance of the type * {@link Collection} or an empty collection, if no child items * should be removed * @return True, if all child items have been removed from the adapter, * false otherwise */ boolean removeAllChildren(boolean removeEmptyGroups, Collection<ChildType> children); /** * Removes all child items, which are contained by a specific collection, * from the group, which belongs to a specific index. The group, the child * items belong to, will not be removed, even if it becomes empty. * * @param groupIndex * The index of the group, the child items should be removed * from, as an {@link Integer} value. The index must be between 0 * and the value of the method <code>getGroupCount():int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param children * The collection, which contains the child items, which should * be removed from the group, as an instance of the type * {@link Collection} or an empty collection, if no child items * should be removed * @return True, if all child items have been removed from the group, false * otherwise */ boolean removeAllChildren(int groupIndex, Collection<ChildType> children); /** * Removes all child items, which are contained by a specific collection, * from the group, which belongs to a specific index. * * @param removeEmptyGroup * True, if the group, the child items belong to, should also be * removed, if it becomes empty, false otherwise * @param groupIndex * The index of the group, the child items should be removed * from, as an {@link Integer} value. The index must be between 0 * and the value of the method <code>getGroupCount():int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param children * The collection, which contains the child items, which should * be removed from the group, as an instance of the type * {@link Collection} or an empty collection, if no child items * should be removed * @return True, if all child items have been removed from the group, false * otherwise */ boolean removeAllChildren(boolean removeEmptyGroup, int groupIndex, Collection<ChildType> children); /** * Removes all child items, which are contained by a specific collection, * from a specific group. No groups will be removed, even if they become * empty. * * @param group * The group, the child items should be removed from, as an * instance of the generic type GroupType. The group may not be * null. If the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param children * The collection, which contains the child items, which should * be removed from the group, as an instance of the type * {@link Collection} or an empty collection, if no child items * should be removed * @return True, if all child items have been removed from the group, false * otherwise */ boolean removeAllChildren(GroupType group, Collection<ChildType> children); /** * Removes all child items, which are contained by a specific collection, * from a specific group. * * @param removeEmptyGroup * True, if the group, the child items belong to, should also be * removed, if it becomes empty, false otherwise * @param group * The group, the child items should be removed from, as an * instance of the generic type GroupType. The group may not be * null. If the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param children * The collection, which contains the child items, which should * be removed from the group, as an instance of the type * {@link Collection} or an empty collection, if no child items * should be removed * @return True, if all child items have been removed from the group, false * otherwise */ boolean removeAllChildren(boolean removeEmptyGroup, GroupType group, Collection<ChildType> children); /** * Removes all child items, which are contained by a specific array, from * the adapter. No groups will be removed, even if they become empty. * * @param children * The array, which contains the child items, which should be * removed from the adapter, as an array of the generic type * ChildType or an empty array, if no child items should be * removed * @return True, if all child items have been removed from the adapter, * false otherwise */ @SuppressWarnings("unchecked") boolean removeAllChildren(ChildType... children); /** * Removes all child items, which are contained by a specific array, from * the adapter. * * @param removeEmptyGroups * True, if the groups, which become empty, should also be * removed, false otherwise * @param children * The array, which contains the child items, which should be * removed from the adapter, as an array of the generic type * ChildType or an empty array, if no child items should be * removed * @return True, if all child items have been removed from the adapter, * false otherwise */ @SuppressWarnings("unchecked") boolean removeAllChildren(boolean removeEmptyGroups, ChildType... children); /** * Removes all child items, which are contained by a specific array, from * the group, which belongs to a specific index. The group, the child items * belong to, will not be removed, even if it becomes empty. * * @param groupIndex * The index of the group, the child items should be removed * from, as an {@link Integer} value. The index must be between 0 * and the value of the method <code>getGroupCount():int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param children * The array, which contains the child items, which should be * removed from the group, as an array of the generic type * ChildType or an empty array, if no child items should be * removed * @return True, if all child items have been removed from the group, false * otherwise */ @SuppressWarnings("unchecked") boolean removeAllChildren(int groupIndex, ChildType... children); /** * Removes all child items, which are contained by a specific array, from * the group, which belongs to a specific index. * * @param removeEmptyGroup * True, if the group, the child items belong to, should also be * removed, if it becomes empty, false otherwise * @param groupIndex * The index of the group, the child items should be removed * from, as an {@link Integer} value. The index must be between 0 * and the value of the method <code>getGroupCount():int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param children * The array, which contains the child items, which should be * removed from the group, as an array of the generic type * ChildType or an empty array, if no child items should be * removed * @return True, if all child items have been removed from the group, false * otherwise */ @SuppressWarnings("unchecked") boolean removeAllChildren(boolean removeEmptyGroup, int groupIndex, ChildType... children); /** * Removes all child items, which are contained by a specific array, from a * specific group. The group, the child items belong to, will not be * removed, even if it becomes empty. * * @param group * The group, the child items should be removed from, as an * instance of the generic type GroupType. The group may not be * null. If the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param children * The array, which contains the child items, which should be * removed from the group, as an array of the generic type * ChildType or an empty collection, if no child items should be * removed * @return True, if all child items have been removed from the group, false * otherwise */ @SuppressWarnings("unchecked") boolean removeAllChildren(GroupType group, ChildType... children); /** * Removes all child items, which are contained by a specific array, from a * specific group. * * @param removeEmptyGroup * True, if the group, the child items belong to, should also be * removed, if it becomes empty, false otherwise * @param group * The group, the child items should be removed from, as an * instance of the generic type GroupType. The group may not be * null. If the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param children * The array, which contains the child items, which should be * removed from the group, as an array of the generic type * ChildType or an empty collection, if no child items should be * removed * @return True, if all child items have been removed from the group, false * otherwise */ @SuppressWarnings("unchecked") boolean removeAllChildren(boolean removeEmptyGroup, GroupType group, ChildType... children); /** * Removes all child items from the adapter, except of the items, which are * contained by a specific collection. No groups will be removed, even if * they become empty. * * @param children * The collection, which contains the child items, which should * be retained, as an instance of the type {@link Collection} or * an empty collection, if no child items should be retained */ void retainAllChildren(Collection<ChildType> children); /** * Removes all child items from the adapter, except of the items, which are * contained by a specific collection. * * @param removeEmptyGroups * True, if groups, which become empty, should be removed, false * otherwise * @param children * The collection, which contains the child items, which should * be retained, as an instance of the type {@link Collection} or * an empty collection, if no child items should be retained */ void retainAllChildren(boolean removeEmptyGroups, Collection<ChildType> children); /** * Removes all child items from the group, which belongs to a specific * index, except of the items, which are contained by a specific collection. * The group, the child items belong to, will not be removed, even if it * becomes empty. * * @param groupIndex * The index of the group, the child items should be removed * from, as an {@link Integer} value. The index must be between 0 * and the value of the method <code>getGroupCount():int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param children * The collection, which contains the child items, which should * be retained, as an instance of the type {@link Collection} or * an empty collection, if no child items should be retained */ void retainAllChildren(int groupIndex, Collection<ChildType> children); /** * Removes all child items from the group, which belongs to a specific * index, except of the items, which are contained by a specific collection. * * @param removeEmptyGroup * True, if the group, the child items belong to, should be * removed, if it becomes empty, false otherwise * @param groupIndex * The index of the group, the child items should be removed * from, as an {@link Integer} value. The index must be between 0 * and the value of the method <code>getGroupCount():int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param children * The collection, which contains the child items, which should * be retained, as an instance of the type {@link Collection} or * an empty collection, if no child items should be retained */ void retainAllChildren(boolean removeEmptyGroup, int groupIndex, Collection<ChildType> children); /** * Removes all child items from a specific group, except of the items, which * are contained by a specific collection. The group, the child items belong * to, will not be removed, even if it becomes empty. * * @param group * The group, the child items should be removed from, as an * instance of the generic type GroupType. The group may not be * null. If the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param children * The collection, which contains the child items, which should * be retained, as an instance of the type {@link Collection} or * an empty collection, if no child items should be retained */ void retainAllChildren(GroupType group, Collection<ChildType> children); /** * Removes all child items from a specific group, except of the items, which * are contained by a specific collection. * * @param removeEmptyGroup * True, if the group, the child items belong to, should be * removed, if it becomes empty, false otherwise * @param group * The group, the child items should be removed from, as an * instance of the generic type GroupType. The group may not be * null. If the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param children * The collection, which contains the child items, which should * be retained, as an instance of the type {@link Collection} or * an empty collection, if no child items should be retained */ void retainAllChildren(boolean removeEmptyGroup, GroupType group, Collection<ChildType> children); /** * Removes all child items from the adapter, except of the items, which are * contained by a specific array. No groups will be removed, even if they * become empty. * * @param children * The array, which contains the child items, which should be * retained, as an array of the generic type ChildType or an * empty array, if no child items should be retained */ @SuppressWarnings("unchecked") void retainAllChildren(ChildType... children); /** * Removes all child items from the adapter, except of the items, which are * contained by a specific array. * * @param removeEmptyGroups * True, if groups, which become empty, should also be removed, * false otherwise * @param children * The array, which contains the child items, which should be * retained, as an array of the generic type ChildType or an * empty array, if no child items should be retained */ @SuppressWarnings("unchecked") void retainAllChildren(boolean removeEmptyGroups, ChildType... children); /** * Removes all child items from the group, which belongs to a specific * index, except of the items, which are contained by a specific array. The * group, the child items belong to, will not be removed, even if it becomes * empty. * * @param groupIndex * The index of the group, the child items should be removed * from, as an {@link Integer} value. The index must be between 0 * and the value of the method <code>getGroupCount():int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param children * The array, which contains the child items, which should be * retained, as an array of the generic type ChildType or an * empty collection, if no child items should be retained */ @SuppressWarnings("unchecked") void retainAllChildren(int groupIndex, ChildType... children); /** * Removes all child items from the group, which belongs to a specific * index, except of the items, which are contained by a specific array. * * @param removeEmptyGroup * True, if the group, the child items belong to, should be * removed, if it becomes empty, false otherwise * @param groupIndex * The index of the group, the child items should be removed * from, as an {@link Integer} value. The index must be between 0 * and the value of the method <code>getGroupCount():int</code>, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param children * The array, which contains the child items, which should be * retained, as an array of the generic type ChildType or an * empty collection, if no child items should be retained */ @SuppressWarnings("unchecked") void retainAllChildren(boolean removeEmptyGroup, int groupIndex, ChildType... children); /** * Removes all child items from a specific group, except of the items, which * are contained by a specific array. The group, the child items belong to, * will not be removed, even if it becomes empty. * * @param group * The group, the child items should be removed from, as an * instance of the generic type GroupType. The group may not be * null. If the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param children * The array, which contains the child items, which should be * retained, as an array of the generic type ChildType or an * empty collection, if no child items should be retained */ @SuppressWarnings("unchecked") void retainAllChildren(GroupType group, ChildType... children); /** * Removes all child items from a specific group, except of the items, which * are contained by a specific array. * * @param removeEmptyGroup * True, if the group, the child items belong to, should be * removed, if it becomes empty, false otherwise * @param group * The group, the child items should be removed from, as an * instance of the generic type GroupType. The group may not be * null. If the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param children * The array, which contains the child items, which should be * retained, as an array of the generic type ChildType or an * empty collection, if no child items should be retained */ @SuppressWarnings("unchecked") void retainAllChildren(boolean removeEmptyGroup, GroupType group, ChildType... children); /** * Removes all child items from the adapter. */ void clearChildren(); /** * Removes all child items from the adapter. * * @param removeEmptyGroups * True, if groups, which become empty, should also be removed, * false otherwise */ void clearChildren(boolean removeEmptyGroups); /** * Removes all child items from the group, which belongs to a specific * index. * * @param groupIndex * The index of the group, whose child item should be removed, as * an {@link Integer} value. The index must be between 0 and the * value of the method <code>getGroupCount():int</code> - 1, * otherwise an {@link IndexOutOfBoundsException} will be thrown */ void clearChildren(int groupIndex); /** * Removes all child items from the group, which belongs to a specific * index. * * @param removeEmptyGroup * True, if the group, the child items belong to, should also be * removed, if it becomes empty, false otherwise * @param groupIndex * The index of the group, whose child item should be removed, as * an {@link Integer} value. The index must be between 0 and the * value of the method <code>getGroupCount():int</code> - 1, * otherwise an {@link IndexOutOfBoundsException} will be thrown */ void clearChildren(boolean removeEmptyGroup, int groupIndex); /** * Removes all child items from a specific group. * * @param group * The group, whose child items should be removed, as an instance * of the generic type GroupType. The group may not be null. If * the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown */ void clearChildren(GroupType group); /** * Removes all child items from a specific group. * * @param removeEmptyGroup * True, if the group, the child items belong to, should also be * removed, if it becomes empty, false otherwise * @param group * The group, whose child items should be removed, as an instance * of the generic type GroupType. The group may not be null. If * the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown */ void clearChildren(boolean removeEmptyGroup, GroupType group); /** * Returns an iterator, which allows to iterate the adapter's child items. * * @return An iterator, which allows to iterate the adapter's child items, * as an instance of the type {@link Iterator}. The iterator may not * be null */ Iterator<ChildType> childIterator(); /** * Returns an iterator, which allows to iterate the child items of the * group, which belongs to a specific index. * * @param groupIndex * The index of the group, whose child items should be iterated, * as an {@link Integer} value. The index must be between 0 and * the value of the method <code>getGroupCount():int</code> - 1, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @return An iterator, which allows to iterate the group's items, as an * instance of the type {@link Iterator}. The iterator may not be * null */ Iterator<ChildType> childIterator(int groupIndex); /** * Returns an iterator, which allows to iterate the child items of a * specific group. * * @param group * The group, whose child items should be iterated, as an * instance of the generic type GroupType. The group may not be * null. If the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @return An iterator, which allows to iterate the group's items, as an * instance of the type {@link Iterator}. The iterator may not be * null */ Iterator<ChildType> childIterator(GroupType group); /** * Returns a list iterator, which allow to iterate the child items of the * group, which belongs to a specific index. * * @param groupIndex * The index of the group, whose child items should be iterated, * as an {@link Integer} value. The index must be between 0 and * the value of the method <code>getGroupCount():int</code> - 1, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @return A list iterator, which allows to iterate the group's items, as an * instance of the type {@link ListIterator}. The iterator may not * be null */ ListIterator<ChildType> childListIterator(int groupIndex); /** * Returns a list iterator, which allow to iterate the child items of a * specific group. * * @param group * The group, whose child items should be iterated, as an * instance of the generic type GroupType. The group may not be * null. If the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @return A list iterator, which allows to iterate the group's items, as an * instance of the type {@link ListIterator}. The iterator may not * be null */ ListIterator<ChildType> childListIterator(GroupType group); /** * Returns a list iterator, which allow to iterate the child items of the * group, which belongs to a specific index, starting at a specific index. * * @param groupIndex * The index of the group, whose child items should be iterated, * as an {@link Integer} value. The index must be between 0 and * the value of the method <code>getGroupCount():int</code> - 1, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param index * The index, the iterator should start at, as an {@link Integer} * value. The index must be between 0 and the value of the method * <code>getChildCount(groupIndex):int</code> - 1, otherwise an * {@link IndexOutOfBoundsException} will be thrown * @return A list iterator, which allows to iterate the group's items, as an * instance of the type {@link ListIterator}. The iterator may not * be null */ ListIterator<ChildType> childListIterator(int groupIndex, int index); /** * Returns a list iterator, which allow to iterate the child items of a * specific group, starting at a specific index. * * @param group * The group, whose child items should be iterated, as an * instance of the generic type GroupType. The group may not be * null. If the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param index * The index, the iterator should start at, as an {@link Integer} * value. The index must be between 0 and the value of the method * <code>getChildCount(group):int</code> - 1, otherwise an * {@link IndexOutOfBoundsException} will be thrown * @return A list iterator, which allows to iterate the group's items, as an * instance of the type {@link ListIterator}. The iterator may not * be null */ ListIterator<ChildType> childListIterator(GroupType group, int index); /** * Returns a collection, which contains the child items of the group, which * belongs to a specific index, between a specific start and end index. * * @param groupIndex * The index of the group, whose child items should be returned, * as an {@link Integer} value. The index must be between 0 and * the value of the method <code>getGroupCount():int</code> - 1, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param start * The start index of the child items, which should be returned, * as an {@link Integer} value. The child item, which belongs to * the start index will be included. The index must be between 0 * and the value of the method * <code>getChildCount(groupIndex):int</code> - 1, otherwise an * {@link IndexOutOfBoundsException} will be thrown * @param end * The end index of the child items, which should be returned, as * an {@link Integer} value. The child item, which belongs to the * end index, will be excluded. The index must be between 0 and * the value of the method * <code>getChildCount(groupIndex):int</code> -1 and it must be * greater than the start index, otherwise an * {@link IndexOutOfBoundsException} will be thrown * @return A collection, which contains the group's child items, between a * specific start end end index, as an instance of the type * {@link Collection} or an empty collection, if the group does not * contain any child items */ Collection<ChildType> subListChildren(int groupIndex, int start, int end); /** * Returns a collection, which contains the child items of a specific group * between a specific start and end index. * * @param group * The group, whose child items should be returned, as an * instance of the generic type GroupType. The group may not be * null. If the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param start * The start index of the child items, which should be returned, * as an {@link Integer} value. The child item, which belongs to * the start index will be included. The index must be between 0 * and the value of the method * <code>getChildCount(group):int</code> - 1, otherwise an * {@link IndexOutOfBoundsException} will be thrown * @param end * The end index of the child items, which should be returned, as * an {@link Integer} value. The child item, which belongs to the * end index, will be excluded. The index must be between 0 and * the value of the method <code>getChildCount(group):int</code> * -1 and it must be greater than the start index, otherwise an * {@link IndexOutOfBoundsException} will be thrown * @return A collection, which contains the group's child items, between a * specific start end end index, as an instance of the type * {@link Collection} or an empty collection, if the group does not * contain any child items */ Collection<ChildType> subListChildren(GroupType group, int start, int end); /** * Returns an array, which contains all of the adapter's child items. * * @return An array, which contains all of the adapter's child items, as an * {@link Object} array or an empty array, if the adapter does not * contain any child items */ Object[] childrenToArray(); /** * Returns an array, which contains the child items of the group, which * belongs to a specific index. * * @param groupIndex * The index of the group, whose child items should be returned, * as an {@link Integer} value. The index must be between 0 and * the value of the method <code>getGroupCount():int</code> - 1, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @return An array, which contains the group's child items, as an * {@link Object} array or an empty array, if the group does not * contain any child items */ Object[] childrenToArray(int groupIndex); /** * Returns an array, which contains the child items of a specific group. * * * @param group * The group, whose child items should be returned, as an * instance of the generic type GroupType. The group may not be * null. If the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @return An array, which contains the group's child items, as an * {@link Object} array or an empty array, if the group does not * contain any child items */ Object[] childrenToArray(GroupType group); /** * Returns an array, which contains all of the adapter's child items. If the * given array is large enough to hold the items, the specified array is * used, otherwise an array of the same type is created. If the given array * can hold more items, the array's elements, following the group's child * items, are set to null. * * @param <T> * The type of the array, which should be returned * @param array * The array, which should be used, if it is large enough, as an * array of the generic type T. The array may not be null * @return An array, which contains all of adapter's child items, as an * array of the generic type T or an empty array, if the adapter * does not contain any child items */ <T> T[] childrenToArray(T[] array); /** * Returns an array, which contains all child items of the group, which * belongs to a specific index. If the given array is large enough to hold * the items, the specified array is used, otherwise an array of the same * type is created. If the given array can hold more items, the array's * elements, following the group's child items, are set to null. * * @param <T> * The type of the array, which should be returned * @param groupIndex * The index of the group, whose child items should be returned, * as an {@link Integer} value. The index must be between 0 and * the value of the method <code>getGroupCount():int</code> - 1, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param array * The array, which should be used, if it is large enough, as an * array of the generic type T. The array may not be null * @return An array, which contains all of the group's child items, as an * array of the generic type T or an empty array, if the group does * not contain any child items */ <T> T[] childrenToArray(int groupIndex, T[] array); /** * Returns an array, which contains all child items of a specific group. If * the given array is large enough to hold the items, the specified array is * used, otherwise an array of the same type is created. If the given array * can hold more items, the array's elements, following the group's child * items, are set to null. * * @param <T> * The type of the array, which should be returned * @param group * The group, whose child items should be returned, as an * instance of the generic type GroupType. The group may not be * null. If the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param array * The array, which should be used, if it is large enough, as an * array of the generic type T. The array may not be null * @return An array, which contains all of the group's child items, as an * array of the generic type T or an empty array, if the group does * not contain any child items */ <T> T[] childrenToArray(GroupType group, T[] array); /** * Returns the child item, which belongs to a specific index of the group, * which belongs to a specific index. * * @param groupIndex * The index of the group, which contains the child item, which * should be returned, as an {@link Integer} value. The value * must be between 0 and the value of the method * <code>getGroupCount():int</code> - 1, otherwise an * {@link IndexOutOfBoundsException} will be thrown * @param index * The index of the child item, which should be returned, as an * {@link Integer} value. The index must be between 0 and the * value of the method <code>getChildCount(groupIndex):int</code> * - 1, otherwise an {@link IndexOutOfBoundsException} will be * thrown * @return The child item, which belongs to the given index, as an instance * of the generic type ChildType. The child item may not be null */ ChildType getChild(int groupIndex, int index); /** * Returns the child item, which belongs to a specific index of a specific * group. * * @param group * The group, which contains the child item, which should be * returned, as an instance of the generic type GroupType. The * group may not be null. If the group does not belong to the * adapter, a {@link NoSuchElementException} will be thrown * @param index * The index of the child item, which should be returned, as an * {@link Integer} value. The index must be between 0 and the * value of the method <code>getChildCount(group):int</code> - 1, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @return The child item, which belongs to the given index, as an instance * of the generic type ChildType. The child item may not be null */ ChildType getChild(GroupType group, int index); /** * Returns the index of the group, a specific child item belongs to. * * @param child * The child item, whose group index should be returned, as an * instance of the generic type ChildType. The child item may not * be null * @return The index of the group, the given child item belongs to, as an * {@link Integer} value or -1, if the adapter does not contain the * given child item */ int indexOfChild(ChildType child); /** * Returns the index of a specific child item within the group, which * belongs to a specific index. * * @param groupIndex * The index of the group, the child item, whose index should be * returned, belongs to, as an {@link Integer} value. The value * must be between 0 and the value of the method * <code>getGroupCount():int</code> - 1, otherwise an * {@link IndexOutOfBoundsException} will be thrown * @param child * The child item, whose index should be returned, as an instance * of the generic type ChildType. The child item may not be null * @return The index of the given child item, as an {@link Integer} value or * -1, if the group does not contain the given child item */ int indexOfChild(int groupIndex, ChildType child); /** * Returns the index of a specific child item within a specific group. * * @param group * The group, the child item, whose index should be returned, * belongs to, as an instance of the generic type GroupType. The * group may not be null. If the group does not belong to the * adapter, a {@link NoSuchElementException} will be thrown * @param child * The child item, whose index should be returned, as an instance * of the generic type ChildType. The child item may not be null * @return The index of the given child item, as an {@link Integer} value or * -1, if the group does not contain the given child item */ int indexOfChild(GroupType group, ChildType child); /** * Returns the last index of the group, a specific child item belongs to. * * @param child * The child item, whose last group index should be returned, as * an instance of the generic type ChildType. The child item may * not be null * @return The last index of the group, the given child item belongs to, as * an {@link Integer} value or -1, if the adapter does not contain * the given child item */ int lastIndexOfChild(ChildType child); /** * Returns the last index of a specific child item within the group, which * belongs to a specific index. * * @param groupIndex * The index of the group, the child item, whose last index * should be returned, belongs to, as an {@link Integer} value. * The index must be between 0 and the value of the method * <code>getGroupCount():int</code> - 1, otherwise an * {@link IndexOutOfBoundsException} will be thrown * @param child * The child item, whose last index should be returned, as an * instance of the generic type ChildType. The child item may not * be null * @return The last index of the given child item, as an {@link Integer} * value or -1, if the group does not contain the given child item */ int lastIndexOfChild(int groupIndex, ChildType child); /** * Returns the last index of a specific child item within a specific group. * * @param group * The group, the child item, whose last index should be * returned, belongs to, as an instance of the generic type * GroupType. The group may not be null. If the group does not * belong to the adapter, a {@link NoSuchElementException} will * be thrown * @param child * The child item, whose last index should be returned, as an * instance of the generic type ChildType. The child item may not * be null * @return The last index of the given child item, as an {@link Integer} * value or -1, if the group does not contain the given child item */ int lastIndexOfChild(GroupType group, ChildType child); /** * Returns, whether the adapter contains a specific child item, or not. * * @param child * The child item, whose presence should be checked, as an * instance of the generic type ChildType. The child item may not * be null * @return True, if the adapter contains the given child item, false * otherwise */ boolean containsChild(ChildType child); /** * Returns, whether the group, which belongs to a specific index, contains a * specific child item, or not. * * @param groupIndex * The index of the group, whose child items should be checked, * as an {@link Integer} value. The index must be between 0 and * the value of the method <code>getGroupCount():int</code> -1, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param child * The child item, whose presence should be checked, as an * instance of the generic type ChildType. The child item may not * be null * @return True, if the group contains the given child item, false otherwise */ boolean containsChild(int groupIndex, ChildType child); /** * Returns, whether a specific group contains a specific child item, or not. * * @param group * The group, whose child items should be checked, as an instance * of the generic type GroupType. The group may not be null. If * the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param child * The child item, whose presence should be checked, as an * instance of the generic type ChildType. The child item may not * be null * @return True, if the group contains the given child item, false otherwise */ boolean containsChild(GroupType group, ChildType child); /** * Returns, whether the adapter contains all child items, which are * contained by a specific collection, or not. * * @param children * The collection, which contains the child items, whose presence * should be checked, as an instance of the type * {@link Collection}. The collection may not be null * @return True, if the adapter contains all child items, which are * contained by the given collection, false otherwise */ boolean containsAllChildren(Collection<ChildType> children); /** * Returns, whether the group, which belongs to a specific index, contains * all child items, which are contained by a specific collection, or not. * * @param groupIndex * The index of the group, whose child items should be checked, * as an {@link Integer} value. The index must be between 0 and * the value of the method <code>getGroupCount():int</code> -1, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param children * The collection, which contains the child items, whose presence * should be checked, as an instance of the type * {@link Collection}. The collection may not be null * @return True, if the group contains all child items, which are contained * by the given collection, false otherwise */ boolean containsAllChildren(int groupIndex, Collection<ChildType> children); /** * Returns, whether a specific group contains all child items, which are * contained by a specific collection, or not. * * @param group * The group, whose child items should be checked, as an instance * of the generic type GroupType. The group may not be null. If * the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param children * The collection, which contains the child items, whose presence * should be checked, as an instance of the type * {@link Collection}. The collection may not be null * @return True, if the group contains all child items, which are contained * by the given collection, false otherwise */ boolean containsAllChildren(GroupType group, Collection<ChildType> children); /** * Returns, whether the adapter contains all child items, which are * contained by a specific array, or not. * * @param children * The array, which contains the child items, whose presence * should be checked, as an array of the generic type ChildType. * The array may not be null * @return True, if the adapter contains all child items, which are * contained by the given array, false otherwise */ @SuppressWarnings("unchecked") boolean containsAllChildren(ChildType... children); /** * Returns the total number of child items, which are contained by the * adapter. * * @return The total number of child items, which are contained by the * adapter, as an {@link Integer} value */ int getChildCount(); /** * Returns the number of child items, which are contained by the group, * which belongs to a specific index. * * @param groupIndex * The index of the group, whose child items should be counted, * as an {@link Integer} value. The index must be between 0 and * the value of the method <code>getGroupCount():int</code> - 1, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @return The number of child items, which are contained by the group, as * an {@link Integer} value */ int getChildCount(int groupIndex); /** * Returns the number of child items, which are contained by a specific * group. * * @param group * The group, whose child items should be counted, as an instance * of the generic type GroupType. The group may not be null. If * the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @return The number of child items, which are contained by the group, as * an {@link Integer} value */ int getChildCount(GroupType group); /** * Returns a collection, which contains all of the adapter's child items. * * @return A collection, which contains all of the adapter's child items, as * an instance of the type {@link Collection} or an empty * collection, if the adapter does not contain any child items */ Collection<ChildType> getAllChildren(); /** * Returns a collection, which contains all child items of the group, which * belongs to a specific index. * * @param groupIndex * The index of the group, whose child items should be returned, * as an {@link Integer} value. The index must be between 0 and * the value of the method <code>getGroupCount():int</code> - 1, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @return A collection, which contains all child items of the group, as an * instance of the type {@link Collection} or an empty collection, * if the group does not contain any child items */ Collection<ChildType> getAllChildren(int groupIndex); /** * Returns a collection, which contains all child items of a specific group. * * @param group * The group, whose child items should be returned, as an * instance of the generic type GroupType. The group may not be * null. If the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @return A collection, which contains all child items of the group, as an * instance of the type {@link Collection} or an empty collection, * if the group does not contain any child items */ Collection<ChildType> getAllChildren(GroupType group); /** * Returns, whether the group, which belongs to a specific index, contains * all child items, which are contained by a specific array, or not. * * @param groupIndex * The index of the group, whose child items should be checked, * as an {@link Integer} value. The index must be between 0 and * the value of the method <code>getGroupCount():int</code> -1, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @param children * The array, which contains the child items, whose presence * should be checked, as an array of the generic type ChildType. * The array may not be null * @return True, if the group contains all child items, which are contained * by the given array, false otherwise */ @SuppressWarnings("unchecked") boolean containsAllChildren(int groupIndex, ChildType... children); /** * Returns, whether a specific group contains all child items, which are * contained by a specific array, or not. * * @param group * The group, whose child items should be checked, as an instance * of the generic type GroupType. The group may not be null. If * the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @param children * The array, which contains the child items, whose presence * should be checked, as an array of the generic type ChildType. * The array may not be null * @return True, if the group contains all child items, which are contained * by the given array, false otherwise */ @SuppressWarnings("unchecked") boolean containsAllChildren(GroupType group, ChildType... children); /** * Returns, whether the group, which belongs to a specific index, is * currently expanded, or not. * * @param index * The index of the group, whose expansion should be checked, as * an {@link Integer} value. The index must be between 0 and the * value of the method <code>getCount():int</code> - 1, otherwise * an {@link IndexOutOfBoundsException} will be thrown * @return True, if the group, which belongs to the given index, is * currently expanded, false if the group is collapsed or if the * adapter is not attached to a view */ boolean isGroupExpanded(int index); /** * Returns, whether a specific group is currently expanded, or not. * * @param group * The group, whose expansion should be checked, as an instance * of the generic type GroupType. The group may not be null. If * the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @return True, if the given group is currently expanded, false if the * group is collapsed or if the adapter is not attached to a view */ boolean isGroupExpanded(GroupType group); /** * Returns, whether the group, which belongs to a specific index, is * currently collapsed, or not. * * @param index * The index of the group, whose expansion should be checked, as * an {@link Integer} value. The index must be between 0 and the * value of the method <code>getGroupCount():int</code> - 1, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @return True, if the group, which belongs to the given index, is * currently collapsed, false if the group is expanded or if the * adapter is not attached to a view */ boolean isGroupCollapsed(int index); /** * Returns, whether a specific group is currently collapsed, or not. * * @param group * The group, whose expansion should be checked, as an instance * of the generic type GroupType. The group may not be null. If * the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @return True, if the given group is currently collapsed, false if the * group is expanded or if the adapter is not attached to a view */ boolean isGroupCollapsed(GroupType group); /** * Returns the first expanded group. * * @return The first expanded group, as an instance of the generic type * GroupType or null, if no group is currently expanded or if the * adapter is not attached to a view */ GroupType getFirstExpandedGroup(); /** * Returns the index of the first expanded group. * * @return The index of the first expanded group, as an {@link Integer} * value or -1, if no group is currently expanded or if the adapter * is not attached to a view */ int getFirstExpandedGroupIndex(); /** * Returns the last expanded group. * * @return The last expanded group, as an instance of the generic type * GroupType or null, if no group is currently expanded or if the * adapter is not attached to a view */ GroupType getLastExpandedGroup(); /** * Returns the index of the last expanded group. * * @return The index of the last expanded group, as an {@link Integer} value * or -1, if no group is currently expanded or if the adapter is not * attached to a view */ int getLastExpandedGroupIndex(); /** * Returns the first collapsed group. * * @return The first collapsed group, as an instance of the generic type * GroupType or null, if no group is currently expanded or if the * adapter is not attached to a view */ GroupType getFirstCollapsedGroup(); /** * Returns the index of the first collapsed group. * * @return The index of the first collapsed group, as an {@link Integer} * value or -1, if no group is currently expanded or if the adapter * is not attached to a view */ int getFirstCollapsedGroupIndex(); /** * Returns the last expanded group. * * @return The last expanded group, as an instance of the generic type * GroupType or null, if no group is currently expanded or if the * adapter is not attached to a view */ GroupType getLastCollapsedGroup(); /** * Returns the index of the last collapsed group. * * @return The index of the last collapsed group, as an {@link Integer} * value or -1, if no group is currently expanded or if the adapter * is not attached to a view */ int getLastCollapsedGroupIndex(); /** * Returns a collection, which contains all currently expanded groups. * * @return A collection, which contains all currently expanded groups, as an * instance of the type {@link Collection} or an empty collection, * if no group is currently expanded or if the adapter is not * attached to a view */ Collection<GroupType> getExpandedGroups(); /** * Returns a collection, which contains the indices of all expanded groups. * * @return A collection, which contains the indices of all currently * expanded groups, as an instance of the type {@link Collection} or * an empty collection, if no group is currently expanded or if the * adapter is not attached to a view */ Collection<Integer> getExpandedGroupIndices(); /** * Returns a collection, which contains all currently collapsed groups. * * @return A collection, which contains all currently collapsed groups, as * an instance of the type {@link Collection} or an empty * collection, if no group is currently collapsed or if the adapter * is not attached to a view */ Collection<GroupType> getCollapsedGroups(); /** * Returns a collection, which contains the indices of all collapsed groups. * * @return A collection, which contains the indices of all currently * collapsed groups, as an instance of the type {@link Collection} * or an empty collection, if no group is currently collapsed or if * the adapter is not attached to a view */ Collection<Integer> getCollapsedGroupIndices(); /** * Returns the number of currently expanded groups. * * @return The number of currently expanded groups as an {@link Integer} * value or 0 if the adapter is not attached to a view */ int getExpandedGroupCount(); /** * Returns the number of the currently collapsed groups. * * @return The number of currently collapsed groups as an {@link Integer} * value or 0 if the adapter is not attached to a view */ int getCollapsedGroupCount(); /** * Expands a specific group. * * @param group * The group, which should be expanded, as an instance of the * generic type GroupType. The group may not be null. If the * group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown */ void expandGroup(GroupType group); /** * Expands the group, which belongs to a specific index. * * @param index * The index of the group, which should be expanded, as an * {@link Integer} value. The index must be between 0 and the * value of the method <code>getGroupCount():int</code> - 1, * otherwise an {@link IndexOutOfBoundsException} will be thrown */ void expandGroup(int index); /** * Collapses a specific group. * * @param group * The group, which should be collapsed, as an instance of the * generic type GroupType. The group may not be null. If the * group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown */ void collapseGroup(GroupType group); /** * Collapses the group, which belongs to a specific index. * * @param index * The index of the group, which should be collapsed, as an * {@link Integer} value. The index must be between 0 and the * value of the method <code>getGroupCount():int</code> - 1, * otherwise an {@link IndexOutOfBoundsException} will be thrown */ void collapseGroup(int index); /** * Triggers the expansion of a specific group. This causes the group to * become expanded, if it is currently collapsed and vice versa. * * @param group * The group, whose expansion should be triggered, as an instance * of the generic type GroupType. The group may not be null. If * the group does not belong to the adapter, a * {@link NoSuchElementException} will be thrown * @return True, if the group has been expanded, false, if the item has been * collapsed or if the adapter is not attached to a view */ boolean triggerGroupExpansion(GroupType group); /** * Triggers the expansion of the group, which belongs to a specific index. * This causes the group to become expanded, if it is currently collapsed * and vice versa. * * @param index * The index of the group, whose expansion should be triggered, * as an {@link Integer} value. The index must be between 0 and * the value of the method <code>getGroupCount():int</code> - 1, * otherwise an {@link IndexOutOfBoundsException} will be thrown * @return True, if the group has been expanded, false, if the item has been * collapsed or if the adapter is not attached to a view */ boolean triggerGroupExpansion(int index); /** * Expands all groups. */ void expandAllGroups(); /** * Collapses all groups. */ void collapseAllGroups(); /** * Triggers the expansion of all groups. This causes a group to become * expanded, if it is currently collapsed and vice versa. */ void triggerAllGroupExpansions(); /** * Returns, whether a group is expanded, when it is clicked by the user, or * not. * * @return True, if a group is expanded, when it is clicked by the user, * false otherwise */ boolean isGroupExpandedOnClick(); /** * Sets, whether a group should be expanded, when it is clicked by the user, * or not. * * @param expandGroupOnClick * True, if a group should be expanded, when it is clicked by the * user, false otherwise */ void expandGroupOnClick(boolean expandGroupOnClick); /** * Adds a new listener, which should be notified, when the adapter's * underlying data has been modified. * * @param listener * The listener, which should be added, as an instance of the * type {@link ExpandableListAdapterListener}. The listener may * not be null */ void addAdapterListener(ExpandableListAdapterListener<GroupType, ChildType> listener); /** * Removes a specific listener, which should not be notified, when the * adapter's underlying data has been modified, anymore. * * @param listener * The listener, which should be removed, as an instance of the * type {@link ExpandableListAdapterListener}. The listener may * not be null */ void removeAdapterListener(ExpandableListAdapterListener<GroupType, ChildType> listener); /** * Adds a specific listener, which should be notified, when a group item has * been expanded or collapsed. * * @param listener * The listener, which should be added, as an instance of the * class {@link ExpansionListener}. The listener may not be null */ void addExpansionListener(ExpansionListener<GroupType, ChildType> listener); /** * Removes a specific listener, which should not be notified, when a group * item has been expanded or collapsed, anymore. * * @param listener * The listener, which should be removed, as an instance of the * class {@link ExpansionListener}. The listener may not be null */ void removeExpansionListener(ExpansionListener<GroupType, ChildType> listener); @Override ExpandableListAdapter<GroupType, ChildType> clone() throws CloneNotSupportedException; }
package foam.nanos.auth; import foam.core.FObject; import foam.core.X; import foam.dao.DAO; import foam.dao.ProxyDAO; import foam.mlang.MLang; import foam.mlang.sink.Count; import java.security.NoSuchAlgorithmException; //TODO: Throw exception for print statements when they are ready public class AuthenticatedUserDAO extends ProxyDAO { public AuthenticatedUserDAO(DAO delegate) { setDelegate(delegate); } @Override public FObject put_(X x, FObject fObject) { if ( fObject instanceof User ) { User user = (User) fObject; if ( super.find(user.getId()) != null ) { System.out.println("A user has already been registered with this account"); return null; } AuthService service = (AuthService) x.get("auth"); if ( service == null ) { System.out.println("Auth Service not started"); return null; } try { service.validateUser(user); Count count = (Count) this.limit(1).where(MLang.EQ(User.EMAIL, user.getEmail())).select(new Count()); if ( count.getValue() > 0 ) { System.out.println("An account is already registered with this email address"); return null; } String salt = UserAndGroupAuthService.generateRandomSalt(); user.setPassword(UserAndGroupAuthService.hashPassword(user.getPassword(), salt) + ":" + salt); return getDelegate().put_(x, user); } catch (RuntimeException e) { e.printStackTrace(); return null; } catch (NoSuchAlgorithmException e) { e.printStackTrace(); return null; } } return null; } }
package fr.tpt.s3.ls_mxc.avail; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileWriter; import java.io.IOException; import java.io.InputStreamReader; import java.util.Iterator; import java.util.List; import fr.tpt.s3.ls_mxc.alloc.LS; import fr.tpt.s3.ls_mxc.model.DAG; import fr.tpt.s3.ls_mxc.model.Edge; import fr.tpt.s3.ls_mxc.model.Node; public class FileUtilities { public FileUtilities () {} /** * Reads and inits the data structures * @param file * @param ls */ public void ReadAndInit(String file, LS ls, Automata aut, List<Voter> lv) { String line; int nb_nodes = 0; try { // Open file FileInputStream fr = new FileInputStream(file); // Initiate the buffer reader BufferedReader br = new BufferedReader(new InputStreamReader(fr)); line = br.readLine(); // Read comments and empty lines while ((line.length() == 0) || (line.charAt(0) == ' line = br.readLine(); // First line is the nb of nodes line = line.trim(); nb_nodes = Integer.parseInt(line); line = br.readLine(); // Read comments and empty lines while ((line.length() == 0) || (line.charAt(0) == ' line = br.readLine(); // Second line is the number of cores line = line.trim(); ls.setNb_cores(Integer.parseInt(line)); line = br.readLine(); // Read comments and empty lines while ((line.length() == 0) || (line.charAt(0) == ' line = br.readLine(); // Third line is the deadline line = line.trim(); ls.setDeadline(Integer.parseInt(line)); line = br.readLine(); // Read comments and empty lines while ((line.length() == 0) || (line.charAt(0) == ' line = br.readLine(); // Instantiate the DAG DAG d = new DAG(); // C LOs are passed afterwards for(int i = 0; i < nb_nodes; i++){ line = line.trim(); Node n = new Node(i, Integer.toString(i), 0, 0); n.setC_LO(Integer.parseInt(line)); d.getNodes().add(n); line = br.readLine(); } // Read comments and empty lines while ((line.length() == 0) || (line.charAt(0) == ' line = br.readLine(); // C HIs are passed afterwards for (int i = 0; i < nb_nodes; i++){ line = line.trim(); Node n = d.getNodebyID(i); n.setC_HI(Integer.parseInt(line)); line = br.readLine(); } // Read comments and empty lines while ((line.length() == 0) || (line.charAt(0) == ' line = br.readLine(); // Edges are passed afterwards for (int i = 0; i < nb_nodes; i++){ Node n = d.getNodebyID(i); String[] dep = line.split(","); for (int j = 0; j < dep.length; j++){ if (dep[j].contains("1")){ Node src = d.getNodebyID(j); @SuppressWarnings("unused") Edge e = new Edge(src, n, false); } } line = br.readLine(); } // Set the constructed DAG Iterator<Node> it_n = d.getNodes().iterator(); while(it_n.hasNext()){ Node n = it_n.next(); n.checkifSink(); n.checkifSource(); n.checkifSinkinHI(); } ls.setMxcDag(d); br.close(); fr.close(); } catch(IOException e) { System.out.println("Unable to open file "+file+" exception "+e.getMessage()); } } public void writeVoters (BufferedWriter out, Voter vot) throws IOException { out.write("module voter\n"); out.write("\tv: [0..20] init 0;\n"); Iterator<Transition> it = vot.getTransitions().iterator(); while (it.hasNext()) { Transition t = it.next(); out.write("\t["+t.getDestOk().getTask()+"_ok] v = "+t.getSrc().getId()+" -> (v' = "+t.getDestOk().getId()+");\n"); out.write("\t["+t.getDestOk().getTask()+"_fail] v = "+t.getSrc().getId()+" -> (v' = "+t.getDestFail().getId()+");\n"); out.write("\n"); } it = vot.getF_trans().iterator(); while (it.hasNext()) { Transition t = it.next(); out.write("\t["+t.getName()+"] v = "+t.getSrc().getId()+" -> (v' = "+t.getDestOk().getId()+");\n"); } out.write("endmodule\n"); out.write("\n"); } /** * This procedures prints the automata * @throws IOException */ public void writeAutomata (BufferedWriter out, Automata a, DAG d) throws IOException { // Write formulas Iterator<List<AutoBoolean>> iab = a.getL_outs_b().iterator(); while (iab.hasNext()) { List<AutoBoolean> lab = iab.next(); out.write("formula "+lab.get(0).getOutput()+" = "); Iterator<AutoBoolean> ia = lab.iterator(); while (ia.hasNext()) { out.write(ia.next().getTask()+"bool"); if (ia.hasNext()) out.write(" & "); } out.write(";\n"); } out.write("\n"); out.write("module proc\n"); out.write("\ts : [0..50] init "+a.getLo_sched().get(0).getId()+";\n"); // Create all necessary booleans Iterator<State> is = a.getLo_sched().iterator(); while (is.hasNext()) { State s = is.next(); if (s.getMode() == 0 && !s.getTask().contains("Final") && !s.getTask().contains("Init")) // It is a LO task out.write("\t"+s.getTask()+"bool: bool init false;\n"); } System.out.println(""); // Create the LO scheduling zone Iterator<Transition> it = a.getL_transitions().iterator(); while (it.hasNext()) { Transition t = it.next(); if (t.getSrc().getMode() == 1) { if (! t.getSrc().isfMechanism()) out.write("\t["+t.getSrc().getTask()+"_lo] s = " + t.getSrc().getId() + " -> 1 - "+ t.getP() +" : (s' = " + t.getDestOk().getId() + ") +" + t.getP() + ": (s' =" + t.getDestFail().getId() +");\n"); else { out.write("\t["+t.getSrc().getTask()+"_ok] s = " + t.getSrc().getId() + " -> (s' = " + t.getDestOk().getId() + ");\n"); out.write("\t["+t.getSrc().getTask()+"_fail] s = " + t.getSrc().getId() + " -> (s' = " + t.getDestFail().getId() + ");\n"); } } else { // If it's a LO task we need to update the boolean if (t.getSrc().getId() == 0) { // Initial state resets booleans out.write("\t["+t.getSrc().getTask()+"_lo] s = " + t.getSrc().getId() + " -> (s' = " + t.getDestOk().getId()+")"); is = a.getLo_sched().iterator(); while (is.hasNext()) { State s = is.next(); if (s.getMode() == 0 && !s.getTask().contains("Final") && !s.getTask().contains("Init")) // It is a LO task out.write(" & ("+s.getTask()+"bool' = false)"); } out.write(";\n"); } else { out.write("\t["+t.getSrc().getTask()+"_lo] s = " + t.getSrc().getId() + " -> 1 - "+ t.getP() +" : (s' = " + t.getDestOk().getId() +") & ("+t.getSrc().getTask()+"bool' = true) + " + t.getP() + ": (s' =" + t.getDestFail().getId() + ");\n" ); } } } // Create the 2^n transitions for the end of LO Iterator<Transition> itf = a.getF_transitions().iterator(); int curr = 0; while (itf.hasNext()) { Transition t = itf.next(); out.write("\t["+t.getSrc().getTask()+curr+"] s = " + t.getSrc().getId()); Iterator<AutoBoolean> ib = t.getbSet().iterator(); while(ib.hasNext()) { AutoBoolean ab = ib.next(); out.write(" & " + ab.getOutput()+" = true"); } Iterator<AutoBoolean> iff = t.getfSet().iterator(); while(iff.hasNext()) { AutoBoolean ab = iff.next(); out.write(" & " + ab.getOutput()+" = false"); } out.write(" -> (s' = "+t.getDestOk().getId()+");\n"); curr++; } // Create the HI scheduling zone // Need to iterate through transitions out.write("\n"); it = a.getH_transitions().iterator(); while (it.hasNext()) { Transition t = it.next(); out.write("\t["+t.getSrc().getTask()+"_hi] s = " + t.getSrc().getId() + " -> (s' =" + t.getDestOk().getId() +");\n"); } out.write("endmodule\n"); // Create the rewards out.write("\n"); Iterator<Node> in = d.getLO_outs().iterator(); while (in.hasNext()) { Node n = in.next(); out.write("rewards \""+n.getName()+"_cycles\"\n"); it = a.getF_transitions().iterator(); int c = 0; while (it.hasNext()) { Transition t = it.next(); Iterator<AutoBoolean> iab2 = t.getbSet().iterator(); while (iab2.hasNext()) { if (iab2.next().getTask().contentEquals(n.getName())) out.write("\t["+t.getSrc().getTask()+c+"] true : 1;\n"); } c++; } c = 0; out.write("endrewards\n"); out.write("\n"); } // Total cycles reward out.write("rewards \"total_cycles\"\n"); it = a.getF_transitions().iterator(); int c = 0; while (it.hasNext()) { Transition t = it.next(); out.write("\t["+t.getSrc().getTask()+c+"] true : 1;\n"); c++; } out.write("\t["+a.getH_transitions().get(a.getH_transitions().size() - 1).getSrc().getTask()+"_hi] true : 1;\n"); out.write("endrewards\n"); out.write("\n"); } public void writeModelToFile(String filename, List<Voter> voters, DAG d, Automata aut) throws IOException { BufferedWriter out = null; try { File f = new File(filename); f.createNewFile(); FileWriter fstream = new FileWriter(f); out = new BufferedWriter(fstream); out.write("dtmc\n\n"); Iterator<Voter> iv = voters.iterator(); while (iv.hasNext()) { writeVoters(out, iv.next()); } writeAutomata(out, aut, d); } catch (IOException e) { System.out.println("writeModelToFile Exception "+e.getMessage()); } finally { if (out != null) out.close(); } } }
package dr.inference.model; import dr.evolution.tree.NodeRef; import dr.evolution.tree.Tree; import dr.evomodel.continuous.MultivariateDiffusionModel; import dr.evomodel.tree.TreeModel; import dr.evomodel.treedatalikelihood.TreeDataLikelihood; import dr.evomodel.treedatalikelihood.continuous.*; import dr.inference.model.*; import dr.math.matrixAlgebra.Matrix; import dr.xml.*; import java.util.List; /** * A Statistic class that computes the expected proportion of the variance in the data due to diffusion on the tree * versus sampling error. * * @author Gabriel Hassler */ public class VarianceProportionStatistic extends Statistic.Abstract implements VariableListener, ModelListener { public static final String PARSER_NAME = "varianceProportionStatistic"; public static final String SCALE_BY_HEIGHT = "scaleByTreeHeight"; private TreeModel tree; private MultivariateDiffusionModel diffusionModel; private TreeDataLikelihood treeLikelihood; private Parameter samplingPrecision; private Parameter diffusionPrecision; private double[] diffusionProportion; private boolean scaleByHeight; private treeVarianceSums treeSums; private double[] diffusionVariance; private double[] samplingVariance; private int[] observedCounts; private boolean treeKnown = false; private boolean varianceKnown = false; public VarianceProportionStatistic(TreeModel tree, TreeDataLikelihood treeLikelihood, RepeatedMeasuresTraitDataModel dataModel, MultivariateDiffusionModel diffusionModel, boolean scaleByHeight) { this.tree = tree; this.treeLikelihood = treeLikelihood; this.diffusionModel = diffusionModel; this.samplingPrecision = dataModel.getSamplingPrecision(); this.diffusionPrecision = diffusionModel.getPrecisionParameter(); this.scaleByHeight = scaleByHeight; this.observedCounts = getObservedCounts(dataModel); int dim = samplingPrecision.getDimension(); this.diffusionVariance = new double[dim]; this.samplingVariance = new double[dim]; this.diffusionProportion = new double[dim]; this.treeSums = new treeVarianceSums(0, 0); updateTreeSums(); updateDiffusionVariance(); updateSamplingVariance(); updateDiffusionProportion(); tree.addModelListener(this); samplingPrecision.addParameterListener(this); diffusionPrecision.addParameterListener(this); } /** * a class that stores the sum of the diagonal elements and all elements of a matrix */ private class treeVarianceSums { private double diagonalSum; private double totalSum; private treeVarianceSums(double diagonalSum, double totalSum){ this.diagonalSum = diagonalSum; this.totalSum = totalSum; } private double getDiagonalSum(){ return this.diagonalSum; } private double getTotalSum(){ return this.totalSum; } } /** * @return an array with the number of taxa with observed data for each trait */ private int[] getObservedCounts(RepeatedMeasuresTraitDataModel dataModel){ List<Integer> missingInds = dataModel.getMissingIndices(); int n = tree.getExternalNodeCount(); int dim = dataModel.getTraitDimension(); int[] observedCounts = new int[dim]; for (int i = 0; i < dim; i++){ observedCounts[i] = n; } int threshold = n; int currentDim = 0; for (int index : missingInds){ if (index >= threshold){ threshold += n; currentDim += 1; } observedCounts[currentDim] -= 1; } return observedCounts; } /** * recalculates the diffusionProportion statistic based on current parameters */ private void updateDiffusionProportion() { int dim = samplingPrecision.getDimension(); double[] diffusionProportion = new double[dim]; for (int i = 0; i < dim; i++) { double diffusionComponent = diffusionVariance[i] * (treeSums.getDiagonalSum() / observedCounts[i] + treeSums.getTotalSum() / (observedCounts[i] * observedCounts[i])); double samplingComponent = samplingVariance[i] * (observedCounts[i] - 1) / observedCounts[i]; diffusionProportion[i] = diffusionComponent / (diffusionComponent + samplingComponent); } } /** * recalculates the the sum of the diagonal elements and sum of all the elements of the tree variance * matrix statistic based on current parameters */ private void updateTreeSums(){ double normalization = 1.0; if (scaleByHeight){ normalization = 1 / tree.getNodeHeight(tree.getRoot()); } double[][] treeVariance = getTreeVariance(); int n = treeVariance.length; double diagonalSum = 0; double offDiagonalSum = 0; for (int i = 0; i < n; i++) { diagonalSum = diagonalSum + treeVariance[i][i]; } for (int i = 0; i < n; i++) { for (int j = i + 1; j < n; j++) { offDiagonalSum = offDiagonalSum + treeVariance[i][j]; } } offDiagonalSum = offDiagonalSum * 2; treeSums.diagonalSum = diagonalSum; treeSums.totalSum = diagonalSum + offDiagonalSum; } /** * recalculates the diffusionVariance variable, which stores the diagonal elements of the diffusion variance matrix, * by inverting the current diffusion precision matrix */ private void updateDiffusionVariance() { Matrix diffusivityMatrix = new Matrix(diffusionModel.getPrecisionmatrix()).inverse(); int dim = diffusivityMatrix.rows(); for (int i = 0; i < dim; i++) { diffusionVariance[i] = diffusivityMatrix.component(i, i); } } /** * recalculates the sampling variance for each trait based on the current sampling precision */ private void updateSamplingVariance() { int dim = samplingPrecision.getDimension(); double[] samplingPrecisionVals = samplingPrecision.getParameterValues(); for (int i = 0; i < dim; i++) { samplingVariance[i] = 1 / samplingPrecisionVals[i]; } } @Override public int getDimension() { return diffusionProportion.length; } @Override public double getStatisticValue(int dim) { boolean needToUpdate = false; if (!treeKnown) { updateTreeSums(); treeKnown = true; needToUpdate = true; } if (!varianceKnown){ updateDiffusionVariance(); updateSamplingVariance(); varianceKnown = true; needToUpdate = true; } if (needToUpdate){ updateDiffusionProportion(); } return diffusionProportion[dim]; } //Post-Order Algorithm for Constructing between taxa tree covariance matrix /** * class that stores where in the tree variance matrix the current block is */ private class postOrderTreeTracker{ int startIndex; int dim; private postOrderTreeTracker(int startIndex, int dim){ this.startIndex = startIndex; this.dim = dim; } private int getStartIndex(){ return this.startIndex; } private int getDim(){ return this.dim; } } /** * @return the between taxa covariance matrix */ private double[][] getTreeVariance(){ int n = tree.getExternalNodeCount(); double[][] treeVariance = new double[n][n]; postOrderTreeTracker x = doTreeRecursion(treeVariance, tree.getRoot(),new postOrderTreeTracker(0, n)); return treeVariance; } /** * NOTE: this function implements a recursive algorithm that updates the treeVariance array in addition to * @return the location and dimension of the current block in the between taxa covariance matrix after */ private postOrderTreeTracker doTreeRecursion(double[][] treeVariance, NodeRef node, postOrderTreeTracker tracker){ int childCount = tree.getChildCount(node); assert (childCount == 2); NodeRef[] childNodes = new NodeRef[childCount]; for (int i = 0; i < childCount; i++){ childNodes[i] = tree.getChild(node, i); } int currentIndex = tracker.getStartIndex(); for (NodeRef child : childNodes){ if (tree.isExternal(child)){ treeVariance[currentIndex][currentIndex] += tree.getBranchLength(child); currentIndex += 1; } else{ postOrderTreeTracker newTracker = doTreeRecursion(treeVariance, child, new postOrderTreeTracker(currentIndex, tracker.getDim())); currentIndex += newTracker.getDim(); for (int i = newTracker.getStartIndex(); i < currentIndex; i++){ for (int j = i; j < currentIndex; j++){ treeVariance[i][j] += tree.getBranchLength(child); } } } } return new postOrderTreeTracker(tracker.getStartIndex(), currentIndex - tracker.getStartIndex()); } @Override public void variableChangedEvent(Variable variable, int index, Variable.ChangeType type) { varianceKnown = false; } @Override public void modelChangedEvent(Model model, Object object, int index) { assert (model == tree); treeKnown = false; } //TODO: make its own class in evomodelxml public static XMLObjectParser PARSER = new AbstractXMLObjectParser() { @Override public Object parseXMLObject(XMLObject xo) throws XMLParseException { TreeModel tree = (TreeModel) xo.getChild(TreeModel.class); RepeatedMeasuresTraitDataModel dataModel = (RepeatedMeasuresTraitDataModel) xo.getChild(RepeatedMeasuresTraitDataModel.class); MultivariateDiffusionModel diffusionModel = (MultivariateDiffusionModel) xo.getChild(MultivariateDiffusionModel.class); // MatrixInverseStatistic diffusionVariance = (MatrixInverseStatistic) xo.getChild(MatrixInverseStatistic.class); // MatrixParameter diffusionPrecision = (MatrixParameter) xo.getChild(MatrixParameter.class); // Parameter samplingPrecision = (Parameter) xo.getChild(Parameter.class); TreeDataLikelihood treeLikelihood = (TreeDataLikelihood) xo.getChild(TreeDataLikelihood.class); final boolean scaleByHeight; if (xo.hasAttribute(SCALE_BY_HEIGHT)){ scaleByHeight = xo.getBooleanAttribute(SCALE_BY_HEIGHT); } else{ scaleByHeight = false; } return new VarianceProportionStatistic(tree, treeLikelihood, dataModel, diffusionModel, scaleByHeight); } private final XMLSyntaxRule[] rules = new XMLSyntaxRule[]{ AttributeRule.newStringRule(SCALE_BY_HEIGHT, true), new ElementRule(TreeModel.class), new ElementRule(TreeDataLikelihood.class), new ElementRule(RepeatedMeasuresTraitDataModel.class), new ElementRule(MultivariateDiffusionModel.class) }; @Override public XMLSyntaxRule[] getSyntaxRules() { return rules; } @Override public String getParserDescription() { return "This element returns a statistic that computes proportion of variance due to diffusion on the tree"; } @Override public Class getReturnType() { return VarianceProportionStatistic.class; } @Override public String getParserName() { return PARSER_NAME; } }; @Override public void modelRestored(Model model) { // Do nothing } }
package tlc2.value; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.Warmup; import tlc2.util.FP64; @State(Scope.Benchmark) public class SubsetValueBenchmark { private static final SubsetValue subset35; private static final SubsetValue subset60; private static final SubsetValue subset100; private static final SubsetValue subset200; private static final SubsetValue subset300; private static final SubsetValue subset400; private static final int k = 80000; private static final double d = 0.1d; private static final int k2 = 160000; private static final double d2 = 0.2d; static { EnumerableValue.setRandom(15041980L); EnumerableValue.resetRandom(); FP64.Init(); subset35 = new SubsetValue(new IntervalValue(1, 35)); subset35.normalize(); subset60 = new SubsetValue(new IntervalValue(1, 60)); subset60.normalize(); subset100 = new SubsetValue(new IntervalValue(1, 100)); subset100.normalize(); subset200 = new SubsetValue(new IntervalValue(1, 200)); subset200.normalize(); subset300 = new SubsetValue(new IntervalValue(1, 300)); subset300.normalize(); subset400 = new SubsetValue(new IntervalValue(1, 400)); subset400.normalize(); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN035k80d01() { return subset35.getRandomSetOfSubsets(k, d); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN060k80d01() { return subset60.getRandomSetOfSubsets(k, d); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN100k80d01() { return subset100.getRandomSetOfSubsets(k, d); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN200k80d01() { return subset200.getRandomSetOfSubsets(k, d); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN300k80d01() { return subset300.getRandomSetOfSubsets(k, d); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN400k80d01() { return subset400.getRandomSetOfSubsets(k, d); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN035k16d01() { return subset35.getRandomSetOfSubsets(k2, d); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN060k16d01() { return subset60.getRandomSetOfSubsets(k2, d); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN100k16d01() { return subset100.getRandomSetOfSubsets(k2, d); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN200k16d01() { return subset200.getRandomSetOfSubsets(k2, d); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN300k16d01() { return subset300.getRandomSetOfSubsets(k2, d); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN400k16d01() { return subset400.getRandomSetOfSubsets(k2, d); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN035k80d02() { return subset35.getRandomSetOfSubsets(k, d2); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN060k80d02() { return subset60.getRandomSetOfSubsets(k, d2); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN100k80d02() { return subset100.getRandomSetOfSubsets(k, d2); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN200k80d02() { return subset200.getRandomSetOfSubsets(k, d2); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN300k80d02() { return subset300.getRandomSetOfSubsets(k, d2); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN400k80d02() { return subset400.getRandomSetOfSubsets(k, d2); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN035k16d02() { return subset35.getRandomSetOfSubsets(k2, d2); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN060k16d02() { return subset60.getRandomSetOfSubsets(k2, d2); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN100k16d02() { return subset100.getRandomSetOfSubsets(k2, d2); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN200k16d02() { return subset200.getRandomSetOfSubsets(k2, d2); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN300k16d02() { return subset300.getRandomSetOfSubsets(k2, d2); } @Benchmark @Warmup(iterations = 3, time = 1) @Measurement(iterations = 3, time = 1) @BenchmarkMode(Mode.Throughput) public EnumerableValue probabilisticN400k16d02() { return subset400.getRandomSetOfSubsets(k2, d2); } }
package com.rho.sync; import com.rho.RhoClassFactory; import com.rho.RhoEmptyLogger; import com.rho.RhoEmptyProfiler; import com.rho.RhoLogger; import com.rho.RhoProfiler; import com.rho.RhodesApp; import com.rho.Tokenizer; import com.rho.net.*; import com.rho.db.*; import java.util.Enumeration; import java.util.Vector; import java.util.Hashtable; import com.rho.FilePath; import com.rho.TimeInterval; import com.rho.RhoAppAdapter; import com.rho.net.NetRequest.MultipartItem; public class SyncSource { private static final RhoLogger LOG = RhoLogger.RHO_STRIP_LOG ? new RhoEmptyLogger() : new RhoLogger("Sync"); private static final RhoProfiler PROF = RhoProfiler.RHO_STRIP_PROFILER ? new RhoEmptyProfiler() : new RhoProfiler(); static class CAttrValue { String m_strAttrib = ""; String m_strValue = ""; String m_strBlobSuffix = ""; CAttrValue(String strAttrib, String strValue) { m_strAttrib = strAttrib; m_strValue = strValue; if ( m_strAttrib.endsWith("-rhoblob") ) { m_strBlobSuffix = "-rhoblob"; m_strAttrib = m_strAttrib.substring(0,m_strAttrib.length()-m_strBlobSuffix.length()); } } }; SyncEngine m_syncEngine; DBAdapter m_dbAdapter; Integer m_nID; String m_strName = ""; long m_token = 0; String m_strSyncType = ""; boolean m_bTokenFromDB; int m_nCurPageCount, m_nInserted, m_nDeleted, m_nTotalCount; boolean m_bGetAtLeastOnePage = false; int m_nErrCode = RhoAppAdapter.ERR_NONE; String m_strError = "", m_strServerError = ""; int m_nRefreshTime = 0; int m_nProgressStep = -1; boolean m_bSchemaSource; static class CAssociation { String m_strSrcName, m_strAttrib; CAssociation( String strSrcName, String strAttrib ){m_strSrcName = strSrcName; m_strAttrib = strAttrib; } }; Vector/*<CAssociation>*/ m_arAssociations = new Vector(); Vector/*Ptr<net::CMultipartItem*>*/ m_arMultipartItems = new Vector(); Vector/*<String>*/ m_arBlobAttrs = new Vector(); Hashtable/*<String,int>*/ m_hashIgnorePushObjects = new Hashtable(); Hashtable/*<String,int>*/ m_hashBelongsTo = new Hashtable(); Integer getID() { return m_nID; } String getName() { return m_strName; } String getSyncType(){ return m_strSyncType; } String getServerError(){ return m_strServerError; } int getErrorCode(){ return m_nErrCode; } int getServerObjectsCount(){ return m_nInserted+m_nDeleted; } long getToken(){ return m_token; } boolean isTokenFromDB(){ return m_bTokenFromDB; } void setToken(long token){ m_token = token; m_bTokenFromDB = false; } boolean isEmptyToken() { return m_token == 0; } int getProgressStep(){ return m_nProgressStep; } void setProgressStep(int nProgressStep){ m_nProgressStep = nProgressStep; } boolean getGetAtLeastOnePage(){ return m_bGetAtLeastOnePage; } int getRefreshTime(){ return m_nRefreshTime; } Vector/*<CAssociation>*/ getAssociations(){ return m_arAssociations; } int getInsertedCount() { return m_nInserted; } int getDeletedCount() { return m_nDeleted; } void setCurPageCount(int nCurPageCount){m_nCurPageCount = nCurPageCount;} void setTotalCount(int nTotalCount){m_nTotalCount = nTotalCount;} int getCurPageCount(){return m_nCurPageCount;} int getTotalCount(){return m_nTotalCount;} SyncEngine getSync(){ return m_syncEngine; } SyncNotify getNotify(){ return getSync().getNotify(); } NetRequest getNet(){ return getSync().getNet(); } ISyncProtocol getProtocol(){ return getSync().getProtocol(); } void setRefreshTime( int nRefreshTime ){ m_nRefreshTime = nRefreshTime;} DBAdapter getDB(){ return m_dbAdapter; } SyncSource(SyncEngine syncEngine, DBAdapter db)throws DBException { m_syncEngine = syncEngine; m_dbAdapter = db; m_nID = new Integer(0); m_bTokenFromDB = true; m_nCurPageCount = 0; m_nInserted = 0; m_nDeleted = 0; m_nTotalCount = 0; m_bGetAtLeastOnePage = false; m_nErrCode = RhoAppAdapter.ERR_NONE; m_bSchemaSource = db.isTableExist(m_strName); } SyncSource(int id, String name, String strSyncType, DBAdapter db, SyncEngine syncEngine )throws DBException { m_syncEngine = syncEngine; m_dbAdapter = db; m_nID = new Integer(id); m_strName = name; m_strSyncType = strSyncType; m_nCurPageCount = 0; m_nInserted = 0; m_nDeleted = 0; m_nTotalCount = 0; m_bGetAtLeastOnePage = false; m_nErrCode = RhoAppAdapter.ERR_NONE; IDBResult res = db.executeSQL("SELECT token,associations from sources WHERE source_id=?", m_nID); if ( !res.isEnd() ) { m_token = res.getLongByIdx(0); m_bTokenFromDB = true; }else { m_token = 0; m_bTokenFromDB = true; } m_bSchemaSource = db.isTableExist(m_strName); parseAssociations(res.getStringByIdx(1)); } void parseAssociations(String strAssociations) { if (strAssociations.length() == 0 ) return; Tokenizer oTokenizer = new Tokenizer( strAssociations, "," ); String strSrcName = ""; while (oTokenizer.hasMoreTokens()) { String tok = oTokenizer.nextToken(); if (tok.length() == 0) continue; if ( strSrcName.length() > 0 ) { m_arAssociations.addElement( new CAssociation(strSrcName, tok) ); strSrcName = ""; }else strSrcName = tok; } } void sync() throws Exception { getNotify().reportSyncStatus(RhoAppAdapter.getMessageText("syncronizing") + getName() + "...", m_nErrCode, m_strError ); TimeInterval startTime = TimeInterval.getCurrentTime(); //m_bIsSearch = false; try{ //if ( isTokenFromDB() && getToken() > 1 ) // syncServerChanges(); //sync only server changes, which was paused before //else if ( isEmptyToken() ) processToken(1); syncClientChanges(); syncServerChanges(); /* boolean bSyncedServer = syncClientChanges(); if ( !bSyncedServer ) syncServerChanges(); */ }catch(Exception exc) { //getSync().stopSync(); throw exc; }finally{ TimeInterval endTime = TimeInterval.getCurrentTime(); getDB().executeSQL("UPDATE sources set last_updated=?,last_inserted_size=?,last_deleted_size=?, "+ "last_sync_duration=?,last_sync_success=?, backend_refresh_time=? WHERE source_id=?", new Long(endTime.toULong()/1000), new Integer(getInsertedCount()), new Integer(getDeletedCount()), new Long((endTime.minus(startTime)).toULong()), new Integer(m_bGetAtLeastOnePage?1:0), new Integer(m_nRefreshTime), getID() ); } } void syncClientChanges()throws Exception { PROF.START("Pull"); boolean bSyncClient = false; { IDBResult res = getDB().executeSQL("SELECT object FROM changed_values WHERE source_id=? and sent<=1 LIMIT 1 OFFSET 0", getID()); bSyncClient = !res.isEnd(); } if ( bSyncClient ) doSyncClientChanges(); PROF.STOP("Pull"); } /* boolean syncClientChanges()throws Exception { boolean bSyncedServer = false; if ( isPendingClientChanges() ) { LOG.INFO( "Client has unconfirmed created items. Call server to update them." ); syncServerChanges(); bSyncedServer = true; } if ( bSyncedServer && isPendingClientChanges() ) { LOG.INFO( "Server does not sent created items. Stop sync." ); getSync().setState(SyncEngine.esStop); } else { PROF.START("Pull"); boolean bSyncClient = false; { IDBResult res = getDB().executeSQL("SELECT object FROM changed_values WHERE source_id=? LIMIT 1 OFFSET 0", getID()); bSyncClient = !res.isEnd(); } if ( bSyncClient ) { doSyncClientChanges(); bSyncedServer = false; } PROF.STOP("Pull"); } return bSyncedServer; } boolean isPendingClientChanges()throws DBException { IDBResult res = getDB().executeSQL("SELECT object FROM changed_values WHERE source_id=? and update_type='create' and sent>1 LIMIT 1 OFFSET 0", getID()); return !res.isEnd(); }*/ void addBelongsTo(String strAttrib, Integer nSrcID) { m_hashBelongsTo.put(strAttrib, nSrcID); } Integer getBelongsToSrcID(String strAttrib) { if ( m_hashBelongsTo.containsKey(strAttrib) ) return (Integer)m_hashBelongsTo.get(strAttrib); return new Integer(-1); } void checkIgnorePushObjects()throws Exception { // ignore changes in pending creates { IDBResult res = getDB().executeSQL("SELECT distinct(object) FROM changed_values where source_id=? and sent>=2", getID() ); for( ; !res.isEnd(); res.next() ) { String strObject = res.getStringByIdx(0); m_hashIgnorePushObjects.put(strObject, new Integer(1)); } } //check for belongs_to String strAttribQuests = ""; Vector/*<String>*/ arValues = new Vector(); arValues.addElement(getID()); Enumeration keys = m_hashBelongsTo.keys(); while (keys.hasMoreElements()) { if ( strAttribQuests.length() > 0 ) strAttribQuests += ","; strAttribQuests += "?"; arValues.addElement(keys.nextElement()); } if ( strAttribQuests.length() > 0 ) { IDBResult res = getDB().executeSQLEx( "SELECT object, attrib, value FROM changed_values where source_id=? and sent<=1 and attrib IN ( " + strAttribQuests + " )", arValues ); for( ; !res.isEnd(); res.next() ) { String strObject = res.getStringByIdx(0); String strAttrib = res.getStringByIdx(1); String strValue = res.getStringByIdx(2); IDBResult res2 = getDB().executeSQL( "SELECT object FROM changed_values where source_id=? and sent>=2 and object=? LIMIT 1 OFFSET 0", getBelongsToSrcID(strAttrib), strValue ); if (!res2.isEnd()) m_hashIgnorePushObjects.put(strObject, new Integer(1) ); } } } void doSyncClientChanges()throws Exception { String arUpdateTypes[] = {"create", "update", "delete"}; boolean arUpdateSent[] = {false, false, false}; m_arMultipartItems.removeAllElements(); m_arBlobAttrs.removeAllElements(); String strBody = "{\"source_name\":" + JSONEntry.quoteValue(getName()) + ",\"client_id\":" + JSONEntry.quoteValue(getSync().getClientID()); boolean bSend = false; int i = 0; getDB().Lock(); try{ checkIgnorePushObjects(); for( i = 0; i < 3; i++ ) { String strBody1; strBody1 = makePushBody_Ver3(arUpdateTypes[i], true); if (strBody1.length() > 0) { strBody += "," + strBody1; String strBlobAttrs = ""; for ( int j = 0; j < (int)m_arBlobAttrs.size(); j++) { if ( strBlobAttrs.length() > 0 ) strBlobAttrs += ","; strBlobAttrs += JSONEntry.quoteValue((String)m_arBlobAttrs.elementAt(j)); } if ( strBlobAttrs.length() > 0 ) strBody += ",\"blob_fields\":[" + strBlobAttrs + "]"; arUpdateSent[i] = true; bSend = true; } } strBody += "}"; }finally { getDB().Unlock(); } if ( bSend && getSync().isContinueSync() ) { LOG.INFO( "Push client changes to server. Source: " + getName() + "Size :" + strBody.length() ); LOG.TRACE("Push body: " + strBody); try{ if ( m_arMultipartItems.size() > 0 ) { MultipartItem oItem = new MultipartItem(); oItem.m_strBody = strBody; //oItem.m_strContentType = getProtocol().getContentType(); oItem.m_strName = "cud"; m_arMultipartItems.addElement(oItem); NetResponse resp = getNet().pushMultipartData( getProtocol().getClientChangesUrl(), m_arMultipartItems, getSync(), null ); if ( !resp.isOK() ) { //getSync().stopSync(); m_nErrCode = RhoAppAdapter.ERR_REMOTESERVER; m_strError = resp.getCharData(); } }else { NetResponse resp = getNet().pushData( getProtocol().getClientChangesUrl(), strBody, getSync()); if ( !resp.isOK() ) { //getSync().stopSync(); m_nErrCode = RhoAppAdapter.ERR_REMOTESERVER; m_strError = resp.getCharData(); } } }catch(Exception exc) { m_nErrCode = RhoAppAdapter.getNetErrorCode(exc); throw exc; } } for( i = 0; i < 3 && m_nErrCode == RhoAppAdapter.ERR_NONE; i++ ) { if ( arUpdateSent[i] ) { //oo conflicts if ( i < 1 && !getSync().getSourceOptions().getBoolProperty(getID(), "pass_through") ) //create getDB().executeSQL("UPDATE changed_values SET sent=2 WHERE source_id=? and update_type=? and sent=1", getID(), arUpdateTypes[i] ); else getDB().executeSQL("DELETE FROM changed_values WHERE source_id=? and update_type=? and sent=1", getID(), arUpdateTypes[i] ); } } m_arMultipartItems.removeAllElements(); m_arBlobAttrs.removeAllElements(); } //{"source_name":"SampleAdapter","client_id":1,"create":{"1":{"brand":"Apple","name":"iPhone","price":"199.99"}}} //{"source_name":"SampleAdapter","client_id":1,"update":{"1":{"brand":"Apple","name":"iPhone","price":"199.99"}}} //{"source_name":"SampleAdapter","client_id":1,"delete":{"1":{"brand":"Apple","name":"iPhone","price":"199.99"}}} //{"source_name":"SampleAdapter","client_id":1,"delete":{"3":{"brand":"HTC","name":"Fuze","price":"299.99"}},"create":{"1":{"brand":"Apple","name":"iPhone","price":"199.99"}},"update":{"2":{"brand":"Android","name":"G2","price":"99.99"}}} String makePushBody_Ver3( String strUpdateType, boolean isSync)throws DBException { String strBody = ""; getDB().Lock(); if ( isSync ) getDB().updateAllAttribChanges(); IDBResult res = getDB().executeSQL("SELECT attrib, object, value, attrib_type "+ "FROM changed_values where source_id=? and update_type =? and sent<=1 ORDER BY object", getID(), strUpdateType ); if ( res.isEnd() ) { res.close(); getDB().Unlock(); return strBody; } String strCurObject = ""; boolean bFirst = true; for( ; !res.isEnd(); res.next() ) { String strAttrib = res.getStringByIdx(0); String strObject = res.getStringByIdx(1); String value = res.getStringByIdx(2); String attribType = res.getStringByIdx(3); if ( m_hashIgnorePushObjects.containsKey(strObject) ) continue; if ( attribType.compareTo("blob.file") == 0 ) { MultipartItem oItem = new MultipartItem(); oItem.m_strFilePath = RhodesApp.getInstance().resolveDBFilesPath(value); oItem.m_strContentType = "application/octet-stream"; oItem.m_strName = strAttrib + "-" + strObject; m_arBlobAttrs.addElement(strAttrib); m_arMultipartItems.addElement(oItem); } if ( strBody.length() == 0 ) { if ( !isSync ) strBody += "{"; else strBody += "\"" + strUpdateType + "\":{"; } if ( strObject.compareTo(strCurObject) != 0 ) { if ( strCurObject.length() > 0 ) { if ( !bFirst ) strBody += "}"; strBody += ","; } bFirst = true; strBody += JSONEntry.quoteValue(strObject); strCurObject = strObject; } if (!bFirst) strBody += ","; if ( strAttrib.length() > 0 ) { if ( bFirst ) strBody += ":{"; strBody += JSONEntry.quoteValue(strAttrib) + ":" + JSONEntry.quoteValue(value); bFirst = false; } } if ( strBody.length() > 0 ) { if ( !bFirst ) strBody += "}"; strBody += "}"; } if ( isSync ) getDB().executeSQL("UPDATE changed_values SET sent=1 WHERE source_id=? and update_type=? and sent=0", getID(), strUpdateType ); getDB().Unlock(); return strBody; } void applyChangedValues()throws Exception { String strBody = makePushBody_Ver3("create", false); if ( strBody != null && strBody.length() > 0 ) { JSONEntry oEntry = new JSONEntry(strBody); processSyncCommand("insert", oEntry, false ); } strBody = makePushBody_Ver3("delete", false); if ( strBody != null && strBody.length() > 0 ) { JSONEntry oEntry = new JSONEntry(strBody); processSyncCommand("delete", oEntry, false ); } strBody = makePushBody_Ver3("update", false); if ( strBody != null && strBody.length() > 0 ) { JSONEntry oEntry = new JSONEntry(strBody); processSyncCommand("insert", oEntry, false ); } } void syncServerChanges()throws Exception { LOG.INFO("Sync server changes source ID :" + getID() ); while( getSync().isContinueSync()&& ( m_nErrCode == RhoAppAdapter.ERR_NONE || m_nErrCode == RhoAppAdapter.ERR_CUSTOMSYNCSERVER) ) { setCurPageCount(0); String strUrl = getProtocol().getServerQueryUrl(""); String strQuery = getProtocol().getServerQueryBody(getName(), getSync().getClientID(), getSync().getSyncPageSize()); if ( !m_bTokenFromDB && getToken() > 1 ) strQuery += "&token=" + getToken(); LOG.INFO( "Pull changes from server. Url: " + (strUrl+strQuery) ); NetResponse resp = null; try{ PROF.START("Net"); resp = getNet().pullData(strUrl+strQuery, getSync()); PROF.STOP("Net"); if ( !resp.isOK() ) { //getSync().stopSync(); m_nErrCode = RhoAppAdapter.getErrorFromResponse(resp); m_strError = resp.getCharData(); continue; } }catch(Exception exc) { m_nErrCode = RhoAppAdapter.getNetErrorCode(exc); throw exc; } String szData = null; String strTestResp = getSync().getSourceOptions().getProperty(getID(), "rho_server_response"); if ( strTestResp != null && strTestResp.length() > 0 ) szData = strTestResp; else szData = resp.getCharData(); PROF.START("Parse"); JSONArrayIterator oJsonArr = new JSONArrayIterator(szData); PROF.STOP("Parse"); processServerResponse_ver3(oJsonArr); if (getSync().getSourceOptions().getBoolProperty(getID(), "pass_through")) processToken(0); if ( getToken() == 0 ) break; } if ( getSync().isSchemaChanged() ) getSync().stopSync(); } //{"create-error":{"0_broken_object_id":{"name":"wrongname","an_attribute":"error create"},"0_broken_object_id-error":{"message":"error create"}}} boolean processServerErrors(JSONEntry oCmds)throws Exception { String arErrTypes[] = {"source-error", "search-error", "create-error", "update-error", "delete-error", "update-rollback", null}; boolean bRes = false; for( int i = 0; ; i++ ) { if ( arErrTypes[i] == null ) break; if ( !oCmds.hasName(arErrTypes[i]) ) continue; bRes = true; m_nErrCode = RhoAppAdapter.ERR_CUSTOMSYNCSERVER; JSONEntry errSrc = oCmds.getEntry(arErrTypes[i]); JSONStructIterator errIter = new JSONStructIterator(errSrc); for( ; !errIter.isEnd(); errIter.next() ) { String strKey = errIter.getCurKey(); if ( i == 0 || i == 1 )//"source-error", "search-error" { if ( errIter.getCurValue().hasName("message") ) { if ( m_strServerError.length() > 0 ) m_strServerError += "&"; m_strServerError += "server_errors[" + URI.urlEncode(strKey) + "][message]=" + URI.urlEncode(errIter.getCurValue().getString("message")); } } else { //"create-error", "update-error", "delete-error", "update-rollback" String strObject = strKey; if ( strObject.endsWith("-error") ) { strObject = strObject.substring(0, strKey.length()-6); if ( m_strServerError.length() > 0 ) m_strServerError += "&"; m_strServerError += "server_errors[" + arErrTypes[i] + "][" + URI.urlEncode(strObject) + "][message]=" + URI.urlEncode(errIter.getCurValue().getString("message")); }else { JSONStructIterator attrIter = new JSONStructIterator(errIter.getCurValue()); for( ; !attrIter.isEnd(); attrIter.next() ) { String strAttrName = attrIter.getCurKey(); String strAttrValue = attrIter.getCurString(); if ( m_strServerError.length() > 0 ) m_strServerError += "&"; m_strServerError += "server_errors[" + arErrTypes[i] + "][" + URI.urlEncode(strObject) + "][attributes][" + URI.urlEncode(strAttrName) + "]=" + URI.urlEncode(strAttrValue); } } } } } return bRes; } void processServerResponse_ver3(JSONArrayIterator oJsonArr)throws Exception { PROF.START("Data1"); int nVersion = 0; if ( !oJsonArr.isEnd() && oJsonArr.getCurItem().hasName("version") ) { nVersion = oJsonArr.getCurItem().getInt("version"); oJsonArr.next(); } if ( nVersion != getProtocol().getVersion() ) { LOG.ERROR("Sync server send data with incompatible version. Client version: " + getProtocol().getVersion() + "; Server response version: " + nVersion + ". Source name: " + getName() ); getSync().stopSync(); m_nErrCode = RhoAppAdapter.ERR_UNEXPECTEDSERVERRESPONSE; return; } if ( !oJsonArr.isEnd() && oJsonArr.getCurItem().hasName("token")) { processToken(oJsonArr.getCurItem().getUInt64("token")); oJsonArr.next(); } if ( !oJsonArr.isEnd() && oJsonArr.getCurItem().hasName("source") ) { //skip it. it uses in search only oJsonArr.next(); } if ( !oJsonArr.isEnd() && oJsonArr.getCurItem().hasName("count") ) { setCurPageCount(oJsonArr.getCurItem().getInt("count")); oJsonArr.next(); } if ( !oJsonArr.isEnd() && oJsonArr.getCurItem().hasName("refresh_time") ) { setRefreshTime(oJsonArr.getCurItem().getInt("refresh_time")); oJsonArr.next(); } if ( !oJsonArr.isEnd() && oJsonArr.getCurItem().hasName("progress_count") ) { //TODO: progress_count //setTotalCount(oJsonArr.getCurItem().getInt("progress_count")); oJsonArr.next(); } if ( !oJsonArr.isEnd() && oJsonArr.getCurItem().hasName("total_count") ) { setTotalCount(oJsonArr.getCurItem().getInt("total_count")); oJsonArr.next(); } //if ( getServerObjectsCount() == 0 ) // getNotify().fireSyncNotification(this, false, RhoAppAdapter.ERR_NONE, ""); if ( getToken() == 0 ) { //oo conflicts getDB().executeSQL("DELETE FROM changed_values where source_id=? and sent>=3", getID() ); } LOG.INFO("Got " + getCurPageCount() + "(Processed: " + getServerObjectsCount() + ") records of " + getTotalCount() + " from server. Source: " + getName() + ". Version: " + nVersion ); PROF.STOP("Data1"); if ( !oJsonArr.isEnd() && getSync().isContinueSync() ) { JSONEntry oCmds = oJsonArr.getCurItem(); PROF.START("Data"); if ( oCmds.hasName("schema-changed") ) { getSync().setSchemaChanged(true); }else if ( !processServerErrors(oCmds) ) { getDB().startTransaction(); if (getSync().getSourceOptions().getBoolProperty(getID(), "pass_through")) { if ( m_bSchemaSource ) getDB().executeSQL( "DELETE FROM " + getName() ); else getDB().executeSQL( "DELETE FROM object_values WHERE source_id=?", getID() ); } if ( oCmds.hasName("metadata") && getSync().isContinueSync() ) { String strMetadata = oCmds.getString("metadata"); getDB().executeSQL("UPDATE sources SET metadata=? WHERE source_id=?", strMetadata, getID() ); } if ( oCmds.hasName("links") && getSync().isContinueSync() ) processSyncCommand("links", oCmds.getEntry("links"), true ); if ( oCmds.hasName("delete") && getSync().isContinueSync() ) processSyncCommand("delete", oCmds.getEntry("delete"), true ); if ( oCmds.hasName("insert") && getSync().isContinueSync() ) processSyncCommand("insert", oCmds.getEntry("insert"), true ); PROF.STOP("Data"); PROF.START("DB"); getDB().endTransaction(); PROF.STOP("DB"); getNotify().fireObjectsNotification(); } } PROF.START("Data1"); if ( getCurPageCount() > 0 ) getNotify().fireSyncNotification(this, false, RhoAppAdapter.ERR_NONE, ""); PROF.STOP("Data1"); } void processSyncCommand(String strCmd, JSONEntry oCmdEntry, boolean bCheckUIRequest)throws Exception { JSONStructIterator objIter = new JSONStructIterator(oCmdEntry); for( ; !objIter.isEnd() && getSync().isContinueSync(); objIter.next() ) { String strObject = objIter.getCurKey(); JSONStructIterator attrIter = new JSONStructIterator( objIter.getCurValue() ); try { if ( m_bSchemaSource ) processServerCmd_Ver3_Schema(strCmd,strObject,attrIter); else { for( ; !attrIter.isEnd(); attrIter.next() ) { String strAttrib = attrIter.getCurKey(); String strValue = attrIter.getCurString(); processServerCmd_Ver3(strCmd,strObject,strAttrib,strValue); } } }catch(DBException exc) { LOG.ERROR("Sync of server changes failed for " + getName() + ";object: " + strObject, exc); } if ( getSyncType().compareTo("none") == 0 ) continue; if ( bCheckUIRequest ) { int nSyncObjectCount = getNotify().incLastSyncObjectCount(getID()); if ( getProgressStep() > 0 && (nSyncObjectCount%getProgressStep() == 0) ) getNotify().fireSyncNotification(this, false, RhoAppAdapter.ERR_NONE, ""); if ( getDB().isUIWaitDB() ) { LOG.INFO("Commit transaction because of UI request."); getDB().endTransaction(); SyncThread.getInstance().sleep(1000); getDB().startTransaction(); } } } } void processAssociations(String strOldObject, String strNewObject)throws Exception { for ( int i = 0; i < m_arAssociations.size(); i++ ) { SyncSource pSrc = getSync().findSourceByName( ((CAssociation)m_arAssociations.elementAt(i)).m_strSrcName); if ( pSrc != null ) pSrc.updateAssociation(strOldObject, strNewObject, ((CAssociation)m_arAssociations.elementAt(i)).m_strAttrib); } } void updateAssociation(String strOldObject, String strNewObject, String strAttrib)throws Exception { if ( m_bSchemaSource ) { String strSqlUpdate = "UPDATE "; strSqlUpdate += getName() + " SET " + strAttrib + "=? where " + strAttrib + "=?"; getDB().executeSQL(strSqlUpdate, strNewObject, strOldObject ); } else getDB().executeSQL("UPDATE object_values SET value=? where attrib=? and source_id=? and value=?", strNewObject, strAttrib, getID(), strOldObject ); getDB().executeSQL("UPDATE changed_values SET value=? where attrib=? and source_id=? and value=?", strNewObject, strAttrib, getID(), strOldObject ); } void processServerCmd_Ver3_Schema(String strCmd, String strObject, JSONStructIterator attrIter)throws Exception { if ( strCmd.compareTo("insert") == 0 ) { Vector/*<String>*/ vecValues = new Vector(), vecAttrs = new Vector(); String strCols = "", strQuest = "", strSet = ""; for( ; !attrIter.isEnd(); attrIter.next() ) { CAttrValue oAttrValue = new CAttrValue(attrIter.getCurKey(),attrIter.getCurString()); if ( !processBlob(strCmd,strObject,oAttrValue) ) break; if ( strCols.length() > 0 ) strCols += ","; if ( strQuest.length() > 0) strQuest += ","; if ( strSet.length() > 0) strSet += ","; strCols += oAttrValue.m_strAttrib; strQuest += "?"; strSet += oAttrValue.m_strAttrib + "=?"; vecAttrs.addElement(oAttrValue.m_strAttrib); vecValues.addElement(oAttrValue.m_strValue); } vecValues.addElement(strObject); if ( strCols.length() > 0 ) strCols += ","; if ( strQuest.length() > 0) strQuest += ","; strCols += "object"; strQuest += "?"; String strSqlInsert = "INSERT INTO "; strSqlInsert += getName() + " ("; strSqlInsert += strCols + ") VALUES(" + strQuest + ")"; if ( !getSync().isContinueSync() ) return; IDBResult resInsert = getDB().executeSQLReportNonUniqueEx(strSqlInsert, vecValues ); if ( resInsert.isNonUnique() ) { String strSqlUpdate = "UPDATE "; strSqlUpdate += getName() + " SET " + strSet + " WHERE object=?"; getDB().executeSQLEx(strSqlUpdate, vecValues); if ( getSyncType().compareTo("none") != 0 ) { // oo conflicts for( int i = 0; i < (int)vecAttrs.size(); i++ ) { getDB().executeSQL("UPDATE changed_values SET sent=4 where object=? and attrib=? and source_id=? and sent>1", strObject, vecAttrs.elementAt(i), getID() ); } } } if ( getSyncType().compareTo("none") != 0 ) getNotify().onObjectChanged(getID(),strObject, SyncNotify.enUpdate); m_nInserted++; }else if (strCmd.compareTo("delete") == 0) { Vector/*<String>*/ vecAttrs = new Vector(); String strSet = ""; for( ; !attrIter.isEnd() && getSync().isContinueSync(); attrIter.next() ) { CAttrValue oAttrValue = new CAttrValue(attrIter.getCurKey(),attrIter.getCurString()); if ( strSet.length() > 0 ) strSet += ","; vecAttrs.addElement(oAttrValue.m_strAttrib); strSet += oAttrValue.m_strAttrib + "=NULL"; } String strSqlUpdate = "UPDATE "; strSqlUpdate += getName() + " SET " + strSet + " WHERE object=?"; if ( strSet.length() == 0 ) return; getDB().executeSQL(strSqlUpdate, strObject); //Remove item if all nulls String strSelect = "SELECT * FROM " + getName() + " WHERE object=?"; IDBResult res = getDB().executeSQL( strSelect, strObject ); if ( !res.isEnd() ) { boolean bAllNulls = true; for( int i = 0; i < res.getColCount(); i ++) { if ( !res.isNullByIdx(i) && res.getColName(i).compareTo("object")!=0 ) { bAllNulls = false; break; } } if (bAllNulls) { String strDelete = "DELETE FROM " + getName() + " WHERE object=?"; getDB().executeSQL( strDelete, strObject); } } if ( getSyncType().compareTo("none") != 0 ) { getNotify().onObjectChanged(getID(), strObject, SyncNotify.enDelete); // oo conflicts for( int i = 0; i < (int)vecAttrs.size(); i++ ) { getDB().executeSQL("UPDATE changed_values SET sent=3 where object=? and attrib=? and source_id=?", strObject, vecAttrs.elementAt(i), getID() ); } } m_nDeleted++; }else if ( strCmd.compareTo("links") == 0 ) { String strValue = attrIter.getCurString(); processAssociations(strObject, strValue); String strSqlUpdate = "UPDATE "; strSqlUpdate += getName() + " SET object=? WHERE object=?"; getDB().executeSQL(strSqlUpdate, strValue, strObject); getDB().executeSQL("UPDATE changed_values SET object=?,sent=3 where object=? and source_id=?", strValue, strObject, getID() ); getNotify().onObjectChanged(getID(), strObject, SyncNotify.enCreate); } } boolean processBlob( String strCmd, String strObject, CAttrValue oAttrValue )throws Exception { //TODO: when server return delete with rhoblob postfix - delete isBlobAttr if ( !(oAttrValue.m_strBlobSuffix.length() > 0 || getDB().getAttrMgr().isBlobAttr(getID(), oAttrValue.m_strAttrib)) ) return true; boolean bDownload = true; String strDbValue = ""; if ( !getDB().getAttrMgr().isOverwriteBlobFromServer(getID(), oAttrValue.m_strAttrib) ) { if ( m_bSchemaSource ) { String strSelect = "SELECT " + oAttrValue.m_strAttrib + " FROM " + getName() + " WHERE object=?"; IDBResult res = getDB().executeSQL( strSelect, strObject); if (!res.isEnd()) { strDbValue = res.getStringByIdx(0); bDownload = strDbValue == null || strDbValue.length() == 0; } }else { IDBResult res = getDB().executeSQL( "SELECT value FROM object_values WHERE object=? and attrib=? and source_id=?", strObject, oAttrValue.m_strAttrib, getID() ); if (!res.isEnd()) { strDbValue = res.getStringByIdx(0); bDownload = strDbValue == null || strDbValue.length() == 0; } } } if ( bDownload ) { boolean bRes = false; getDB().endTransaction(); try{ bRes = downloadBlob(oAttrValue); }finally { getDB().startTransaction(); } return bRes; } /* String fName = makeFileName( oAttrValue ); String fOldName = RhodesApp.getInstance().resolveDBFilesPath(strDbValue); RhoClassFactory.createFile().renameOverwrite(fOldName, fName); oAttrValue.m_strValue = FilePath.getRelativePath( fName, RhodesApp.getInstance().getRhoRootPath()); */ oAttrValue.m_strValue = strDbValue; return true; } void processServerCmd_Ver3(String strCmd, String strObject, String strAttriba, String strValuea)throws Exception { CAttrValue oAttrValue = new CAttrValue(strAttriba,strValuea); if ( strCmd.compareTo("insert") == 0 ) { if ( !processBlob(strCmd,strObject,oAttrValue) ) return; IDBResult resInsert = getDB().executeSQLReportNonUnique("INSERT INTO object_values "+ "(attrib, source_id, object, value) VALUES(?,?,?,?)", oAttrValue.m_strAttrib, getID(), strObject, oAttrValue.m_strValue ); if ( resInsert.isNonUnique() ) { getDB().executeSQL("UPDATE object_values " + "SET value=? WHERE object=? and attrib=? and source_id=?", oAttrValue.m_strValue, strObject, oAttrValue.m_strAttrib, getID() ); if ( getSyncType().compareTo("none") != 0 ) { // oo conflicts getDB().executeSQL("UPDATE changed_values SET sent=4 where object=? and attrib=? and source_id=? and sent>1", strObject, oAttrValue.m_strAttrib, getID() ); } } if ( getSyncType().compareTo("none") != 0 ) getNotify().onObjectChanged(getID(),strObject, SyncNotify.enUpdate); m_nInserted++; }else if (strCmd.compareTo("delete") == 0) { getDB().executeSQL("DELETE FROM object_values where object=? and attrib=? and source_id=?", strObject, oAttrValue.m_strAttrib, getID() ); if ( getSyncType().compareTo("none") != 0 ) { getNotify().onObjectChanged(getID(), strObject, SyncNotify.enDelete); // oo conflicts getDB().executeSQL("UPDATE changed_values SET sent=3 where object=? and attrib=? and source_id=?", strObject, oAttrValue.m_strAttrib, getID() ); } m_nDeleted++; }else if ( strCmd.compareTo("links") == 0 ) { processAssociations(strObject, oAttrValue.m_strValue); getDB().executeSQL("UPDATE object_values SET object=? where object=? and source_id=?", oAttrValue.m_strValue, strObject, getID() ); getDB().executeSQL("UPDATE changed_values SET object=?,sent=3 where object=? and source_id=?", oAttrValue.m_strValue, strObject, getID() ); getNotify().onObjectChanged(getID(), strObject, SyncNotify.enCreate); } } private String makeFileName(CAttrValue value)throws Exception { String strExt = ""; URI uri = new URI(value.m_strValue); String strQuest = uri.getQueryString(); if (strQuest != null && strQuest.length() > 0) { int nExt = strQuest.indexOf("extension="); if ( nExt >= 0 ) { int nExtEnd = strQuest.indexOf("&", nExt); if (nExtEnd < 0 ) nExtEnd = strQuest.length(); strExt = strQuest.substring(nExt+10, nExtEnd); } } if ( strExt.length() == 0 ) { String strFileName = uri.getLastNamePart(); int nExt = strFileName != null ? strFileName.lastIndexOf('.') : -1; if ( nExt >= 0 ) strExt = strFileName.substring(nExt); } if ( strExt.length() == 0 ) strExt = ".bin"; else if ( strExt.charAt(0) != '.' ) strExt = "." + strExt; String fName = RhodesApp.getInstance().getBlobsDirPath() + "/id_" + TimeInterval.getCurrentTime().toULong() + strExt; return fName; /* String strExt = ".bin"; URI uri = new URI(value.m_strValue); int nDot = uri.getPath().lastIndexOf('.'); if ( nDot >= 0 ) strExt = uri.getPath().substring(nDot); else{ int nExt = uri.getQueryString().indexOf("extension="); if ( nExt >= 0 ){ int nExtEnd = uri.getQueryString().indexOf("&", nExt); if (nExtEnd < 0 ) nExtEnd = uri.getQueryString().length(); strExt = uri.getQueryString().substring(nExt+10, nExtEnd); } } String fName = RhodesApp.getInstance().getBlobsDirPath() + "/id_" + TimeInterval.getCurrentTime().toULong() + strExt; return fName;*/ } boolean downloadBlob(CAttrValue value)throws Exception { String fName = makeFileName( value ); String url = value.m_strValue; int nQuest = url.lastIndexOf('?'); if ( nQuest > 0 ) url += "&"; else url += "?"; url += "client_id=" + getSync().getClientID(); try{ NetResponse resp = getNet().pullFile(url, fName, getSync(), null); if ( !resp.isOK() ) { //getSync().stopSync(); com.rho.file.RhoFile.deleteFile(fName); m_nErrCode = RhoAppAdapter.getErrorFromResponse(resp); return false; } }catch(Exception exc) { m_nErrCode = RhoAppAdapter.getNetErrorCode(exc); throw exc; } value.m_strValue = FilePath.getRelativePath( fName, RhodesApp.getInstance().getRhoRootPath()); return true; } void processToken(long token)throws DBException { if ( token > 1 && getToken() == token ){ //Delete non-confirmed records setToken( token ); //For m_bTokenFromDB = false; //getDB().executeSQL("DELETE FROM object_values where source_id=? and token=?", getID(), token ); //TODO: add special table for id,token }else { setToken( token ); getDB().executeSQL("UPDATE sources SET token=? where source_id=?", new Long(token), getID() ); } } }
package condor.gahp; public class VersionHandler implements CommandHandler{ private GahpInterface gahp = null; public void setGahp (GahpInterface g) { this.gahp = g; } public CommandHandlerResponse handleCommand (String[] command) { return new CommandHandlerResponse ( CommandHandlerResponse.SUCCESS + " $GahpVersion: 2.0.1 Oct 29 2008 "+ IOUtils.escapeWord(gahp.getVersion())+" $"); } }
package edu.brandeis.cs.steele.wn; import java.io.*; import java.rmi.Remote; import java.rmi.RemoteException; /** <code>FileManagerInterface</code> defines the interface between the <code>FileBackedDictionary</code> and the file system. * <code>FileBackedDictionary</code> invokes methods from this interface to retrieve lines of text from the * WordNet data files. * * <p>Methods in this interface take filenames as arguments. The filename is the name of * a WordNet file, and is relative to the database directory (e.g. <code>data.noun</code>, not * <code>dict/data.noun</code>). * * <p>Methods in this interface operate on and return pointers, which are indices into the * file named by filename. * * <p><code>FileManagerInterface</code> is designed to work efficiently across a network. To this end, it obeys * two design principles: it uses only primitive types (including <code>String</code>) as argument and return types, * and operations that search a file for a line with a specific property are provided by the * server. The first principle ensures that scanning a database won't create a large number of remote objects that * must then be queried and garbage-collected (each requiring additional RPC). The second * principle avoids paging an entire database file across the network in order to search for * an entry. * * <p>Making <code>FileBackedDictionary</code> XXX MISSING WORD XXX would violate the first of these properties * (it would require that {@link WordSense}, {@link Synset}, {@link POS}, etc. be supported as remote objects); * a generic remote file system interface would violate the second. * * <p>A third design principle is that sessions are stateless -- this simplifies the * implementation of the server. A consequence of this * principle together with the restriction of return values to primitives is that pairs * of operations such as <code>getNextLinePointer</code>/<code>readLineAt</code> are required in order to step through * a file. The implementor of <code>FileManagerInterface</code> can cache the file position before and * after <code>readLineAt</code> in order to eliminate the redundant IO activity that a naive implementation * of these methods would necessitate. * * @author Oliver Steele, steele@cs.brandeis.edu * @version 1.0 */ public interface FileManagerInterface extends Remote { /** Search for the line whose first word <i>is</i> <var>index</var> (that is, that begins with * <var>index</var> followed by a space or tab). <var>filename</var> must name a file * whose lines are sorted by index word. * @return The file offset of the start of the matching line, or <code>-1</code> if no such line * exists. */ public int getIndexedLinePointer(String filename, String index) throws IOException, RemoteException; /** Read the line that begins at file offset <var>offset</var> in the file named by <var>filename</var>. */ public String readLineAt(String filename, int offset) throws IOException, RemoteException; /** Search for the line following the line that begins at <var>offset</var>. * @return The file offset of the start of the line, or <code>-1</code> if <var>offset</var> * is the last line in the file. */ public int getNextLinePointer(String filename, int offset) throws IOException, RemoteException; /** Search for a line whose index word <i>contains</i> <var>substring</var>. * @return The file offset of the start of the matching line, or <code>-1</code> if * no such line exists. */ public int getMatchingLinePointer(String filename, int offset, String substring) throws IOException, RemoteException; /** Search for a line whose index word <i>begins with</i> <var>substring</var>. * @return The file offset of the start of the matching line, or <code>-1</code> if * no such line exists. */ public int getMatchingBeginningLinePointer(String filename, int offset, String substring) throws IOException, RemoteException; /** Treat file contents like an array of lines and return the zero-based, * inclusive line corresponding to <var>linenum</var> */ public String readLineNumber(String filename, int linenum) throws IOException; }
package ibis.io; import java.io.ObjectInput; import java.io.ObjectStreamClass; import java.io.IOException; import java.io.NotActiveException; import java.io.Serializable; import java.io.Externalizable; import ibis.ipl.IbisIOException; public final class IbisSerializationInputStream extends SerializationInputStream implements IbisStreamFlags { class TypeInfo { Class clazz; boolean isArray; boolean isString; // for ibis.io.Serializable Generator gen; // for java.io.Serializable AlternativeTypeInfo altInfo; TypeInfo(Class clzz, boolean isArray, boolean isString, Generator gen){ this.clazz = clzz; this.isArray = isArray; this.isString = isString; this.gen = gen; if (gen == null) { altInfo = new AlternativeTypeInfo(clzz); } } } IbisVector objects = new IbisVector(); int next_object; public ArrayInputStream in; /* Type id management */ private int next_type = 1; private IbisVector types; private static Class stringClass; private static Class classClass; static { try { stringClass = Class.forName("java.lang.String"); } catch (Exception e) { System.err.println("Failed to find java.lang.String " + e); System.exit(1); } try { classClass = Class.forName("java.lang.Class"); } catch (Exception e) { System.err.println("Failed to find java.lang.Class " + e); System.exit(1); } } /* Notion of a current object, needed for defaultWriteObject. */ private Object current_object; private int current_level; private ImplGetField current_getfield; private Object[] object_stack; private int[] level_stack; private ImplGetField[] getfield_stack; private int max_stack_size = 0; private int stack_size = 0; public IbisSerializationInputStream(ArrayInputStream in) throws IOException { super(); init(in); } public void init(ArrayInputStream in) throws IOException { types = new IbisVector(); types.add(0, null); // Vector requires this types.add(TYPE_BOOLEAN, new TypeInfo(classBooleanArray, true, false, null)); types.add(TYPE_BYTE, new TypeInfo(classByteArray, true, false, null)); types.add(TYPE_CHAR, new TypeInfo(classCharArray, true, false, null)); types.add(TYPE_SHORT, new TypeInfo(classShortArray, true, false, null)); types.add(TYPE_INT, new TypeInfo(classIntArray, true, false, null)); types.add(TYPE_LONG, new TypeInfo(classLongArray, true, false, null)); types.add(TYPE_FLOAT, new TypeInfo(classFloatArray, true, false, null)); types.add(TYPE_DOUBLE, new TypeInfo(classDoubleArray, true, false, null)); next_type = PRIMITIVE_TYPES; this.in = in; objects.clear(); next_object = CONTROL_HANDLES; } public String serializationImplName() { return "ibis"; } public void reset() { if (DEBUG) { System.err.println("IN(" + this + ") reset: next handle = " + next_object + "."); } objects.clear(); next_object = CONTROL_HANDLES; } public void statistics() { System.err.println("IbisSerializationInputStream: statistics() not yet implemented"); } private void receive() throws IOException { int leftover = in.max_handle_index - in.handle_index; if (leftover == 1 && in.handle_buffer[in.handle_index] == RESET_HANDLE) { reset(); in.handle_index++; } in.receive(); } /* This is the data output / object output part */ public int read() throws IOException { while (in.byte_index == in.max_byte_index) { receive(); } return in.byte_buffer[in.byte_index++]; } public int read(byte[] b) throws IOException { return read(b, 0, b.length); } public int read(byte[] b, int off, int len) throws IOException { readArray(b, off, len); return len; } public long skip(long n) throws IOException { throw new IOException("skip not meaningful in a typed input stream"); } public int skipBytes(int n) throws IOException { throw new IOException("skipBytes not meaningful in a typed input stream"); } public int available() throws IOException { /* @@@ NOTE: this is not right. There are also some buffered arrays..*/ return in.available(); } public void readFully(byte[] b) throws IOException { readFully(b, 0, b.length); } public void readFully(byte[] b, int off, int len) throws IOException { read(b, off, len); } public boolean readBoolean() throws IOException { while (in.byte_index == in.max_byte_index) { receive(); } return (in.byte_buffer[in.byte_index++] == 1); } public byte readByte() throws IOException { while (in.byte_index == in.max_byte_index) { receive(); } return in.byte_buffer[in.byte_index++]; } public int readUnsignedByte() throws IOException { while (in.byte_index == in.max_byte_index) { receive(); } int i = in.byte_buffer[in.byte_index++]; if (i < 0) { i += 256; } return i; } public short readShort() throws IOException { while (in.short_index == in.max_short_index) { receive(); } return in.short_buffer[in.short_index++]; } public int readUnsignedShort() throws IOException { while (in.short_index == in.max_short_index) { receive(); } int i = in.short_buffer[in.short_index++]; if (i < 0) { i += 65536; } return i; } public char readChar() throws IOException { while (in.char_index == in.max_char_index) { receive(); } return in.char_buffer[in.char_index++]; } public int readInt() throws IOException { while (in.int_index == in.max_int_index) { receive(); } return in.int_buffer[in.int_index++]; } public int readHandle() throws IOException { while (in.handle_index == in.max_handle_index) { receive(); } if (DEBUG) { System.err.println("read handle [" + in.handle_index + "] = " + Integer.toHexString(in.handle_buffer[in.handle_index])); } return in.handle_buffer[in.handle_index++]; } public long readLong() throws IOException { while (in.long_index == in.max_long_index) { receive(); } return in.long_buffer[in.long_index++]; } public float readFloat() throws IOException { while (in.float_index == in.max_float_index) { receive(); } return in.float_buffer[in.float_index++]; } public double readDouble() throws IOException { while (in.double_index == in.max_double_index) { receive(); } return in.double_buffer[in.double_index++]; } public String readUTF() throws IOException { int bn = readInt(); if (DEBUG) { System.err.println("readUTF: len = " + bn); } if (bn == -1) { return null; } byte[] b = new byte[bn]; readArray(b, 0, bn); int len = 0; char[] c = new char[bn]; for (int i = 0; i < bn; i++) { if ((b[i] & ~0x7f) == 0) { c[len++] = (char)(b[i] & 0x7f); } else if ((b[i] & ~0x1f) == 0xc0) { if (i + 1 >= bn || (b[i + 1] & ~0x3f) != 0x80) { throw new IOException("UTF Data Format Exception"); } c[len++] = (char)(((b[i] & 0x1f) << 6) | (b[i] & 0x3f)); i++; } else if ((b[i] & ~0x0f) == 0xe0) { if (i + 2 >= bn || (b[i + 1] & ~0x3f) != 0x80 || (b[i + 2] & ~0x3f) != 0x80) { throw new IOException("UTF Data Format Exception"); } c[len++] = (char)(((b[i] & 0x0f) << 12) | ((b[i+1] & 0x3f) << 6) | b[i+2] & 0x3f); } else { throw new IOException("UTF Data Format Exception"); } } String s = new String(c, 0, len); // System.out.println("readUTF: " + s); if (DEBUG) { System.err.println("read string " + s); } return s; } public Class readClass() throws IOException, ClassNotFoundException { String name = readUTF(); return Class.forName(name); } private void readArrayHeader(Class clazz, int len) throws IOException { if (DEBUG) { System.err.println("readArrayHeader: class = " + clazz + " len = " + len); } int type; while (true) { type = readHandle(); if (type != RESET_HANDLE) { break; } reset(); } if (ASSERTS && ((type & TYPE_BIT) == 0)) { throw new IOException("Array slice header but I receive a HANDLE!"); } Class in_clazz = readType(type & TYPE_MASK).clazz; int in_len = readInt(); if (ASSERTS && !clazz.isAssignableFrom(in_clazz)) { throw new ClassCastException("Cannot assign class " + clazz + " from read class " + in_clazz); } if (ASSERTS && in_len != len) { throw new ArrayIndexOutOfBoundsException("Cannot read " + in_len + " into " + len + " elements"); } } public String readBytes() throws IOException { int len = readInt(); byte[] bytes = new byte[len]; for (int i = 0; i < len; i++) { bytes[i] = readByte(); } return new String(bytes); } public String readChars() throws IOException { int len = readInt(); char[] chars = new char[len]; for (int i = 0; i < len; i++) { chars[i] = readChar(); } return new String(chars); } public void readArray(boolean[] ref, int off, int len) throws IOException { readArrayHeader(classBooleanArray, len); in.readArray(ref, off, len); } public void readArray(byte[] ref, int off, int len) throws IOException { readArrayHeader(classByteArray, len); in.readArray(ref, off, len); } public void readArray(char[] ref, int off, int len) throws IOException { readArrayHeader(classCharArray, len); in.readArray(ref, off, len); } public void readArray(short[] ref, int off, int len) throws IOException { readArrayHeader(classShortArray, len); in.readArray(ref, off, len); } public void readArray(int[] ref, int off, int len) throws IOException { readArrayHeader(classIntArray, len); in.readArray(ref, off, len); } public void readArray(long[] ref, int off, int len) throws IOException { readArrayHeader(classLongArray, len); in.readArray(ref, off, len); } public void readArray(float[] ref, int off, int len) throws IOException { readArrayHeader(classFloatArray, len); in.readArray(ref, off, len); } public void readArray(double[] ref, int off, int len) throws IOException { readArrayHeader(classDoubleArray, len); in.readArray(ref, off, len); } public void readArray(Object[] ref, int off, int len) throws IOException, ClassNotFoundException { readArrayHeader(ref.getClass(), len); for (int i = off; i < off + len; i++) { ref[i] = readObject(); } } public void addObjectToCycleCheck(Object o) { objects.add(next_object, o); if (DEBUG) { System.out.println("objects[" + next_object + "] = " + (o == null ? "null" : o)); } next_object++; } public Object getObjectFromCycleCheck(int handle) { Object o = objects.get(handle); // - CONTROL_HANDLES); if (DEBUG) { System.err.println("getfromcycle: handle = " + (handle - CONTROL_HANDLES) + " obj = " + o); } return o; } public int readKnownTypeHeader() throws IOException { int handle_or_type = readHandle(); if (handle_or_type == NUL_HANDLE) { if (DEBUG) { System.err.println("readKnownTypeHeader -> read NUL_HANDLE"); } return 0; } if ((handle_or_type & TYPE_BIT) == 0) { if (DEBUG) { System.err.println("readKnownTypeHeader -> read OLD HANDLE " + (handle_or_type - CONTROL_HANDLES)); } return handle_or_type; } if (DEBUG) { System.err.println("readKnownTypeHeader -> read NEW HANDLE " + ((handle_or_type & TYPE_MASK) - CONTROL_HANDLES)); } return -1; } Object readArray(Class arrayClass, int type) throws IOException, ClassNotFoundException { int len = readInt(); if (DEBUG) { System.err.println("Read array " + arrayClass + " length " + len); } // if (len < 0) len = -len; switch (type) { case TYPE_BOOLEAN: boolean [] temp1 = new boolean[len]; in.readArray(temp1, 0, len); objects.add(next_object++, temp1); return temp1; case TYPE_BYTE: byte [] temp2 = new byte[len]; in.readArray(temp2, 0, len); objects.add(next_object++, temp2); return temp2; case TYPE_SHORT: short [] temp3 = new short[len]; in.readArray(temp3, 0, len); objects.add(next_object++, temp3); return temp3; case TYPE_CHAR: char [] temp4 = new char[len]; in.readArray(temp4, 0, len); objects.add(next_object++, temp4); return temp4; case TYPE_INT: int [] temp5 = new int[len]; in.readArray(temp5, 0, len); objects.add(next_object++, temp5); return temp5; case TYPE_LONG: long [] temp6 = new long[len]; in.readArray(temp6, 0, len); objects.add(next_object++, temp6); return temp6; case TYPE_FLOAT: float [] temp7 = new float[len]; in.readArray(temp7, 0, len); objects.add(next_object++, temp7); return temp7; case TYPE_DOUBLE: double [] temp8 = new double[len]; in.readArray(temp8, 0, len); objects.add(next_object++, temp8); return temp8; default: if (DEBUG) { System.err.println("Read an array " + arrayClass + " of len " + len); } Object ref = java.lang.reflect.Array.newInstance(arrayClass.getComponentType(), len); objects.add(next_object++, ref); for (int i = 0; i < len; i++) { Object o = readObject(); if (DEBUG) { System.err.println("Read array[" + i + "] = " + (o == null ? "<null>" : o.getClass().getName())); } ((Object[])ref)[i] = o; } return ref; } } public TypeInfo readType(int type) throws IOException { if (DEBUG) { System.err.println("Read type_number " + Integer.toHexString(type) + ", next = " + Integer.toHexString(next_type)); } if (type < next_type) { return (TypeInfo) types.get(type); } else { if (next_type != type) { System.err.println("EEK: readType: next_type != type"); System.exit(1); } if (DEBUG) { System.err.println("NEW TYPE: reading utf"); } String typeName = readUTF(); if (DEBUG) { System.err.println("New type " + typeName); } Class clazz = null; try { clazz = Class.forName(typeName); } catch (ClassNotFoundException e) { throw new IOException("class " + typeName + " not found"); } Generator g = null; TypeInfo t = null; if (clazz.isArray()) { t = new TypeInfo(clazz, true, false, g); } else if (clazz == stringClass) { t = new TypeInfo(clazz, false, true, g); } else if (clazz == classClass) { t = new TypeInfo(clazz, false, false, g); } else { try { Class gen_class = Class.forName(typeName + "_ibis_io_Generator"); g = (Generator) gen_class.newInstance(); } catch (Exception e) { System.err.println("WARNING: Failed to find generator for " + clazz.getName()); // + " error: " + e); // failed to get generator class -> use null } t = new TypeInfo(clazz, false, false, g); } types.add(next_type, t); next_type++; return t; } } private native void setFieldDouble(Object ref, String fieldname, double d); private native void setFieldLong(Object ref, String fieldname, long l); private native void setFieldFloat(Object ref, String fieldname, float f); private native void setFieldInt(Object ref, String fieldname, int i); private native void setFieldShort(Object ref, String fieldname, short s); private native void setFieldChar(Object ref, String fieldname, char c); private native void setFieldByte(Object ref, String fieldname, byte b); private native void setFieldBoolean(Object ref, String fieldname, boolean b); private native void setFieldObject(Object ref, String fieldname, String osig, Object o); /** For IOGenerator: needed when assigning final fields of an object that is rewritten, but super is not, and super is serializable. */ public void readFieldDouble(Object ref, String fieldname) throws IOException { setFieldDouble(ref, fieldname, readDouble()); } public void readFieldLong(Object ref, String fieldname) throws IOException { setFieldLong(ref, fieldname, readLong()); } public void readFieldFloat(Object ref, String fieldname) throws IOException { setFieldFloat(ref, fieldname, readFloat()); } public void readFieldInt(Object ref, String fieldname) throws IOException { setFieldInt(ref, fieldname, readInt()); } public void readFieldShort(Object ref, String fieldname) throws IOException { setFieldShort(ref, fieldname, readShort()); } public void readFieldChar(Object ref, String fieldname) throws IOException { setFieldChar(ref, fieldname, readChar()); } public void readFieldByte(Object ref, String fieldname) throws IOException { setFieldByte(ref, fieldname, readByte()); } public void readFieldBoolean(Object ref, String fieldname) throws IOException { setFieldBoolean(ref, fieldname, readBoolean()); } public void readFieldUTF(Object ref, String fieldname) throws IOException { setFieldObject(ref, fieldname, "Ljava/lang/String;", readUTF()); } public void readFieldClass(Object ref, String fieldname) throws IOException, ClassNotFoundException { setFieldObject(ref, fieldname, "Ljava/lang/Class;", readClass()); } public void readFieldObject(Object ref, String fieldname, String fieldsig) throws IOException, ClassNotFoundException { setFieldObject(ref, fieldname, fieldsig, readObject()); } private void alternativeDefaultReadObject(AlternativeTypeInfo t, Object ref) throws IOException { int temp = 0; try { for (int i=0;i<t.double_count;i++) { if (t.fields_final[temp]) { setFieldDouble(ref, t.serializable_fields[temp].getName(), readDouble()); } else { t.serializable_fields[temp].setDouble(ref, readDouble()); } temp++; } for (int i=0;i<t.long_count;i++) { if (t.fields_final[temp]) { setFieldLong(ref, t.serializable_fields[temp].getName(), readLong()); } else { t.serializable_fields[temp].setLong(ref, readLong()); } temp++; } for (int i=0;i<t.float_count;i++) { if (t.fields_final[temp]) { setFieldFloat(ref, t.serializable_fields[temp].getName(), readFloat()); } else { t.serializable_fields[temp].setFloat(ref, readFloat()); } temp++; } for (int i=0;i<t.int_count;i++) { if (t.fields_final[temp]) { setFieldInt(ref, t.serializable_fields[temp].getName(), readInt()); } else { t.serializable_fields[temp].setInt(ref, readInt()); } temp++; } for (int i=0;i<t.short_count;i++) { if (t.fields_final[temp]) { setFieldShort(ref, t.serializable_fields[temp].getName(), readShort()); } else { t.serializable_fields[temp].setShort(ref, readShort()); } temp++; } for (int i=0;i<t.char_count;i++) { if (t.fields_final[temp]) { setFieldChar(ref, t.serializable_fields[temp].getName(), readChar()); } else { t.serializable_fields[temp].setChar(ref, readChar()); } temp++; } for (int i=0;i<t.byte_count;i++) { if (t.fields_final[temp]) { setFieldByte(ref, t.serializable_fields[temp].getName(), readByte()); } else { t.serializable_fields[temp].setByte(ref, readByte()); } temp++; } for (int i=0;i<t.boolean_count;i++) { if (t.fields_final[temp]) { setFieldBoolean(ref, t.serializable_fields[temp].getName(), readBoolean()); } else { t.serializable_fields[temp].setBoolean(ref, readBoolean()); } temp++; } for (int i=0;i<t.reference_count;i++) { if (t.fields_final[temp]) { String fieldname = t.serializable_fields[temp].getName(); String fieldtype = t.serializable_fields[temp].getType().getName(); if (fieldtype.startsWith("[")) { } else { fieldtype = "L" + fieldtype.replace('.', '/') + ";"; } // System.out.println("fieldname = " + fieldname); // System.out.println("signature = " + fieldtype); setFieldObject(ref, fieldname, fieldtype, readObject()); } else { t.serializable_fields[temp].set(ref, readObject()); } temp++; } } catch(ClassNotFoundException e) { throw new IbisIOException("class not found exception", e); } catch(IllegalAccessException e2) { throw new IbisIOException("illegal access exception", e2); } } private void alternativeReadObject(AlternativeTypeInfo t, Object ref) throws IOException { if (DEBUG) { System.err.println("alternativeReadObject " + t); } if (t.superSerializable) { alternativeReadObject(t.alternativeSuperInfo, ref); } if (t.hasReadObject) { current_level = t.level; t.invokeReadObject(ref, this); return; } if (DEBUG) { System.err.println("Using alternative readObject for " + ref.getClass().getName()); } alternativeDefaultReadObject(t, ref); } public void readSerializableObject(Object ref, String classname) throws IOException { AlternativeTypeInfo t = AlternativeTypeInfo.getAlternativeTypeInfo(classname); push_current_object(ref, 0); alternativeReadObject(t, ref); pop_current_object(); } public void defaultReadSerializableObject(Object ref, int depth) throws IOException { Class type = ref.getClass(); AlternativeTypeInfo t = AlternativeTypeInfo.getAlternativeTypeInfo(type); /* Find the type info corresponding to the current invocation. See the invokeReadObject invocation in alternativeReadObject. */ while (t.level > depth) { t = t.alternativeSuperInfo; } alternativeDefaultReadObject(t, ref); } private native Object createUninitializedObject(Class type, Class non_serializable_super); public Object create_uninitialized_object(String classname) throws IOException { Class clazz = null; try { clazz = Class.forName(classname); } catch (ClassNotFoundException e) { throw new IOException("class " + classname + " not found"); } Class t2 = clazz; while (Serializable.class.isAssignableFrom(t2)) { /* Find first non-serializable super-class. */ t2 = t2.getSuperclass(); } // Calls constructor for non-serializable superclass. Object obj = createUninitializedObject(clazz, t2); addObjectToCycleCheck(obj); return obj; } public void push_current_object(Object ref, int level) { if (stack_size >= max_stack_size) { max_stack_size = 2 * max_stack_size + 10; Object[] new_o_stack = new Object[max_stack_size]; int[] new_l_stack = new int[max_stack_size]; ImplGetField[] new_g_stack = new ImplGetField[max_stack_size]; for (int i = 0; i < stack_size; i++) { new_o_stack[i] = object_stack[i]; new_l_stack[i] = level_stack[i]; new_g_stack[i] = getfield_stack[i]; } object_stack = new_o_stack; level_stack = new_l_stack; getfield_stack = new_g_stack; } object_stack[stack_size] = current_object; level_stack[stack_size] = current_level; getfield_stack[stack_size] = current_getfield; stack_size++; current_object = ref; current_level = level; } public void pop_current_object() { stack_size current_object = object_stack[stack_size]; current_level = level_stack[stack_size]; current_getfield = getfield_stack[stack_size]; } public Object doReadObject() throws IOException, ClassNotFoundException { /* * ref < 0: type * ref = 0: null ptr * ref > 0: handle */ int handle_or_type = readHandle(); while (handle_or_type == RESET_HANDLE) { reset(); handle_or_type = readHandle(); } if (handle_or_type == NUL_HANDLE) { return null; } if ((handle_or_type & TYPE_BIT) == 0) { /* Ah, it's a handle. Look it up, return the stored ptr */ Object o = objects.get(handle_or_type); if (DEBUG) { System.err.println("readobj: handle = " + (handle_or_type - CONTROL_HANDLES) + " obj = " + o); } return o; } int type = handle_or_type & TYPE_MASK; TypeInfo t = readType(type); if (DEBUG) { System.err.println("read type " + t.clazz + " isarray " + t.isArray); } Object obj; if (DEBUG) { System.err.println("t = " + t); } if (t.isArray) { obj = readArray(t.clazz, type); } else if (t.isString) { obj = readUTF(); addObjectToCycleCheck(obj); } else if (t.clazz == classClass) { String name = readUTF(); obj = Class.forName(name); addObjectToCycleCheck(obj); } else if (t.gen != null) { obj = t.gen.generated_newInstance(this); } else if (Externalizable.class.isAssignableFrom(t.clazz)) { try { // TODO: is this correct? I guess it is, when accessibility // is fixed. obj = t.clazz.newInstance(); } catch(Exception e) { throw new RuntimeException("Could not instantiate" + e); } addObjectToCycleCheck(obj); push_current_object(obj, 0); ((java.io.Externalizable) obj).readExternal(this); pop_current_object(); } else { // this is for java.io.Serializable try { // obj = t.clazz.newInstance(); Not correct: calls wrong constructor. Class t2 = t.clazz; while (Serializable.class.isAssignableFrom(t2)) { /* Find first non-serializable super-class. */ t2 = t2.getSuperclass(); } // Calls constructor for non-serializable superclass. obj = createUninitializedObject(t.clazz, t2); addObjectToCycleCheck(obj); push_current_object(obj, 0); alternativeReadObject(t.altInfo, obj); pop_current_object(); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException("Couldn't deserialize or create object " + e); } } return obj; } public void close() throws IOException { } protected void readStreamHeader() { /* ignored */ } public GetField readFields() throws IOException, ClassNotFoundException { if (current_object == null) { throw new NotActiveException("not in readObject"); } Class type = current_object.getClass(); AlternativeTypeInfo t = AlternativeTypeInfo.getAlternativeTypeInfo(type); current_getfield = new ImplGetField(t); current_getfield.readFields(); return current_getfield; } private class ImplGetField extends GetField { double[] doubles; long[] longs; int[] ints; float[] floats; short[] shorts; char[] chars; byte[] bytes; boolean[] booleans; Object[] references; AlternativeTypeInfo t; ImplGetField(AlternativeTypeInfo t) { doubles = new double[t.double_count]; longs = new long[t.long_count]; ints = new int[t.int_count]; shorts = new short[t.short_count]; floats = new float[t.float_count]; chars = new char[t.char_count]; bytes = new byte[t.byte_count]; booleans = new boolean[t.boolean_count]; references = new Object[t.reference_count]; this.t = t; } public ObjectStreamClass getObjectStreamClass() { /* I don't know how it could be used here, but ... */ return ObjectStreamClass.lookup(t.clazz); } public boolean defaulted(String name) { return false; } public boolean get(String name, boolean dflt) { return booleans[t.getOffset(name, Boolean.TYPE)]; } public char get(String name, char dflt) { return chars[t.getOffset(name, Character.TYPE)]; } public byte get(String name, byte dflt) { return bytes[t.getOffset(name, Byte.TYPE)]; } public short get(String name, short dflt) { return shorts[t.getOffset(name, Short.TYPE)]; } public int get(String name, int dflt) { return ints[t.getOffset(name, Integer.TYPE)]; } public long get(String name, long dflt) { return longs[t.getOffset(name, Long.TYPE)]; } public float get(String name, float dflt) { return floats[t.getOffset(name, Float.TYPE)]; } public double get(String name, double dflt) { return doubles[t.getOffset(name, Double.TYPE)]; } public Object get(String name, Object dflt) { return references[t.getOffset(name, Object.class)]; } void readFields() throws IOException, ClassNotFoundException { for (int i = 0; i < t.double_count; i++) doubles[i] = readDouble(); for (int i = 0; i < t.float_count; i++) floats[i] = readFloat(); for (int i = 0; i < t.long_count; i++) longs[i] = readLong(); for (int i = 0; i < t.int_count; i++) ints[i] = readInt(); for (int i = 0; i < t.short_count; i++) shorts[i] = readShort(); for (int i = 0; i < t.char_count; i++) chars[i] = readChar(); for (int i = 0; i < t.byte_count; i++) bytes[i] = readByte(); for (int i = 0; i < t.boolean_count; i++) booleans[i] = readBoolean(); for (int i = 0; i < t.reference_count; i++) references[i] = readObject(); } } public void defaultReadObject() throws IOException, NotActiveException { if (current_object == null) { throw new NotActiveException("defaultReadObject without a current object"); } Object ref = current_object; if (ref instanceof ibis.io.Serializable) { ((ibis.io.Serializable)ref).generated_DefaultReadObject(this, current_level); } else if (ref instanceof java.io.Serializable) { Class type = ref.getClass(); AlternativeTypeInfo t = AlternativeTypeInfo.getAlternativeTypeInfo(type); /* Find the type info corresponding to the current invocation. * See the invokeReadObject invocation in alternativeReadObject. */ while (t.level > current_level) { t = t.alternativeSuperInfo; } alternativeDefaultReadObject(t, ref); } else { Class type = ref.getClass(); throw new RuntimeException("Not Serializable : " + type.toString()); } } static { try { /* Need conversion for allocation of uninitialized objects. */ System.loadLibrary("conversion"); } catch(Throwable t) { System.err.println("Could not load libconversion"); } } }
package edu.cmu.cs.glacier; import java.util.HashMap; import java.util.Map; import java.util.Set; import javax.lang.model.element.AnnotationMirror; import javax.lang.model.element.Element; import javax.lang.model.element.ElementKind; import javax.lang.model.element.TypeParameterElement; import javax.lang.model.util.Types; import org.checkerframework.common.basetype.BaseAnnotatedTypeFactory; import org.checkerframework.common.basetype.BaseTypeChecker; import org.checkerframework.framework.type.AnnotatedTypeMirror; import org.checkerframework.framework.type.ElementAnnotationApplier; import org.checkerframework.framework.type.SyntheticArrays; import org.checkerframework.framework.type.TypeHierarchy; import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedArrayType; import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedDeclaredType; import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedExecutableType; import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedIntersectionType; import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedNoType; import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedNullType; import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedPrimitiveType; import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedTypeVariable; import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedUnionType; import org.checkerframework.framework.type.AnnotatedTypeMirror.AnnotatedWildcardType; import org.checkerframework.framework.type.visitor.AnnotatedTypeScanner; import org.checkerframework.javacutil.AnnotationUtils; import org.checkerframework.javacutil.TreeUtils; import com.sun.source.tree.ClassTree; import com.sun.source.tree.Tree; import com.sun.source.util.TreePath; import edu.cmu.cs.glacier.qual.*; public class GlacierAnnotatedTypeFactory extends BaseAnnotatedTypeFactory { protected final AnnotationMirror MUTABLE, IMMUTABLE, GLACIER_BOTTOM, READ_ONLY; public GlacierAnnotatedTypeFactory(BaseTypeChecker checker) { super(checker, false); // Must disable flow analysis for correct behavior in Glacier. MUTABLE = AnnotationUtils.fromClass(elements, Mutable.class); IMMUTABLE = AnnotationUtils.fromClass(elements, Immutable.class); GLACIER_BOTTOM = AnnotationUtils.fromClass(elements, GlacierBottom.class); READ_ONLY = AnnotationUtils.fromClass(elements, ReadOnly.class); this.postInit(); } /* * Superclass's implementation assumes that type of "this" should be according to the annotations on the "this" parameter. * But instead, for immutability, we want the annotations to be according to the containing class. */ @Override public AnnotatedDeclaredType getSelfType(Tree tree) { AnnotatedDeclaredType selfType = super.getSelfType(tree); TreePath path = getPath(tree); ClassTree enclosingClass = TreeUtils.enclosingClass(path); if (enclosingClass == null) { // I hope this only happens when tree is a fake tree that // we created, e.g. when desugaring enhanced-for-loops. enclosingClass = getCurrentClassTree(tree); } AnnotatedDeclaredType enclosingClassType = getAnnotatedType(enclosingClass); if (!selfType.isAnnotatedInHierarchy(READ_ONLY)) { // If there's already an annotation on selfType and it conflicts with Mutable, that error will be found by the type validity check elsewhere. if (enclosingClassType.isAnnotatedInHierarchy(READ_ONLY)) { annotateInheritedFromClass(selfType, enclosingClassType.getAnnotations()); } else { selfType.addAnnotation(MUTABLE); } } return selfType; } private static boolean isWhitelistedImmutableClass(Element element) { if (element.asType().toString().equals("java.lang.String") || element.asType().toString().equals("java.lang.Number")) { return true; } // TODO: add more classes. return false; } private static boolean isAutoboxedImmutableClass(Types types, AnnotatedTypeMirror type) { // Surely there is a better API for doing this than having to try/catch. try { types.unboxedType(type.getUnderlyingType()); return true; } catch (IllegalArgumentException e) { return false; } } /** * A callback method for the AnnotatedTypeFactory subtypes to customize * AnnotatedTypes.asMemberOf(). Overriding methods should merely change * the annotations on the subtypes, without changing the types. * * In this override, we make the receiver's annotations match the return type's annotations, which have already been set up correctly. * * @param type the annotated type of the element * @param owner the annotated type of the receiver of the accessing tree * @param element the element of the field or method */ @Override public void postAsMemberOf(AnnotatedTypeMirror type, AnnotatedTypeMirror owner, Element element) { super.postAsMemberOf(type, owner, element); if (SyntheticArrays.isArrayClone(owner, element)) { // Why is there a special case for array clone? Ugh. AnnotatedExecutableType executableType = (AnnotatedExecutableType)type; AnnotatedTypeMirror receiverType = executableType.getReceiverType(); receiverType.removeAnnotationInHierarchy(READ_ONLY); AnnotatedArrayType arrayReturnType = (AnnotatedArrayType)executableType.getReturnType(); receiverType.addMissingAnnotations(arrayReturnType.getAnnotations()); arrayReturnType.removeAnnotationInHierarchy(READ_ONLY); arrayReturnType.addAnnotation(GLACIER_BOTTOM); } } // TODO: Forbid @Immutable and @Mutable annotations on this-parameters of methods. protected void annotateInheritedFromClass(/*@Mutable*/ AnnotatedTypeMirror type) { GlacierInheritedFromClassAnnotator.INSTANCE.visit(type, this); } /** * Callback to determine what to do with the annotations from a class declaration. * * Ugh. This should not be here, but is due to visibility limitations. */ protected void annotateInheritedFromClass(/*@Mutable*/ AnnotatedTypeMirror type, Set<AnnotationMirror> fromClass) { type.addMissingAnnotations(fromClass); } @Override protected TypeHierarchy createTypeHierarchy() { return new GlacierTypeHierarchy(checker, getQualifierHierarchy(), checker.hasOption("ignoreRawTypeArguments"), checker.hasOption("invariantArrays")); } /** * A singleton utility class for pulling annotations down from a class * type. * * HACK HACK HACK: It would be preferable to inherit from InheritedFromClassAnnotator, but that class has a private constructor. * * @see #annotateInheritedFromClass */ protected static class GlacierInheritedFromClassAnnotator extends AnnotatedTypeScanner<Void, GlacierAnnotatedTypeFactory> { /** The singleton instance. */ public static final GlacierInheritedFromClassAnnotator INSTANCE = new GlacierInheritedFromClassAnnotator(); private GlacierInheritedFromClassAnnotator() {} @Override public Void visitExecutable(AnnotatedExecutableType type, GlacierAnnotatedTypeFactory p) { // KEY DIFFERENCE VS. SUPERCLASS: Visit the receiver too! scanAndReduce(type.getReceiverType(), p, null); // ANOTHER KEY DIFFERENCE VS. SUPERCLASS: visit // constructor return types (which somewhat act like // the receiver). scan(type.getReturnType(), p); scanAndReduce(type.getParameterTypes(), p, null); scanAndReduce(type.getThrownTypes(), p, null); scanAndReduce(type.getTypeVariables(), p, null); return null; } @Override public Void visitDeclared(AnnotatedDeclaredType type, GlacierAnnotatedTypeFactory p) { Element classElt = type.getUnderlyingType().asElement(); // assert(!type.hasAnnotation(GlacierBottom.class)); // Only add annotations from the class declaration if there // are no annotations from that hierarchy already on the type. if (classElt != null) { boolean isHardCodedImmutable = isAutoboxedImmutableClass(p.types, type) || isWhitelistedImmutableClass(classElt); if (isHardCodedImmutable && !type.isAnnotatedInHierarchy(p.READ_ONLY)) { type.addAnnotation(Immutable.class); } else { AnnotatedTypeMirror classType = p.fromElement(classElt); assert classType != null : "Unexpected null type for class element: " + classElt; // If the class type has no annotations, infer @Mutable. if (!classType.isAnnotatedInHierarchy(p.READ_ONLY) && !type.isAnnotatedInHierarchy(p.READ_ONLY)) { type.addAnnotation(Mutable.class); } else { p.annotateInheritedFromClass(type, classType.getAnnotations()); } } } // assert(!type.hasAnnotation(GlacierBottom.class)); // System.out.println("visitDeclared " + type); return super.visitDeclared(type, p); } private final Map<TypeParameterElement, AnnotatedTypeVariable> visited = new HashMap<TypeParameterElement, AnnotatedTypeVariable>(); @Override public Void visitTypeVariable(AnnotatedTypeVariable type, GlacierAnnotatedTypeFactory p) { TypeParameterElement tpelt = (TypeParameterElement) type.getUnderlyingType().asElement(); if (!visited.containsKey(tpelt)) { visited.put(tpelt, type); if (type.getAnnotations().isEmpty() && type.getUpperBound().getAnnotations().isEmpty() && tpelt.getEnclosingElement().getKind() != ElementKind.TYPE_PARAMETER) { ElementAnnotationApplier.apply(type, tpelt, p); } super.visitTypeVariable(type, p); visited.remove(tpelt); } return null; } @Override public void reset() { visited.clear(); super.reset(); } @Override public Void visitIntersection(AnnotatedIntersectionType type, GlacierAnnotatedTypeFactory p) { if (visitedNodes.containsKey(type)) { return visitedNodes.get(type); } visitedNodes.put(type, null); Void r = scan(type.directSuperTypes(), p); return r; } @Override public Void visitUnion(AnnotatedUnionType type, GlacierAnnotatedTypeFactory p) { if (visitedNodes.containsKey(type)) { return visitedNodes.get(type); } visitedNodes.put(type, null); Void r = scan(type.getAlternatives(), p); return r; } @Override public Void visitArray(AnnotatedArrayType type, GlacierAnnotatedTypeFactory p) { // Arrays default to mutable. if (!type.isAnnotatedInHierarchy(p.READ_ONLY)) { type.addAnnotation(Mutable.class); } Void r = scan(type.getComponentType(), p); return r; } @Override public Void visitNoType(AnnotatedNoType type, GlacierAnnotatedTypeFactory p) { return null; } @Override public Void visitNull(AnnotatedNullType type, GlacierAnnotatedTypeFactory p) { return null; } @Override public Void visitPrimitive(AnnotatedPrimitiveType type, GlacierAnnotatedTypeFactory p) { // All primitives are immutable. if (!type.isAnnotatedInHierarchy(p.READ_ONLY)) { type.addAnnotation(Immutable.class); } return null; } @Override public Void visitWildcard(AnnotatedWildcardType type, GlacierAnnotatedTypeFactory p) { if (visitedNodes.containsKey(type)) { return visitedNodes.get(type); } visitedNodes.put(type, null); Void r = scan(type.getExtendsBound(), p); visitedNodes.put(type, r); r = scanAndReduce(type.getSuperBound(), p, r); visitedNodes.put(type, r); return r; } } }
package edu.mit.streamjit.test.sanity; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableList; import com.jeffreybosboom.serviceproviderprocessor.ServiceProvider; import edu.mit.streamjit.api.Identity; import edu.mit.streamjit.api.Input; import edu.mit.streamjit.api.Joiner; import edu.mit.streamjit.api.RoundrobinJoiner; import edu.mit.streamjit.api.RoundrobinSplitter; import edu.mit.streamjit.api.Splitjoin; import edu.mit.streamjit.api.Splitter; import edu.mit.streamjit.impl.blob.Buffer; import edu.mit.streamjit.impl.common.InputBufferFactory; import edu.mit.streamjit.test.AbstractBenchmark; import edu.mit.streamjit.test.Benchmark; import edu.mit.streamjit.test.Benchmark.Dataset; import edu.mit.streamjit.test.BenchmarkProvider; import edu.mit.streamjit.test.Datasets; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Queue; /** * * @author Jeffrey Bosboom <jeffreybosboom@gmail.com> * @since 8/20/2013 */ @ServiceProvider(BenchmarkProvider.class) public final class RoundrobinSanity implements BenchmarkProvider { @Override public Iterator<Benchmark> iterator() { Benchmark[] benchmarks = { rr_rr(7, 1, 1), rr_rr(7, 5, 5), rr_rr(7, 5, 3), rr_rr(7, 3, 5), }; return Arrays.asList(benchmarks).iterator(); } private static Benchmark rr_rr(int width, int splitRate, int joinRate) { String name = String.format("RR(%d) x %dw x RR(%d)", splitRate, width, joinRate); return new AbstractBenchmark(name, new SplitjoinSupplier(width, new RoundrobinSplitterSupplier(splitRate), new RoundrobinJoinerSupplier(joinRate)), simulateRoundrobin(Datasets.allIntsInRange(0, 1_000_000), width, splitRate, joinRate)); } private static final class SplitjoinSupplier implements Supplier<Splitjoin<Integer, Integer>> { private final int width; private final Supplier<? extends Splitter<Integer, Integer>> splitter; private final Supplier<? extends Joiner<Integer, Integer>> joiner; private SplitjoinSupplier(int width, Supplier<? extends Splitter<Integer, Integer>> splitter, Supplier<? extends Joiner<Integer, Integer>> joiner) { this.width = width; this.splitter = splitter; this.joiner = joiner; } @Override public Splitjoin<Integer, Integer> get() { ImmutableList.Builder<Identity<Integer>> builder = ImmutableList.builder(); //Can't use Collections.nCopies because we need distinct filters. for (int i = 0; i < width; ++i) builder.add(new Identity<Integer>()); return new Splitjoin<>(splitter.get(), joiner.get(), builder.build()); } } //I'd like to use ConstructorSupplier here, but the generics won't work //because e.g. RoundrobinSplitter.class is a raw type. private static final class RoundrobinSplitterSupplier implements Supplier<Splitter<Integer, Integer>> { private final int rate; private RoundrobinSplitterSupplier(int rate) { this.rate = rate; } @Override public Splitter<Integer, Integer> get() { return new RoundrobinSplitter<>(rate); } } private static final class RoundrobinJoinerSupplier implements Supplier<Joiner<Integer, Integer>> { private final int rate; private RoundrobinJoinerSupplier(int rate) { this.rate = rate; } @Override public Joiner<Integer, Integer> get() { return new RoundrobinJoiner<>(rate); } } /** * Simulates a roundrobin splitjoin, returning a Dataset with reference * output. */ private static Dataset simulateRoundrobin(Dataset dataset, int width, int splitRate, int joinRate) { int[] splitRates = new int[width], joinRates = new int[width]; Arrays.fill(splitRates, splitRate); Arrays.fill(joinRates, joinRate); return simulateRoundrobin(dataset, width, splitRates, joinRates); } /** * Simulates a weighted roundrobin splitjoin, returning a Dataset with * reference output. */ private static Dataset simulateRoundrobin(Dataset dataset, int width, int[] splitRates, int[] joinRates) { List<Queue<Object>> bins = new ArrayList<>(width); for (int i = 0; i < width; ++i) bins.add(new ArrayDeque<>()); int splitReq = 0; for (int i : splitRates) splitReq += i; Buffer buffer = InputBufferFactory.unwrap(dataset.input()).createReadableBuffer(splitReq); while (buffer.size() >= splitReq) for (int i = 0; i < bins.size(); ++i) for (int j = 0; j < splitRates[i]; ++j) bins.get(i).add(buffer.read()); List<Object> output = new ArrayList<>(); while (ready(bins, joinRates)) { for (int i = 0; i < bins.size(); ++i) for (int j = 0; j < joinRates[i]; ++j) output.add(bins.get(i).remove()); } return Dataset.builder(dataset).output(Input.fromIterable(output)).build(); } private static boolean ready(List<Queue<Object>> bins, int[] joinRates) { for (int i = 0; i < bins.size(); ++i) if (bins.get(i).size() < joinRates[i]) return false; return true; } }
package ee.ut.math.tvt.salessystem.ui.tabs; import ee.ut.math.tvt.salessystem.domain.controller.SalesDomainController; import ee.ut.math.tvt.salessystem.domain.data.SoldItem; import ee.ut.math.tvt.salessystem.domain.exception.VerificationFailedException; import ee.ut.math.tvt.salessystem.ui.model.SalesSystemModel; import ee.ut.math.tvt.salessystem.ui.panels.PurchaseItemPanel; import java.awt.Color; import java.awt.Component; import java.awt.Dimension; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.GridLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import javax.swing.BorderFactory; import javax.swing.JButton; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JTextField; import org.apache.log4j.Logger; /** * Encapsulates everything that has to do with the purchase tab (the tab * labelled "Point-of-sale" in the menu). */ public class PurchaseTab { private static final Logger log = Logger.getLogger(PurchaseTab.class); private final SalesDomainController domainController; private JButton newPurchase; private JButton submitPurchase; private JButton cancelPurchase; private PurchaseItemPanel purchasePane; private SalesSystemModel model; public PurchaseTab(SalesDomainController controller, SalesSystemModel model) { this.domainController = controller; this.model = model; } /** * The purchase tab. Consists of the purchase menu, current purchase dialog and * shopping cart table. */ public Component draw() { JPanel panel = new JPanel(); // Layout panel.setBorder(BorderFactory.createLineBorder(Color.BLACK)); panel.setLayout(new GridBagLayout()); // Add the purchase menu panel.add(getPurchaseMenuPane(), getConstraintsForPurchaseMenu()); // Add the main purchase-panel purchasePane = new PurchaseItemPanel(model); panel.add(purchasePane, getConstraintsForPurchasePanel()); return panel; } // The purchase menu. Contains buttons "New purchase", "Submit", "Cancel". private Component getPurchaseMenuPane() { JPanel panel = new JPanel(); // Initialize layout panel.setLayout(new GridBagLayout()); GridBagConstraints gc = getConstraintsForMenuButtons(); // Initialize the buttons newPurchase = createNewPurchaseButton(); submitPurchase = createConfirmButton(); cancelPurchase = createCancelButton(); // Add the buttons to the panel, using GridBagConstraints we defined above panel.add(newPurchase, gc); panel.add(submitPurchase, gc); panel.add(cancelPurchase, gc); return panel; } public void uusaken(){ final JFrame frame = new JFrame("Makse"); frame.setLayout(new GridLayout(4, 2)); frame.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); final JTextField payment = new JTextField(); payment.setText("0"); frame.add(new JLabel("Total sum: ")); final double price = model.getCurrentPurchaseTableModel().getTotalPrice(); frame.add(new JLabel(String.valueOf(price))); frame.add(new JLabel("Payment amount: ")); frame.add(payment); frame.add(new JLabel("Change amount: ")); final JLabel tagasi = new JLabel(); frame.add(tagasi); payment.addKeyListener(new KeyAdapter() { public void keyPressed(KeyEvent evt) { if(evt.getKeyCode() == KeyEvent.VK_ENTER) { try{ double change=Double.parseDouble(payment.getText())-price; tagasi.setText(String.valueOf(change)); } catch( java.lang.NumberFormatException e){ final JPanel panel = new JPanel(); JOptionPane.showMessageDialog(panel, "Vale sisend", "Warning", JOptionPane.WARNING_MESSAGE); } } } }); JButton accept = new JButton("Accept"); accept.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { } }); frame.add(accept); JButton cancel = new JButton("Cancel"); cancel.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { frame.setVisible(false); } }); frame.add(cancel); frame.pack(); frame.setResizable(false); frame.setVisible(true); } // Creates the button "New purchase" private JButton createNewPurchaseButton() { JButton b = new JButton("New purchase"); b.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { newPurchaseButtonClicked(); } }); return b; } // Creates the "Confirm" button private JButton createConfirmButton() { JButton b = new JButton("Confirm"); b.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { submitPurchaseButtonClicked(); } }); b.setEnabled(false); return b; } // Creates the "Cancel" button private JButton createCancelButton() { JButton b = new JButton("Cancel"); b.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { cancelPurchaseButtonClicked(); } }); b.setEnabled(false); return b; } /** Event handler for the <code>new purchase</code> event. */ protected void newPurchaseButtonClicked() { log.info("New sale process started"); try { domainController.startNewPurchase(); startNewSale(); } catch (VerificationFailedException e1) { log.error(e1.getMessage()); } } /** Event handler for the <code>cancel purchase</code> event. */ protected void cancelPurchaseButtonClicked() { log.info("Sale cancelled"); try { domainController.cancelCurrentPurchase(); endSale(); model.getCurrentPurchaseTableModel().clear(); } catch (VerificationFailedException e1) { log.error(e1.getMessage()); } } /** Event handler for the <code>submit purchase</code> event. */ protected void submitPurchaseButtonClicked() { log.info("Sale complete"); uusaken(); try { log.debug("Contents of the current basket:\n" + model.getCurrentPurchaseTableModel()); domainController.submitCurrentPurchase( model.getCurrentPurchaseTableModel().getTableRows() ) ; endSale(); model.getCurrentPurchaseTableModel().clear(); } catch (VerificationFailedException e1) { log.error(e1.getMessage()); } } // switch UI to the state that allows to proceed with the purchase private void startNewSale() { purchasePane.reset(); purchasePane.setEnabled(true); submitPurchase.setEnabled(true); cancelPurchase.setEnabled(true); newPurchase.setEnabled(false); } // switch UI to the state that allows to initiate new purchase private void endSale() { purchasePane.reset(); cancelPurchase.setEnabled(false); submitPurchase.setEnabled(false); newPurchase.setEnabled(true); purchasePane.setEnabled(false); } private GridBagConstraints getConstraintsForPurchaseMenu() { GridBagConstraints gc = new GridBagConstraints(); gc.fill = GridBagConstraints.HORIZONTAL; gc.anchor = GridBagConstraints.NORTH; gc.gridwidth = GridBagConstraints.REMAINDER; gc.weightx = 1.0d; gc.weighty = 0d; return gc; } private GridBagConstraints getConstraintsForPurchasePanel() { GridBagConstraints gc = new GridBagConstraints(); gc.fill = GridBagConstraints.BOTH; gc.anchor = GridBagConstraints.NORTH; gc.gridwidth = GridBagConstraints.REMAINDER; gc.weightx = 1.0d; gc.weighty = 1.0; return gc; } // The constraints that control the layout of the buttons in the purchase menu private GridBagConstraints getConstraintsForMenuButtons() { GridBagConstraints gc = new GridBagConstraints(); gc.weightx = 0; gc.anchor = GridBagConstraints.CENTER; gc.gridwidth = GridBagConstraints.RELATIVE; return gc; } }
package gov.nih.nci.calab.service.util; public class CananoConstants { public static final String PARTICLE_PROPERTY = "particle.properties"; public static final String PHYSICAL_CHARACTERIZATION = "Physical"; public static final String COMPOSITION_CHARACTERIZATION = "Composition"; public static final String INVITRO_CHARACTERIZATION = "In Vitro"; public static final String INVIVO_CHARACTERIZATION = "In Vivo"; public static final String TOXICITY_CHARACTERIZATION = "Toxicity"; public static final String CYTOXICITY_CHARACTERIZATION = "Cytoxicity"; public static final String APOPTOSIS_CELL_DEATH_METHOD_CYTOXICITY_CHARACTERIZATION = "apoptosis"; public static final String NECROSIS_CELL_DEATH_METHOD_CYTOXICITY_CHARACTERIZATION = "necrosis"; public static final String BLOOD_CONTACT_IMMUNOTOXICITY_CHARACTERIZATION = "Blood Contact"; public static final String IMMUNE_CELL_FUNCTION_IMMUNOTOXICITY_CHARACTERIZATION = "Immune Cell Function"; public static final String METABOLIC_STABILITY_TOXICITY_CHARACTERIZATION = "Metabolic Stability"; public static final String PHYSICAL_SIZE = "Size"; public static final String PHYSICAL_SHAPE = "Shape"; public static final String PHYSICAL_MOLECULAR_WEIGHT = "Molecular Weight"; public static final String PHYSICAL_SOLUBILITY = "Solubility"; public static final String PHYSICAL_SURFACE = "Surface"; public static final String PHYSICAL_STABILITY = "Stability"; public static final String PHYSICAL_PURITY = "Purity"; public static final String PHYSICAL_FUNCTIONAL = "Functional"; public static final String PHYSICAL_MORPHOLOGY = "Morphology"; public static final String PHYSICAL_COMPOSITION = "Composition"; public static final String TOXICITY_OXIDATIVE_STRESS = "Oxidative Stress"; public static final String TOXICITY_ENZYME_FUNCTION = "Enzyme Function"; public static final String CYTOTOXICITY_CELL_VIABILITY = "Cell Viability"; public static final String CYTOTOXICITY_CASPASE3_ACTIVIATION = "Caspase 3 Activation"; public static final String BLOODCONTACTTOX_PLATE_AGGREGATION = "Platelet Aggregation"; public static final String BLOODCONTACTTOX_HEMOLYSIS = "Hemolysis"; public static final String BLOODCONTACTTOX_COAGULATION = "Coagulation"; public static final String BLOODCONTACTTOX_PLASMA_PROTEIN_BINDING = "Plasma Protein Binding"; public static final String IMMUNOCELLFUNCTOX_PHAGOCYTOSIS = "Phagocytosis"; public static final String IMMUNOCELLFUNCTOX_OXIDATIVE_BURST = "Oxidative Burst"; public static final String IMMUNOCELLFUNCTOX_CHEMOTAXIS = "Chemotaxis"; public static final String IMMUNOCELLFUNCTOX_CYTOKINE_INDUCTION = "Cytokine Induction"; public static final String IMMUNOCELLFUNCTOX_COMPLEMENT_ACTIVATION = "Complement Activation"; public static final String IMMUNOCELLFUNCTOX_LEUKOCYTE_PROLIFERATION = "Leukocyte Proliferation"; public static final String IMMUNOCELLFUNCTOX_NKCELL_CYTOTOXIC_ACTIVITY = "Cytotoxic Activity of NK Cells"; public static final String METABOLIC_STABILITY_CYP450 = "CYP450"; public static final String METABOLIC_STABILITY_ROS = "ROS"; public static final String METABOLIC_STABILITY_GLUCURONIDATION_SULPHATION = "Glucuronidation Sulphation"; public static final String IMMUNOCELLFUNCTOX_CFU_GM = "CFU_GM"; public static final String DENDRIMER_TYPE = "Dendrimer"; public static final String POLYMER_TYPE = "Polymer"; public static final String LIPOSOME_TYPE = "Liposome"; public static final String CARBON_NANOTUBE_TYPE = "Carbon Nanotube"; public static final String FULLERENE_TYPE = "Fullerene"; public static final String QUANTUM_DOT_TYPE = "Quantum Dot"; public static final String METAL_PARTICLE_TYPE = "Metal Particle"; public static final String EMULSION_TYPE = "Emulsion"; public static final String COMPLEX_PARTICLE_TYPE = "Complex Particle"; public static final String CORE = "core"; public static final String SHELL = "shell"; public static final String COATING = "coating"; public static final String BOOLEAN_YES = "Yes"; public static final String BOOLEAN_NO = "No"; public static final String[] BOOLEAN_CHOICES = new String[] { BOOLEAN_YES, BOOLEAN_NO }; public static final String[] CHARACTERIZATION_SOURCES = new String[] { "NCL", "Vendor" }; public static final String[] CARBON_NANOTUBE_WALLTYPES = new String[] { "Single (SWNT)", "Double (DWMT)", "Multiple (MWNT)" }; public static final String NCL_REPORT = "NCL Report"; public static final String ASSOCIATED_FILE = "Other Associated File"; public static final String[] REPORT_TYPES = new String[] { NCL_REPORT, ASSOCIATED_FILE }; public static final String[] DEFAULT_POLYMER_INITIATORS = new String[] { "Free Radicals", "Peroxide" }; public static final String[] DEFAULT_DENDRIMER_BRANCHES = new String[] { "1-2", "1-3" }; public static final String[] DEFAULT_DENDRIMER_GENERATIONS = new String[] { "0", "0.5", "1.0", "1.5", "2.0", "2.5", "3.0", "3.5", "4.0", "4.5", "5.0", "5.5", "6.0", "6.5", "7.0", "7.5", "8.0", "8.5", "9.0", "9.5", "10.0" }; public static final String OTHER = "Other"; public static final String CHARACTERIZATION_FILE = "characterizationFile"; public static final String DNA = "DNA"; public static final String PEPTIDE = "Peptide"; public static final String SMALL_MOLECULE = "Small Molecule"; public static final String PROBE = "Probe"; public static final String ANTIBODY = "Antibody"; public static final String IMAGE_CONTRAST_AGENT = "Image Contrast Agent"; public static final String ATTACHMENT = "Attachment"; public static final String ENCAPSULATION = "Encapsulation"; public static final String[] FUNCTION_AGENT_TYPES = new String[] { DNA, PEPTIDE, SMALL_MOLECULE, PROBE, ANTIBODY, IMAGE_CONTRAST_AGENT }; public static final String[] FUNCTION_LINKAGE_TYPES = new String[] { ATTACHMENT, ENCAPSULATION }; public static final String RECEPTOR = "Receptor"; public static final String ANTIGEN = "Antigen"; public static final int MAX_VIEW_TITLE_LENGTH = 23; public static final String[] DEFAULT_CELLLINES = new String[] { "LLC-PK1", "Hep-G2" }; public static final String[] DEFAULT_SHAPE_TYPES = new String[] { "Cubic", "Hexagonal", "Irregular", "Needle", "Oblate", "Rod", "Spherical", "Tetrahedron", "Tetrapod", "Triangle", "Eliptical", "Composite", "Cylindrical", "Vesicular", "Elliposid" }; public static final String[] DEFAULT_MORPHOLOGY_TYPES = new String[] { "Power", "Liquid", "Solid", "Crystalline", "Copolymer", "Fibril", "Colloid", "Oil" }; public static final String[] DEFAULT_SURFACE_GROUP_NAMES = new String[] { "Amine", "Carboxyl", "Hydroxyl" }; public static final String NCL_PI = "NCL_PI"; public static final String NCL_RESEARCHER = "NCL_Researcher"; public static final String[] DEFAULT_VISIBLE_GROUPS = new String[] { NCL_PI, NCL_RESEARCHER }; public static final String ABBR_COMPOSITION = "CP"; public static final String ABBR_SIZE = "SZ"; public static final String ABBR_MOLECULAR_WEIGHT = "MW"; public static final String ABBR_MORPHOLOGY = "MP"; public static final String ABBR_SHAPE = "SH"; public static final String ABBR_SURFACE = "SF"; public static final String ABBR_SOLUBILITY ="SL"; public static final String ABBR_PURITY = "PT"; public static final String ABBR_OXIDATIVE_STRESS = "OS"; public static final String ABBR_ENZYME_FUNCTION = "EF"; public static final String ABBR_CELL_VIABILITY = "CV"; public static final String ABBR_CASPASE3_ACTIVATION = "C3"; public static final String ABBR_PLATELET_AGGREGATION = "PA"; public static final String ABBR_HEMOLYSIS = "HM"; public static final String ABBR_PLASMA_PROTEIN_BINDING = "PB"; public static final String ABBR_COAGULATION = "CG"; public static final String ABBR_OXIDATIVE_BURST ="OB"; public static final String ABBR_CHEMOTAXIS ="CT"; public static final String ABBR_LEUKOCYTE_PROLIFERATION = "LP"; public static final String ABBR_PHAGOCYTOSIS = "PC"; public static final String ABBR_CYTOKINE_INDUCTION = "IC"; public static final String ABBR_CFU_GM = "CU"; public static final String ABBR_COMPLEMENT_ACTIVATION = "CA"; public static final String ABBR_NKCELL_CYTOTOXIC_ACTIVITY = "NK"; }
// BUI - a user interface library for the JME 3D engine // This library is free software; you can redistribute it and/or modify it // (at your option) any later version. // This library is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // You should have received a copy of the GNU Lesser General Public // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA package com.jmex.bui.layout; import java.util.Arrays; import com.jmex.bui.BComponent; import com.jmex.bui.BContainer; import com.jmex.bui.Log; import com.jmex.bui.util.Dimension; import com.jmex.bui.util.Insets; /** * Lays out components in a simple grid arrangement, wherein the width and * height of each column and row is defined by the widest preferred width * and height of any component in that column and row. * * <p> The table layout defaults to left horizontal alignment and top vertical * alignment. */ public class TableLayout extends BLayoutManager { /** An enumeration class representing alignments. */ public static class Alignment { } /** Left justifies the table contents within the container. */ public static final Alignment LEFT = new Alignment(); /** Centers the table contents within the container. */ public static final Alignment CENTER = new Alignment(); /** Right justifies the table contents within the container. */ public static final Alignment RIGHT = new Alignment(); /** Top justifies the table contents within the container. */ public static final Alignment TOP = new Alignment(); /** Bottom justifies the table contents within the container. */ public static final Alignment BOTTOM = new Alignment(); /** Divides the column space among the columns in proportion to their * preferred size. This only works with {@link #setHorizontalAlignment}. */ public static final Alignment STRETCH = new Alignment(); /** * Creates a table layout with the specified number of columns and a zero * pixel gap between rows and columns. */ public TableLayout (int columns) { this(columns, 0, 0); } /** * Creates a table layout with the specified number of columns and the * specifeid gap between rows and columns. */ public TableLayout (int columns, int rowgap, int colgap) { // A table must have at least a column columns = Math.max(1, columns); _columnWidths = new int[columns]; _fixedColumns = new boolean[columns]; _rowgap = rowgap; _colgap = colgap; } /** * Configures the horizontal alignment (or stretching) of this table. This * must be called before the container using this layout is validated. */ public TableLayout setHorizontalAlignment (Alignment align) { _halign = align; return this; } /** * Configures the vertical alignment of this table. This must be called * before the container using this layout is validated. */ public TableLayout setVerticalAlignment (Alignment align) { _valign = align; return this; } /** * Configures a column as fixed or free. If a table layout is configured * with <code>STRETCH</code> horizontal alignment, extra space is divided * up among all of the non-fixed columns. All columns are non-fixed by * default. */ public TableLayout setFixedColumn (int column, boolean fixed) { _fixedColumns[column] = fixed; return this; } /** * Configures whether or not the table will force all rows to be a uniform * size. This must be called before the container using this layout is * validated. */ public TableLayout setEqualRows (boolean equalRows) { _equalRows = equalRows; return this; } // documentation inherited public Dimension computePreferredSize ( BContainer target, int whint, int hhint) { computeMetrics(target, true, whint); int cx = (_columnWidths.length-1) * _colgap; int rx = (computeRows(target)-1) * _rowgap; return new Dimension(sum(_columnWidths) + cx, sum(_rowHeights) + rx); } // documentation inherited public void layoutContainer (BContainer target) { Insets insets = target.getInsets(); int availwid = target.getWidth() - insets.getHorizontal(); computeMetrics(target, false, availwid); int totwidth = sum(_columnWidths) + (_columnWidths.length-1) * _colgap; int totheight = sum(_rowHeights) + (computeRows(target)-1) * _rowgap; // account for our horizontal alignment int sx = insets.left; if (_halign == RIGHT) { sx += target.getWidth() - insets.getHorizontal() - totwidth; } else if (_halign == CENTER) { sx += (target.getWidth() - insets.getHorizontal() - totwidth)/2; } // account for our vertical alignment int y = insets.bottom; if (_valign == CENTER) { y += totheight + (target.getHeight() - insets.getVertical() - totheight)/2; } else if (_valign == TOP) { y = target.getHeight() - insets.top; } int row = 0, col = 0, x = sx; for (int ii = 0, ll = target.getComponentCount(); ii < ll; ii++) { BComponent child = target.getComponent(ii); int width = Math.min(_columnWidths[col], availwid); child.setBounds(x, y - _rowHeights[row], width, _rowHeights[row]); x += (_columnWidths[col] + _colgap); if (++col == _columnWidths.length) { y -= (_rowHeights[row] + _rowgap); row++; col = 0; x = sx; } } } protected void computeMetrics ( BContainer target, boolean preferred, int whint) { int rows = computeRows(target); if (_rowHeights == null || _rowHeights.length != rows) { _rowHeights = new int[rows]; } else { Arrays.fill(_rowHeights, 0); } Arrays.fill(_columnWidths, 0); int row = 0, col = 0, maxrh = 0; for (int ii = 0, ll = target.getComponentCount(); ii < ll; ii++) { BComponent child = target.getComponent(ii); if (child.isVisible()) { Dimension psize = child.getPreferredSize(whint, -1); if (psize.height > _rowHeights[row]) { _rowHeights[row] = psize.height; if (maxrh < _rowHeights[row]) { maxrh = _rowHeights[row]; } } if (psize.width > _columnWidths[col]) { _columnWidths[col] = psize.width; } } if (++col == _columnWidths.length) { col = 0; row++; } } // if we are stretching, adjust the column widths accordingly (however, // no adjusting if we're computing our preferred size) int naturalWidth; if (!preferred && _halign == STRETCH && (naturalWidth = sum(_columnWidths)) > 0) { // sum the width of the non-fixed columns int freewid = 0; for (int ii = 0; ii < _fixedColumns.length; ii++) { if (!_fixedColumns[ii]) { freewid += _columnWidths[ii]; } } // now divide up the extra space among said non-fixed columns int avail = target.getWidth() - target.getInsets().getHorizontal() - naturalWidth - (_colgap * (_columnWidths.length-1)); int used = 0; for (int ii = 0; ii < _columnWidths.length; ii++) { if (_fixedColumns[ii]) { continue; } int adjust = _columnWidths[ii] * avail / freewid; _columnWidths[ii] += adjust; used += adjust; } // add any rounding error to the first non-fixed column if (_columnWidths.length > 0) { for (int ii = 0; ii < _fixedColumns.length; ii++) { if (!_fixedColumns[ii]) { _columnWidths[ii] += (avail - used); break; } } } } // if we're equalizing rows, make all row heights the max if (_equalRows) { Arrays.fill(_rowHeights, maxrh); } } protected int computeRows (BContainer target) { int ccount = target.getComponentCount(); int rows = ccount / _columnWidths.length; if (ccount % _columnWidths.length != 0) { rows++; } return rows; } protected int sum (int[] values) { int total = 0; for (int ii = 0; ii < values.length; ii++) { total += values[ii]; } return total; } protected Alignment _halign = LEFT, _valign = TOP; protected boolean _equalRows; protected int _rowgap, _colgap; protected int[] _columnWidths; protected int[] _rowHeights; protected boolean[] _fixedColumns; }
package net.sf.jabref.gui; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.logging.Logger; import javax.swing.JCheckBox; import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.undo.CannotRedoException; import javax.swing.undo.CannotUndoException; import javax.swing.undo.UndoableEdit; import com.jgoodies.forms.builder.DefaultFormBuilder; import com.jgoodies.forms.layout.CellConstraints; import com.jgoodies.forms.layout.FormLayout; import net.sf.jabref.AbstractWorker; import net.sf.jabref.BasePanel; import net.sf.jabref.BibtexEntry; import net.sf.jabref.CheckBoxMessage; import net.sf.jabref.GUIGlobals; import net.sf.jabref.Globals; import net.sf.jabref.ImportSettingsTab; import net.sf.jabref.JabRefFrame; import net.sf.jabref.JabRefPreferences; import net.sf.jabref.Util; import net.sf.jabref.external.ExternalFileType; import net.sf.jabref.undo.NamedCompound; import net.sf.jabref.undo.UndoableFieldChange; public class CleanUpAction extends AbstractWorker { private Logger logger = Logger.getLogger(CleanUpAction.class.getName()); public final static String AKS_AUTO_NAMING_PDFS_AGAIN = "AskAutoNamingPDFsAgain", CLEANUP_DOI = "CleanUpDOI", CLEANUP_MONTH = "CleanUpMonth", CLEANUP_PAGENUMBERS = "CleanUpPageNumbers", CLEANUP_MAKEPATHSRELATIVE = "CleanUpMakePathsRelative", CLEANUP_RENAMEPDF = "CleanUpRenamePDF", CLEANUP_RENAMEPDF_ONLYRELATIVE_PATHS = "CleanUpRenamePDFonlyRelativePaths", CLEANUP_SUPERSCRIPTS = "CleanUpSuperscripts"; public static void putDefaults(HashMap<String, Object> defaults) { defaults.put(AKS_AUTO_NAMING_PDFS_AGAIN, Boolean.TRUE); defaults.put(CLEANUP_SUPERSCRIPTS, Boolean.TRUE); defaults.put(CLEANUP_DOI, Boolean.TRUE); defaults.put(CLEANUP_MONTH, Boolean.TRUE); defaults.put(CLEANUP_PAGENUMBERS, Boolean.TRUE); defaults.put(CLEANUP_MAKEPATHSRELATIVE, Boolean.TRUE); defaults.put(CLEANUP_RENAMEPDF, Boolean.TRUE); defaults.put(CLEANUP_RENAMEPDF_ONLYRELATIVE_PATHS, Boolean.FALSE); } private JCheckBox cleanUpSuperscrips; private JCheckBox cleanUpDOI; private JCheckBox cleanUpMonth; private JCheckBox cleanUpPageNumbers; private JCheckBox cleanUpMakePathsRelative; private JCheckBox cleanUpRenamePDF; private JCheckBox cleanUpRenamePDFonlyRelativePaths; private JPanel optionsPanel = new JPanel(); private BasePanel panel; private JabRefFrame frame; // global variable to count unsucessful Renames int unsuccesfullRenames = 0; public CleanUpAction(BasePanel panel) { this.panel = panel; this.frame = panel.frame(); initOptionsPanel(); } private void initOptionsPanel() { cleanUpSuperscrips = new JCheckBox(Globals.lang("Convert 1st, 2nd, ... to real superscripts")); cleanUpDOI = new JCheckBox(Globals.lang("Move DOIs from note and URL field to DOI field and remove http prefix")); cleanUpMonth = new JCheckBox(Globals.lang("Format content of month field to #mon#")); cleanUpPageNumbers = new JCheckBox(Globals.lang("Ensure that page ranges are of the form num1--num2")); cleanUpMakePathsRelative = new JCheckBox(Globals.lang("Make paths of linked files relative (if possible)")); cleanUpRenamePDF = new JCheckBox(Globals.lang("Rename PDFs to given file name format pattern")); cleanUpRenamePDF.addChangeListener(new ChangeListener() { @Override public void stateChanged(ChangeEvent arg0) { cleanUpRenamePDFonlyRelativePaths.setEnabled(cleanUpRenamePDF.isSelected()); } }); cleanUpRenamePDFonlyRelativePaths = new JCheckBox(Globals.lang("Rename only PDFs having a relative path")); optionsPanel = new JPanel(); retrieveSettings(); FormLayout layout = new FormLayout("left:15dlu,pref:grow", "pref, pref, pref, pref, pref, pref, pref, pref"); DefaultFormBuilder builder = new DefaultFormBuilder(layout, optionsPanel); builder.setDefaultDialogBorder(); CellConstraints cc = new CellConstraints(); builder.add(cleanUpSuperscrips, cc.xyw(1,1,2)); builder.add(cleanUpDOI, cc.xyw(1,2,2)); builder.add(cleanUpMonth, cc.xyw(1,3,2)); builder.add(cleanUpPageNumbers, cc.xyw(1,4,2)); builder.add(cleanUpMakePathsRelative, cc.xyw(1,5,2)); builder.add(cleanUpRenamePDF, cc.xyw(1,6,2)); String currentPattern = Globals.lang("File name format pattern").concat(": ").concat(Globals.prefs.get(ImportSettingsTab.PREF_IMPORT_FILENAMEPATTERN)); builder.add(new JLabel(currentPattern), cc.xyw(2,7,1)); builder.add(cleanUpRenamePDFonlyRelativePaths, cc.xyw(2,8,1)); } private void retrieveSettings() { cleanUpSuperscrips.setSelected(Globals.prefs.getBoolean(CLEANUP_SUPERSCRIPTS)); cleanUpDOI.setSelected(Globals.prefs.getBoolean(CLEANUP_DOI)); cleanUpMonth.setSelected(Globals.prefs.getBoolean(CLEANUP_MONTH)); cleanUpPageNumbers.setSelected(Globals.prefs.getBoolean(CLEANUP_PAGENUMBERS)); cleanUpMakePathsRelative.setSelected(Globals.prefs.getBoolean(CLEANUP_MAKEPATHSRELATIVE)); cleanUpRenamePDF.setSelected(Globals.prefs.getBoolean(CLEANUP_RENAMEPDF)); cleanUpRenamePDFonlyRelativePaths.setSelected(Globals.prefs.getBoolean(CLEANUP_RENAMEPDF_ONLYRELATIVE_PATHS)); cleanUpRenamePDFonlyRelativePaths.setEnabled(cleanUpRenamePDF.isSelected()); } private void storeSettings() { Globals.prefs.putBoolean(CLEANUP_SUPERSCRIPTS, cleanUpSuperscrips.isSelected()); Globals.prefs.putBoolean(CLEANUP_DOI, cleanUpDOI.isSelected()); Globals.prefs.putBoolean(CLEANUP_MONTH, cleanUpMonth.isSelected()); Globals.prefs.putBoolean(CLEANUP_PAGENUMBERS, cleanUpPageNumbers.isSelected()); Globals.prefs.putBoolean(CLEANUP_MAKEPATHSRELATIVE, cleanUpMakePathsRelative.isSelected()); Globals.prefs.putBoolean(CLEANUP_RENAMEPDF, cleanUpRenamePDF.isSelected()); Globals.prefs.putBoolean(CLEANUP_RENAMEPDF_ONLYRELATIVE_PATHS, cleanUpRenamePDFonlyRelativePaths.isSelected()); } private int showCleanUpDialog() { String dialogTitle = Globals.lang("Cleanup entries"); Object[] messages = {Globals.lang("What would you like to clean up?"), optionsPanel}; return JOptionPane.showConfirmDialog(frame, messages, dialogTitle, JOptionPane.OK_CANCEL_OPTION, JOptionPane.QUESTION_MESSAGE); } boolean cancelled; int modifiedEntriesCount; int numSelected; public void init() { cancelled = false; modifiedEntriesCount = 0; int numSelected = panel.getSelectedEntries().length; if (numSelected == 0) { // None selected. Inform the user to select entries first. JOptionPane.showMessageDialog(frame, Globals.lang("First select entries to clean up."), Globals.lang("Cleanup entry"), JOptionPane.INFORMATION_MESSAGE); cancelled = true; return; } frame.block(); panel.output(Globals.lang("Doing a cleanup for %0 entries...", Integer.toString(numSelected))); } public void run() { if (cancelled) return; int choice = showCleanUpDialog(); if (choice != JOptionPane.OK_OPTION) { cancelled = true; return; } storeSettings(); boolean choiceCleanUpSuperscripts = cleanUpSuperscrips.isSelected(), choiceCleanUpDOI = cleanUpDOI.isSelected(), choiceCleanUpMonth = cleanUpMonth.isSelected(), choiceCleanUpPageNumbers = cleanUpPageNumbers.isSelected(), choiceMakePathsRelative = cleanUpMakePathsRelative.isSelected(), choiceRenamePDF = cleanUpRenamePDF.isSelected(); if (choiceRenamePDF && Globals.prefs.getBoolean(AKS_AUTO_NAMING_PDFS_AGAIN)) { CheckBoxMessage cbm = new CheckBoxMessage(Globals.lang("Auto-generating PDF-Names does not support undo. Continue?"), Globals.lang("Disable this confirmation dialog"), false); int answer = JOptionPane.showConfirmDialog(frame, cbm, Globals.lang("Autogenerate PDF Names"), JOptionPane.YES_NO_OPTION); if (cbm.isSelected()) Globals.prefs.putBoolean(AKS_AUTO_NAMING_PDFS_AGAIN, false); if (answer == JOptionPane.NO_OPTION) { cancelled = true; return; } } for (BibtexEntry entry : panel.getSelectedEntries()) { // undo granularity is on entry level NamedCompound ce = new NamedCompound(Globals.lang("Cleanup entry")); if (choiceCleanUpSuperscripts) doCleanUpSuperscripts(entry, ce); if (choiceCleanUpDOI) doCleanUpDOI(entry, ce); if (choiceCleanUpMonth) doCleanUpMonth(entry, ce); if (choiceCleanUpPageNumbers) doCleanUpPageNumbers(entry, ce); if (choiceMakePathsRelative) doMakePathsRelative(entry, ce); if (choiceRenamePDF) doRenamePDFs(entry, ce); ce.end(); if (ce.hasEdits()) { modifiedEntriesCount++; panel.undoManager.addEdit(ce); } } } public void update() { if (cancelled) { frame.unblock(); return; } if(unsuccesfullRenames>0) { //Rename failed for at least one entry JOptionPane.showMessageDialog(frame, Globals.lang("File rename failed for")+" " + unsuccesfullRenames + " "+Globals.lang("entries") + ".", Globals.lang("Autogenerate PDF Names"), JOptionPane.INFORMATION_MESSAGE); } if (modifiedEntriesCount>0) { panel.updateEntryEditorIfShowing(); panel.markBaseChanged() ; } String message; switch (modifiedEntriesCount) { case 0: message = Globals.lang("No entry needed a clean up"); break; case 1: message = Globals.lang("One entry needed a clean up"); break; default: message = Globals.lang("%0 entries needed a clean up"); break; } panel.output(message); frame.unblock(); } /** * Converts the text in 1st, 2nd, ... to real superscripts by wrapping in \textsuperscript{st}, ... */ private void doCleanUpSuperscripts(BibtexEntry entry, NamedCompound ce) { final String field = "booktitle"; String oldValue = entry.getField(field); if (oldValue == null) return; String newValue = oldValue.replaceAll(" (\\d+)(st|nd|rd|th) ", " $1\\\\textsuperscript{$2} "); if (!oldValue.equals(newValue)) { entry.setField(field, newValue); ce.addEdit(new UndoableFieldChange(entry, field, oldValue, newValue)); } } private void doCleanUpDOI(BibtexEntry bes, NamedCompound ce) { // fields to check String[] fields = {"note", "url", "ee"}; // First check if the DOI Field is empty if (bes.getField("doi") != null) { String doiFieldValue = bes.getField("doi"); if (Util.checkForDOIwithHTTPprefix(doiFieldValue)) { String newValue = Util.getDOI(doiFieldValue); ce.addEdit(new UndoableFieldChange(bes, "doi", doiFieldValue, newValue)); bes.setField("doi", newValue); }; if (Util.checkForPlainDOI(doiFieldValue)) { // DOI field seems to contain DOI // cleanup note, url, ee field // we do NOT copy values to the DOI field as the DOI field contains a DOI! for (String field: fields) { if (Util.checkForPlainDOI(bes.getField(field))){ Util.removeDOIfromBibtexEntryField(bes, field, ce); } } } } else { // As the DOI field is empty we now check if note, url, or ee field contains a DOI for (String field: fields) { if (Util.checkForPlainDOI(bes.getField(field))){ // update DOI String oldValue = bes.getField("doi"); String newValue = Util.getDOI(bes.getField(field)); ce.addEdit(new UndoableFieldChange(bes, "doi", oldValue, newValue)); bes.setField("doi", newValue); Util.removeDOIfromBibtexEntryField(bes, field, ce); } } } } private void doCleanUpMonth(BibtexEntry entry, NamedCompound ce) { // implementation based on patch 3470076 by Mathias Walter String oldValue = entry.getField("month"); if (oldValue == null) return; String newValue = oldValue; try { int month = Integer.parseInt(newValue); newValue = new StringBuffer("#").append(Globals.MONTHS[month - 1]).append('#').toString(); } catch (NumberFormatException e) { // adapt casing of newValue to follow entry in Globals_MONTH_STRINGS String casedString = newValue.substring(0, 1).toUpperCase().concat(newValue.substring(1).toLowerCase()); if (Globals.MONTH_STRINGS.containsKey(newValue.toLowerCase()) || Globals.MONTH_STRINGS.containsValue(casedString)) { newValue = new StringBuffer("#").append(newValue.substring(0, 3).toLowerCase()).append('#').toString(); } } if (!oldValue.equals(newValue)) { entry.setField("month", newValue); ce.addEdit(new UndoableFieldChange(entry, "month", oldValue, newValue)); } } private void doCleanUpPageNumbers(BibtexEntry entry, NamedCompound ce) { String oldValue = entry.getField("pages"); if (oldValue == null) return; String newValue = oldValue.replaceAll("(\\d+) *- *(\\d+)", "$1 if (!oldValue.equals(newValue)) { entry.setField("pages", newValue); ce.addEdit(new UndoableFieldChange(entry, "pages", oldValue, newValue)); } } private void doExportToKeywords(BibtexEntry entry, NamedCompound ce) { } private void doImportFromKeywords(BibtexEntry entry, NamedCompound ce) { } private void doMakePathsRelative(BibtexEntry entry, NamedCompound ce) { String oldValue = entry.getField(GUIGlobals.FILE_FIELD); if (oldValue == null) return; FileListTableModel flModel = new FileListTableModel(); flModel.setContent(oldValue); if (flModel.getRowCount() == 0) { return; } boolean changed = false; for (int i = 0; i<flModel.getRowCount(); i++) { FileListEntry flEntry = flModel.getEntry(i); String oldFileName = flEntry.getLink(); String newFileName = Util.shortenFileName(new File(oldFileName), panel.metaData().getFileDirectory(GUIGlobals.FILE_FIELD)).toString(); if (!oldFileName.equals(newFileName)) { flEntry.setLink(newFileName); changed = true; } } if (changed) { String newValue = flModel.getStringRepresentation(); assert(!oldValue.equals(newValue)); entry.setField(GUIGlobals.FILE_FIELD, newValue); ce.addEdit(new UndoableFieldChange(entry, GUIGlobals.FILE_FIELD, oldValue, newValue)); } } private void doRenamePDFs(BibtexEntry entry, NamedCompound ce) { //Extract the path String oldValue = entry.getField(GUIGlobals.FILE_FIELD); if (oldValue == null) return; FileListTableModel flModel = new FileListTableModel(); flModel.setContent(oldValue); if (flModel.getRowCount() == 0) { return; } boolean changed = false; for (int i=0; i<flModel.getRowCount(); i++) { String realOldFilename = flModel.getEntry(i).getLink(); if (cleanUpRenamePDFonlyRelativePaths.isSelected() && (new File(realOldFilename).isAbsolute())) return; String newFilename = Util.getLinkedFileName(panel.database(), entry); //String oldFilename = bes.getField(GUIGlobals.FILE_FIELD); // would have to be stored for undoing purposes //Add extension to newFilename newFilename = newFilename + "." + flModel.getEntry(i).getType().getExtension(); //get new Filename with path //Create new Path based on old Path and new filename File expandedOldFile = Util.expandFilename(realOldFilename, panel.metaData().getFileDirectory(GUIGlobals.FILE_FIELD)); String newPath = expandedOldFile.getParent().concat(System.getProperty("file.separator")).concat(newFilename); if (new File(newPath).exists()) // we do not overwrite files // TODO: we could check here if the newPath file is linked with the current entry. And if not, we could add a link return; //do rename boolean renameSuccesfull = Util.renameFile(expandedOldFile.toString(), newPath); if (renameSuccesfull) { changed = true; //Change the path for this entry String description = flModel.getEntry(i).getDescription(); ExternalFileType type = flModel.getEntry(i).getType(); flModel.removeEntry(i); // we cannot use "newPath" to generate a FileListEntry as newPath is absolute, but we want to keep relative paths whenever possible File parent = (new File(realOldFilename)).getParentFile(); String newFileEntryFileName; if (parent == null) { newFileEntryFileName = newFilename; } else { newFileEntryFileName = parent.toString().concat(System.getProperty("file.separator")).concat(newFilename); } flModel.addEntry(i, new FileListEntry(description, newFileEntryFileName, type)); } else { unsuccesfullRenames++; } } if (changed) { String newValue = flModel.getStringRepresentation(); assert(!oldValue.equals(newValue)); entry.setField(GUIGlobals.FILE_FIELD, newValue); //we put an undo of the field content here //the file is not being renamed back, which leads to inconsostencies //if we put a null undo object here, the change by "doMakePathsRelative" would overwrite the field value nevertheless. ce.addEdit(new UndoableFieldChange(entry, GUIGlobals.FILE_FIELD, oldValue, newValue)); } } }
//This library is free software; you can redistribute it and/or //modify it under the terms of the GNU Lesser General Public //This library is distributed in the hope that it will be useful, //MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the //You should have received a copy of the GNU Lesser General Public //Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. package opennlp.tools.chunker; import java.util.Arrays; import java.util.List; import opennlp.maxent.GISModel; import opennlp.maxent.MaxentModel; import opennlp.maxent.TwoPassDataIndexer; import opennlp.tools.util.BeamSearch; import opennlp.tools.util.Sequence; /** * The class represents a maximum-entropy-based chunker. Such a chunker can be used to * find flat structures based on sequence inputs such as noun phrases or named entities. * */ public class ChunkerME implements Chunker { /** The beam used to search for sequences of chunk tag assignments. */ protected BeamSearch beam; private Sequence bestSequence; /** The model used to assign chunk tags to a sequence of tokens. */ protected MaxentModel model; /** * Creates a chunker using the specified model. * @param mod The maximum entropy model for this chunker. */ public ChunkerME(MaxentModel mod) { this(mod, new DefaultChunkerContextGenerator(), 10); } /** * Creates a chunker using the specified model and context generator. * @param mod The maximum entropy model for this chunker. * @param cg The context generator to be used by the specified model. */ public ChunkerME(MaxentModel mod, ChunkerContextGenerator cg) { this(mod, cg, 10); } /** * Creates a chunker using the specified model and context generator and decodes the * model using a beam search of the specified size. * @param mod The maximum entropy model for this chunker. * @param cg The context generator to be used by the specified model. * @param beamSize The size of the beam that should be used when decoding sequences. */ public ChunkerME(MaxentModel mod, ChunkerContextGenerator cg, int beamSize) { beam = new ChunkBeamSearch(beamSize, cg, mod); this.model = mod; } /* inherieted javadoc */ public List chunk(List toks, List tags) { bestSequence = beam.bestSequence(toks, new Object[] { (String[]) tags.toArray(new String[tags.size()]) }); return bestSequence.getOutcomes(); } /* inherieted javadoc */ public String[] chunk(Object[] toks, String[] tags) { bestSequence = beam.bestSequence(Arrays.asList(toks), new Object[] {tags}); List c = bestSequence.getOutcomes(); return (String[]) c.toArray(new String[c.size()]); } /** * This method determines wheter the outcome is valid for the preceeding sequence. * This can be used to implement constraints on what sequences are valid. * @param outcome The outcome. * @param sequence The precceding sequence of outcome assignments. * @return true is the outcome is valid for the sequence, false otherwise. */ protected boolean validOutcome(String outcome, Sequence sequence) { return (true); } /** * This method determines wheter the outcome is valid for the preceeding sequence. * This can be used to implement constraints on what sequences are valid. * @param outcome The outcome. * @param sequence The precceding sequence of outcome assignments. * @return true is the outcome is valid for the sequence, false otherwise. */ protected boolean validOutcome(String outcome, String[] sequence) { return (true); } /** * This class implements the abstract BeamSearch class to allow for the chunker to use * the common beam search code. * */ class ChunkBeamSearch extends BeamSearch { public ChunkBeamSearch(int size, ChunkerContextGenerator cg, MaxentModel model) { super(size, cg, model); } /* inherieted java doc */ protected boolean validSequence(int i, List sequence, Sequence s, String outcome) { return validOutcome(outcome, s); } protected boolean validSequence(int i, Object[] sequence, String[] s, String outcome) { return validOutcome(outcome, s); } } /** * Populates the specified array with the probabilities of the last decoded sequence. The * sequence was determined based on the previous call to <code>chunk</code>. The * specified array should be at least as large as the numbe of tokens in the previous call to <code>chunk</code>. * @param probs An array used to hold the probabilities of the last decoded sequence. */ public void probs(double[] probs) { bestSequence.getProbs(probs); } /** * Returns an array with the probabilities of the last decoded sequence. The * sequence was determined based on the previous call to <code>chunk</code>. * @return An array with the same number of probabilities as tokens were sent to <code>chunk</code> * when it was last called. */ public double[] probs() { return bestSequence.getProbs(); } private static GISModel train(opennlp.maxent.EventStream es, int iterations, int cut) throws java.io.IOException { return opennlp.maxent.GIS.trainModel(iterations, new TwoPassDataIndexer(es, cut)); } /** * Trains the chunker using the specified parameters. <br> * Usage: ChunkerME trainingFile modelFile. <br> * Training file should be one word per line where each line consists of a * space-delimited triple of "word pos outcome". Sentence breaks are indicated by blank lines. * @param args The training file and the model file. * @throws java.io.IOException When the specifed files can not be read. */ public static void main(String[] args) throws java.io.IOException { if (args.length == 0) { System.err.println("Usage: ChunkerME trainingFile modelFile"); System.err.println(); System.err.println("Training file should be one word per line where each line consists of a "); System.err.println("space-delimited triple of \"word pos outcome\". Sentence breaks are indicated by blank lines."); System.exit(1); } java.io.File inFile = new java.io.File(args[0]); java.io.File outFile = new java.io.File(args[1]); GISModel mod; opennlp.maxent.EventStream es = new ChunkerEventStream(new opennlp.maxent.PlainTextByLineDataStream(new java.io.FileReader(inFile))); if (args.length > 3) mod = train(es, Integer.parseInt(args[2]), Integer.parseInt(args[3])); else mod = train(es, 100, 5); System.out.println("Saving the model as: " + args[1]); new opennlp.maxent.io.SuffixSensitiveGISModelWriter(mod, outFile).persist(); } }
package org.apache.log4j.chainsaw; import java.awt.BorderLayout; import java.awt.Component; import java.awt.Container; import java.awt.Dimension; import java.awt.Event; import java.awt.Frame; import java.awt.Point; import java.awt.Toolkit; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ContainerEvent; import java.awt.event.ContainerListener; import java.awt.event.InputEvent; import java.awt.event.KeyEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.io.IOException; import java.lang.reflect.Method; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Vector; import javax.swing.AbstractAction; import javax.swing.Action; import javax.swing.BorderFactory; import javax.swing.ImageIcon; import javax.swing.JComponent; import javax.swing.JDialog; import javax.swing.JEditorPane; import javax.swing.JFrame; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JPopupMenu; import javax.swing.JScrollPane; import javax.swing.JToolBar; import javax.swing.JWindow; import javax.swing.KeyStroke; import javax.swing.SwingUtilities; import javax.swing.UIManager; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.event.EventListenerList; import javax.swing.event.HyperlinkEvent; import javax.swing.event.HyperlinkListener; import org.apache.log4j.Level; import org.apache.log4j.LogManager; import org.apache.log4j.Priority; import org.apache.log4j.UtilLoggingLevel; import org.apache.log4j.chainsaw.help.Tutorial; import org.apache.log4j.chainsaw.icons.ChainsawIcons; import org.apache.log4j.chainsaw.prefs.LoadSettingsEvent; import org.apache.log4j.chainsaw.prefs.SaveSettingsEvent; import org.apache.log4j.chainsaw.prefs.SettingsListener; import org.apache.log4j.chainsaw.prefs.SettingsManager; import org.apache.log4j.helpers.LogLog; import org.apache.log4j.helpers.OptionConverter; import org.apache.log4j.net.SocketNodeEventListener; import org.apache.log4j.plugins.Plugin; import org.apache.log4j.plugins.PluginEvent; import org.apache.log4j.plugins.PluginListener; import org.apache.log4j.plugins.PluginRegistry; import org.apache.log4j.plugins.Receiver; /** * The main entry point for Chainsaw, this class represents the first frame * that is used to display a Welcome panel, and any other panels that are * generated because Logging Events are streamed via a Receiver, or other * mechanism. * * If a system property 'chainsaw.usecyclicbuffer' is set to 'true', each panel * will use a cyclic buffer for displaying events and once events reach the * buffer limit, the oldest events are removed from the table. * * If the property is not provided, there is no limit on the table's buffer * size. * * If 'chainsaw.usecyclicbuffer' is set to 'true' and a system property * 'chainsaw.cyclicbuffersize' is set to some integer value, that value will be * used as the buffer size - if the buffersize is not provided, a default size * of 500 is used. * * @author Scott Deboy <sdeboy@apache.org> * @author Paul Smith * <psmith@apache.org> * */ public class LogUI extends JFrame implements ChainsawViewer, SettingsListener { private static final String CONFIG_FILE_TO_USE = "config.file"; static final String USE_CYCLIC_BUFFER_PROP_NAME = "chainsaw.usecyclicbuffer"; static final String CYCLIC_BUFFER_SIZE_PROP_NAME = "chainsaw.cyclicbuffersize"; private static final String MAIN_WINDOW_HEIGHT = "main.window.height"; private static final String MAIN_WINDOW_WIDTH = "main.window.width"; private static final String MAIN_WINDOW_Y = "main.window.y"; private static final String MAIN_WINDOW_X = "main.window.x"; static final String TABLE_COLUMN_ORDER = "table.columns.order"; static final String TABLE_COLUMN_WIDTHS = "table.columns.widths"; private static final String LOOK_AND_FEEL = "LookAndFeel"; private static final String STATUS_BAR = "StatusBar"; static final String COLUMNS_EXTENSION = ".columns"; private final JFrame preferencesFrame = new JFrame(); private static ChainsawSplash splash; private URL configURLToUse; private boolean noReceiversDefined; private ReceiversPanel receiversPanel; private ChainsawTabbedPane tabbedPane; private JToolBar toolbar; private ChainsawStatusBar statusBar; private final ApplicationPreferenceModel applicationPreferenceModel = new ApplicationPreferenceModel(); private final ApplicationPreferenceModelPanel applicationPreferenceModelPanel = new ApplicationPreferenceModelPanel(applicationPreferenceModel); private final Map tableModelMap = new HashMap(); private final Map tableMap = new HashMap(); private final List filterableColumns = new ArrayList(); private final Map panelMap = new HashMap(); ChainsawAppenderHandler handler; private ChainsawToolBarAndMenus tbms; private ChainsawAbout aboutBox; private final SettingsManager sm = SettingsManager.getInstance(); private String lookAndFeelClassName; private final JFrame tutorialFrame = new JFrame("Chainsaw Tutorial"); /** * Set to true, if and only if the GUI has completed it's full * initialization. Any logging events that come in must wait until this is * true, and if it is false, should wait on the initializationLock object * until notified. */ private boolean isGUIFullyInitialized = false; private Object initializationLock = new Object(); /** * The shutdownAction is called when the user requests to exit Chainsaw, and * by default this exits the VM, but a developer may replace this action with * something that better suits their needs */ private Action shutdownAction = new AbstractAction() { public void actionPerformed(ActionEvent e) { System.exit(0); } }; /** * Clients can register a ShutdownListener to be notified when the user has * requested Chainsaw to exit. */ private EventListenerList shutdownListenerList = new EventListenerList(); private WelcomePanel welcomePanel; /** * Constructor which builds up all the visual elements of the frame including * the Menu bar */ public LogUI() { super("Chainsaw v2 - Log Viewer"); if (ChainsawIcons.WINDOW_ICON != null) { setIconImage(new ImageIcon(ChainsawIcons.WINDOW_ICON).getImage()); } } private static final void showSplash(Frame owner) { splash = new ChainsawSplash(owner); Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize(); splash.setLocation( (screenSize.width / 2) - (splash.getWidth() / 2), (screenSize.height / 2) - (splash.getHeight() / 2)); splash.setVisible(true); } private static final void removeSplash() { if (splash != null) { splash.setVisible(false); splash.dispose(); } } /** * Registers a ShutdownListener with this calss so that it can be notified * when the user has requested that Chainsaw exit. * * @param l */ public void addShutdownListener(ShutdownListener l) { shutdownListenerList.add(ShutdownListener.class, l); } /** * Removes the registered ShutdownListener so that the listener will not be * notified on a shutdown. * * @param l */ public void removeShutdownListener(ShutdownListener l) { shutdownListenerList.remove(ShutdownListener.class, l); } /** * Starts Chainsaw by attaching a new instance to the Log4J main root Logger * via a ChainsawAppender, and activates itself * * @param args */ public static void main(String[] args) { createChainsawGUI(true, null); } /** * Creates, activates, and then shows the Chainsaw GUI, optionally showing * the splash screen, and using the passed shutdown action when the user * requests to exit the application (if null, then Chainsaw will exit the vm) * * @param showSplash * @param shutdownAction * DOCUMENT ME! */ public static void createChainsawGUI( boolean showSplash, Action shutdownAction) { LogUI logUI = new LogUI(); if (showSplash) { showSplash(logUI); } logUI.handler = new ChainsawAppenderHandler(); logUI.handler.addEventBatchListener(logUI.new NewTabEventBatchReceiver()); LogManager.getRootLogger().addAppender(logUI.handler); logUI.activateViewer(); if (shutdownAction != null) { logUI.setShutdownAction(shutdownAction); } } /** * DOCUMENT ME! * * @param appender * DOCUMENT ME! */ public void activateViewer(ChainsawAppender appender) { handler = new ChainsawAppenderHandler(appender); handler.addEventBatchListener(new NewTabEventBatchReceiver()); activateViewer(); } /** * Initialises the menu's and toolbars, but does not actually create any of * the main panel components. * */ private void initGUI() { statusBar = new ChainsawStatusBar(); receiversPanel = new ReceiversPanel(this); setToolBarAndMenus(new ChainsawToolBarAndMenus(this)); toolbar = getToolBarAndMenus().getToolbar(); setJMenuBar(getToolBarAndMenus().getMenubar()); setTabbedPane(new ChainsawTabbedPane()); preferencesFrame.setTitle("'Application-wide Preferences"); preferencesFrame.setIconImage( ((ImageIcon) ChainsawIcons.ICON_PREFERENCES).getImage()); preferencesFrame.getContentPane().add(applicationPreferenceModelPanel); preferencesFrame.setSize(640, 480); applicationPreferenceModelPanel.setOkCancelActionListener( new ActionListener() { public void actionPerformed(ActionEvent e) { preferencesFrame.setVisible(false); } }); } /** * Given the load event, configures the size/location of the main window etc * etc. * * @param event * DOCUMENT ME! */ public void loadSettings(LoadSettingsEvent event) { if (event.asBoolean(LogUI.STATUS_BAR)) { addStatusBar(); } else { removeStatusBar(); } setLocation( event.asInt(LogUI.MAIN_WINDOW_X), event.asInt(LogUI.MAIN_WINDOW_Y)); setSize( event.asInt(LogUI.MAIN_WINDOW_WIDTH), event.asInt(LogUI.MAIN_WINDOW_HEIGHT)); getToolBarAndMenus().stateChange(); } /** * Ensures the location/size of the main window is stored with the settings * * @param event * DOCUMENT ME! */ public void saveSettings(SaveSettingsEvent event) { event.saveSetting(LogUI.MAIN_WINDOW_X, (int) getLocation().getX()); event.saveSetting(LogUI.MAIN_WINDOW_Y, (int) getLocation().getY()); event.saveSetting(LogUI.MAIN_WINDOW_WIDTH, getWidth()); event.saveSetting(LogUI.MAIN_WINDOW_HEIGHT, getHeight()); if (lookAndFeelClassName != null) { event.saveSetting(LogUI.LOOK_AND_FEEL, lookAndFeelClassName); } event.saveSetting( LogUI.STATUS_BAR, isStatusBarVisible() ? Boolean.TRUE : Boolean.FALSE); if (configURLToUse != null) { event.saveSetting(LogUI.CONFIG_FILE_TO_USE, configURLToUse.toString()); } } /** * Activates itself as a viewer by configuring Size, and location of itself, * and configures the default Tabbed Pane elements with the correct layout, * table columns, and sets itself viewable. */ public void activateViewer() { welcomePanel = new WelcomePanel(this); applicationPreferenceModel.addPropertyChangeListener("identifierExpression", new PropertyChangeListener() { public void propertyChange(PropertyChangeEvent evt) { handler.setIdentifierExpression(evt.getNewValue().toString()); } } ); applicationPreferenceModel.addPropertyChangeListener("responsiveness", new PropertyChangeListener() { public void propertyChange(PropertyChangeEvent evt) { int value = ((Integer)evt.getNewValue()).intValue(); handler.setQueueInterval((value*1000)-750); } } ); final SocketNodeEventListener socketListener = new SocketNodeEventListener() { public void socketOpened(String remoteInfo) { statusBar.remoteConnectionReceived(remoteInfo); } public void socketClosedEvent(Exception e) { statusBar.setMessage("Collection lost! :: " + e.getMessage()); } }; PluginListener pluginListener = new PluginListener() { public void pluginStarted(PluginEvent e) { statusBar.setMessage(e.getPlugin().getName() + " started!"); Method method = getAddListenerMethod(e.getPlugin()); if (method != null) { try { method.invoke(e.getPlugin(), new Object[] { socketListener }); } catch (Exception ex) { LogLog.error("Failed to add a SocketNodeEventListener", ex); } } } Method getRemoveListenerMethod(Plugin p) { try { return p.getClass().getMethod( "removeSocketNodeEventListener", new Class[] { SocketNodeEventListener.class }); } catch (Exception e) { return null; } } Method getAddListenerMethod(Plugin p) { try { return p.getClass().getMethod( "addSocketNodeEventListener", new Class[] { SocketNodeEventListener.class }); } catch (Exception e) { return null; } } public void pluginStopped(PluginEvent e) { Method method = getRemoveListenerMethod(e.getPlugin()); if (method != null) { try { method.invoke(e.getPlugin(), new Object[] { socketListener }); } catch (Exception ex) { LogLog.error("Failed to remove SocketNodeEventListener", ex); } } statusBar.setMessage(e.getPlugin().getName() + " stopped!"); } }; PluginRegistry.addPluginListener(pluginListener); getSettingsManager().configure( new SettingsListener() { public void loadSettings(LoadSettingsEvent event) { String configFile = event.getSetting(LogUI.CONFIG_FILE_TO_USE); //if both a config file are defined and a log4j.configuration property // are set, //don't use configFile's configuration if ( (configFile != null) && !configFile.trim().equals("") && (System.getProperty("log4j.configuration") == null)) { try { URL url = new URL(configFile); OptionConverter.selectAndConfigure( url, null, LogManager.getLoggerRepository()); if (LogUI.this.getStatusBar() != null) { LogUI.this.getStatusBar().setMessage( "Configured Log4j using remembered URL :: " + url); } LogUI.this.configURLToUse = url; } catch (Exception e) { LogLog.error("error occurred initializing log4j", e); } } } public void saveSettings(SaveSettingsEvent event) { //required because of SettingsListener interface..not used during load } }); if ( PluginRegistry.getPlugins( LogManager.getLoggerRepository(), Receiver.class).size() == 0) { noReceiversDefined = true; } initGUI(); List utilList = UtilLoggingLevel.getAllPossibleLevels(); // TODO: Replace the array list creating with the standard way of // retreiving the Level set. (TBD) Priority[] priorities = new Level[] { Level.FATAL, Level.ERROR, Level.WARN, Level.INFO, Level.DEBUG }; List priorityLevels = new ArrayList(); for (int i = 0; i < priorities.length; i++) { priorityLevels.add(priorities[i].toString()); } List utilLevels = new ArrayList(); for (Iterator iterator = utilLevels.iterator(); iterator.hasNext();) { utilLevels.add(iterator.next().toString()); } // getLevelMap().put(ChainsawConstants.UTIL_LOGGING_EVENT_TYPE, // utilLevels); // getLevelMap().put(ChainsawConstants.LOG4J_EVENT_TYPE, priorityLevels); getFilterableColumns().add(ChainsawConstants.LEVEL_COL_NAME); getFilterableColumns().add(ChainsawConstants.LOGGER_COL_NAME); getFilterableColumns().add(ChainsawConstants.THREAD_COL_NAME); getFilterableColumns().add(ChainsawConstants.NDC_COL_NAME); getFilterableColumns().add(ChainsawConstants.MDC_COL_NAME); getFilterableColumns().add(ChainsawConstants.CLASS_COL_NAME); getFilterableColumns().add(ChainsawConstants.METHOD_COL_NAME); getFilterableColumns().add(ChainsawConstants.FILE_COL_NAME); getFilterableColumns().add(ChainsawConstants.NONE_COL_NAME); JPanel panePanel = new JPanel(); panePanel.setLayout(new BorderLayout(2, 2)); getContentPane().setLayout(new BorderLayout()); getTabbedPane().addChangeListener(getToolBarAndMenus()); getTabbedPane().addChangeListener( new ChangeListener() { //received a statechange event - selection changed - remove icon from // selected index public void stateChanged(ChangeEvent e) { if ( getTabbedPane().getSelectedComponent() instanceof ChainsawTabbedPane) { if (getTabbedPane().getSelectedIndex() > -1) { getTabbedPane().setIconAt( getTabbedPane().getSelectedIndex(), null); } } } }); KeyStroke ksRight = KeyStroke.getKeyStroke(KeyEvent.VK_RIGHT, Event.CTRL_MASK); KeyStroke ksLeft = KeyStroke.getKeyStroke(KeyEvent.VK_LEFT, Event.CTRL_MASK); getTabbedPane().getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put( ksRight, "MoveRight"); getTabbedPane().getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put( ksLeft, "MoveLeft"); Action moveRight = new AbstractAction() { public void actionPerformed(ActionEvent e) { int temp = getTabbedPane().getSelectedIndex(); ++temp; if (temp != getTabbedPane().getTabCount()) { getTabbedPane().setSelectedTab(temp); } } }; Action moveLeft = new AbstractAction() { public void actionPerformed(ActionEvent e) { int temp = getTabbedPane().getSelectedIndex(); --temp; if (temp > -1) { getTabbedPane().setSelectedTab(temp); } } }; getTabbedPane().getActionMap().put("MoveRight", moveRight); getTabbedPane().getActionMap().put("MoveLeft", moveLeft); /** * We listen for double clicks, and auto-undock currently selected Tab if * the mouse event location matches the currently selected tab */ getTabbedPane().addMouseListener( new MouseAdapter() { public void mouseClicked(MouseEvent e) { super.mouseClicked(e); if ( (e.getClickCount() > 1) && ((e.getModifiers() & InputEvent.BUTTON1_MASK) > 0)) { int tabIndex = getTabbedPane().getSelectedIndex(); if ( (tabIndex != -1) && (tabIndex == getTabbedPane().getSelectedIndex())) { LogPanel logPanel = getCurrentLogPanel(); if (logPanel != null) { logPanel.undock(); } } } } }); panePanel.add(getTabbedPane()); addWelcomePanel(); getContentPane().add(toolbar, BorderLayout.NORTH); getContentPane().add(panePanel, BorderLayout.CENTER); getContentPane().add(statusBar, BorderLayout.SOUTH); receiversPanel.setVisible(false); getContentPane().add(receiversPanel, BorderLayout.EAST); addWindowListener( new WindowAdapter() { public void windowClosing(WindowEvent event) { exit(); } }); getSettingsManager().configure( new SettingsListener() { public void loadSettings(LoadSettingsEvent event) { lookAndFeelClassName = event.getSetting(LogUI.LOOK_AND_FEEL); if (lookAndFeelClassName != null) { applyLookAndFeel(lookAndFeelClassName); } } public void saveSettings(SaveSettingsEvent event) { //required because of SettingsListener interface..not used during load } }); pack(); final JPopupMenu tabPopup = new JPopupMenu(); final Action hideCurrentTabAction = new AbstractAction("Hide") { public void actionPerformed(ActionEvent e) { displayPanel(getCurrentLogPanel().getIdentifier(), false); tbms.stateChange(); } }; final Action hideOtherTabsAction = new AbstractAction("Hide Others") { public void actionPerformed(ActionEvent e) { String currentName = getCurrentLogPanel().getIdentifier(); int count = getTabbedPane().getTabCount(); int index = 0; for (int i = 0; i < count; i++) { String name = getTabbedPane().getTitleAt(index); if ( getPanelMap().keySet().contains(name) && !name.equals(currentName)) { displayPanel(name, false); tbms.stateChange(); } else { index++; } } } }; Action showHiddenTabsAction = new AbstractAction("Show All Hidden") { public void actionPerformed(ActionEvent e) { for (Iterator iter = getPanels().keySet().iterator(); iter.hasNext();) { String identifier = (String) iter.next(); int count = getTabbedPane().getTabCount(); boolean found = false; for (int i = 0; i < count; i++) { String name = getTabbedPane().getTitleAt(i); if (name.equals(identifier)) { found = true; break; } } if (!found) { displayPanel(identifier, true); tbms.stateChange(); } } } }; tabPopup.add(hideCurrentTabAction); tabPopup.add(hideOtherTabsAction); tabPopup.addSeparator(); tabPopup.add(showHiddenTabsAction); final PopupListener tabPopupListener = new PopupListener(tabPopup); getTabbedPane().addMouseListener(tabPopupListener); final ChangeListener actionEnabler = new ChangeListener(){ public void stateChanged(ChangeEvent arg0) { boolean enabled = getCurrentLogPanel()!=null; hideCurrentTabAction.setEnabled(enabled); hideOtherTabsAction.setEnabled(enabled); }}; getTabbedPane().addChangeListener(actionEnabler); getTabbedPane().addContainerListener(new ContainerListener(){ public void componentAdded(ContainerEvent arg0) { actionEnabler.stateChanged(null); } public void componentRemoved(ContainerEvent arg0) { actionEnabler.stateChanged(null); }}); this.handler.addPropertyChangeListener( "dataRate", new PropertyChangeListener() { public void propertyChange(PropertyChangeEvent evt) { double dataRate = ((Double) evt.getNewValue()).doubleValue(); statusBar.setDataRate(dataRate); } }); getSettingsManager().addSettingsListener(this); getSettingsManager().addSettingsListener(applicationPreferenceModel); getSettingsManager().addSettingsListener(getToolBarAndMenus()); getSettingsManager().loadSettings(); setVisible(true); removeSplash(); synchronized (initializationLock) { isGUIFullyInitialized = true; initializationLock.notifyAll(); } if (noReceiversDefined && applicationPreferenceModel.isShowNoReceiverWarning()) { showNoReceiversWarningPanel(); } Container container = tutorialFrame.getContentPane(); final JEditorPane tutorialArea = new JEditorPane(); tutorialArea.setBorder(BorderFactory.createEmptyBorder(0, 5, 0, 5)); tutorialArea.setEditable(false); container.setLayout(new BorderLayout()); try { tutorialArea.setPage(getWelcomePanel().getTutorialURL()); container.add(new JScrollPane(tutorialArea), BorderLayout.CENTER); } catch (Exception e) { LogLog.error("Error occurred loading the Tutorial", e); } tutorialFrame.setSize(new Dimension(640, 480)); final Action startTutorial = new AbstractAction( "Start Tutorial", new ImageIcon(ChainsawIcons.ICON_RESUME_RECEIVER)) { public void actionPerformed(ActionEvent e) { if ( JOptionPane.showConfirmDialog( null, "This will start 3 \"Generator\" receivers for use in the Tutorial. Is that ok?", "Confirm", JOptionPane.YES_NO_OPTION) == JOptionPane.YES_OPTION) { new Thread(new Tutorial()).start(); putValue("TutorialStarted", Boolean.TRUE); } else { putValue("TutorialStarted", Boolean.FALSE); } } }; final Action stopTutorial = new AbstractAction( "Stop Tutorial", new ImageIcon(ChainsawIcons.ICON_STOP_RECEIVER)) { public void actionPerformed(ActionEvent e) { if ( JOptionPane.showConfirmDialog( null, "This will stop all of the \"Generator\" receivers used in the Tutorial, but leave any other Receiver untouched. Is that ok?", "Confirm", JOptionPane.YES_NO_OPTION) == JOptionPane.YES_OPTION) { new Thread( new Runnable() { public void run() { List list = PluginRegistry.getPlugins( LogManager.getLoggerRepository(), Generator.class); for (Iterator iter = list.iterator(); iter.hasNext();) { Plugin plugin = (Plugin) iter.next(); PluginRegistry.stopPlugin(plugin); } } }).start(); setEnabled(false); startTutorial.putValue("TutorialStarted", Boolean.FALSE); } } }; stopTutorial.putValue( Action.SHORT_DESCRIPTION, "Removes all of the Tutorials Generator Receivers, leaving all other Receivers untouched"); startTutorial.putValue( Action.SHORT_DESCRIPTION, "Begins the Tutorial, starting up some Generator Receivers so you can see Chainsaw in action"); stopTutorial.setEnabled(false); final SmallToggleButton startButton = new SmallToggleButton(startTutorial); PropertyChangeListener pcl = new PropertyChangeListener() { public void propertyChange(PropertyChangeEvent evt) { stopTutorial.setEnabled( ((Boolean) startTutorial.getValue("TutorialStarted")) == Boolean.TRUE); startButton.setSelected(stopTutorial.isEnabled()); } }; startTutorial.addPropertyChangeListener(pcl); stopTutorial.addPropertyChangeListener(pcl); PluginRegistry.addPluginListener(new PluginListener(){ public void pluginStarted(PluginEvent e) { } public void pluginStopped(PluginEvent e) { List list = PluginRegistry.getPlugins(LogManager.getLoggerRepository(), Generator.class); if (list.size() == 0) { startTutorial.putValue("TutorialStarted", Boolean.FALSE); } }}); final SmallButton stopButton = new SmallButton(stopTutorial); final JToolBar tutorialToolbar = new JToolBar(); tutorialToolbar.setFloatable(false); tutorialToolbar.add(startButton); tutorialToolbar.add(stopButton); container.add(tutorialToolbar, BorderLayout.NORTH); tutorialArea.addHyperlinkListener( new HyperlinkListener() { public void hyperlinkUpdate(HyperlinkEvent e) { if (e.getEventType() == HyperlinkEvent.EventType.ACTIVATED) { if (e.getDescription().equals("StartTutorial")) { startTutorial.actionPerformed(null); } else if (e.getDescription().equals("StopTutorial")) { stopTutorial.actionPerformed(null); } else { try { tutorialArea.setPage(e.getURL()); } catch (IOException e1) { LogLog.error("Failed to change the URL for the Tutorial", e1); } } } } }); } /** * Displays a warning dialog about having no Receivers defined and allows the * user to choose some options for configuration */ private void showNoReceiversWarningPanel() { final NoReceiversWarningPanel noReceiversWarningPanel = new NoReceiversWarningPanel(); final SettingsListener sl = new SettingsListener() { public void loadSettings(LoadSettingsEvent event) { int size = event.asInt("SavedConfigs.Size"); Object[] configs = new Object[size]; for (int i = 0; i < size; i++) { configs[i] = event.getSetting("SavedConfigs." + i); } noReceiversWarningPanel.getModel().setRememberedConfigs(configs); } public void saveSettings(SaveSettingsEvent event) { Object[] configs = noReceiversWarningPanel.getModel().getRememberedConfigs(); event.saveSetting("SavedConfigs.Size", configs.length); for (int i = 0; i < configs.length; i++) { event.saveSetting("SavedConfigs." + i, configs[i].toString()); } } }; /** * This listener sets up the NoReciversWarningPanel and loads saves the * configs/logfiles */ getSettingsManager().addSettingsListener(sl); getSettingsManager().configure(sl); SwingUtilities.invokeLater( new Runnable() { public void run() { final JDialog dialog = new JDialog(LogUI.this, true); dialog.setTitle("Warning: You have no Receivers defined..."); dialog.setDefaultCloseOperation(JDialog.DO_NOTHING_ON_CLOSE); dialog.setResizable(false); noReceiversWarningPanel.setOkActionListener( new ActionListener() { public void actionPerformed(ActionEvent e) { dialog.setVisible(false); } }); dialog.getContentPane().add(noReceiversWarningPanel); dialog.pack(); Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize(); dialog.setLocation( (screenSize.width / 2) - (dialog.getWidth() / 2), (screenSize.height / 2) - (dialog.getHeight() / 2)); dialog.show(); dialog.dispose(); applicationPreferenceModel.setShowNoReceiverWarning(!noReceiversWarningPanel.isDontWarnMeAgain()); if (noReceiversWarningPanel.getModel().isManualMode()) { toggleReceiversPanel(); } else if (noReceiversWarningPanel.getModel().isSimpleReceiverMode()) { int port = noReceiversWarningPanel.getModel().getSimplePort(); Class receiverClass = noReceiversWarningPanel.getModel().getSimpleReceiverClass(); try { Receiver simpleReceiver = (Receiver) receiverClass.newInstance(); simpleReceiver.setName("Simple Receiver"); Method portMethod = simpleReceiver.getClass().getMethod( "setPort", new Class[] { int.class }); portMethod.invoke( simpleReceiver, new Object[] { new Integer(port) }); simpleReceiver.setThreshold(Level.DEBUG); PluginRegistry.startPlugin(simpleReceiver); receiversPanel.updateReceiverTreeInDispatchThread(); } catch (Exception e) { LogLog.error("Error creating Receiver", e); getStatusBar().setMessage( "An error occurred creating your Receiver"); } } else if (noReceiversWarningPanel.getModel().isLoadConfig()) { final URL url = noReceiversWarningPanel.getModel().getConfigToLoad(); if (url != null) { LogLog.debug("Initialiazing Log4j with " + url.toExternalForm()); new Thread( new Runnable() { public void run() { try { OptionConverter.selectAndConfigure( url, null, LogManager.getLoggerRepository()); } catch (Exception e) { LogLog.error("Error initializing Log4j", e); } LogManager.getLoggerRepository().getRootLogger() .addAppender(handler); receiversPanel.updateReceiverTreeInDispatchThread(); } }).start(); } } } }); } /** * Exits the application, ensuring Settings are saved. * */ void exit() { // TODO Ask the user if they want to save the settings via a dialog. getSettingsManager().saveSettings(); shutdown(); } void addWelcomePanel() { getTabbedPane().addANewTab( "Welcome", welcomePanel, new ImageIcon(ChainsawIcons.ABOUT), "Welcome/Help"); } void removeWelcomePanel() { if (getTabbedPane().containsWelcomePanel()) { getTabbedPane().remove( getTabbedPane().getComponentAt(getTabbedPane().indexOfTab("Welcome"))); } } void toggleReceiversPanel() { SwingUtilities.invokeLater( new Runnable() { public void run() { receiversPanel.setVisible(!receiversPanel.isVisible()); receiversPanel.invalidate(); receiversPanel.validate(); getToolBarAndMenus().stateChange(); } }); } boolean isReceiverPanelVisible() { return receiversPanel.isVisible(); } /** * DOCUMENT ME! * * @return DOCUMENT ME! */ public ChainsawStatusBar getStatusBar() { return statusBar; } void showApplicationPreferences() { applicationPreferenceModelPanel.updateModel(); preferencesFrame.show(); } void showAboutBox() { if (aboutBox == null) { aboutBox = new ChainsawAbout(this); } aboutBox.setVisible(true); } Map getPanels() { Map m = new HashMap(); Set panelSet = getPanelMap().entrySet(); Iterator iter = panelSet.iterator(); while (iter.hasNext()) { Map.Entry entry = (Map.Entry) iter.next(); m.put( entry.getKey(), new Boolean(((DockablePanel) entry.getValue()).isDocked())); } return m; } void displayPanel(String panelName, boolean display) { Object o = getPanelMap().get(panelName); if (o instanceof LogPanel) { LogPanel p = (LogPanel) o; int index = getTabbedPane().indexOfTab(panelName); if ((index == -1) && display) { getTabbedPane().addTab(panelName, p); } if ((index > -1) && !display) { getTabbedPane().removeTabAt(index); } } } /** * Shutsdown by ensuring the Appender gets a chance to close. */ private void shutdown() { JWindow progress = new JWindow(); final ProgressPanel panel = new ProgressPanel(1, 3, "Shutting down"); progress.getContentPane().add(panel); progress.pack(); Point p = new Point(getLocation()); p.move((int) getSize().getWidth() >> 1, (int) getSize().getHeight() >> 1); progress.setLocation(p); progress.setVisible(true); Runnable runnable = new Runnable() { public void run() { try { int progress = 1; final int delay = 25; handler.close(); panel.setProgress(progress++); Thread.sleep(delay); PluginRegistry.stopAllPlugins(); panel.setProgress(progress++); Thread.sleep(delay); panel.setProgress(progress++); Thread.sleep(delay); } catch (Exception e) { e.printStackTrace(); } fireShutdownEvent(); performShutdownAction(); } }; new Thread(runnable).start(); } /** * Ensures all the registered ShutdownListeners are notified. */ private void fireShutdownEvent() { ShutdownListener[] listeners = (ShutdownListener[]) shutdownListenerList.getListeners( ShutdownListener.class); for (int i = 0; i < listeners.length; i++) { listeners[i].shuttingDown(); } } /** * Configures LogUI's with an action to execute when the user requests to * exit the application, the default action is to exit the VM. This Action is * called AFTER all the ShutdownListeners have been notified * * @param shutdownAction */ public final void setShutdownAction(Action shutdownAction) { this.shutdownAction = shutdownAction; } /** * Using the current thread, calls the registed Shutdown action's * actionPerformed(...) method. * */ private void performShutdownAction() { LogLog.debug("Calling the shutdown Action. Goodbye!"); shutdownAction.actionPerformed( new ActionEvent(this, ActionEvent.ACTION_PERFORMED, "Shutting Down")); } /** * Returns the currently selected LogPanel, if there is one, otherwise null * * @return */ LogPanel getCurrentLogPanel() { Component selectedTab = getTabbedPane().getSelectedComponent(); if (selectedTab instanceof LogPanel) { return (LogPanel) selectedTab; } else { // System.out.println(selectedTab); } return null; } void removeStatusBar() { SwingUtilities.invokeLater( new Runnable() { public void run() { getContentPane().remove(statusBar); getContentPane().validate(); getContentPane().repaint(); } }); } boolean isStatusBarVisible() { return getContentPane().isAncestorOf(statusBar); } void addStatusBar() { removeStatusBar(); SwingUtilities.invokeLater( new Runnable() { public void run() { getContentPane().add(statusBar, BorderLayout.SOUTH); getContentPane().validate(); getContentPane().repaint(); } }); } /** * DOCUMENT ME! * * @return DOCUMENT ME! */ public String getActiveTabName() { int index = getTabbedPane().getSelectedIndex(); if (index == -1) { return null; } else { return getTabbedPane().getTitleAt(index); } } /** * Formats the individual elements of an LoggingEvent by ensuring that there * are no null bits, replacing them with EMPTY_STRING * * @param v * @return */ private Vector formatFields(Vector v) { for (int i = 0; i < v.size(); i++) { if (v.get(i) == null) { v.set(i, ChainsawConstants.EMPTY_STRING); } } return v; } /** * Modify the saved Look And Feel - does not update the currently used Look * And Feel * * @param lookAndFeelClassName * The FQN of the LookAndFeel */ public void setLookAndFeel(String lookAndFeelClassName) { this.lookAndFeelClassName = lookAndFeelClassName; JOptionPane.showMessageDialog( getContentPane(), "Restart application for the new Look and Feel to take effect.", "Look and Feel Updated", JOptionPane.INFORMATION_MESSAGE); } /** * Changes the currently used Look And Feel of the App * * @param lookAndFeelClassName * The FQN of the LookANdFeel */ private void applyLookAndFeel(String lookAndFeelClassName) { if ( UIManager.getLookAndFeel().getClass().getName().equals( lookAndFeelClassName)) { LogLog.debug("No need to change L&F, already the same"); return; } LogLog.debug("Setting L&F -> " + lookAndFeelClassName); try { UIManager.setLookAndFeel(lookAndFeelClassName); SwingUtilities.updateComponentTreeUI(this); SwingUtilities.updateComponentTreeUI(preferencesFrame); } catch (Exception e) { LogLog.error("Failed to change L&F", e); } } /** * Causes the Welcome Panel to become visible, and shows the URL specified as * it's contents * * @param url * for content to show */ void showHelp(URL url) { removeWelcomePanel(); addWelcomePanel(); // TODO ensure the Welcome Panel is the selected tab getWelcomePanel().setURL(url); } /** * DOCUMENT ME! * * @return */ private WelcomePanel getWelcomePanel() { return welcomePanel; } /** * DOCUMENT ME! * * @return */ public boolean isLogTreePanelVisible() { if (getCurrentLogPanel() == null) { return false; } return getCurrentLogPanel().isLogTreePanelVisible(); } /* * (non-Javadoc) * * @see org.apache.log4j.chainsaw.EventBatchListener#getInterestedIdentifier() */ /** * DOCUMENT ME! * * @return DOCUMENT ME! */ public String getInterestedIdentifier() { // this instance is interested in ALL event batches, as we determine how to // route things return null; } // public Map getEntryMap() { // return entryMap; // public Map getScrollMap() { // return scrollMap; /** * DOCUMENT ME! * * @return DOCUMENT ME! */ public Map getPanelMap() { return panelMap; } // public Map getLevelMap() { // return levelMap; /** * DOCUMENT ME! * * @return DOCUMENT ME! */ public SettingsManager getSettingsManager() { return sm; } /** * DOCUMENT ME! * * @return DOCUMENT ME! */ public List getFilterableColumns() { return filterableColumns; } /** * DOCUMENT ME! * * @param tbms * DOCUMENT ME! */ public void setToolBarAndMenus(ChainsawToolBarAndMenus tbms) { this.tbms = tbms; } /** * DOCUMENT ME! * * @return DOCUMENT ME! */ public ChainsawToolBarAndMenus getToolBarAndMenus() { return tbms; } /** * DOCUMENT ME! * * @return DOCUMENT ME! */ public Map getTableMap() { return tableMap; } /** * DOCUMENT ME! * * @return DOCUMENT ME! */ public Map getTableModelMap() { return tableModelMap; } /** * DOCUMENT ME! * * @param tabbedPane * DOCUMENT ME! */ public void setTabbedPane(ChainsawTabbedPane tabbedPane) { this.tabbedPane = tabbedPane; } /** * DOCUMENT ME! * * @return DOCUMENT ME! */ public ChainsawTabbedPane getTabbedPane() { return tabbedPane; } /** * DOCUMENT ME! */ public void setupTutorial() { SwingUtilities.invokeLater( new Runnable() { public void run() { Dimension screen = Toolkit.getDefaultToolkit().getScreenSize(); setLocation(0, getLocation().y); double chainsawwidth = 0.7; double tutorialwidth = 1 - chainsawwidth; setSize((int) (screen.width * chainsawwidth), getSize().height); invalidate(); validate(); Dimension size = getSize(); Point loc = getLocation(); tutorialFrame.setSize( (int) (screen.width * tutorialwidth), size.height); tutorialFrame.setLocation(loc.x + size.width, loc.y); tutorialFrame.setVisible(true); } }); } /** * This class handles the recption of the Event batches and creates new * LogPanels if the identifier is not in use otherwise it ignores the event * batch. * * @author Paul Smith * <psmith@apache.org> * */ private class NewTabEventBatchReceiver implements EventBatchListener { /** * DOCUMENT ME! * * @param ident * DOCUMENT ME! * @param eventBatchEntrys * DOCUMENT ME! */ public void receiveEventBatch( final String ident, final List eventBatchEntrys) { if (eventBatchEntrys.size() == 0) { return; } EventContainer tableModel; JSortTable table; HashMap map = null; if (!isGUIFullyInitialized) { synchronized (initializationLock) { while (!isGUIFullyInitialized) { System.out.println( "Wanting to add a row, but GUI not initialized, waiting..."); /** * Lets wait 1 seconds and recheck. */ try { initializationLock.wait(1000); } catch (InterruptedException e) { } } } } if (!getPanelMap().containsKey(ident)) { final String eventType = ((ChainsawEventBatchEntry) eventBatchEntrys.get(0)).getEventType(); final LogPanel thisPanel = new LogPanel(getStatusBar(), ident, eventType); thisPanel.addEventCountListener(new TabIconHandler(ident)); PropertyChangeListener toolbarMenuUpdateListener = new PropertyChangeListener() { public void propertyChange(PropertyChangeEvent evt) { tbms.stateChange(); } }; thisPanel.addPropertyChangeListener(toolbarMenuUpdateListener); thisPanel.getPreferenceModel().addPropertyChangeListener( toolbarMenuUpdateListener); thisPanel.addPropertyChangeListener( "docked", new PropertyChangeListener() { public void propertyChange(PropertyChangeEvent evt) { LogPanel logPanel = (LogPanel) evt.getSource(); if (logPanel.isDocked()) { getPanelMap().put(logPanel.getIdentifier(), logPanel); getTabbedPane().addANewTab( logPanel.getIdentifier(), logPanel, null); } else { getTabbedPane().remove(logPanel); } } }); getTabbedPane().add(ident, thisPanel); getPanelMap().put(ident, thisPanel); getSettingsManager().addSettingsListener(thisPanel); getSettingsManager().configure(thisPanel); /** * Let the new LogPanel receive this batch */ thisPanel.receiveEventBatch(ident, eventBatchEntrys); /** * Now add the panel as a batch listener so it can handle it's own * batchs */ handler.addEventBatchListener(thisPanel); SwingUtilities.invokeLater( new Runnable() { public void run() { getTabbedPane().addANewTab( ident, thisPanel, new ImageIcon( ChainsawIcons.ANIM_RADIO_TOWER)); } }); String msg = "added tab " + ident; LogLog.debug(msg); statusBar.setMessage(msg); } } /* * (non-Javadoc) * * @see org.apache.log4j.chainsaw.EventBatchListener#getInterestedIdentifier() */ /** * DOCUMENT ME! * * @return DOCUMENT ME! */ public String getInterestedIdentifier() { // we are interested in all batches so we can detect new identifiers return null; } } class TabIconHandler implements EventCountListener { private final String ident; private int lastCount; private int currentCount; //the tabIconHandler is associated with a new tab, and a new tab always //has new events private boolean hasNewEvents = true; ImageIcon NEW_EVENTS = new ImageIcon(ChainsawIcons.ANIM_RADIO_TOWER); ImageIcon HAS_EVENTS = new ImageIcon(ChainsawIcons.INFO); public TabIconHandler(final String ident) { this.ident = ident; new Thread( new Runnable() { public void run() { while (true) { //if this tab is active, remove the icon if ( (getTabbedPane().getSelectedIndex() > -1) && (getTabbedPane().getSelectedIndex() == getTabbedPane() .indexOfTab( ident))) { getTabbedPane().setIconAt( getTabbedPane().indexOfTab(ident), null); //reset fields so no icon will display lastCount = currentCount; hasNewEvents = false; } else { //don't process undocked tabs if (getTabbedPane().indexOfTab(ident) > -1) { //if the tab is not active and the counts don't match, set the // new events icon if (lastCount != currentCount) { getTabbedPane().setIconAt( getTabbedPane().indexOfTab(ident), NEW_EVENTS); lastCount = currentCount; hasNewEvents = true; } else { if (hasNewEvents) { getTabbedPane().setIconAt( getTabbedPane().indexOfTab(ident), HAS_EVENTS); } } } } try { Thread.sleep(handler.getQueueInterval() + 1000); } catch (InterruptedException ie) { } } } }).start(); } /** * DOCUMENT ME! * * @param currentCount * DOCUMENT ME! * @param totalCount * DOCUMENT ME! */ public void eventCountChanged(int currentCount, int totalCount) { this.currentCount = currentCount; } } /** * @return Returns the applicationPreferenceModel. */ public final ApplicationPreferenceModel getApplicationPreferenceModel() { return applicationPreferenceModel; } }
package org.jsimpledb.annotation; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.jsimpledb.core.DeleteAction; /** * Java annotation for creating simple fields, including reference fields that refer to other Java model object types. * * <p> * This annotation is used in two scenarios: * <ul> * <li>To describe a <b>simple</b> database field by annotating the corresponding abstract Java bean property `getter' method</li> * <li>To describe the <b>sub-field(s)</b> of a <b>complex</b> database field (i.e., set, list, or map), * that is, the collection element or map key and value. In this case this annotation nests within the corresponding * {@link JListField &#64;JListField}, {@link JSetField &#64;JSetField}, or {@link JMapField &#64;JMapField} annotation.</li> * </ul> * * <p> * Note that this annotation can be applied to superclass and interface methods to have the corresponding * field defined in all sub-types. * </p> * * <p> * This annotation is not required when auto-generation of properties is enabled (unless you need to override * defaults); see {@link JSimpleClass#autogenFields}. * </p> * * <p><b>Non-Reference Fields</b></p> * * <p> * If the field is not a reference field, the property type is inferred from the type of the annotated method or, * in the case of complex sub-fields, the generic type of the collection class. The name of the property type * must be registered in the {@link org.jsimpledb.core.FieldTypeRegistry} (perhaps via {@link JFieldType &#64;JFieldType}), * and the corresponding {@link org.jsimpledb.core.FieldType} is then used to encode/decode field values. * The type name may also be specified explicitly by {@link #name}. * </p> * * <p> * Simple fields may be {@link #indexed}; see {@link org.jsimpledb.index} for information on querying indexes. * </p> * * <p><b>Reference Fields</b></p> * * <p> * If the type of the field is (assignable to) a {@link JSimpleClass &#64;JsimpleClass}-annotated Java model object type, * then the field is a reference field. * </p> * * <p> * Reference fields are always indexed; the value of {@link #indexed} is ignored. * </p> * * <p><b>Delete Cascades</b></p> * * <p> * Reference fields have configurable behavior when the referring object or the referred-to object is deleted; * see {@link #onDelete} and {@link #cascadeDelete}. * </p> * * <p><b>Uniqueness Constraints</b></p> * * <p> * Fields that are not complex sub-fields may be marked as {@link #unique} to impose a uniqueness constraint on the value. * Fields with uniqueness constraints must be indexed. Uniqueness constraints are handled at the JSimpleDB layer and function as * an implicit validation constraint. In other words, the constraint is verified when the validation queue is processed * and is affected by the transaction's configured {@link org.jsimpledb.ValidationMode}. * </p> * * <p> * Optionally, specific field values may be marked as excluded from the uniqueness constraint via {@link #uniqueExclude}. * If so, the specified values may appear in multiple objects without violating the constraint. Because null values * are not allowed in annotations, {@link #uniqueExcludeNull} is provided to exclude the null value. * </p> * * <p> * In {@link org.jsimpledb.ValidationMode#AUTOMATIC}, any upgraded {@link org.jsimpledb.JObject}s are automatically * added to the validation queue, so a uniqueness constraint added in a new schema version will be automatically verified * when any object is upgraded. * * <p> * Note however, that uniqueness constraints can be added or changed on a field without a schema version change. * Therefore, after such changes, pre-existing database objects that were previously valid could become suddenly invalid. * To avoid this possibililty, change the schema version number and update them manually. * </p> */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD) @Documented public @interface JField { /** * The name of this field. * * <p> * If equal to the empty string (default value), the name is inferred from the name of the annotated Java bean getter method. * </p> * * <p> * For sub-fields of complex fields, this property must be left unset. * </p> * * @return the name of the field */ String name() default ""; /** * Optional override for the type of this field. * * <p> * If set, this must equal the name of a type registered in the {@link org.jsimpledb.core.FieldTypeRegistry} * associated with the {@link org.jsimpledb.core.Database} instance, and the annotated method's return type must match the * {@link org.jsimpledb.core.FieldType}'s {@linkplain org.jsimpledb.core.FieldType#getTypeToken supported Java type}. * * <p> * If equal to the empty string (default value), then the Java type is inferred from the return type of the getter method * and the {@link org.jsimpledb.core.FieldType} is found via * {@link org.jsimpledb.core.FieldTypeRegistry#getFieldType(com.google.common.reflect.TypeToken) * FieldTypeRegistry.getFieldType()}. * </p> * * <p> * For reference fields (i.e., methods with return value equal to a {@link JSimpleClass &#64;JSimpleClass}-annotated class), * this property must be left unset. * </p> * * <p> * For sub-fields of complex fields, this property can be used to force a primitive sub-field type instead of a * primitive wrapper type. In that case, the complex field will disallow null values. For example: * <pre> * &#64;JSetField(element = &#64;JField(<b>type = "float"</b>)) // nulls will be disallowed * public abstract List&lt;<b>Float</b>&gt; getScores(); * </pre> * * @return the name of the field's type */ String type() default ""; /** * Storage ID for this field. Value should be positive and unique within the contained class. * If zero, the configured {@link org.jsimpledb.StorageIdGenerator} will be consulted to auto-generate a value * unless {@link JSimpleClass#autogenFields} is false (in which case an error occurs). * * @see org.jsimpledb.StorageIdGenerator#generateFieldStorageId StorageIdGenerator.generateFieldStorageId() * @see org.jsimpledb.StorageIdGenerator#generateSetElementStorageId StorageIdGenerator.generateSetElementStorageId() * @see org.jsimpledb.StorageIdGenerator#generateListElementStorageId StorageIdGenerator.generateListElementStorageId() * @see org.jsimpledb.StorageIdGenerator#generateMapKeyStorageId StorageIdGenerator.generateMapKeyStorageId() * @see org.jsimpledb.StorageIdGenerator#generateMapValueStorageId StorageIdGenerator.generateMapValueStorageId() * * @return the field's storage ID */ int storageId() default 0; /** * Whether this field is indexed or not. * * <p> * Setting this property to true creates a simple index on this field. To have this field participate in * a composite index on multiple fields, use {@link JSimpleClass#compositeIndexes}. * </p> * * <p> * Note: reference fields are always indexed (for reference fields, this property is ignored). * </p> * * @return whether the field is indexed */ boolean indexed() default false; /** * For reference fields, configure the behavior when the referred-to object is * {@linkplain org.jsimpledb.JObject#delete deleted}. * * <p> * For non-reference fields this property must be equal to its default value. * </p> * * @return desired behavior when a referenced object is deleted * @see #cascadeDelete * @see org.jsimpledb.JObject#delete */ DeleteAction onDelete() default DeleteAction.EXCEPTION; /** * For reference fields, configure cascading behavior when the referring object is * {@linkplain org.jsimpledb.JObject#delete deleted}. If set to true, the referred-to object * is automatically deleted as well. * * <p> * For non-reference fields this property must be equal to its default value. * </p> * * @return whether deletion should cascade to the referred-to object * @see #onDelete * @see org.jsimpledb.JObject#delete */ boolean cascadeDelete() default false; /** * Require this field's value to be unique among all database objects. * * <p> * When set, this causes this field's value to be checked for uniqueness any time normal validation is * performed on an object containing the field. * * <p> * More precisely, a uniqueness constraint behaves like a * validation constraint with {@code groups() = }<code>{ </code>{@link javax.validation.groups.Default}{@code .class, * }{@link org.jsimpledb.UniquenessConstraints}{@code .class}<code> }</code>. Therefore, uniqueness constraints * are included in default validation, but you can also validate <i>only</i> uniqueness constraints via * {@link org.jsimpledb.JObject#revalidate myobj.revalidate(UniquenessConstraints.class)}. * * <p> * This property must be false for sub-fields of complex fields. * * @return whether the field's value should be unique * @see #uniqueExclude * @see #uniqueExcludeNull * @see org.jsimpledb.UniquenessConstraints */ boolean unique() default false; /** * Specify field value(s) which are excluded from the uniqueness constraint. * * <p> * The specified values must be valid {@link String} encodings of the associated field. For example: * <pre> * &#64;JField(indexed = true, unique = true, uniqueExclude = { "Infinity", "-Infinity" }) * public abstract float getPriority(); * </pre> * * <p> * This property must be left empty when {@link #unique} is false. * </p> * * @return values to exclude from the uniqueness constraint * @see #unique * @see #uniqueExcludeNull */ String[] uniqueExclude() default {}; /** * Specify that the null value is excluded from the uniqueness constraint. * * <p> * This property must be left false when {@link #unique} is false or the field has primitive type. * </p> * * @return whether null should be excluded from the uniqueness constraint * @see #unique * @see #uniqueExclude */ boolean uniqueExcludeNull() default false; }
package org.jsimpledb.util; import com.google.common.base.Function; import java.io.File; import java.lang.reflect.Method; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.URL; import java.net.URLClassLoader; import java.net.UnknownHostException; import java.nio.charset.Charset; import java.util.ArrayDeque; import java.util.Arrays; import java.util.Comparator; import java.util.HashSet; import java.util.LinkedHashSet; import org.dellroad.stuff.main.MainClass; import org.dellroad.stuff.net.TCPNetwork; import org.jsimpledb.JSimpleDB; import org.jsimpledb.JSimpleDBFactory; import org.jsimpledb.ValidationMode; import org.jsimpledb.annotation.JFieldType; import org.jsimpledb.core.Database; import org.jsimpledb.core.FieldType; import org.jsimpledb.kv.KVDatabase; import org.jsimpledb.kv.array.ArrayKVDatabase; import org.jsimpledb.kv.array.AtomicArrayKVStore; import org.jsimpledb.kv.bdb.BerkeleyKVDatabase; import org.jsimpledb.kv.fdb.FoundationKVDatabase; import org.jsimpledb.kv.leveldb.LevelDBAtomicKVStore; import org.jsimpledb.kv.leveldb.LevelDBKVDatabase; import org.jsimpledb.kv.mvcc.AtomicKVDatabase; import org.jsimpledb.kv.mvcc.AtomicKVStore; import org.jsimpledb.kv.raft.RaftKVDatabase; import org.jsimpledb.kv.rocksdb.RocksDBAtomicKVStore; import org.jsimpledb.kv.rocksdb.RocksDBKVDatabase; import org.jsimpledb.kv.simple.SimpleKVDatabase; import org.jsimpledb.kv.simple.XMLKVDatabase; import org.jsimpledb.kv.sql.MySQLKVDatabase; import org.jsimpledb.schema.SchemaModel; import org.jsimpledb.spring.JSimpleDBClassScanner; import org.jsimpledb.spring.JSimpleDBFieldTypeScanner; import org.springframework.jdbc.datasource.DriverManagerDataSource; /** * Support superclass for main entry point classes. */ public abstract class AbstractMain extends MainClass { private static final File DEMO_XML_FILE = new File("demo-database.xml"); private static final File DEMO_SUBDIR = new File("demo-classes"); private static final String MYSQL_DRIVER_CLASS_NAME = "com.mysql.jdbc.Driver"; // These are like an Enum<DBType> protected final MemoryDBType memoryDBType = new MemoryDBType(); protected final FoundationDBType foundationDBType = new FoundationDBType(); protected final BerkeleyDBType berkeleyDBType = new BerkeleyDBType(); protected final XMLDBType xmlDBType = new XMLDBType(); protected final LevelDBType levelDBType = new LevelDBType(); protected final RocksDBType rocksDBType = new RocksDBType(); protected final ArrayDBType arrayDBType = new ArrayDBType(); protected final MySQLDBType mySQLType = new MySQLDBType(); protected final RaftDBType raftDBType = new RaftDBType(); // FDB config protected String fdbClusterFile; protected byte[] fdbKeyPrefix; // BDB config protected File bdbDirectory; protected String bdbDatabaseName = BerkeleyKVDatabase.DEFAULT_DATABASE_NAME; // LevelDB config protected File leveldbDirectory; // RocksDB config protected File rocksdbDirectory; // ArrayKVDatabase config protected File arraydbDirectory; // Raft config protected AtomicKVStore raftKVStore; protected File raftDirectory; protected String raftIdentity; protected String raftAddress; protected int raftPort = RaftKVDatabase.DEFAULT_TCP_PORT; protected int raftMinElectionTimeout = -1; protected int raftMaxElectionTimeout = -1; protected int raftHeartbeatTimeout = -1; // XML config protected File xmlFile; // MySQL config protected String jdbcUrl; // Schema protected int schemaVersion; protected HashSet<Class<?>> schemaClasses; protected HashSet<Class<? extends FieldType<?>>> fieldTypeClasses; protected boolean allowNewSchema; protected HashSet<DBType<?>> dbTypes = new HashSet<>(); protected DBType<?> dbType; protected KVDatabase kvdb; protected String databaseDescription; // Misc protected boolean verbose; protected boolean readOnly; protected boolean allowAutoDemo = true; /** * Parse command line options. * * @param params command line parameters * @return -1 to proceed, otherwise process exit value */ public int parseOptions(ArrayDeque<String> params) { // Parse options final LinkedHashSet<String> modelPackages = new LinkedHashSet<>(); final LinkedHashSet<String> typePackages = new LinkedHashSet<>(); while (!params.isEmpty() && params.peekFirst().startsWith("-")) { final String option = params.removeFirst(); if (option.equals("-h") || option.equals("--help")) { this.usageMessage(); return 0; } else if (option.equals("-ro") || option.equals("--read-only")) this.readOnly = true; else if (option.equals("-cp") || option.equals("--classpath")) { if (params.isEmpty()) this.usageError(); if (!this.appendClasspath(params.removeFirst())) return 1; } else if (option.equals("--verbose")) this.verbose = true; else if (option.equals("-v") || option.equals("--schema-version")) { if (params.isEmpty()) this.usageError(); final String vstring = params.removeFirst(); try { this.schemaVersion = Integer.parseInt(vstring); if (this.schemaVersion < 0) throw new IllegalArgumentException("schema version is negative"); } catch (Exception e) { System.err.println(this.getName() + ": invalid schema version `" + vstring + "': " + e.getMessage()); return 1; } } else if (option.equals("--model-pkg")) { if (params.isEmpty()) this.usageError(); modelPackages.add(params.removeFirst()); } else if (option.equals("--type-pkg")) { if (params.isEmpty()) this.usageError(); typePackages.add(params.removeFirst()); } else if (option.equals("-p") || option.equals("--pkg")) { if (params.isEmpty()) this.usageError(); final String packageName = params.removeFirst(); modelPackages.add(packageName); typePackages.add(packageName); } else if (option.equals("--new-schema")) { this.allowNewSchema = true; this.allowAutoDemo = false; } else if (option.equals("--mem")) this.dbTypes.add(this.memoryDBType); else if (option.equals("--fdb-prefix")) { if (params.isEmpty()) this.usageError(); final String value = params.removeFirst(); try { this.fdbKeyPrefix = ByteUtil.parse(value); } catch (IllegalArgumentException e) { this.fdbKeyPrefix = value.getBytes(Charset.forName("UTF-8")); } if (this.fdbKeyPrefix.length > 0) this.allowAutoDemo = false; } else if (option.equals("--fdb")) { if (params.isEmpty()) this.usageError(); this.dbTypes.add(this.foundationDBType); this.fdbClusterFile = params.removeFirst(); if (!new File(this.fdbClusterFile).exists()) { System.err.println(this.getName() + ": file `" + this.fdbClusterFile + "' does not exist"); return 1; } } else if (option.equals("--xml")) { if (params.isEmpty()) this.usageError(); this.dbTypes.add(this.xmlDBType); this.xmlFile = new File(params.removeFirst()); } else if (option.equals("--bdb")) { if (params.isEmpty()) this.usageError(); this.dbTypes.add(this.berkeleyDBType); if (!this.createDirectory(this.bdbDirectory = new File(params.removeFirst()))) return 1; } else if (option.equals("--bdb-database")) { if (params.isEmpty()) this.usageError(); this.bdbDatabaseName = params.removeFirst(); } else if (option.equals("--mysql")) { if (params.isEmpty()) this.usageError(); this.jdbcUrl = params.removeFirst(); this.dbTypes.add(this.mySQLType); } else if (option.equals("--leveldb")) { if (params.isEmpty()) this.usageError(); this.dbTypes.add(this.levelDBType); if (!this.createDirectory(this.leveldbDirectory = new File(params.removeFirst()))) return 1; } else if (option.equals("--rocksdb")) { if (params.isEmpty()) this.usageError(); this.dbTypes.add(this.rocksDBType); if (!this.createDirectory(this.rocksdbDirectory = new File(params.removeFirst()))) return 1; } else if (option.equals("--arraydb")) { if (params.isEmpty()) this.usageError(); this.dbTypes.add(this.arrayDBType); if (!this.createDirectory(this.arraydbDirectory = new File(params.removeFirst()))) return 1; } else if (option.equals("--raft-dir")) { if (params.isEmpty()) this.usageError(); this.dbTypes.add(this.raftDBType); if (!this.createDirectory(this.raftDirectory = new File(params.removeFirst()))) return 1; } else if (option.matches("--raft-((min|max)-election|heartbeat)-timeout")) { if (params.isEmpty()) this.usageError(); final String tstring = params.removeFirst(); final int timeout; try { timeout = Integer.parseInt(tstring); } catch (Exception e) { System.err.println(this.getName() + ": timeout value `" + tstring + "': " + e.getMessage()); return 1; } if (option.equals("--raft-min-election-timeout")) this.raftMinElectionTimeout = timeout; else if (option.equals("--raft-max-election-timeout")) this.raftMaxElectionTimeout = timeout; else if (option.equals("--raft-heartbeat-timeout")) this.raftHeartbeatTimeout = timeout; else throw new RuntimeException("internal error"); } else if (option.equals("--raft-identity")) { if (params.isEmpty()) this.usageError(); this.dbTypes.add(this.raftDBType); this.raftIdentity = params.removeFirst(); } else if (option.equals("--raft-address")) { if (params.isEmpty()) this.usageError(); final String address = params.removeFirst(); this.raftAddress = TCPNetwork.parseAddressPart(address); this.raftPort = TCPNetwork.parsePortPart(address, this.raftPort); } else if (option.equals("--raft-port")) { if (params.isEmpty()) this.usageError(); final String portString = params.removeFirst(); if ((this.raftPort = TCPNetwork.parsePortPart("x:" + portString, -1)) == -1) { System.err.println(this.getName() + ": invalid TCP port `" + portString + "'"); return 1; } } else if (option.equals(" break; else if (!this.parseOption(option, params)) { System.err.println(this.getName() + ": unknown option `" + option + "'"); this.usageError(); return 1; } } // Additional logic post-processing of options if (!modelPackages.isEmpty() || !typePackages.isEmpty()) this.allowAutoDemo = false; // Check database choice(s) switch (this.dbTypes.size()) { case 0: if (this.allowAutoDemo && DEMO_XML_FILE.exists() && DEMO_SUBDIR.exists()) this.configureDemoMode(); else { System.err.println(this.getName() + ": no key/value store specified; use one of `--mysql', etc."); this.usageError(); return 1; } break; case 1: if (this.dbTypes.contains(this.raftDBType)) { System.err.println(this.getName() + ": Raft requires a local peristent store; use one of `--mysql', etc."); this.usageError(); return 1; } break; default: if (this.dbTypes.size() > 2 || !this.dbTypes.contains(this.raftDBType)) { System.err.println(this.getName() + ": multiple key/value stores configured; choose only one"); this.usageError(); return 1; } break; } // Scan for model and type classes final LinkedHashSet<String> emptyPackages = new LinkedHashSet<>(); emptyPackages.addAll(modelPackages); emptyPackages.addAll(typePackages); for (String packageName : modelPackages) { if (this.scanModelClasses(packageName) > 0) emptyPackages.remove(packageName); } for (String packageName : typePackages) { if (this.scanTypeClasses(packageName) > 0) emptyPackages.remove(packageName); } // Warn if we didn't find anything for (String packageName : emptyPackages) { final boolean isModel = modelPackages.contains(packageName); final boolean isType = typePackages.contains(packageName); if (isModel && isType) this.log.warn("no Java model or custom FieldType classes found under package `" + packageName + "'"); else if (isModel) this.log.warn("no Java model classes found under package `" + packageName + "'"); else this.log.warn("no custom FieldType classes found under package `" + packageName + "'"); } // Done return -1; } public int getSchemaVersion() { return this.schemaVersion; } public boolean isAllowNewSchema() { return this.allowNewSchema; } public boolean isReadOnly() { return this.readOnly; } public String getDatabaseDescription() { return this.databaseDescription; } public JSimpleDBFactory getJSimpleDBFactory(Database db) { return new JSimpleDBFactory() .setModelClasses(this.schemaClasses) .setSchemaVersion(this.schemaVersion) .setDatabase(db); } /** * Subclass hook to parse unrecognized command line options. * * @param option command line option (starting with `-') * @param params subsequent command line parameters * @return true if successful, false otherwise */ protected boolean parseOption(String option, ArrayDeque<String> params) { return false; } protected void configureDemoMode() { // Configure database System.err.println(this.getName() + ": auto-configuring use of demo database `" + DEMO_XML_FILE + "'"); this.dbTypes.add(this.xmlDBType); this.xmlFile = DEMO_XML_FILE; // Add demo subdirectory to class path this.appendClasspath(DEMO_SUBDIR.toString()); // Scan classes this.scanModelClasses("org.jsimpledb.demo"); } /** * Append path(s) to the classpath. * * @param path classpath path component * @return true if successful, false if an error occured */ protected boolean appendClasspath(String path) { this.log.trace("adding classpath `" + path + "' to system classpath"); try { // Get URLClassLoader.addURL() method and make accessible final Method addURLMethod = URLClassLoader.class.getDeclaredMethod("addURL", URL.class); addURLMethod.setAccessible(true); // Split path and add components for (String file : path.split(System.getProperty("path.separator", ":"))) { if (file.length() == 0) continue; addURLMethod.invoke(ClassLoader.getSystemClassLoader(), new Object[] { new File(file).toURI().toURL() }); this.log.trace("added path component `" + file + "' to system classpath"); } return true; } catch (Exception e) { this.log.error("can't append `" + path + " to classpath: " + e, e); return false; } } private int scanModelClasses(String pkgname) { if (this.schemaClasses == null) this.schemaClasses = new HashSet<>(); int count = 0; for (String className : new JSimpleDBClassScanner().scanForClasses(pkgname.split("[\\s,]"))) { this.log.debug("loading Java model class " + className); this.schemaClasses.add(this.loadClass(className)); count++; } return count; } private int scanTypeClasses(String pkgname) { // Check types of annotated classes as we scan them final Function<Class<?>, Class<? extends FieldType<?>>> checkFunction = new Function<Class<?>, Class<? extends FieldType<?>>>() { @Override @SuppressWarnings("unchecked") public Class<? extends FieldType<?>> apply(Class<?> type) { try { return (Class<? extends FieldType<?>>)type.asSubclass(FieldType.class); } catch (ClassCastException e) { throw new IllegalArgumentException("invalid @" + JFieldType.class.getSimpleName() + " annotation on " + type + ": type is not a subclass of " + FieldType.class); } } }; // Scan classes if (this.fieldTypeClasses == null) this.fieldTypeClasses = new HashSet<>(); int count = 0; for (String className : new JSimpleDBFieldTypeScanner().scanForClasses(pkgname.split("[\\s,]"))) { this.log.debug("loading custom FieldType class " + className); this.fieldTypeClasses.add(checkFunction.apply(this.loadClass(className))); count++; } return count; } private boolean createDirectory(File dir) { if (!dir.exists() && !dir.mkdirs()) { System.err.println(this.getName() + ": could not create directory `" + dir + "'"); return false; } if (!dir.isDirectory()) { System.err.println(this.getName() + ": file `" + dir + "' is not a directory"); return false; } return true; } /** * Load a class. * * @param className class name * @return class with name {@code className} * @throws RuntimeException if load fails */ protected Class<?> loadClass(String className) { try { return Class.forName(className, false, Thread.currentThread().getContextClassLoader()); } catch (ClassNotFoundException e) { throw new RuntimeException("failed to load class `" + className + "'", e); } } /** * Start the {@link Database} based on the configured {@link KVDatabase} and {@link #fieldTypeClasses} and return it. * * @return initialized database */ protected Database startupKVDatabase() { // Raft requires a separate AtomicKVStore to be configured first final boolean raft = this.dbTypes.remove(this.raftDBType); this.dbType = this.dbTypes.iterator().next(); if (raft) { this.raftKVStore = dbType.createAtomicKVStore(); this.dbType = this.raftDBType; } // Create and start up database this.kvdb = this.dbType.createKVDatabase(); this.databaseDescription = this.dbType.getDescription(); AbstractMain.startKVDatabase(this.dbType, this.kvdb); this.log.debug("using database: " + this.databaseDescription); // Construct core Database final Database db = new Database(this.kvdb); // Register custom field types if (this.fieldTypeClasses != null) db.getFieldTypeRegistry().addClasses(this.fieldTypeClasses); // Done return db; } // This method exists solely to bind the generic type parameters private static <T extends KVDatabase> void startKVDatabase(DBType<T> dbType, KVDatabase kvdb) { dbType.startKVDatabase(dbType.cast(kvdb)); } /** * Perform a test transaction. * * @param db database * @param schemaModel schema model */ protected void performTestTransaction(final Database db, final SchemaModel schemaModel) { this.performTestTransaction(new Runnable() { @Override public void run() { db.createTransaction(schemaModel, AbstractMain.this.schemaVersion, AbstractMain.this.allowNewSchema).commit(); } }); } /** * Perform a test transaction. * * @param jdb database */ protected void performTestTransaction(final JSimpleDB jdb) { this.performTestTransaction(new Runnable() { @Override public void run() { jdb.createTransaction(AbstractMain.this.allowNewSchema, ValidationMode.AUTOMATIC).commit(); } }); } private void performTestTransaction(Runnable test) { try { test.run(); } catch (Exception e) { this.log.warn("test transaction failed: " + (e.getMessage() != null ? e.getMessage() : e), e); } } /** * Shutdown the {@link KVDatabase}. */ protected void shutdownKVDatabase() { AbstractMain.stopKVDatabase(this.dbType, this.kvdb); } // This method exists solely to bind the generic type parameters private static <T extends KVDatabase> void stopKVDatabase(DBType<T> dbType, KVDatabase kvdb) { dbType.stopKVDatabase(dbType.cast(kvdb)); } protected abstract String getName(); /** * Output usage message flag listing. * * @param subclassOpts array containing flag and description pairs */ protected void outputFlags(String[][] subclassOpts) { final String[][] baseOpts = new String[][] { { "--arraydb directory", "Use ArrayKVDatabase in specified directory" }, { "--classpath, -cp path", "Append to the classpath (useful with `java -jar ...')" }, { "--fdb file", "Use FoundationDB with specified cluster file" }, { "--fdb-prefix prefix", "FoundationDB key prefix (hex or string)" }, { "--bdb directory", "Use Berkeley DB Java Edition in specified directory" }, { "--bdb-database", "Specify Berkeley DB database name (default `" + BerkeleyKVDatabase.DEFAULT_DATABASE_NAME + "')" }, { "--leveldb directory", "Use LevelDB in specified directory" }, { "--mem", "Use an empty in-memory database (default)" }, { "--mysql URL", "Use MySQL with the given JDBC URL" }, { "--raft-dir directory", "Raft local persistence directory" }, { "--raft-min-election-timeout", "Raft minimum election timeout in ms (default " + RaftKVDatabase.DEFAULT_MIN_ELECTION_TIMEOUT + ")" }, { "--raft-max-election-timeout", "Raft maximum election timeout in ms (default " + RaftKVDatabase.DEFAULT_MAX_ELECTION_TIMEOUT + ")" }, { "--raft-heartbeat-timeout", "Raft leader heartbeat timeout in ms (default " + RaftKVDatabase.DEFAULT_HEARTBEAT_TIMEOUT + ")" }, { "--raft-identity", "Raft identity" }, { "--raft-address address", "Specify local Raft node's IP address" }, { "--raft-port", "Specify local Raft node's TCP port (default " + RaftKVDatabase.DEFAULT_TCP_PORT + ")" }, { "--read-only, -ro", "Disallow database modifications" }, { "--rocksdb directory", "Use RocksDB in specified directory" }, { "--new-schema", "Allow recording of a new database schema version" }, { "--xml file", "Use the specified XML flat file database" }, { "--schema-version, -v num", "Specify database schema version (default highest recorded)" }, { "--model-pkg package", "Scan for @JSimpleClass model classes under Java package (=> JSimpleDB mode)" }, { "--type-pkg package", "Scan for @JFieldType types under Java package to register custom types" }, { "--pkg, -p package", "Equivalent to `--model-pkg package --type-pkg package'" }, { "--help, -h", "Show this help message" }, { "--verbose", "Show verbose error messages" }, }; final String[][] combinedOpts = new String[baseOpts.length + subclassOpts.length][]; System.arraycopy(baseOpts, 0, combinedOpts, 0, baseOpts.length); System.arraycopy(subclassOpts, 0, combinedOpts, baseOpts.length, subclassOpts.length); Arrays.sort(combinedOpts, new Comparator<String[]>() { @Override public int compare(String[] opt1, String[] opt2) { return opt1[0].compareTo(opt2[0]); } }); int width = 0; for (String[] opt : combinedOpts) width = Math.max(width, opt[0].length()); for (String[] opt : combinedOpts) System.err.println(String.format(" %-" + width + "s %s", opt[0], opt[1])); } // DBType protected abstract class DBType<T extends KVDatabase> { private final Class<T> type; protected DBType(Class<T> type) { this.type = type; } public T cast(KVDatabase db) { return this.type.cast(db); } public AtomicKVStore createAtomicKVStore() { return new AtomicKVDatabase(this.createKVDatabase()); } public abstract T createKVDatabase(); public void startKVDatabase(T db) { db.start(); } public void stopKVDatabase(T db) { db.stop(); } public abstract String getDescription(); } protected final class MemoryDBType extends DBType<SimpleKVDatabase> { private MemoryDBType() { super(SimpleKVDatabase.class); } @Override public SimpleKVDatabase createKVDatabase() { return new SimpleKVDatabase(); } @Override public String getDescription() { return "In-Memory Database"; } } protected final class FoundationDBType extends DBType<FoundationKVDatabase> { private FoundationDBType() { super(FoundationKVDatabase.class); } @Override public FoundationKVDatabase createKVDatabase() { final FoundationKVDatabase fdb = new FoundationKVDatabase(); fdb.setClusterFilePath(AbstractMain.this.fdbClusterFile); fdb.setKeyPrefix(AbstractMain.this.fdbKeyPrefix); return fdb; } @Override public String getDescription() { String desc = "FoundationDB " + new File(AbstractMain.this.fdbClusterFile).getName(); if (AbstractMain.this.fdbKeyPrefix != null) desc += " [0x" + ByteUtil.toString(AbstractMain.this.fdbKeyPrefix) + "]"; return desc; } } protected final class BerkeleyDBType extends DBType<BerkeleyKVDatabase> { private BerkeleyDBType() { super(BerkeleyKVDatabase.class); } @Override public BerkeleyKVDatabase createKVDatabase() { final BerkeleyKVDatabase bdb = new BerkeleyKVDatabase(); bdb.setDirectory(AbstractMain.this.bdbDirectory); bdb.setDatabaseName(AbstractMain.this.bdbDatabaseName); // if (AbstractMain.this.readOnly) // bdb.setDatabaseConfig(bdb.getDatabaseConfig().setReadOnly(true)); return bdb; } @Override public String getDescription() { return "BerkeleyDB " + AbstractMain.this.bdbDirectory.getName(); } } protected final class XMLDBType extends DBType<XMLKVDatabase> { private XMLDBType() { super(XMLKVDatabase.class); } @Override public XMLKVDatabase createKVDatabase() { return new XMLKVDatabase(AbstractMain.this.xmlFile); } @Override public String getDescription() { return "XML DB " + AbstractMain.this.xmlFile.getName(); } } protected final class LevelDBType extends DBType<LevelDBKVDatabase> { private LevelDBType() { super(LevelDBKVDatabase.class); } @Override public LevelDBKVDatabase createKVDatabase() { final LevelDBKVDatabase leveldb = new LevelDBKVDatabase(); leveldb.setKVStore(this.createAtomicKVStore()); return leveldb; } @Override public LevelDBAtomicKVStore createAtomicKVStore() { final LevelDBAtomicKVStore kvstore = new LevelDBAtomicKVStore(); kvstore.setDirectory(AbstractMain.this.leveldbDirectory); kvstore.setCreateIfMissing(true); return kvstore; } @Override public String getDescription() { return "LevelDB " + AbstractMain.this.leveldbDirectory.getName(); } } protected final class RocksDBType extends DBType<RocksDBKVDatabase> { private RocksDBType() { super(RocksDBKVDatabase.class); } @Override public RocksDBKVDatabase createKVDatabase() { final RocksDBKVDatabase rocksdb = new RocksDBKVDatabase(); rocksdb.setKVStore(this.createAtomicKVStore()); return rocksdb; } @Override public RocksDBAtomicKVStore createAtomicKVStore() { final RocksDBAtomicKVStore kvstore = new RocksDBAtomicKVStore(); kvstore.setDirectory(AbstractMain.this.rocksdbDirectory); return kvstore; } @Override public String getDescription() { return "RocksDB " + AbstractMain.this.rocksdbDirectory.getName(); } } protected final class ArrayDBType extends DBType<ArrayKVDatabase> { private ArrayDBType() { super(ArrayKVDatabase.class); } @Override public ArrayKVDatabase createKVDatabase() { final ArrayKVDatabase arraydb = new ArrayKVDatabase(); arraydb.setKVStore(this.createAtomicKVStore()); return arraydb; } @Override public AtomicArrayKVStore createAtomicKVStore() { final AtomicArrayKVStore kvstore = new AtomicArrayKVStore(); kvstore.setDirectory(AbstractMain.this.arraydbDirectory); return kvstore; } @Override public String getDescription() { return "ArrayDB " + AbstractMain.this.arraydbDirectory.getName(); } } protected final class MySQLDBType extends DBType<MySQLKVDatabase> { private MySQLDBType() { super(MySQLKVDatabase.class); } @Override public MySQLKVDatabase createKVDatabase() { try { Class.forName(MYSQL_DRIVER_CLASS_NAME); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new RuntimeException("can't load MySQL driver class `" + MYSQL_DRIVER_CLASS_NAME + "'", e); } final MySQLKVDatabase mysql = new MySQLKVDatabase(); mysql.setDataSource(new DriverManagerDataSource(AbstractMain.this.jdbcUrl)); return mysql; } @Override public String getDescription() { return "MySQL"; } } protected final class RaftDBType extends DBType<RaftKVDatabase> { private RaftDBType() { super(RaftKVDatabase.class); } @Override public RaftKVDatabase createKVDatabase() { // Setup network final TCPNetwork network = new TCPNetwork(RaftKVDatabase.DEFAULT_TCP_PORT); try { network.setListenAddress(AbstractMain.this.raftAddress != null ? new InetSocketAddress(InetAddress.getByName(AbstractMain.this.raftAddress), AbstractMain.this.raftPort) : new InetSocketAddress(AbstractMain.this.raftPort)); } catch (UnknownHostException e) { throw new RuntimeException("can't resolve address `" + AbstractMain.this.raftAddress + "'", e); } // Set up Raft DB final RaftKVDatabase raft = new RaftKVDatabase(); raft.setLogDirectory(AbstractMain.this.raftDirectory); raft.setKVStore(AbstractMain.this.raftKVStore); raft.setNetwork(network); raft.setIdentity(AbstractMain.this.raftIdentity); if (AbstractMain.this.raftMinElectionTimeout != -1) raft.setMinElectionTimeout(AbstractMain.this.raftMinElectionTimeout); if (AbstractMain.this.raftMaxElectionTimeout != -1) raft.setMaxElectionTimeout(AbstractMain.this.raftMaxElectionTimeout); if (AbstractMain.this.raftHeartbeatTimeout != -1) raft.setHeartbeatTimeout(AbstractMain.this.raftHeartbeatTimeout); // Done return raft; } @Override public String getDescription() { return "Raft " + (AbstractMain.this.raftDirectory != null ? AbstractMain.this.raftDirectory.getName() : "?"); } } }
package org.ligi.axt.helpers; import java.io.File; import java.io.FileInputStream; import java.io.FileWriter; import java.io.IOException; import java.nio.MappedByteBuffer; import java.nio.channels.FileChannel; import java.nio.charset.Charset; public class FileHelper { private final File file; public FileHelper(File file) { this.file = file; } /** * reads a file to a string * * @return the content of the file * @throws IOException */ public String loadToString() throws IOException { return loadToString(Charset.defaultCharset()); } public String loadToString(Charset charset) throws IOException { FileInputStream stream = new FileInputStream(file); FileChannel fc = stream.getChannel(); MappedByteBuffer bb = fc.map(FileChannel.MapMode.READ_ONLY, 0, fc.size()); fc.close(); stream.close(); return charset.decode(bb).toString(); } public boolean writeString(String string) { try { FileWriter fileWriter = new FileWriter(file); fileWriter.write(string); fileWriter.close(); return true; } catch (IOException e) { return false; } } public boolean deleteRecursive() { return deleteRecursive(file); } public boolean deleteRecursive(File file2delete) { if (!file2delete.isDirectory()) { return false; } for (String child:file2delete.list()) { File temp = new File(file2delete, child); if (temp.isDirectory()) { deleteRecursive(temp); } else { temp.delete(); } } return file2delete.delete(); } }
package org.xins.server; import org.apache.log4j.Logger; import org.xins.util.MandatoryArgumentChecker; import org.xins.util.text.FastStringBuffer; /** * Base class for function implementation classes. * * @version $Revision$ $Date$ * @author Ernst de Haan (<a href="mailto:znerd@FreeBSD.org">znerd@FreeBSD.org</a>) */ public abstract class Function extends Object implements DefaultResultCodes { // Class fields // Class functions /** * Checks if the specified value is <code>null</code> or an empty string. * Only if it is then <code>true</code> is returned. * * @param value * the value to check. * * @return * <code>true</code> if and only if <code>value == null || * value.length() == 0</code>. */ protected static final boolean isMissing(String value) { return value == null || value.length() == 0; } // Constructors protected Function(API api, String name, String version) throws IllegalArgumentException { // Check argument MandatoryArgumentChecker.check("api", api, "name", name, "version", version); _log = Logger.getLogger(getClass().getName()); _api = api; _name = name; _version = version; _api.functionAdded(this); } // Fields /** * The logger used by this function. This field is initialized by the * constructor and set to a non-<code>null</code> value. */ private final Logger _log; /** * The API implementation this function is part of. */ private final API _api; /** * The name of this function. */ private final String _name; /** * The version of the specification this function implements. */ private final String _version; /** * Lock object for <code>_callCount</code>. */ private final Object _callCountLock = new Object(); /** * The total number of calls executed up until now. */ private int _callCount; /** * Statistics object linked to this function. */ private final Statistics _statistics = new Statistics(); /** * Lock object for a successful call. */ private final Object _successfulCallLock = new Object(); /** * Lock object for an unsuccessful call. */ private final Object _unsuccessfulCallLock = new Object(); /** * Buffer for log messages for successful calls. This field is * initialized at construction time and cannot be <code>null</code>. */ private final FastStringBuffer _successfulCallStringBuffer = new FastStringBuffer(256); /** * Buffer for log messages for unsuccessful calls. This field is * initialized at construction time and cannot be <code>null</code>. */ private final FastStringBuffer _unsuccessfulCallStringBuffer = new FastStringBuffer(256); /** * The number of successful calls executed up until now. */ private int _successfulCalls; /** * The number of unsuccessful calls executed up until now. */ private int _unsuccessfulCalls; /** * The start time of the most recent successful call. */ private long _lastSuccessfulStart; /** * The start time of the most recent unsuccessful call. */ private long _lastUnsuccessfulStart; /** * The duration of the most recent successful call. */ private long _lastSuccessfulDuration; /** * The duration of the most recent unsuccessful call. */ private long _lastUnsuccessfulDuration; /** * The total duration of all successful calls up until now. */ private long _successfulDuration; /** * The total duration of all unsuccessful calls up until now. */ private long _unsuccessfulDuration; /** * The minimum time a successful call took. */ private long _successfulMin = Long.MAX_VALUE; /** * The minimum time an unsuccessful call took. */ private long _unsuccessfulMin = Long.MAX_VALUE; /** * The maximum time a successful call took. */ private long _successfulMax; /** * The maximum time an unsuccessful call took. */ private long _unsuccessfulMax; // Methods /** * Returns the logger associated with this function. * * @return * the associated logger, constant, and cannot be <code>null</code>. */ final Logger getLogger() { return _log; } /** * Returns the name of this function. * * @return * the name, not <code>null</code>. */ final String getName() { return _name; } /** * Returns the specification version for this function. * * @return * the version, not <code>null</code>. */ final String getVersion() { return _version; } /** * Returns the call statistics for this function. * * @return * the statistics, never <code>null</code>. */ final Statistics getStatistics() { return _statistics; } /** * Assigns a new call ID for the caller. Every call to this method will * return an increasing number. * * @return * the assigned call ID, &gt;= 0. */ final int assignCallID() { synchronized (_callCountLock) { return _callCount++; } } /** * Handles a call to this function. * * @param context * the context for this call, never <code>null</code>. * * @throws Throwable * if anything goes wrong. */ protected abstract void handleCall(CallContext context) throws Throwable; /** * Callback method that may be called after a call to this function. This * method will store statistics-related information. * * <p />This method does not <em>have</em> to be called. If statistics * gathering is disabled, then this method should not be called. * * @param context * the used call context, not <code>null</code>. * * @param success * indication if the call was successful. * * @param code * the function result code, or <code>null</code>. */ final void performedCall(CallContext context, boolean success, String code) { long start = context.getStart(); long duration = System.currentTimeMillis() - start; boolean debugEnabled = context.isDebugEnabled(); String message = null; if (success) { if (debugEnabled) { synchronized (_successfulCallStringBuffer) { _successfulCallStringBuffer.clear(); _successfulCallStringBuffer.append("Call succeeded. Duration: "); _successfulCallStringBuffer.append(String.valueOf(duration)); _successfulCallStringBuffer.append(" ms."); if (code != null) { _successfulCallStringBuffer.append(" Code: \""); _successfulCallStringBuffer.append(code); _successfulCallStringBuffer.append("\"."); } message = _successfulCallStringBuffer.toString(); } } synchronized (_successfulCallLock) { _lastSuccessfulStart = start; _lastSuccessfulDuration = duration; _successfulCalls++; _successfulDuration += duration; _successfulMin = _successfulMin > duration ? duration : _successfulMin; _successfulMax = _successfulMax < duration ? duration : _successfulMax; } } else { if (debugEnabled) { synchronized (_unsuccessfulCallStringBuffer) { _unsuccessfulCallStringBuffer.clear(); _unsuccessfulCallStringBuffer.append("Call failed. Duration: "); _unsuccessfulCallStringBuffer.append(String.valueOf(duration)); _unsuccessfulCallStringBuffer.append(" ms."); if (code != null) { _unsuccessfulCallStringBuffer.append(" Code: \""); _unsuccessfulCallStringBuffer.append(code); _unsuccessfulCallStringBuffer.append("\"."); } message = _unsuccessfulCallStringBuffer.toString(); } } synchronized (_unsuccessfulCallLock) { _lastUnsuccessfulStart = start; _lastUnsuccessfulDuration = duration; _unsuccessfulCalls++; _unsuccessfulDuration += duration; _unsuccessfulMin = _unsuccessfulMin > duration ? duration : _unsuccessfulMin; _unsuccessfulMax = _unsuccessfulMax < duration ? duration : _unsuccessfulMax; } } if (debugEnabled) { context.debug(message); } } // Inner classes /** * Call statistics pertaining to a certain function. * * @version $Revision$ $Date$ * @author Ernst de Haan (<a href="mailto:znerd@FreeBSD.org">znerd@FreeBSD.org</a>) */ final class Statistics extends Object { // Constructors /** * Constructs a new <code>Statistics</code> object. */ private Statistics() { // empty } // Fields // Methods /** * Returns the number of successful calls executed up until now. * * @return * the number of successful calls executed up until now. */ public int getSuccessfulCalls() { return _successfulCalls; } /** * Returns the number of unsuccessful calls executed up until now. * * @return * the number of unsuccessful calls executed up until now. */ public int getUnsuccessfulCalls() { return _unsuccessfulCalls; } /** * Returns the start time of the most recent successful call. * * @return * the start time of the most recent successful call. */ public long getLastSuccessfulStart() { return _lastSuccessfulStart; } /** * Returns the start time of the most recent unsuccessful call. * * @return * the start time of the most recent unsuccessful call. */ public long getLastUnsuccessfulStart() { return _lastUnsuccessfulStart; } /** * Returns the duration of the most recent successful call. * * @return * the duration of the most recent successful call. */ public long getLastSuccessfulDuration() { return _lastSuccessfulDuration; } /** * Returns the duration of the most recent unsuccessful call. * * @return * the duration of the most recent unsuccessful call. */ public long getLastUnsuccessfulDuration() { return _unsuccessfulDuration; } /** * Returns the total duration of all successful calls up until now. * * @return * the total duration of all successful calls up until now. */ public long getSuccessfulDuration() { return _successfulDuration; } /** * Returns the total duration of all unsuccessful calls up until now. * * @return * the total duration of all unsuccessful calls up until now. */ public long getUnsuccessfulDuration() { return _unsuccessfulDuration; } /** * Returns the minimum time a successful call took. * * @return * the minimum time a successful call took. */ public long getSuccessfulMin() { return _successfulMin; } /** * Returns the minimum time an unsuccessful call took. * * @return * the minimum time an unsuccessful call took. */ public long getUnsuccessfulMin() { return _unsuccessfulMin; } /** * Returns the maximum time a successful call took. * * @return * the maximum time a successful call took. */ public long getSuccessfulMax() { return _successfulMax; } /** * Returns the maximum time an unsuccessful call took. * * @return * the maximum time an unsuccessful call took. */ public long getUnsuccessfulMax() { return _unsuccessfulMax; } } }
package org.xins.server; import org.xins.util.MandatoryArgumentChecker; /** * Base class for function implementation classes. * * @version $Revision$ $Date$ * @author Ernst de Haan (<a href="mailto:znerd@FreeBSD.org">znerd@FreeBSD.org</a>) */ public abstract class Function extends Object { // Class fields // Class functions // Constructors protected Function(API api, String name, String version) throws IllegalArgumentException { // Check argument MandatoryArgumentChecker.check("api", api, "name", name, "version", version); _api = api; _name = name; _version = version; _api.functionAdded(this); } // Fields /** * The API implementation this function is part of. */ private final API _api; /** * The name of this function. */ private final String _name; /** * The version of the specification this function implements. */ private final String _version; /** * Statistics object linked to this function. */ private final Statistics _statistics = new Statistics(); /** * Lock object for a successful call. */ private final Object _successfulCallLock = new Object(); /** * Lock object for an unsuccessful call. */ private final Object _unsuccessfulCallLock = new Object(); /** * The number of successful calls executed up until now. */ private int _successfulCalls; /** * The number of unsuccessful calls executed up until now. */ private int _unsuccessfulCalls; /** * The start time of the most recent successful call. */ private long _lastSuccessfulStart; /** * The start time of the most recent unsuccessful call. */ private long _lastUnsuccessfulStart; /** * The duration of the most recent successful call. */ private long _lastSuccessfulDuration; /** * The duration of the most recent unsuccessful call. */ private long _lastUnsuccessfulDuration; /** * The total duration of all successful calls up until now. */ private long _successfulDuration; /** * The total duration of all unsuccessful calls up until now. */ private long _unsuccessfulDuration; /** * The minimum time a successful call took. */ private long _successfulMin = Long.MAX_VALUE; /** * The minimum time an unsuccessful call took. */ private long _unsuccessfulMin = Long.MAX_VALUE; /** * The maximum time a successful call took. */ private long _successfulMax; /** * The maximum time an unsuccessful call took. */ private long _unsuccessfulMax; // Methods /** * Returns the name of this function. * * @return * the name, not <code>null</code>. */ final String getName() { return _name; } /** * Returns the specification version for this function. * * @return * the version, not <code>null</code>. */ final String getVersion() { return _version; } /** * Returns the call statistics for this function. * * @return * the statistics, never <code>null</code>. */ final Statistics getStatistics() { return _statistics; } /** * Callback method that may be called after a call to this function. This * method will store statistics-related information. * * <p />This method does not <em>have</em> to be called. If statistics * gathering is disabled, then this method should not be called. * * @param start * the timestamp indicating when the call was started, as a number of * milliseconds since midnight January 1, 1970 UTC. * * @param duration * the duration of the function call, as a number of milliseconds. * * @param success * indication if the call was successful. * * @param code * the function result code, or <code>null</code>. */ final void performedCall(long start, long duration, boolean success, String code) { if (success) { synchronized (_successfulCallLock) { _lastSuccessfulStart = start; _lastSuccessfulDuration = duration; _successfulCalls++; _successfulDuration += duration; _successfulMin = _successfulMin > duration ? duration : _successfulMin; _successfulMax = _successfulMax < duration ? duration : _successfulMax; } } else { synchronized (_unsuccessfulCallLock) { _lastUnsuccessfulStart = start; _lastUnsuccessfulDuration = duration; _unsuccessfulCalls++; _unsuccessfulDuration += duration; _unsuccessfulMin = _unsuccessfulMin > duration ? duration : _unsuccessfulMin; _unsuccessfulMax = _unsuccessfulMax < duration ? duration : _unsuccessfulMax; } } } // Inner classes /** * Call statistics pertaining to a certain function. * * @version $Revision$ $Date$ * @author Ernst de Haan (<a href="mailto:znerd@FreeBSD.org">znerd@FreeBSD.org</a>) */ public final class Statistics extends Object { // Constructors /** * Constructs a new <code>Statistics</code> object. */ private Statistics() { // empty } // Fields // Methods /** * Returns the number of successful calls executed up until now. * * @return * the number of successful calls executed up until now. */ public int getSuccessfulCalls() { return _successfulCalls; } /** * Returns the number of unsuccessful calls executed up until now. * * @return * the number of unsuccessful calls executed up until now. */ public int getUnsuccessfulCalls() { return _unsuccessfulCalls; } /** * Returns the start time of the most recent successful call. * * @return * the start time of the most recent successful call. */ public long getLastSuccessfulStart() { return _lastSuccessfulStart; } /** * Returns the start time of the most recent unsuccessful call. * * @return * the start time of the most recent unsuccessful call. */ public long getLastUnsuccessfulStart() { return _lastUnsuccessfulStart; } /** * Returns the duration of the most recent successful call. * * @return * the duration of the most recent successful call. */ public long getLastSuccessfulDuration() { return _lastSuccessfulDuration; } /** * Returns the duration of the most recent unsuccessful call. * * @return * the duration of the most recent unsuccessful call. */ public long getLastUnsuccessfulDuration() { return _unsuccessfulDuration; } /** * Returns the total duration of all successful calls up until now. * * @return * the total duration of all successful calls up until now. */ public long getSuccessfulDuration() { return _successfulDuration; } /** * Returns the total duration of all unsuccessful calls up until now. * * @return * the total duration of all unsuccessful calls up until now. */ public long getUnsuccessfulDuration() { return _unsuccessfulDuration; } /** * Returns the minimum time a successful call took. * * @return * the minimum time a successful call took. */ public long getSuccessfulMin() { return _successfulMin; } /** * Returns the minimum time an unsuccessful call took. * * @return * the minimum time an unsuccessful call took. */ public long getUnsuccessfulMin() { return _unsuccessfulMin; } /** * Returns the maximum time a successful call took. * * @return * the maximum time a successful call took. */ public long getSuccessfulMax() { return _successfulMax; } /** * Returns the maximum time an unsuccessful call took. * * @return * the maximum time an unsuccessful call took. */ public long getUnsuccessfulMax() { return _unsuccessfulMax; } } }
// $Id: BackgroundTiler.java,v 1.2 2002/05/07 03:23:46 mdb Exp $ package com.threerings.media.util; import java.awt.Graphics; import java.awt.Shape; import java.awt.image.BufferedImage; import com.threerings.media.Log; /** * Used to tile a background image into regions of various sizes. The * source image is divided into nine quadrants (of mostly equal size) * which are tiled accordingly to fill whatever size background image is * desired. */ public class BackgroundTiler { /** * Creates a background tiler with the specified source image. */ public BackgroundTiler (BufferedImage src) { // make sure we were given the goods if (src == null) { Log.info("Backgrounder given null source image. Coping."); return; } // compute some values _width = src.getWidth(null); _w3 = _width/3; _cw3 = _width-2*_w3; _height = src.getHeight(null); _h3 = _height/3; _ch3 = _height-2*_h3; // create our sub-divided images _tiles = new BufferedImage[9]; int[] sy = { 0, _h3, _h3+_ch3 }; int[] thei = { _h3, _ch3, _h3 }; for (int i = 0; i < 3; i++) { _tiles[3*i] = src.getSubimage(0, sy[i], _w3, thei[i]); _tiles[3*i+1] = src.getSubimage(_w3, sy[i], _cw3, thei[i]); _tiles[3*i+2] = src.getSubimage(_width-_w3, sy[i], _w3, thei[i]); } } /** * Fills the requested region with the background defined by our * source image. */ public void paint (Graphics g, int x, int y, int width, int height) { // bail out now if we were passed a bogus source image at // construct time if (_tiles == null) { return; } int rwid = width-2*_w3, rhei = height-2*_h3, cy = y; Shape oclip = g.getClip(); // tile the top row paintRow(0, g, x, cy, width); cy += _h3; // tile the (complete) intermediate rows int ycount = rhei/_ch3; for (int row = 0; row < ycount; row++) { paintRow(1, g, x, cy, width); cy += _ch3; } // set the clip and paint the clipped intermediate row (if we // didn't tile evenly in the vertical direction) int yextra = (rhei - ycount * _ch3); if (yextra > 0) { g.clipRect(x, cy, width, yextra); paintRow(1, g, x, cy, width); g.setClip(oclip); } // tile the last row int lasty = y + height - _h3; paintRow(2, g, x, lasty, width); // now, set the clipping rectangle and render the horizontal tiles // that we missed the first time around because we want to clip // only once instead of once per row int xcount = rwid/_cw3; int xextra = (rwid - xcount * _cw3); int xoff = x + width - _w3 - xextra; if (xextra < width) { cy = y; // start back at the top g.clipRect(xoff, y, xextra, height); g.drawImage(_tiles[1], xoff, cy, null); cy += _h3; for (int row = 0; row < ycount; row++) { g.drawImage(_tiles[4], xoff, cy, null); cy += _ch3; } g.drawImage(_tiles[7], xoff, lasty, null); } // finally, clip the tiny region where the xextra and yextra rects // intersect and paint that last niggling bit (we value // correctness, so we're doing things properly) if (xextra > 0 && yextra > 0) { // we know the clip is still set from the xextra render, so we // just restrict it once again to the yextra region and the // intersection will happen automatically g.clipRect(x, cy, width, yextra); g.drawImage(_tiles[4], xoff, lasty - yextra, null); } // phew, we're done g.setClip(oclip); } /** * Used by {@link #paint} to render rows. */ protected void paintRow (int srow, Graphics g, int x, int y, int width) { int xcount = (width-2*_w3)/_cw3; int tidx = 3*srow; // draw the first image in the row g.drawImage(_tiles[tidx++], x, y, null); x += _w3; // draw the (complete) tiled middle images for (int ii = 0; ii < xcount; ii++) { g.drawImage(_tiles[tidx], x, y, null); x += _cw3; } // we'll render the last (incomplete) tiled image in a final // cleanup render so that we only have to set the clipping region // once // draw the end image x = width-_w3; g.drawImage(_tiles[++tidx], x, y, null); } /** Our nine sub-divided images. */ protected BufferedImage[] _tiles; /** The width/height of our source image. */ protected int _width, _height; /** One third of width/height of our source image. */ protected int _w3, _h3; /** The size of the center chunk of our subdivided images. */ protected int _cw3, _ch3; }
package org.apache.fop.fo.flow; // FOP import org.apache.fop.datatypes.ColorType; import org.apache.fop.fo.FOTreeVisitor; import org.apache.fop.fo.FONode; import org.apache.fop.fo.FObj; import org.apache.fop.fo.FOInputHandler; import org.apache.fop.fo.properties.CommonAccessibility; import org.apache.fop.fo.properties.CommonAural; import org.apache.fop.fo.properties.CommonBackground; import org.apache.fop.fo.properties.CommonBorderAndPadding; import org.apache.fop.fo.FOTreeControl; import org.apache.fop.fonts.Font; import org.apache.fop.fo.properties.CommonMarginInline; import org.apache.fop.fo.properties.CommonRelativePosition; import org.apache.fop.util.CharUtilities; /** * Class modelling the fo:page-number-citation object. See Sec. 6.6.11 of the * XSL-FO Standard. * This inline fo is replaced with the text for a page number. * The page number used is the page that contains the start of the * block referenced with the ref-id attribute. */ public class PageNumberCitation extends FObj { /** FontInfo for this object **/ protected FOTreeControl fontInfo = null; /** Fontstate for this object **/ protected Font fontState; private float red; private float green; private float blue; private int wrapOption; private String pageNumber; private String refId; private boolean unresolved = false; /** * @param parent FONode that is the parent of this object */ public PageNumberCitation(FONode parent) { super(parent); } /** * @param foih the FOInputHandler object to set */ public void setFOInputHandler(FOInputHandler foih) { super.setFOInputHandler(foih); fontInfo = foih.getFontInfo(); } /** * @param str string to be measured * @return width (in millipoints ??) of the string */ public int getStringWidth(String str) { int width = 0; for (int count = 0; count < str.length(); count++) { width += CharUtilities.getCharWidth(str.charAt(count), fontState); } return width; } public void setup() { // Common Accessibility Properties CommonAccessibility mAccProps = propMgr.getAccessibilityProps(); // Common Aural Properties CommonAural mAurProps = propMgr.getAuralProps(); // Common Border, Padding, and Background Properties CommonBorderAndPadding bap = propMgr.getBorderAndPadding(); CommonBackground bProps = propMgr.getBackgroundProps(); // Common Font Properties this.fontState = propMgr.getFontState(fontInfo); // Common Margin Properties-Inline CommonMarginInline mProps = propMgr.getMarginInlineProps(); // Common Relative Position Properties CommonRelativePosition mRelProps = propMgr.getRelativePositionProps(); // this.properties.get("alignment-adjust"); // this.properties.get("alignment-baseline"); // this.properties.get("baseline-shift"); // this.properties.get("dominant-baseline"); setupID(); // this.properties.get("keep-with-next"); // this.properties.get("keep-with-previous"); // this.properties.get("letter-spacing"); // this.properties.get("line-height"); // this.properties.get("line-height-shift-adjustment"); // this.properties.get("ref-id"); // this.properties.get("score-spaces"); // this.properties.get("text-decoration"); // this.properties.get("text-shadow"); // this.properties.get("text-transform"); // this.properties.get("word-spacing"); ColorType c = this.properties.get("color").getColorType(); this.red = c.getRed(); this.green = c.getGreen(); this.blue = c.getBlue(); this.wrapOption = this.properties.get("wrap-option").getEnum(); this.refId = this.properties.get("ref-id").getString(); if (this.refId.equals("")) { //throw new FOPException("page-number-citation must contain \"ref-id\""); } } public String getRefId() { return refId; } public boolean getUnresolved() { return unresolved; } public void setUnresolved(boolean isUnresolved) { unresolved = isUnresolved; } public void acceptVisitor(FOTreeVisitor fotv) { fotv.serveVisitor(this); } public Font getFontState() { return fontState; } }
package org.jdesktop.swingx.image; import java.awt.image.BufferedImage; import java.awt.Color; import org.jdesktop.swingx.graphics.GraphicsUtilities; /** * <p>A color tint filter can be used to mix a solid color to an image. The * result is an image tinted by the specified color. The force of the effect * can be controlled with the <code>mixValue</code>, a number between 0.0 and * 1.0 that can be seen as the percentage of the mix (0.0 does not affect the * source image and 1.0 replaces all the pixels by the solid color).</p> * <p>The color of the pixels in the resulting image is computed as follows:</p> * <pre> * cR = cS * (1 - mixValue) + cM * mixValue * </pre> * <p>Definition of the parameters:</p> * <ul> * <li><code>cR</code>: color of the resulting pixel</li> * <li><code>cS</code>: color of the source pixel</li> * <li><code>cM</code>: the solid color to mix with the source image</li> * <li><code>mixValue</code>: strength of the mix, a value between 0.0 and 1.0</li> * </ul> * * @author Romain Guy <romain.guy@mac.com> */ public class ColorTintFilter extends AbstractFilter { private final Color mixColor; private final float mixValue; private int[] preMultipliedAlpha; private int[] preMultipliedRed; private int[] preMultipliedGreen; private int[] preMultipliedBlue; public ColorTintFilter(Color mixColor, float mixValue) { if (mixColor == null) { throw new IllegalArgumentException("mixColor cannot be null"); } this.mixColor = mixColor; if (mixValue < 0.0f) { mixValue = 0.0f; } else if (mixValue > 1.0f) { mixValue = 1.0f; } this.mixValue = mixValue; int mix_a = (int) (mixColor.getAlpha() * mixValue); int mix_r = (int) (mixColor.getRed() * mixValue); int mix_g = (int) (mixColor.getGreen() * mixValue); int mix_b = (int) (mixColor.getBlue() * mixValue); // Since we use only lookup tables to apply the filter, this filter // could be implemented as a LookupOp. float factor = 1.0f - mixValue; preMultipliedAlpha = new int[256]; preMultipliedRed = new int[256]; preMultipliedGreen = new int[256]; preMultipliedBlue = new int[256]; for (int i = 0; i < 256; i++) { int value = (int) (i * factor); preMultipliedAlpha[i] = value + mix_a; preMultipliedRed[i] = value + mix_r; preMultipliedGreen[i] = value + mix_g; preMultipliedBlue[i] = value + mix_b; } } /** * <p>Returns the mix value of this filter.</p> * * @return the mix value, between 0.0 and 1.0 */ public float getMixValue() { return mixValue; } /** * <p>Returns the solid mix color of this filter.</p> * * @return the solid color used for mixing */ public Color getMixColor() { return mixColor; } /** * {@inheritDoc} */ @Override public BufferedImage filter(BufferedImage src, BufferedImage dst) { if (dst == null) { dst = createCompatibleDestImage(src, null); } int width = src.getWidth(); int height = src.getHeight(); int[] pixels = new int[width * height]; GraphicsUtilities.getPixels(src, 0, 0, width, height, pixels); mixColor(pixels); GraphicsUtilities.setPixels(dst, 0, 0, width, height, pixels); return dst; } private void mixColor(int[] pixels) { for (int i = 0; i < pixels.length; i++) { int argb = pixels[i]; pixels[i] = preMultipliedAlpha[(argb >> 24) & 0xFF] << 24 | preMultipliedRed[(argb >> 16) & 0xFF] << 16 | preMultipliedGreen[(argb >> 8) & 0xFF] << 8 | preMultipliedBlue[argb & 0xFF]; } } }
package org.relique.jdbc.csv; import java.io.InputStream; import java.io.Reader; import java.math.BigDecimal; import java.net.URL; import java.sql.Array; import java.sql.Blob; import java.sql.Clob; import java.sql.Date; import java.sql.DriverManager; import java.sql.NClob; import java.sql.ParameterMetaData; import java.sql.PreparedStatement; import java.sql.Ref; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.RowId; import java.sql.SQLException; import java.sql.SQLXML; import java.sql.Time; import java.sql.Timestamp; import java.util.Calendar; public class CsvPreparedStatement extends CsvStatement implements PreparedStatement { private Object[] parameters; private String templateQuery; private SqlParser parser; protected CsvPreparedStatement(CsvConnection connection, String sql, int isScrollable) throws SQLException { super(connection, isScrollable); parser = new SqlParser(); try { parser.parse(sql); } catch (Exception e) { e.printStackTrace(); throw new SQLException("Syntax Error. " + e.getMessage()); } parameters = new Object[parser.getPlaceholdersCount() + 1]; templateQuery = sql; } public void addBatch() throws SQLException { // TODO Auto-generated method stub } public void clearParameters() throws SQLException { for(int i=1; i<parameters.length; i++) { parameters[i] = null; } } public boolean execute() throws SQLException { throw new SQLException("execute() not Supported !"); } public ResultSet executeQuery() throws SQLException { DriverManager.println("CsvJdbc - CsvStatement:executeQuery() - sql= " + templateQuery); parser.setPlaceholdersValues(parameters); return executeParsedQuery(parser); } public int executeUpdate() throws SQLException { // TODO Auto-generated method stub return 0; } public ResultSetMetaData getMetaData() throws SQLException { // TODO Auto-generated method stub return null; } public ParameterMetaData getParameterMetaData() throws SQLException { // TODO Auto-generated method stub return null; } public void setArray(int arg0, Array arg1) throws SQLException { this.parameters[arg0] = arg1; } public void setAsciiStream(int arg0, InputStream arg1) throws SQLException { this.parameters[arg0] = arg1; } public void setAsciiStream(int arg0, InputStream arg1, int arg2) throws SQLException { // TODO Auto-generated method stub } public void setAsciiStream(int arg0, InputStream arg1, long arg2) throws SQLException { // TODO Auto-generated method stub } public void setBigDecimal(int arg0, BigDecimal arg1) throws SQLException { this.parameters[arg0] = arg1; } public void setBinaryStream(int arg0, InputStream arg1) throws SQLException { this.parameters[arg0] = arg1; } public void setBinaryStream(int arg0, InputStream arg1, int arg2) throws SQLException { // TODO Auto-generated method stub } public void setBinaryStream(int arg0, InputStream arg1, long arg2) throws SQLException { // TODO Auto-generated method stub } public void setBlob(int arg0, Blob arg1) throws SQLException { this.parameters[arg0] = arg1; } public void setBlob(int arg0, InputStream arg1) throws SQLException { this.parameters[arg0] = arg1; } public void setBlob(int arg0, InputStream arg1, long arg2) throws SQLException { // TODO Auto-generated method stub } public void setBoolean(int arg0, boolean arg1) throws SQLException { this.parameters[arg0] = Boolean.valueOf(arg1); } public void setByte(int arg0, byte arg1) throws SQLException { this.parameters[arg0] = Byte.valueOf(arg1); } public void setBytes(int arg0, byte[] arg1) throws SQLException { // TODO Auto-generated method stub } public void setCharacterStream(int arg0, Reader arg1) throws SQLException { this.parameters[arg0] = arg1; } public void setCharacterStream(int arg0, Reader arg1, int arg2) throws SQLException { // TODO Auto-generated method stub } public void setCharacterStream(int arg0, Reader arg1, long arg2) throws SQLException { // TODO Auto-generated method stub } public void setClob(int arg0, Clob arg1) throws SQLException { this.parameters[arg0] = arg1; } public void setClob(int arg0, Reader arg1) throws SQLException { this.parameters[arg0] = arg1; } public void setClob(int arg0, Reader arg1, long arg2) throws SQLException { // TODO Auto-generated method stub } public void setDate(int arg0, Date arg1) throws SQLException { this.parameters[arg0] = arg1; } public void setDate(int arg0, Date arg1, Calendar arg2) throws SQLException { // TODO Auto-generated method stub } public void setDouble(int arg0, double arg1) throws SQLException { // TODO Auto-generated method stub } public void setFloat(int arg0, float arg1) throws SQLException { this.parameters[arg0] = Float.valueOf(arg1); } public void setInt(int arg0, int arg1) throws SQLException { this.parameters[arg0] = Integer.valueOf(arg1); } public void setLong(int arg0, long arg1) throws SQLException { this.parameters[arg0] = Long.valueOf(arg1); } public void setNCharacterStream(int arg0, Reader arg1) throws SQLException { this.parameters[arg0] = arg1; } public void setNCharacterStream(int arg0, Reader arg1, long arg2) throws SQLException { // TODO Auto-generated method stub } public void setNClob(int arg0, NClob arg1) throws SQLException { this.parameters[arg0] = arg1; } public void setNClob(int arg0, Reader arg1) throws SQLException { this.parameters[arg0] = arg1; } public void setNClob(int arg0, Reader arg1, long arg2) throws SQLException { // TODO Auto-generated method stub } public void setNString(int arg0, String arg1) throws SQLException { // TODO Auto-generated method stub } public void setNull(int arg0, int arg1) throws SQLException { // TODO Auto-generated method stub } public void setNull(int arg0, int arg1, String arg2) throws SQLException { // TODO Auto-generated method stub } public void setObject(int arg0, Object arg1) throws SQLException { this.parameters[arg0] = arg1; } public void setObject(int arg0, Object arg1, int arg2) throws SQLException { // TODO Auto-generated method stub } public void setObject(int arg0, Object arg1, int arg2, int arg3) throws SQLException { // TODO Auto-generated method stub } public void setRef(int arg0, Ref arg1) throws SQLException { this.parameters[arg0] = arg1; } public void setRowId(int arg0, RowId arg1) throws SQLException { this.parameters[arg0] = arg1; } public void setSQLXML(int arg0, SQLXML arg1) throws SQLException { this.parameters[arg0] = arg1; } public void setShort(int arg0, short arg1) throws SQLException { this.parameters[arg0] = Short.valueOf(arg1); } public void setString(int arg0, String arg1) throws SQLException { this.parameters[arg0] = arg1; } public void setTime(int arg0, Time arg1) throws SQLException { this.parameters[arg0] = arg1; } public void setTime(int arg0, Time arg1, Calendar arg2) throws SQLException { // TODO Auto-generated method stub } public void setTimestamp(int arg0, Timestamp arg1) throws SQLException { this.parameters[arg0] = arg1; } public void setTimestamp(int arg0, Timestamp arg1, Calendar arg2) throws SQLException { // TODO Auto-generated method stub } public void setURL(int arg0, URL arg1) throws SQLException { this.parameters[arg0] = arg1; } public void setUnicodeStream(int arg0, InputStream arg1, int arg2) throws SQLException { // TODO Auto-generated method stub } }
package io.spine.core; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Optional; import com.google.common.base.Throwables; import com.google.protobuf.Any; import com.google.protobuf.Message; import io.spine.Identifier; import io.spine.annotation.Internal; import io.spine.base.ThrowableMessage; import static com.google.common.base.Preconditions.checkNotNull; import static io.spine.protobuf.AnyPacker.pack; import static io.spine.protobuf.AnyPacker.unpack; import static java.lang.String.format; /** * Utility class for working with rejections. * * @author Alexander Yevsyukov */ public final class Rejections { /** The name suffix for an outer class of generated rejection classes. */ public static final String OUTER_CLASS_SUFFIX = "Rejections"; /** The format string for ID of a {@link Rejection}. */ @VisibleForTesting static final String REJECTION_ID_FORMAT = "%s-reject"; /** Prevents instantiation of this utility class. */ private Rejections() {} /** * Tells whether the passed message class represents a rejection message. */ public static boolean isRejection(Class<? extends Message> messageClass) { checkNotNull(messageClass); final Class<?> enclosingClass = messageClass.getEnclosingClass(); if (enclosingClass == null) { return false; // Rejection messages are generated as inner static classes. } final boolean hasCorrectSuffix = enclosingClass.getName() .endsWith(OUTER_CLASS_SUFFIX); return hasCorrectSuffix; } /** * Extracts a rejection message if the passed instance is {@link Rejection} or {@link Any}, * otherwise returns the passed message. */ public static Message ensureMessage(Message rejectionOrMessage) { checkNotNull(rejectionOrMessage); if (rejectionOrMessage instanceof Rejection) { return getMessage((Rejection) rejectionOrMessage); } return io.spine.protobuf.Messages.ensureMessage(rejectionOrMessage); } /** * Converts this {@code ThrowableMessage} into {@link Rejection}. * * @param command the command which caused the rejection */ public static Rejection toRejection(ThrowableMessage throwable, Command command) { checkNotNull(throwable); checkNotNull(command); final Message rejectionMessage = throwable.getMessageThrown(); final Any packedState = pack(rejectionMessage); final RejectionContext context = createContext(throwable, command); final RejectionId id = generateId(command.getId()); final Rejection.Builder builder = Rejection.newBuilder() .setId(id) .setMessage(packedState) .setContext(context); return builder.build(); } private static RejectionContext createContext(ThrowableMessage message, Command command) { final String stacktrace = Throwables.getStackTraceAsString(message); final RejectionContext.Builder builder = RejectionContext.newBuilder() .setTimestamp(message.getTimestamp()) .setStacktrace(stacktrace) .setCommand(command); final Optional<Any> optional = message.producerId(); if (optional.isPresent()) { builder.setProducerId(optional.get()); } return builder.build(); } /** * Creates a new {@code Rejection} instance. * * @param messageOrAny the rejection message or {@code Any} containing the message * @param command the {@code Command}, which triggered the rejection. * @return created rejection instance */ public static Rejection createRejection(Message messageOrAny, Command command) { checkNotNull(messageOrAny); checkNotNull(command); final Any packedMessage = pack(messageOrAny); final RejectionContext context = RejectionContext.newBuilder() .setCommand(command) .build(); final Rejection result = Rejection.newBuilder() .setMessage(packedMessage) .setContext(context) .build(); return result; } /** * Generates a {@code RejectionId} based upon a {@linkplain CommandId command ID} in a format: * * <pre>{@code <commandId>-reject}</pre> * * @param id the identifier of the {@linkplain Command command}, which processing caused the * rejection **/ public static RejectionId generateId(CommandId id) { final String idValue = format(REJECTION_ID_FORMAT, id.getUuid()); return RejectionId.newBuilder() .setValue(idValue) .build(); } /** * Extracts the message from the passed {@code Rejection} instance. * * @param rejection a rejection to extract a message from * @param <M> a type of the rejection message * @return an unpacked message */ public static <M extends Message> M getMessage(Rejection rejection) { checkNotNull(rejection); final M result = unpack(rejection.getMessage()); return result; } /** * Obtains rejection producer ID from the passed {@code RejectionContext} and casts it to the * {@code <I>} type. * * @param context the rejection context to to get the producer ID * @param <I> the type of the producer ID * @return the producer ID */ public static <I> Optional<I> getProducer(RejectionContext context) { checkNotNull(context); final Any producerId = context.getProducerId(); if (Any.getDefaultInstance().equals(producerId)) { return Optional.absent(); } final I id = Identifier.unpack(producerId); return Optional.of(id); } /** * Analyzes the rejection context and determines if the rejection has been produced outside * of the current bounded context. * * @param context the context of rejection * @return {@code true} if the rejection is external, {@code false} otherwise */ @Internal public static boolean isExternal(RejectionContext context) { checkNotNull(context); return context.getExternal(); } }
package VASSAL.preferences; import java.awt.Dimension; import java.awt.Frame; import java.awt.Toolkit; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ComponentAdapter; import java.awt.event.ComponentEvent; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.swing.AbstractAction; import javax.swing.Action; import javax.swing.Box; import javax.swing.BoxLayout; import javax.swing.JButton; import javax.swing.JDialog; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JTabbedPane; import javax.swing.WindowConstants; import VASSAL.build.module.documentation.HelpFile; import VASSAL.tools.BrowserSupport; import net.miginfocom.swing.MigLayout; import VASSAL.configure.Configurer; import VASSAL.i18n.Resources; import VASSAL.tools.SplashScreen; import VASSAL.tools.WriteErrorDialog; public class PrefsEditor { private JDialog dialog; private List<Configurer> options = new ArrayList<>(); private List<Configurer> extras = new ArrayList<>(); private boolean iterating = false; private Map<Configurer, Object> savedValues = new HashMap<>(); private List<Prefs> prefs = new ArrayList<>(); private JTabbedPane optionsTab = new JTabbedPane(); private JDialog setupDialog; private File pfile; private Action editAction; public PrefsEditor() {} public void initDialog(Frame parent) { if (dialog == null) { dialog = new JDialog(parent, true); dialog.setTitle(Resources.getString("Prefs.preferences")); //$NON-NLS-1$ dialog.setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE); // Handle window closing correctly. dialog.addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent we) { cancel(); } }); // Help button looks up help in Preferences.html, by tab-name final JButton help = new JButton(Resources.getString(Resources.HELP)); help.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { int index = optionsTab.getSelectedIndex(); String tabName = (index >= 0) ? optionsTab.getTitleAt(index) : "top"; HelpFile helpFile = HelpFile.getReferenceManualPage("Preferences.html", tabName); BrowserSupport.openURL(helpFile.getContents().toString()); } }); final JButton ok = new JButton(Resources.getString(Resources.OK)); ok.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { save(); } }); final JButton cancel = new JButton(Resources.getString(Resources.CANCEL)); cancel.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { cancel(); } }); dialog.setLayout(new MigLayout("insets dialog")); dialog.add(optionsTab, "push, grow, wrap unrelated"); dialog.add(help, "tag help, split"); dialog.add(ok, "tag ok, split"); dialog.add(cancel, "tag cancel"); } } public JDialog getDialog() { return dialog; } public void addPrefs(Prefs p) { prefs.add(p); } public void addOption(String category, Configurer c, String prompt) { if (prompt != null) { if (setupDialog == null) { setupDialog = new JDialog((Frame) null, true); setupDialog.setTitle(Resources.getString("Prefs.initial_setup")); //$NON-NLS-1$ setupDialog.setLayout(new BoxLayout(setupDialog.getContentPane(), BoxLayout.Y_AXIS)); setupDialog.setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE); setupDialog.addComponentListener(new ComponentAdapter() { @Override public void componentShown(ComponentEvent e) { SplashScreen.sendAllToBack(); } }); } JPanel p = new JPanel(); p.add(new JLabel(prompt)); setupDialog.add(p); setupDialog.add(c.getControls()); JButton b = new JButton(Resources.getString(Resources.OK)); b.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent evt) { setupDialog.setVisible(false); } }); p = new JPanel(); p.add(b); setupDialog.add(p); setupDialog.pack(); Dimension d = Toolkit.getDefaultToolkit().getScreenSize(); setupDialog.setLocation( d.width / 2 - setupDialog.getSize().width / 2, d.height / 2 - setupDialog.getSize().height / 2 ); setupDialog.setVisible(true); setupDialog.removeAll(); } addOption(category, c); } public synchronized void addOption(String category, Configurer c) { if (category == null) { category = Resources.getString("Prefs.general_tab"); //$NON-NLS-1$ } JPanel pan = null; int i = optionsTab.indexOfTab(category); if (i == -1) { // No match pan = new JPanel(); pan.setLayout(new BoxLayout(pan, BoxLayout.Y_AXIS)); optionsTab.addTab(category, pan); } else { pan = (JPanel) optionsTab.getComponentAt(i); } if (iterating) { extras.add(c); } else { options.add(c); } final Box b = Box.createHorizontalBox(); b.add(c.getControls()); b.add(Box.createHorizontalGlue()); pan.add(b); } private synchronized void storeValues() { savedValues.clear(); for (Configurer c : options) { c.setFrozen(true); if (c.getValue() != null) { savedValues.put(c, c.getValue()); } } } protected synchronized void cancel() { for (Configurer c : options) { Object o = savedValues.get(c); if (o != null) { c.setValue(o); } c.setFrozen(false); } dialog.setVisible(false); } protected synchronized void save() { iterating = true; for (Configurer c : options) { if ((savedValues.get(c) == null && c.getValue() != null) || (savedValues.get(c) != null && !savedValues.get(c).equals(c.getValue()))) { c.fireUpdate(); } c.setFrozen(false); } iterating = false; options.addAll(extras); extras.clear(); write(); dialog.setVisible(false); } public Action getEditAction() { if (editAction == null) { editAction = new AbstractAction( Resources.getString("Prefs.edit_preferences")) { //$NON-NLS-1$ private static final long serialVersionUID = 1L; @Override public void actionPerformed(ActionEvent e) { storeValues(); dialog.pack(); final Dimension d = Toolkit.getDefaultToolkit().getScreenSize(); dialog.setLocation(d.width / 2 - dialog.getWidth() / 2, 0); dialog.setVisible(true); } }; // FIMXE: setting nmemonic from first letter could cause collisions in // some languages editAction.putValue(Action.MNEMONIC_KEY, (int) Resources.getString("Prefs.edit_preferences").charAt(0)); } return editAction; } public void write() { for (Prefs p : prefs) { try { p.save(); } catch (IOException e) { WriteErrorDialog.error(e, p.getFile()); } } } public void close() { write(); } }
package main.swapship.screens; import main.swapship.GameInfo.Level; import main.swapship.GameInfo.ShipType; import main.swapship.SwapShipGame; import main.swapship.common.Constants; import com.badlogic.gdx.scenes.scene2d.Actor; import com.badlogic.gdx.scenes.scene2d.ui.ButtonGroup; import com.badlogic.gdx.scenes.scene2d.ui.ImageButton; import com.badlogic.gdx.scenes.scene2d.ui.Label; import com.badlogic.gdx.scenes.scene2d.ui.Table; import com.badlogic.gdx.scenes.scene2d.ui.TextButton; import com.badlogic.gdx.scenes.scene2d.utils.ChangeListener; /** * Screen used to select the ship and level to play * @author Brandon * */ public class SelectScreen extends AbstractScreen { private ButtonGroup shipGroup; private ButtonGroup levelGroup; public SelectScreen(final SwapShipGame game) { super(game); } @Override public void setUp() { Table table = new Table(); table.setFillParent(true); stage.addActor(table); table.debug(); setUpShipChoice(table); table.row(); setUpLevelChoice(table); table.row(); final TextButton go = new TextButton("Go!", game.skin, Constants.UI.OPTION_BUTTON); table.add(go); go.addListener(new ChangeListener() { @Override public void changed(ChangeEvent event, Actor actor) { // Make sure they've checked something if (game.gameInfo.level != Level.UNCHOSEN && game.gameInfo.ship != ShipType.UNCHOSEN) { game.setScreen(new GameScreen(game)); return; } go.setChecked(false); } }); // Set the defaults as checked shipGroup.getButtons().get(0).setChecked(true); levelGroup.getButtons().get(0).setChecked(true); } // Sets up the buttons used to select the ship private void setUpShipChoice(Table table) { // The label for ships final Label shipLabel = new Label("SHIPS", game.skin, Constants.UI.TITLE_LABEL); table.add(shipLabel); table.row(); // Button to choose the artemis ship final ImageButton artemisButton = new ImageButton(game.skin, "artemis-button"); table.add(artemisButton); artemisButton.addListener(new ChangeListener() { @Override public void changed(ChangeEvent event, Actor actor) { game.gameInfo.ship = ShipType.ARTEMIS; } }); // Button to choose the ganymede ship final ImageButton ganymedeButton = new ImageButton(game.skin, "ganymede-button"); table.add(ganymedeButton); ganymedeButton.addListener(new ChangeListener() { @Override public void changed(ChangeEvent event, Actor actor) { game.gameInfo.ship = ShipType.GANAMEDE; } }); // only allow one to be selected shipGroup = new ButtonGroup(artemisButton, ganymedeButton); } private void setUpLevelChoice(Table table) { // Add a label for the options final Label levelLabel = new Label("LEVELS", game.skin, Constants.UI.TITLE_LABEL); table.add(levelLabel); table.row(); final TextButton l1 = new TextButton("Level 1", game.skin, Constants.UI.OPTION_BUTTON); table.add(l1); l1.addListener(new ChangeListener() { @Override public void changed(ChangeEvent event, Actor actor) { game.gameInfo.level = Level.ONE; } }); table.row(); final TextButton l2 = new TextButton("Level 2", game.skin, Constants.UI.OPTION_BUTTON); table.add(l2); l2.addListener(new ChangeListener() { @Override public void changed(ChangeEvent event, Actor actor) { game.gameInfo.level = Level.TWO; } }); table.row(); final TextButton l3 = new TextButton("Level 3", game.skin, Constants.UI.OPTION_BUTTON); table.add(l3); l3.addListener(new ChangeListener() { @Override public void changed(ChangeEvent event, Actor actor) { game.gameInfo.level = Level.THREE; } }); table.row(); final TextButton infinite = new TextButton("Infinite", game.skin, Constants.UI.OPTION_BUTTON); table.add(infinite); infinite.addListener(new ChangeListener() { @Override public void changed(ChangeEvent event, Actor actor) { game.gameInfo.level = Level.INFINITE; } }); levelGroup = new ButtonGroup(l1, l2, l3, infinite); } }
package ly.count.android.api; import android.app.Activity; import android.os.Bundle; public class CountlyActivity extends Activity { /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); Countly.sharedInstance().init(this, "http://YOUR_SERVER", "YOUR_APP_KEY"); } @Override public void onStart() { super.onStart(); Countly.sharedInstance().onStart(); } @Override public void onStop() { Countly.sharedInstance().onStop(); super.onStop(); } }
package fi.hu.cs.titokone; /** This data class contains all the register information. The registers are accessed via index numbers which are defined in the TTK91CPU interface. */ public class Registers { /** This field contains the register values. 0..4 are CU registers, 5..12 are general-purpose registers. */ private int[] registerValues; /** Returns the value of a register. The index numbers are available from the TTK91CPU interface. @param registerId Identifying number of the register. @return Value stored in the register. */ public int getRegister(int registerId) {} /** Returns the value of a register. @param registerName The name of the register. @return Value stored in the register. */ public int getRegister(String registerName) {} /** Sets a new value to a register. @param registerId The identifying number of the register. @param value New value to set. */ public void setRegister(int registerId, int value) {} /** Sets a new value to a register. @param registerName The name of the register. @param value New value to set. */ public void setRegister(String registerName, int value) {} }
package BlueTurtle.parsers; import java.io.File; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import BlueTurtle.warnings.FindBugsWarning; import BlueTurtle.warnings.Warning; /** * This class can be used to parse a FindBugs XML output file. * * @author BlueTurtle. * */ public class FindBugsXMLParser extends XMLParser { /** * Parse a FindBugs report file. * * @param xmlFilePath * the location of the FindBugs report. * @return a list of FindBugs warnings. */ @Override public List<Warning> parseFile(String xmlFilePath, HashMap<String, String> categoryInfo) { // List to store the warnings. List<Warning> findBugsWarnings = new LinkedList<Warning>(); try { // Instantiate things that are necessary for the parser. File inputFile = new File(xmlFilePath); DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); // Parse the file. Document doc = dBuilder.parse(inputFile); // Normalize the elements of the document. doc.getDocumentElement().normalize(); // Get the list of file path of the project. NodeList pathsList = doc.getElementsByTagName("Project"); NodeList srcList = doc.getElementsByTagName("SrcDir"); String pathFront = ""; if (pathsList != null && pathsList.getLength() > 0) { Element pathElement = (Element) pathsList.item(0); srcList = pathElement.getElementsByTagName("SrcDir"); } // Get all list of files where there are warnings. NodeList nList = doc.getElementsByTagName("file"); for (int i = 0; i < nList.getLength(); i++) { // Get the file from the list. Node file = nList.item(i); if (file.getNodeType() == Node.ELEMENT_NODE) { // Convert the node to an element. Element fileElement = (Element) file; // Get the class name where the warning is from. String className = fileElement.getAttribute("classname"); // split the class name into a string array. String [] classArray = className.split("\\."); // the last one is the class name. className = classArray[classArray.length - 1]; // concatenate the source path with the class name. String fileN = className + ".java"; fileN = fileN.substring(fileN.lastIndexOf(File.separatorChar) + 1, fileN.length()); // get the file path from the file name. String filePath = new File(fileN).getCanonicalPath(); // Get the class name where the warning is from. String classN = fileElement.getAttribute("classname"); // replace the . with \\ in the file name. String cN = classN.replaceAll("\\.", "\\\\"); pathFront = srcList.item(0).getTextContent(); // concatenate the source path with the class name. String fileConcate = pathFront + "\\" + cN + ".java"; // get the absoluteFilePath. String absoluteFilePath = new File(fileConcate).getAbsolutePath(); // Get the name of the file where the warning is from. String fileName = filePath.substring(filePath.lastIndexOf(File.separatorChar) + 1, filePath.length()); // Get all the warnings. NodeList warningList = fileElement.getElementsByTagName("BugInstance"); for (int j = 0; j < warningList.getLength(); j++) { // Get the warning from the list of warnings. Node warning = warningList.item(j); if (warning.getNodeType() == Node.ELEMENT_NODE) { // Convert the node to an element. Element warningElement = (Element) warning; // message of warning String message = warningElement.getAttribute("message"); // category of warning String category = warningElement.getAttribute("category"); // priority of warning String priority = warningElement.getAttribute("priority"); // line number where the warning is located. int line = Integer.parseInt(warningElement.getAttribute("lineNumber")); // Get the category of the warning. String ruleName = warningElement.getAttribute("type"); String classification = categoryInfo.get(ruleName); FindBugsWarning fbw = new FindBugsWarning(filePath, fileName, line, message, category, priority, ruleName, classification); fbw.setAbsoluteFilePath(absoluteFilePath); // Add warning to the list of warnings. findBugsWarnings.add(fbw); } } } } } catch (Exception e) { e.printStackTrace(); } return findBugsWarnings; } }
package asa.controller; import asa.bean.Appointment; import asa.service.AppointmentService; import org.springframework.web.bind.annotation.*; import org.springframework.http.MediaType; import org.springframework.beans.factory.annotation.Autowired; import java.util.*; @RestController @RequestMapping("/appointment") public class AppointmentController { @Autowired private AppointmentService appointmentService; @RequestMapping(method = RequestMethod.GET) public Collection<Object> view(){ System.out.println("Inside AppointmentController"); Collection<Object> list= appointmentService.get(); System.out.println("Length :"list.size()); return list; } @RequestMapping(method = RequestMethod.POST, consumes = MediaType.APPLICATION_JSON_VALUE) public @ResponseBody Map<String,String> add(@RequestBody Appointment appointment){ Map<String,String> map=new HashMap<>(); if(appointmentService.add(appointment)){ map.put("result","success"); } else{ map.put("result","failed"); } return map; } /* @RequestMapping(value="/test",method = RequestMethod.GET) public @ResponseBody Map<String,String> addTest(){ System.out.println("Inside AppointmentController POST TEST"); Map<String,String> map=new HashMap<>(); if(appointmentService.add(appointment)){ map.put("result","success"); } else{ map.put("result","failed"); } return map; } */ }
package Application; import java.sql.Connection; import java.sql.ResultSet; import java.sql.Statement; import org.springframework.stereotype.Controller; //import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ResponseBody; import com.heroku.sdk.jdbc.DatabaseUrl; import Logic.CombatLogic; import Logic.MarketLogic; import Models.Player; import dataTransfer.LoginData; import dataTransfer.LoginValidation; import dataTransfer.RegisterData; //import Models.RegisterUserCredentials; import dataTransfer.ValidationCodes; import dto.CombatObject; import dto.MarketObject; @Controller public class HomeController { @RequestMapping("/") public String home() { return "index"; } @RequestMapping("/loginPage") public String loginPage() { return "users/login"; } @RequestMapping("/townPage") public String townPage() { return "town"; } @RequestMapping("/registerPage") public String registerPage() { return "users/register"; } @RequestMapping("/worldPage") public String worldPage() { return "world"; } @RequestMapping("/battlePage") public String battlePage() { return "battle"; } @RequestMapping(value = "/login", method = RequestMethod.POST) public @ResponseBody LoginValidation loginGet(@RequestBody LoginData data) { LoginValidation code = new LoginValidation(); try { Connection connection = DatabaseUrl.extract().getConnection(); Statement stmtCount = connection.createStatement(); Statement stmt = connection.createStatement(); ResultSet user = stmtCount.executeQuery("SELECT count(*) FROM Users WHERE username = '" + data.userName + "' AND password = '" + data.password + "'"); while (user.next()) { if(user.getInt(1) == 0) { code.IncorrectUsernameOrPassword = true; } } if(!code.IncorrectUsernameOrPassword) { ResultSet userInfo = stmt.executeQuery("SELECT * FROM Users WHERE username = '" + data.userName + "' AND password = '" + data.password + "'"); while (userInfo.next()) { if(userInfo.getInt(1) != 0) { code.userId = userInfo.getInt(1); code.userName = userInfo.getString(2); } } } } catch (Exception e) { code.databaseError = true; } return code; // return new ResponseEntity<String>(HttpStatus.ACCEPTED); } @RequestMapping(value = "/register", method = RequestMethod.POST) public @ResponseBody ValidationCodes registerTransfer(@RequestBody RegisterData data) { ValidationCodes code = new ValidationCodes(); try { Connection connection = DatabaseUrl.extract().getConnection(); Statement stmtUser = connection.createStatement(); Statement stmtEmail = connection.createStatement(); Statement stmtInsert = connection.createStatement(); Statement stmtCharacterCreate = connection.createStatement(); Statement stmtUserData = connection.createStatement(); Statement stmtCharacterData = connection.createStatement(); Statement stmtCharacterRelate = connection.createStatement(); ResultSet userName = stmtUser.executeQuery("SELECT count(*) FROM Users WHERE username = '" + data.userName + "'"); ResultSet email = stmtEmail.executeQuery("SELECT count(*) FROM Users WHERE email = '" + data.email + "'"); while (userName.next()) { if(userName.getInt(1) != 0) { code.UsernameTaken = true; } } while (email.next()) { if(email.getInt(1) != 0) { code.EmailTaken = true; } } String newPassword = data.password; String newConfirmPassword = data.confirmPassword; if(!newPassword.equals(newConfirmPassword)) { code.PasswordMismatch = true; } if(!code.UsernameTaken && !code.EmailTaken && !code.PasswordMismatch) { stmtInsert.execute("Insert into Users (username, email, password) values ('" + data.userName + "','" + data.email + "','" + data.password + "')"); stmtCharacterCreate.execute("Insert into Characters (charactername, attack, maxhealth, currenthealth) values ('" + data.userName + "', 10, 10, 10)"); ResultSet userData = stmtUserData.executeQuery("SELECT userID FROM users WHERE username = '" + data.userName + "'"); ResultSet characterData = stmtCharacterData.executeQuery("SELECT characterID FROM Characters WHERE charactername = '" + data.userName + "'"); int userID = -1; int characterID = -1; while (userData.next()) { userID = userData.getInt(1); } while (characterData.next()) { characterID = characterData.getInt(1); } stmtCharacterRelate.execute("Insert into userCharacters (userid, characterid) values (" + userID + ", " + characterID + ")"); } } catch (Exception e) { code.databaseError = true; } return code; } // @RequestMapping(value = "/validate", method = RequestMethod.POST) // public String registerPost(@ModelAttribute("registerUserCredentials") RegisterUserCredentials userCredentials, BindingResult result) { // try { // URI dbUri = new URI("postgres://fghhopulwiaynq:OfvO_N_KLpwGqwbOZY7wEwKfL_@ec2-54-221-201-165.compute-1.amazonaws.com:5432/df02650vnkne80"); // String dbusername = dbUri.getUserInfo().split(":")[0]; // String dbpassword = dbUri.getUserInfo().split(":")[1]; // String dbUrl = "jdbc:postgresql://" + dbUri.getHost() + ':' + dbUri.getPort() + dbUri.getPath(); // System.out.println(userCredentials.getUserName()); // System.out.println(userCredentials.getEmail()); // System.out.println(userCredentials.getPassword()); // System.out.println(userCredentials.getConfirmPassword()); // //"Select * from Users // return "redirect:login"; // } catch (Exception e) { // return registerTransfer(); // @ModelAttribute("registerUserCredentials") // public RegisterUserCredentials getRegisterUserCredentials() { // return new RegisterUserCredentials(); @RequestMapping(value = "/buy/Attack", method = RequestMethod.POST) public @ResponseBody MarketObject increaseAttack(@RequestBody MarketObject data) { data = MarketLogic.buyAttack(data); try { Connection connection = DatabaseUrl.extract().getConnection(); Statement stmtUser = connection.createStatement(); stmtUser.execute("UPDATE Characters SET zeni = '" + data.getPlayer().getMoney() + "' WHERE characterid = '" + data.getPlayer().getCharacterID() + "'"); stmtUser.execute("UPDATE Characters SET attack = '" + data.getPlayer().getAttack() + "' WHERE characterid = '" + data.getPlayer().getCharacterID() + "'"); } catch (Exception e) { System.out.println(e); } return data; } @RequestMapping(value = "/buy/Health", method = RequestMethod.POST) public @ResponseBody MarketObject increaseHealth(@RequestBody MarketObject data) { data = MarketLogic.buyHealth(data); try { Connection connection = DatabaseUrl.extract().getConnection(); Statement stmtUser = connection.createStatement(); stmtUser.execute("UPDATE Characters SET zeni = '" + data.getPlayer().getMoney() + "' WHERE characterid = '" + data.getPlayer().getCharacterID() + "'"); stmtUser.execute("UPDATE Characters SET currenthealth = '" + data.getPlayer().getCurrentHealth() + "' WHERE characterid = '" + data.getPlayer().getCharacterID() + "'"); stmtUser.execute("UPDATE Characters SET maxhealth = '" + data.getPlayer().getMaxHealth() + "' WHERE characterid = '" + data.getPlayer().getCharacterID() + "'"); } catch (Exception e) { System.out.println(e); } return data; } @RequestMapping(value = "/buy/Potion", method = RequestMethod.POST) public @ResponseBody MarketObject addPotions(@RequestBody MarketObject data) { data = MarketLogic.buyPotion(data); try { Connection connection = DatabaseUrl.extract().getConnection(); Statement stmtUser = connection.createStatement(); stmtUser.execute("UPDATE Characters SET zeni = '" + data.getPlayer().getMoney() + "' WHERE characterid = '" + data.getPlayer().getCharacterID() + "'"); stmtUser.execute("UPDATE Characters SET healItems = '" + data.getPlayer().getHealItems() + "' WHERE characterid = '" + data.getPlayer().getCharacterID() + "'"); } catch (Exception e) { System.out.println(e); } return data; } @RequestMapping(value = "/combat/pve/Attack", method = RequestMethod.POST) public @ResponseBody CombatObject increaseAttack(@RequestBody CombatObject data) { data = CombatLogic.playerAttack(data); data = CombatLogic.enemyAttack(data); if(data.getThePlayer().getCurrentHealth() > 0 && data.getTheEnemy().getHealth() <= 0) { data = CombatLogic.survivingPlayer(data); } if(data.getThePlayer().getCurrentHealth() <= 0) { data = CombatLogic.dieingPlayer(data); } try { Connection connection = DatabaseUrl.extract().getConnection(); Statement stmtUser = connection.createStatement(); stmtUser.execute("UPDATE Characters SET currenthealth = '" + data.getThePlayer().getCurrentHealth() + "' WHERE characterid = '" + data.getThePlayer().getCharacterID() + "'"); stmtUser.execute("UPDATE Characters SET zeni = '" + data.getThePlayer().getMoney() + "' WHERE characterid = '" + data.getThePlayer().getCharacterID() + "'"); } catch (Exception e) { System.out.println(e); } return data; } @RequestMapping(value = "/combat/pve/Heal", method = RequestMethod.POST) public @ResponseBody CombatObject increaseHealth(@RequestBody CombatObject data) { data = CombatLogic.healPlayer(data); data = CombatLogic.enemyAttack(data); if(data.getThePlayer().getCurrentHealth() > 0 && data.getTheEnemy().getHealth() <= 0) { data = CombatLogic.survivingPlayer(data); } if(data.getThePlayer().getCurrentHealth() <= 0) { data = CombatLogic.dieingPlayer(data); } try { Connection connection = DatabaseUrl.extract().getConnection(); Statement stmtUser = connection.createStatement(); //Table that relates characters to users: usercharacters //Columns: characterid chactername health attack healingitems zeni //DataTypes: int string int int int int //ResultSet character = stmtUser.executeQuery("SELECT health FROM Characters WHERE characterid = '" + data.getThePlayer().getCharacterID() + "'"); stmtUser.execute("UPDATE Characters SET currenthealth = '" + data.getThePlayer().getCurrentHealth() + "' WHERE characterid = '" + data.getThePlayer().getCharacterID() + "'"); stmtUser.execute("UPDATE Characters SET healthitems = '" + data.getThePlayer().getHealItems() + "' WHERE characterid = '" + data.getThePlayer().getCharacterID() + "'"); stmtUser.execute("UPDATE Characters SET zeni = '" + data.getThePlayer().getMoney() + "' WHERE characterid = '" + data.getThePlayer().getCharacterID() + "'"); //while(character.next()) //int health = character.getInt(1); } catch (Exception e) { System.out.println(e); } return data; } @RequestMapping(value = "/combat/pve", method = RequestMethod.POST) public @ResponseBody CombatObject startCombat(@RequestBody Player data) { CombatObject combat = new CombatObject(data, CombatLogic.createEnemy(data)); return combat; } }
package au.com.southsky.jfreesane; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import java.io.IOException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.logging.Logger; import java.util.stream.Collectors; /** * This class represents a SANE device option. An option may be active or inactive (see * {@link #isActive}). Active options may be read (see {@link #isReadable}) and modified (see * {@link #isWriteable}). * * <p> * Options have a type (see {@link #getType}), in order to read or write an option's value, you must * call the getter or setter method corresponding to the option's type. For example, for an option * of type {@link OptionValueType#STRING}, you will call {@link #setStringValue} or * {@link #getStringValue}. * * <p> * Options may have constraints that impose restrictions on the range of values the option may take. * Constraints have a type which may be obtained using {@link #getConstraintType}. You may read the * actual constraints by calling the constraint getter method corresponding to the constraint type. * For example, an option of type {@link OptionValueType#INT} may have a constraint of type * {@link OptionValueConstraintType#VALUE_LIST_CONSTRAINT}, which you may obtain by calling * {@link #getIntegerValueListConstraint}. * * @author James Ring (sjr@jdns.org) */ public final class SaneOption { private static final Logger logger = Logger.getLogger(SaneOption.class.getName()); private enum OptionAction implements SaneEnum { GET_VALUE(0), SET_VALUE(1), SET_AUTO(2); private final int actionNo; OptionAction(int actionNo) { this.actionNo = actionNo; } @Override public int getWireValue() { return actionNo; } } /** * Instances of this enum are returned by {@link SaneOption#getUnits} indicating what units, if * any, the value has. */ public enum OptionUnits implements SaneEnum { /** * The option has no units. */ UNIT_NONE(0), /** * The option unit is pixels. */ UNIT_PIXEL(1), /** * The option unit is bits. */ UNIT_BIT(2), /** * The option unit is millimeters. */ UNIT_MM(3), /** * The option unit is dots per inch. */ UNIT_DPI(4), /** * The option unit is a percentage. */ UNIT_PERCENT(5), /** * The option unit is microseconds. */ UNIT_MICROSECOND(6); private final int wireValue; OptionUnits(int wireValue) { this.wireValue = wireValue; } @Override public int getWireValue() { return wireValue; } } /** * Represents the information that the SANE daemon returns about the effect of modifying an * option. */ public enum OptionWriteInfo implements SaneEnum { /** * The value passed to SANE was accepted, but the SANE daemon has chosen a different value than * the one specified. */ INEXACT(1), /** * Setting the option may have resulted in changes to other options and the client should * re-read options whose values it needs. */ RELOAD_OPTIONS(2), /** * Setting the option may have caused a parameter set by the user to have changed. */ RELOAD_PARAMETERS(4); private final int wireValue; OptionWriteInfo(int wireValue) { this.wireValue = wireValue; } @Override public int getWireValue() { return wireValue; } } private final SaneDevice device; private final int optionNumber; private final SaneOptionDescriptor descriptor; SaneOption(SaneDevice device, int optionNumber, SaneOptionDescriptor descriptor) { this.device = device; this.optionNumber = optionNumber; this.descriptor = descriptor; if (descriptor.getGroup() != null && getValueType() != OptionValueType.GROUP) { descriptor.getGroup().addOption(this); } } static List<SaneOption> optionsFor(SaneDevice device) throws IOException { Preconditions.checkState(device.isOpen(), "you must open() the device first"); SaneSession session = device.getSession(); SaneInputStream inputStream = session.getInputStream(); SaneOutputStream outputStream = session.getOutputStream(); // send SANE_NET_GET_OPTION_DESCRIPTORS outputStream.write(SaneRpcCode.SANE_NET_GET_OPTION_DESCRIPTORS); // select device outputStream.write(device.getHandle().getHandle()); outputStream.flush(); // first word of response is number of option entries int length = inputStream.readWord().integerValue() - 1; if (length <= 0) { return ImmutableList.of(); } ImmutableList.Builder<SaneOption> options = new ImmutableList.Builder<>(); for (int i = 0; i <= length; i++) { SaneOption option = SaneOption.fromStream(inputStream, device, i); if (option.getValueType() == OptionValueType.GROUP) { device.addOptionGroup(option.getGroup()); } else { // The first option always has an empty name. Sometimes we see options after the first option // that have empty names. Elsewhere we assume that option names are unique, so this option is // omitted if (i > 0 && Strings.isNullOrEmpty(option.getName())) { logger.fine(String.format("ignoring null or empty option with id %d: %s", i, option)); continue; } if (option.isWriteable() && option.isHardSelectable()) { // This option is invalid, it can't be both hardware and software selectable. continue; } if (option.isWriteable() && !option.isReadable()) { // Can't have a write-only option. continue; } if (!(option.isWriteable() || option.isReadable() || option.isHardSelectable())) { // Useless option, skip it. continue; } options.add(option); } } return options.build(); } private static SaneOption fromStream( SaneInputStream inputStream, SaneDevice device, int optionNumber) throws IOException { return new SaneOption(device, optionNumber, inputStream.readOptionDescriptor()); } public SaneDevice getDevice() { return device; } public String getName() { return descriptor.getName(); } public String getTitle() { return descriptor.getTitle(); } public String getDescription() { return descriptor.getDescription(); } public OptionGroup getGroup() { return descriptor.getGroup(); } public OptionValueType getType() { return descriptor.getValueType(); } public OptionUnits getUnits() { return descriptor.getUnits(); } public int getSize() { return descriptor.getSize(); } public int getValueCount() { switch (descriptor.getValueType()) { case BOOLEAN: case STRING: return 1; case INT: case FIXED: return getSize() / SaneWord.SIZE_IN_BYTES; case BUTTON: case GROUP: throw new IllegalStateException( "Option type '" + descriptor.getValueType() + "' has no value count"); default: throw new IllegalStateException("Option type '" + descriptor.getValueType() + "' unknown"); } } /** * Returns {@code true} if this option has a constraint other than * {@link OptionValueConstraintType#NO_CONSTRAINT}. */ public boolean isConstrained() { return !OptionValueConstraintType.NO_CONSTRAINT.equals(descriptor.getConstraintType()); } public OptionValueConstraintType getConstraintType() { return descriptor.getConstraintType(); } public RangeConstraint getRangeConstraints() { return descriptor.getRangeConstraints(); } public List<String> getStringConstraints() { return descriptor.getStringConstraints(); } public List<SaneWord> getWordConstraints() { return descriptor.getWordConstraints(); } public List<Integer> getIntegerValueListConstraint() { return descriptor .getWordConstraints() .stream() .map(SaneWord::integerValue) .collect(Collectors.toList()); } public List<Double> getFixedValueListConstraint() { return descriptor .getWordConstraints() .stream() .map(SaneWord::fixedPrecisionValue) .collect(Collectors.toList()); } @Override public String toString() { return String.format( "Option: %s, %s, value type: %s, units: %s", descriptor.getName(), descriptor.getTitle(), descriptor.getValueType(), descriptor.getUnits()); } private OptionValueType getValueType() { return descriptor.getValueType(); } /** * Reads the current boolean value option. This option must be of type * {@link OptionValueType#BOOLEAN}. * * @throws IOException if a problem occurred while talking to SANE */ public boolean getBooleanValue() throws IOException, SaneException { Preconditions.checkState(getValueType() == OptionValueType.BOOLEAN, "option is not a boolean"); Preconditions.checkState(getValueCount() == 1, "option is a boolean array, not boolean"); ControlOptionResult result = readOption(); return SaneWord.fromBytes(result.getValue()).integerValue() != 0; } /** * Reads the current Integer value option. We do not cache value from previous get or set * operations so each get involves a round trip to the server. * * TODO: consider caching the returned value for "fast read" later * * @return the value of the option * @throws IOException if a problem occurred while talking to SANE */ public int getIntegerValue() throws IOException, SaneException { // check for type agreement Preconditions.checkState(getValueType() == OptionValueType.INT, "option is not an integer"); Preconditions.checkState(getValueCount() == 1, "option is an integer array, not integer"); // Send RCP corresponding to: // SANE_Status sane_control_option (SANE_Handle h, SANE_Int n, // SANE_Action a, void *v, // SANE_Int * i); ControlOptionResult result = readOption(); Preconditions.checkState(result.getType() == OptionValueType.INT); Preconditions.checkState( result.getValueSize() == SaneWord.SIZE_IN_BYTES, "unexpected value size " + result.getValueSize() + ", expecting " + SaneWord.SIZE_IN_BYTES); // TODO: handle resource authorisation // TODO: check status -- may have to reload options!! return SaneWord.fromBytes(result.getValue()).integerValue(); // the // value } public List<Integer> getIntegerArrayValue() throws IOException, SaneException { ControlOptionResult result = readOption(); Preconditions.checkState(result.getType() == OptionValueType.INT); List<Integer> values = Lists.newArrayList(); for (int i = 0; i < result.getValueSize(); i += SaneWord.SIZE_IN_BYTES) { values.add(SaneWord.fromBytes(result.getValue(), i).integerValue()); } return values; } /** * Returns the value of this option interpreted as a LATIN-1 (SANE's default encoding) encoded * string. * * @throws IOException if a problem occurs reading the value from the SANE backend */ public String getStringValue() throws IOException, SaneException { return getStringValue(StandardCharsets.ISO_8859_1); } public String getStringValue(Charset encoding) throws IOException, SaneException { Preconditions.checkState(getValueType() == OptionValueType.STRING, "option is not a string"); ControlOptionResult result = readOption(); byte[] value = result.getValue(); // string is null terminated int length = 0; while (length < value.length && value[length] != 0) { length++; } // trim the trailing null character return new String(result.getValue(), 0, length, encoding); } public double getFixedValue() throws IOException, SaneException { Preconditions.checkState( getValueType() == OptionValueType.FIXED, "option is not of fixed precision type"); ControlOptionResult result = readOption(); return SaneWord.fromBytes(result.getValue()).fixedPrecisionValue(); } public List<Double> getFixedArrayValue() throws IOException, SaneException { ControlOptionResult result = readOption(); Preconditions.checkState(result.getType() == OptionValueType.FIXED); List<Double> values = new ArrayList<>(); for (int i = 0; i < result.getValueSize(); i += SaneWord.SIZE_IN_BYTES) { values.add(SaneWord.fromBytes(result.getValue(), i).fixedPrecisionValue()); } return values; } private ControlOptionResult readOption() throws IOException, SaneException { // check that this option is readable Preconditions.checkState(isReadable(), "option is not readable"); Preconditions.checkState(isActive(), "option is not active"); SaneOutputStream out = device.getSession().getOutputStream(); out.write(SaneRpcCode.SANE_NET_CONTROL_OPTION); out.write(device.getHandle().getHandle()); out.write(SaneWord.forInt(optionNumber)); out.write(OptionAction.GET_VALUE); out.write(getValueType()); out.write(SaneWord.forInt(getSize())); int elementCount; switch (getValueType()) { case BOOLEAN: case FIXED: case INT: elementCount = getSize() / SaneWord.SIZE_IN_BYTES; break; case STRING: elementCount = getSize(); break; default: throw new IllegalStateException("Unsupported type " + getValueType()); } out.write(SaneWord.forInt(elementCount)); for (int i = 0; i < getSize(); i++) { out.write(0); // why do we need to provide a value // buffer in an RPC call ??? } out.flush(); // read result return ControlOptionResult.fromSession(device.getSession()); } /** * Sets the value of the current option to the supplied boolean value. Option value must be of * boolean type. SANE may ignore your preference, so if you need to ensure the value has been set * correctly, you should examine the return value of this method. * * @return the value that the option now has according to SANE */ public boolean setBooleanValue(boolean value) throws IOException, SaneException { ControlOptionResult result = writeOption(SaneWord.forInt(value ? 1 : 0)); Preconditions.checkState(result.getType() == OptionValueType.BOOLEAN); return SaneWord.fromBytes(result.getValue()).integerValue() != 0; } public void setButtonValue() throws IOException, SaneException { writeButtonOption(); } /** * Sets the value of the current option to the supplied fixed-precision value. Option value must * be of fixed-precision type. */ public double setFixedValue(double value) throws IOException, SaneException { Preconditions.checkArgument( value >= -32768 && value <= 32767.9999, "value %d is out of range", value); SaneWord wordValue = SaneWord.forFixedPrecision(value); ControlOptionResult result = writeOption(wordValue); Preconditions.checkState( result.getType() == OptionValueType.FIXED, "setFixedValue is not appropriate for option of type " + result.getType()); return SaneWord.fromBytes(result.getValue()).fixedPrecisionValue(); } private static SaneWord fixedValueToWord(double value) { Preconditions.checkArgument( value >= -32768 && value <= 32767.9999, "value %f is out of range", value); return SaneWord.forFixedPrecision(value); } /** * Sets the value of the current option to the supplied list of fixed-precision values. Option * value must be of fixed-precision type and {@link #getValueCount} must be more than 1. */ public List<Double> setFixedValue(List<Double> value) throws IOException, SaneException { List<SaneWord> wordValues = value.stream().map(SaneOption::fixedValueToWord).collect(Collectors.toList()); ControlOptionResult result = writeWordListOption(wordValues); List<Double> newValues = Lists.newArrayListWithCapacity(result.getValueSize() / SaneWord.SIZE_IN_BYTES); for (int i = 0; i < result.getValueSize(); i += SaneWord.SIZE_IN_BYTES) { newValues.add(SaneWord.fromBytes(result.getValue(), i).fixedPrecisionValue()); } return newValues; } public String setStringValue(String newValue) throws IOException, SaneException { // check for type agreement Preconditions.checkState(getValueType() == OptionValueType.STRING); Preconditions.checkState(getValueCount() == 1); Preconditions.checkState(isWriteable()); // new value must be STRICTLY less than size(), as SANE includes the // trailing null // that we will add later in its size Preconditions.checkState( newValue.length() < getSize(), "string value '" + newValue + "' (length=" + newValue.length() + ") exceeds maximum size of " + (getSize() - 1) + " byte(s) for option " + getName()); ControlOptionResult result = writeOption(newValue); Preconditions.checkState(result.getType() == OptionValueType.STRING); // TODO(sjr): maybe this should go somewhere common? String optionValueFromServer = new String(result.getValue(), 0, result.getValueSize() - 1, StandardCharsets.ISO_8859_1); Preconditions.checkState( result.getInfo().contains(OptionWriteInfo.INEXACT) ^ newValue.equals(optionValueFromServer), "new option value does not match when it should"); return optionValueFromServer; } /** * Set the value of the current option to the supplied value. Option value must be of integer type * * TODO: consider caching the returned value for "fast read" later * * @param newValue for the option * @return the value actually set * @throws IOException */ public int setIntegerValue(int newValue) throws IOException, SaneException { Preconditions.checkState(getValueCount() == 1, "option is an array"); // check that this option is readable Preconditions.checkState(isWriteable()); // Send RPC corresponding to: // SANE_Status sane_control_option (SANE_Handle h, SANE_Int n, // SANE_Action a, void *v, // SANE_Int * i); ControlOptionResult result = writeOption(ImmutableList.of(newValue)); Preconditions.checkState(result.getType() == OptionValueType.INT); Preconditions.checkState(result.getValueSize() == SaneWord.SIZE_IN_BYTES); return SaneWord.fromBytes(result.getValue()).integerValue(); } public List<Integer> setIntegerValue(List<Integer> newValue) throws IOException, SaneException { ControlOptionResult result = writeOption(newValue); List<Integer> newValues = Lists.newArrayListWithCapacity(result.getValueSize() / SaneWord.SIZE_IN_BYTES); for (int i = 0; i < result.getValueSize(); i += SaneWord.SIZE_IN_BYTES) { newValues.add(SaneWord.fromBytes(result.getValue(), i).integerValue()); } return newValues; } private ControlOptionResult writeWordListOption(List<SaneWord> value) throws IOException, SaneException { Preconditions.checkState(isWriteable(), "option is not writeable"); Preconditions.checkState(isActive(), "option is not active"); SaneOutputStream out = device.getSession().getOutputStream(); out.write(SaneRpcCode.SANE_NET_CONTROL_OPTION); out.write(device.getHandle().getHandle()); out.write(SaneWord.forInt(optionNumber)); out.write(SaneWord.forInt(OptionAction.SET_VALUE.getWireValue())); out.write(getValueType()); out.write(SaneWord.forInt(value.size() * SaneWord.SIZE_IN_BYTES)); // Write the pointer to the words out.write(SaneWord.forInt(value.size())); for (SaneWord element : value) { // and the words themselves out.write(element); } out.flush(); ControlOptionResult result = handleWriteResponse(); if (result.getInfo().contains(OptionWriteInfo.RELOAD_OPTIONS) || result.getInfo().contains(OptionWriteInfo.RELOAD_PARAMETERS)) { device.invalidateOptions(); device.listOptions(); } return result; } private ControlOptionResult writeOption(String value) throws IOException, SaneException { Preconditions.checkState(getValueType() == OptionValueType.STRING); SaneOutputStream out = device.getSession().getOutputStream(); out.write(SaneRpcCode.SANE_NET_CONTROL_OPTION); out.write(SaneWord.forInt(device.getHandle().getHandle().integerValue())); out.write(SaneWord.forInt(this.optionNumber)); out.write(SaneWord.forInt(OptionAction.SET_VALUE.getWireValue())); out.write(getValueType()); // even if the string is empty, we still write out at least 1 byte (null // terminator) out.write(SaneWord.forInt(value.length() + 1)); // write(String) takes care of writing the size for us out.write(value); out.flush(); return handleWriteResponse(); } private ControlOptionResult writeOption(SaneWord word) throws IOException, SaneException { return writeWordListOption(ImmutableList.of(word)); } private ControlOptionResult writeOption(List<Integer> value) throws IOException, SaneException { Preconditions.checkState(isActive(), "option %s is not active", getName()); Preconditions.checkState(isWriteable(), "option %s is not writeable", getName()); Preconditions.checkState( getValueType() == OptionValueType.INT, "option %s is %s-typed, you must use the corresponding methods to set the value", getName(), getValueType()); SaneOutputStream out = device.getSession().getOutputStream(); out.write(SaneRpcCode.SANE_NET_CONTROL_OPTION); out.write(device.getHandle().getHandle()); out.write(SaneWord.forInt(optionNumber)); out.write(OptionAction.SET_VALUE); out.write(getValueType()); out.write(SaneWord.forInt(getSize())); out.write(SaneWord.forInt(value.size())); for (Integer element : value) { out.write(SaneWord.forInt(element)); } out.flush(); return handleWriteResponse(); } private ControlOptionResult writeButtonOption() throws IOException, SaneException { Preconditions.checkState(getValueType() == OptionValueType.BUTTON); SaneOutputStream out = device.getSession().getOutputStream(); out.write(SaneRpcCode.SANE_NET_CONTROL_OPTION); out.write(device.getHandle().getHandle()); out.write(SaneWord.forInt(this.optionNumber)); out.write(OptionAction.SET_VALUE); out.write(getValueType()); out.write(SaneWord.forInt(0)); out.write(SaneWord.forInt(0)); // only one value follows out.flush(); return handleWriteResponse(); } private ControlOptionResult handleWriteResponse() throws IOException, SaneException { ControlOptionResult result = ControlOptionResult.fromSession(device.getSession()); if (result.getInfo().contains(OptionWriteInfo.RELOAD_OPTIONS)) { device.invalidateOptions(); } return result; } public boolean isActive() { return !descriptor.getOptionCapabilities().contains(OptionCapability.INACTIVE); } public boolean isReadable() { return descriptor.getOptionCapabilities().contains(OptionCapability.SOFT_DETECT); } public boolean isWriteable() { return descriptor.getOptionCapabilities().contains(OptionCapability.SOFT_SELECT); } boolean isHardSelectable() { return descriptor.getOptionCapabilities().contains(OptionCapability.HARD_SELECT); } /** * Represents the result of calling {@code SANE_NET_CONTROL_OPTION} (RPC code 5). */ private static final class ControlOptionResult { private final int status; private final Set<OptionWriteInfo> info; private final OptionValueType type; private final int valueSize; private final byte[] value; private final String resource; private ControlOptionResult( int status, int info, OptionValueType type, int valueSize, byte[] value, String resource) { this.status = status; this.info = SaneEnums.enumSet(OptionWriteInfo.class, info); this.type = type; this.valueSize = valueSize; this.value = value; this.resource = resource; } private static ControlOptionResult fromSession(SaneSession session) throws IOException, SaneException { SaneInputStream stream = session.getInputStream(); // Expected record format: // SANE_Status status // SANE_Word info // SANE_Word value_type // SANE_Word value_size // void *value // SANE_String *resource // See http://sane-project.org/html/doc017.html#s5.2.6. SaneWord status = stream.readWord(); if (status.integerValue() != 0) { throw SaneException.fromStatusWord(status); } int info = stream.readWord().integerValue(); OptionValueType type = SaneEnums.valueOf(OptionValueType.class, stream.readWord().integerValue()); int valueSize = stream.readWord().integerValue(); // read the pointer int pointer = stream.readWord().integerValue(); byte[] value = null; if (pointer != 0) { value = new byte[valueSize]; if (ByteStreams.readAllBytes(stream, value) != valueSize) { throw new IOException("truncated read while getting value"); } } String resource = stream.readString(); if (!resource.isEmpty()) { if (!session.authorize(resource)) { throw new SaneException(SaneStatus.STATUS_ACCESS_DENIED); } status = stream.readWord(); info = stream.readWord().integerValue(); type = SaneEnums.valueOf(OptionValueType.class, stream.readWord().integerValue()); valueSize = stream.readWord().integerValue(); // read the pointer pointer = stream.readWord().integerValue(); value = null; if (pointer != 0) { value = new byte[valueSize]; if (stream.read(value) != valueSize) { throw new IOException("truncated read while getting value"); } } if (status.integerValue() != 0) { throw SaneException.fromStatusWord(status); } } return new ControlOptionResult(status.integerValue(), info, type, valueSize, value, resource); } public int getStatus() { return status; } public Set<OptionWriteInfo> getInfo() { return Sets.immutableEnumSet(info); } public OptionValueType getType() { return type; } public int getValueSize() { return valueSize; } public byte[] getValue() { return value; } public String getResource() { return resource; } } }
import java.net.Socket; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.InetAddress; import java.rmi.Naming; import java.rmi.RemoteException; import java.rmi.registry.LocateRegistry; import java.rmi.registry.Registry; import java.rmi.server.UnicastRemoteObject; import java.rmi.server.RMISocketFactory; import java.rmi.server.RMIClientSocketFactory; import java.rmi.server.RMIServerSocketFactory; class RmiServer { private interface MyClientSocketFactory extends RMIClientSocketFactory{InetSocketAddress getHost();} private static RMIClientSocketFactory getClientFactory(final InetSocketAddress host, final Integer clientTimeout) { if (clientTimeout == null){ // By default Java knows how to establish connection with rmi server, so in most cases this is the best choice! return null; } else if (clientTimeout.equals(0)){ // Teaching rmi client how to establish connection to this specific rmi server. return RMISocketFactory.getDefaultSocketFactory(); } // This fancy way is used to itemize socket details for client, like dead-server timeout, SO_LINGER, etc. return new MyClientSocketFactory(){ private InetSocketAddress thisHost = host; private int timeoutMillis = clientTimeout; public InetSocketAddress getHost(){return thisHost;} @Override public Socket createSocket(String host, int port) throws java.io.IOException { Socket socket = new Socket(); socket.setSoTimeout( timeoutMillis ); // socket.setSoLinger( false, 0 ); socket.connect( new InetSocketAddress( host, port ), timeoutMillis ); return socket; } @Override public boolean equals(Object obj) { return (getClass() == obj.getClass() && ((MyClientSocketFactory)obj).getHost().equals(thisHost)); } }; } private interface MyServerSocketFactory extends RMIServerSocketFactory{InetAddress getHost();} private static RMIServerSocketFactory getServerFactory(final InetAddress host) { return new MyServerSocketFactory(){ private InetAddress thisHost = host; public InetAddress getHost(){return thisHost;} @Override public ServerSocket createServerSocket(int port) throws java.io.IOException { return new java.net.ServerSocket(port, 0, thisHost); } @Override public boolean equals(Object obj) { return (getClass() == obj.getClass() && ((MyServerSocketFactory)obj).getHost().equals(thisHost)); } }; } public static void main(String argv[]) throws Exception { System.out.println("Starting server..."); ServerSocket welcomeSocket = null; String serverRmiSeviceHost = "localhost"; String serverRmiSevicePort = "7777"; String serverRmiSeviceName = "RmiTest"; if (argv.length==2){ serverRmiSeviceHost = argv[0]; serverRmiSevicePort = argv[1]; }else if (argv.length==1){ serverRmiSevicePort = argv[0]; } final InetAddress host = InetAddress.getByName(serverRmiSeviceHost); final Integer port = Integer.parseInt(serverRmiSevicePort); final InetSocketAddress clientSocketAddr = new InetSocketAddress(serverRmiSeviceHost, port); final Integer clientTimeout = null; // "null" - use client's socket; "0" - create default socket; or set any timeout in ms to create a fancy socket. System.out.println("Starting RMI Service on [" + "rmi://" + serverRmiSeviceHost + ":" + serverRmiSevicePort + "/" + serverRmiSeviceName + "]..."); RmiService rmiService = null; Registry rmiRegistry = null; if (argv.length==2){ if ( serverRmiSeviceHost.equals("localhost") ){ System.out.println("Assembling to run all packets through single port on all interfaces " + serverRmiSevicePort); rmiRegistry = LocateRegistry.createRegistry(port); // Registry Port. rmiService = new RmiService(port); // Remote Objects Port. } else { System.out.println("Assembling to run all packets through single port on single interface " + serverRmiSeviceHost + ":" + serverRmiSevicePort); RMIClientSocketFactory clientSocketFactory = getClientFactory(clientSocketAddr, clientTimeout); System.setProperty("java.rmi.server.hostname", serverRmiSeviceHost); // Remote Objects network interface. RMIServerSocketFactory serverSocketFactory = getServerFactory(host); // Registry network interface. rmiRegistry = LocateRegistry.createRegistry(port, null, serverSocketFactory); // Registry Port. rmiService = new RmiService(port, // Remote Objects Port. clientSocketFactory, serverSocketFactory); // For non-UnicastRemoteObject use // if (UnicastRemoteObject.unexportObject(rmiService, true)) // rmiService = (RmiService)UnicastRemoteObject.exportObject(new RmiService(), port, clientSocketFactory, serverSocketFactory); } } else if (argv.length==1){ System.out.println("Remote Object will be returned on a random port between 0 and 65535 and random interface."); rmiRegistry = LocateRegistry.createRegistry(port); // Registry Port is where we connect to. rmiService = new RmiService(); } rmiRegistry.rebind(serverRmiSeviceName, rmiService); System.out.println("RMI Service is ready."); } }
package be.isach.samaritan.stream; import com.google.common.util.concurrent.Futures; import com.mb3364.twitch.api.Twitch; import com.mb3364.twitch.api.handlers.StreamResponseHandler; import com.mb3364.twitch.api.models.Channel; import com.mb3364.twitch.api.models.Stream; import net.dv8tion.jda.JDA; import net.dv8tion.jda.entities.TextChannel; import pro.beam.api.BeamAPI; import pro.beam.api.resource.BeamUser; import pro.beam.api.resource.channel.BeamChannel; import pro.beam.api.response.users.UserSearchResponse; import pro.beam.api.services.impl.ChannelsService; import pro.beam.api.services.impl.UsersService; import pro.beam.api.util.ResponseHandler; import java.util.HashMap; import java.util.Map; import java.util.TimerTask; public class BeamModule extends StreamModule { public BeamModule(JDA jda, StreamData streamData) { super(jda, streamData); initChannels(); } @Override public void initialize(String channel) { BeamAPI beam = new BeamAPI(); Futures.addCallback(beam.use(UsersService.class).search(channel), new ResponseHandler<UserSearchResponse>() { @Override public void onSuccess(UserSearchResponse response) { if(response.size() > 0) { BeamChannel beamChannel = response.get(0).channel; Status currentStatus = beamChannel == null ? Status.OFFLINE : beamChannel.online ? Status.ONLINE : Status.OFFLINE; streamersMap.put(channel, currentStatus); } } }); } @Override void check(String channel) { BeamAPI beam = new BeamAPI(); Futures.addCallback(beam.use(UsersService.class).search(channel), new ResponseHandler<UserSearchResponse>() { @Override public void onSuccess(UserSearchResponse response) { if(response.size() > 0) { Status lastStatus = streamersMap.get(channel); BeamChannel beamChannel = response.get(0).channel; Status currentStatus = beamChannel == null ? Status.OFFLINE : beamChannel.online ? Status.ONLINE : Status.OFFLINE; System.out.println("BEAM | Checking " + channel + ": " + currentStatus); StreamerChannel streamerChannel = new StreamerChannel(null, beamChannel.name, channel); if (currentStatus == Status.ONLINE && lastStatus == Status.OFFLINE) { broadcastLive(streamerChannel); } streamersMap.put(channel, currentStatus); } } }); } @Override void broadcastLive(StreamerChannel channel) { sendMessage("Hey! " + channel.getChannelName() + " est en live sur Beam !"); sendMessage(channel.getDisplayName() + " | https://beam.pro/" + channel.getChannelName()); } }
package application; import com.sun.org.apache.xpath.internal.operations.Bool; import javafx.application.Platform; import javafx.concurrent.Task; import javafx.fxml.FXML; import javafx.fxml.Initializable; import javafx.scene.control.*; import javafx.scene.image.Image; import javafx.scene.image.ImageView; import javafx.stage.DirectoryChooser; import org.apache.commons.net.ftp.FTPClient; import org.apache.commons.net.ftp.FTPFile; import org.apache.commons.net.ftp.FTPReply; import java.io.*; import java.net.InetAddress; import java.net.URL; import java.text.SimpleDateFormat; import java.time.Duration; import java.util.*; public class MainController implements Initializable { // UI elements @FXML private TreeView<String> fileTreeView; @FXML private TextField addressTF; @FXML private TextField usernameTF; @FXML private PasswordField passwordPF; @FXML private Button loginBT; @FXML private TextArea logTA; @FXML private Button outputDirBT; @FXML private Label outputDirLB; @FXML private TextField fileAgeLimitTF; // variables private FTPClient client = new FTPClient(); private InetAddress address; private DirectoryChooser directoryChooser = new DirectoryChooser(); private File outputDir; private OutputStream outStream; private SimpleDateFormat ft = new SimpleDateFormat ("HH:mm:ss MMM d"); private File settingsDir; private HashSet<String> syncedFileLedger = new HashSet<>(); private String syncedFileLedgerName = "syncedFiles.dat"; private long defaultDaysLimit = 6; private long daysLimit; private boolean outputDirSelected; private boolean isDownloadingFiles; private Image dirIcon = new Image(getClass().getResourceAsStream("/icons/directory_icon.png")); public void initialize(URL location, ResourceBundle resources) { // make the settings directory settingsDir = new File("settings"); if(settingsDir.exists()){ // try load settings // load the set of synced files loadSyncedFileLedger(); } else { // make settings directory settingsDir.mkdir(); System.out.println("Settings folder not found, recreating."); } // set up directory chooser directoryChooser.setTitle("Select Download Location"); // set default age limit fileAgeLimitTF.setText(String.valueOf(defaultDaysLimit)); daysLimit = defaultDaysLimit; // set up file tree TreeItem<String> rootItem = new TreeItem<> ("Root: /", new ImageView(dirIcon)); rootItem.setExpanded(true); // set the tree root fileTreeView.setRoot(rootItem); // set the login details to make testing faster addressTF.setText("ftp.vaultfortress.net"); usernameTF.setText("ross@vaultfortress.net"); } // initialize() // onClick method for login button @FXML void loginButtonClick_OnAction(){ System.out.println("Login Click"); // clear message label logTA.setText("Logging in..."); // clear the file tree TreeItem<String> rootItem = new TreeItem<> ("Root: /", new ImageView(dirIcon)); rootItem.setExpanded(true); // set the tree root fileTreeView.setRoot(rootItem); // check that server address is entered // check that username and password are entered if(addressTF.getCharacters().length() < 3){ logTA.appendText("\nError, enter ftp server address."); return; } if(usernameTF.getCharacters().length() < 1){ logTA.appendText("\nError, enter Username."); return; } if(passwordPF.getCharacters().length() < 1){ logTA.appendText("\nError, enter Password."); return; } // make sure output directory is selected if(outputDirSelected == false){ logTA.appendText("\nError, please select an output directory for downloaded files."); return; } // try login connectToServer(this.addressTF.getText(), this.usernameTF.getText(), this.passwordPF.getText()); } // loginButtonClick() @FXML void outputDirBT_OnAction(){ // open output directory chooser outputDir = directoryChooser.showDialog(null); // show selected folder if(outputDir != null) { // flag as directory selected outputDirSelected = true; // display output location outputDirLB.setText(outputDir.getAbsolutePath()); } } // outputDirBT_OnAction() // runs when the Sync Files button is pressed @FXML void syncFilesBT_OnAction(){ Task downloadTask; // check if logged in if(client.isConnected() == false) { logTA.appendText("\nError, not logged in. Cannot sync files."); return; } // flag as downloading isDownloadingFiles = true; // update the days limit daysLimit = Long.parseLong(fileAgeLimitTF.getText()); // download the files, in a separate thread downloadTask = new Task<Boolean>(){ protected Boolean call() throws Exception { Platform.runLater(() -> logTA.appendText("\nStarting to download files . . .")); try { // sync the files syncFiles(client); // disconnect from the server disconnectServer(); } catch (Exception ex){ Platform.runLater(() -> logTA.appendText("\nError Downloading files!")); // disconnect from the server disconnectServer(); isDownloadingFiles = false; ex.printStackTrace(); return false; } // try Platform.runLater(() -> logTA.appendText("\nFinished downloading files.")); isDownloadingFiles = false; // save the set of synced files saveSyncedFileLedger(); return true; } // call() }; // start the thread new Thread(downloadTask).start(); } // syncFilesBT_OnAction() // loads the synced file ledger from a file private void loadSyncedFileLedger(){ try { System.out.println("Loading synced file ledger."); FileInputStream fin = new FileInputStream(settingsDir.getName() + File.separator + syncedFileLedgerName); ObjectInputStream ois = new ObjectInputStream(fin); syncedFileLedger = (HashSet<String>) ois.readObject(); }catch (FileNotFoundException fnf){ System.out.println("Error, syncedFileLedger file not found."); } catch(Exception e){ e.printStackTrace(); } // try } // loadSyncedFileLedger() // saves the map that contains all the files that are synced private void saveSyncedFileLedger(){ try { System.out.println("Saving synced file ledger."); FileOutputStream fout = new FileOutputStream(settingsDir.getName() + File.separator + syncedFileLedgerName); ObjectOutputStream oos = new ObjectOutputStream(fout); oos.writeObject(syncedFileLedger); }catch (Exception e){ System.out.println("Error saving synced file ledger"); e.printStackTrace(); } // try } // saveSyncedFileLedger() // connects to the ftp server and discovers the files private void connectToServer(String serverAddress, String username, String password){ // try connect try { // create a server address this.address = InetAddress.getByName(serverAddress); // connect to the address client.connect(address, 21); // try and login client.login(username, password); if (client.isConnected()) { System.out.print(client.getReplyString()); if (!FTPReply.isPositiveCompletion(client.getReplyCode())){ logTA.appendText("\nError: " + client.getReplyString()); client.disconnect(); return; } // enter passive mode client.enterLocalPassiveMode(); // logged in ok logTA.appendText("\n" + client.getReplyString()); // display files buildFileTree(fileTreeView.getRoot(), client); } }catch (Exception e){ System.out.println("Error: " + e.getMessage()); logTA.appendText("\nError: " + e.getMessage()); // disconnect the user from server disconnectServer(); } // try } // connectToServer() // disconnect the user form server private void disconnectServer(){ try { // disconnect client client.disconnect(); logTA.appendText("\nDisconnecting."); System.out.println("Disconnecting"); } catch (Exception e) { //e.printStackTrace(); logTA.appendText("\nError Disconnecting."); } // try } // disconnectServer() // builds the tree view of the files private void buildFileTree(TreeItem treeNode, FTPClient client) throws Exception { // display the files FTPFile[] files = client.listFiles("", FTPFile::isFile); for (FTPFile file : files) { if(!file.getName().startsWith(".")) { System.out.println("File: " + file.getName()); // add file to file tree treeNode.getChildren().add(new TreeItem<>(file.getName() + " | " + ft.format(file.getTimestamp().getTime()))); } } // for // get the directories FTPFile[] directories = client.listDirectories(); for (FTPFile dir : directories) { if(!dir.getName().startsWith(".")) { // change working directory to detected directory client.changeWorkingDirectory(dir.getName()); // create treeItem to represent new Directory TreeItem newDir = new TreeItem<>(dir.getName(), new ImageView(dirIcon)); // add directory to file tree treeNode.getChildren().add(newDir); logTA.appendText("\nDiscovering Files in: " + client.printWorkingDirectory()); System.out.println("Discovering Files in: " + client.printWorkingDirectory()); // recursively call method to add files and directories to new directory buildFileTree(newDir, client); // go back to parent directory, once finished in this directory client.changeToParentDirectory(); } } // for } // buildFileTree() // sync files, by download files that need to be downloaded private void syncFiles(FTPClient client) throws Exception { long daysOld; String pwd; // display the files FTPFile[] files = client.listFiles("", FTPFile::isFile); if(files.length > 0){ pwd = client.printWorkingDirectory(); System.out.println("Downloading Files in: " + client.printWorkingDirectory()); Platform.runLater(() -> logTA.appendText("\nDownloading Files in: " + pwd)); } for (FTPFile file : files) { if(!file.getName().startsWith(".")) { // get the number of days old this file is daysOld = Duration.between(file.getTimestamp().toInstant(), Calendar.getInstance().toInstant()).toDays(); System.out.println("File is " + daysOld + " days old"); // if file is not older then limit and not already synced if (daysOld < daysLimit && syncedFileLedger.contains(file.getName()) == false) { System.out.println("Downloading: " + file.getName()); Platform.runLater(() -> logTA.appendText("\nDownloading: " + file.getName())); // create outputStream for file outStream = new FileOutputStream(outputDir.getAbsoluteFile() + File.separator + file.getName()); // retrieve the files client.retrieveFile(file.getName(), outStream); // close the stream outStream.close(); // flag file as synced syncedFileLedger.add(file.getName()); } } } // for // get the directories FTPFile[] directories = client.listDirectories(); for (FTPFile dir : directories) { if (!dir.getName().startsWith(".")) { // change working directory to detected directory client.changeWorkingDirectory(dir.getName()); // recursively call method to add files and directories to new directory syncFiles(client); // go back to parent directory, once finished in this directory client.changeToParentDirectory(); } } // for } // syncFiles() } // class
package cn.gyyx.gy4j.cache.redis; import org.apache.log4j.Logger; /** * @Author : east.Fu * @Description : * @Date : Created in 2017/10/5 21:54 */ public class RedisHelper { private static final Logger LOG = Logger.getLogger(RedisHelper.class); public boolean clear(){ return null; } }
package cn.com.lemon.framework; import java.util.List; import cn.com.lemon.framework.model.BaseEntity; /** * The <code>Dao</code> interface is the basic data persistence api. * <p> * Support the third utilities(e.g.<code>Mybatis</code>) implement this * interface * * @see List * @author shellpo shih * @version 1.0 */ interface Dao<T extends BaseEntity> { T get(int id); List<T> list(String... values); List<T> listAll(); List<T> listPage(int start, int size, String... values); int count(String... values); int save(T t); boolean delete(int id); boolean update(T t); }
package co.mewf.humpty.servlets; import co.mewf.humpty.Pipeline; import co.mewf.humpty.config.HumptyBootstrap; import java.io.IOException; import java.io.PrintWriter; import java.io.Reader; import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.io.IOUtils; public class HumptyFilter implements Filter { private Pipeline pipeline; @Override public void init(FilterConfig filterConfig) throws ServletException { pipeline = createPipeline(); } protected Pipeline createPipeline() { return new HumptyBootstrap().createPipeline(); } @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { HttpServletRequest httpRequest = (HttpServletRequest) request; String assetUri = httpRequest.getServletPath(); HttpServletResponse httpResponse = ((HttpServletResponse) response); httpResponse.setContentType(httpRequest.getRequestURI().endsWith(".js") ? "text/javascript" : "text/css"); assetUri = assetUri.substring(assetUri.lastIndexOf('/') + 1); Reader processedAsset = pipeline.process(assetUri, httpRequest, httpResponse); PrintWriter responseWriter = httpResponse.getWriter(); IOUtils.copy(processedAsset, responseWriter); processedAsset.close(); } @Override public void destroy() {} }
package com.amee.domain.sheet; import com.amee.base.utils.UidGen; import com.amee.base.utils.XMLUtils; import com.amee.domain.ValueType; import org.json.JSONException; import org.json.JSONObject; import org.w3c.dom.Document; import org.w3c.dom.Element; import java.io.Serializable; import java.util.Date; /** * Constructors purposefully verbose to ensure identity in collections after add() operations. * <p/> * TODO: convert this to use generics. */ public class Cell implements Serializable, Comparable { private Sheet sheet; private Column column; private Row row; private ValueType valueType = ValueType.TEXT; private String uid = ""; private Object value = ""; private Cell() { super(); } public Cell(Column column, Row row) { this(); setSheet(column.getSheet()); setColumn(column); setRow(row); setValue(""); setValueType(); setUid(UidGen.INSTANCE_12.getUid()); add(); } public Cell(Column column, Row row, Object value) { this(); setSheet(column.getSheet()); setColumn(column); setRow(row); setValue(value); setValueType(); setUid(UidGen.INSTANCE_12.getUid()); add(); } public Cell(Column column, Row row, Object value, ValueType valueType) { this(); setSheet(column.getSheet()); setColumn(column); setRow(row); setValue(value); setUid(UidGen.INSTANCE_12.getUid()); setValueType(valueType); add(); } public Cell(Column column, Row row, Object value, String uid) { this(); setSheet(column.getSheet()); setColumn(column); setRow(row); setValue(value); setValueType(); setUid(uid); add(); } public Cell(Column column, Row row, Object value, String uid, ValueType valueType) { this(); setSheet(column.getSheet()); setColumn(column); setRow(row); setValue(value); setUid(uid); setValueType(valueType); add(); } public JSONObject getJSONObject() throws JSONException { JSONObject obj = new JSONObject(); obj.put("uid", getUid()); obj.put("type", getValueType()); obj.put("name", getColumn().getName()); obj.put("value", getValueAsString()); return obj; } public Element getElement(Document document) { Element cellElement = document.createElement("Cell"); cellElement.setAttribute("uid", getValueType().toString()); cellElement.setAttribute("type", getValueType().toString()); cellElement.appendChild(XMLUtils.getElement(document, "Name", getColumn().getName())); cellElement.appendChild(XMLUtils.getElement(document, "Value", getValueAsString())); return cellElement; } public void add() { getColumn().add(this); getRow().add(this); getSheet().add(this); } public void beforeRemove() { getColumn().remove(this); getSheet().remove(this); } public boolean equals(Object o) { if (super.equals(o)) return true; if (!(o instanceof Cell)) return false; Cell other = (Cell) o; return getUid().equalsIgnoreCase(other.getUid()); } public int compareTo(Object o) { if (this.equals(o)) return 0; Cell other = (Cell) o; int result = 0; // default to 'greater than' to push stuff to the end of the list // assume other is Cell with value of same ValueType switch (getValueType()) { case INTEGER: { Integer a = null; Integer b = null; try { a = Integer.valueOf(getValueAsString()); } catch (NumberFormatException e) { // swallow, push non integer to the end of list result = 1; } try { b = Integer.valueOf(other.getValueAsString()); } catch (NumberFormatException e) { // swallow, push non integer to the end of list result = -1; } if ((a != null) && (b != null)) { result = a.compareTo(b); } else if ((a == null) && (b == null)) { // neither values are Integers so just compare text values instead result = getValueAsString().compareToIgnoreCase(other.getValueAsString()); } break; } case BOOLEAN: { Boolean a = Boolean.valueOf(getValueAsString()); Boolean b = Boolean.valueOf(other.getValueAsString()); result = a.compareTo(b); break; } case DOUBLE: { Double a = null; Double b = null; try { a = new Double(getValueAsString()); } catch (NumberFormatException e) { // swallow, push non double to the end of list result = 1; } try { b = new Double(other.getValueAsString()); } catch (NumberFormatException e) { // swallow, push non double to the end of list result = -1; } if ((a != null) && (b != null)) { result = a.compareTo(b); } else if ((a == null) && (b == null)) { // neither values are Doubles so just compare text values instead result = getValueAsString().compareToIgnoreCase(other.getValueAsString()); } break; } case DATE: { Date a = getValueAsDate(); Date b = other.getValueAsDate(); if (a == null) { // push non Date to the end of list result = 1; } if (b == null) { // push non Date to the end of list result = -1; } if ((a != null) && (b != null)) { result = a.compareTo(b); } else if ((a == null) && (b == null)) { // neither values are Dates so just compare text values instead result = getValueAsString().compareToIgnoreCase(other.getValueAsString()); } break; } case TEXT: case UNSPECIFIED: default: { result = getValueAsString().compareToIgnoreCase(other.getValueAsString()); break; } } if (getColumn().getSortOrder().equals(SortOrder.DESC)) { return result * -1; // invert } else { return result; } } public int hashCode() { return getUid().hashCode(); } public String toString() { return getValueAsString(); } public Sheet getSheet() { return sheet; } protected void setSheet(Sheet sheet) { if (sheet != null) { this.sheet = sheet; } } public Column getColumn() { return column; } protected void setColumn(Column column) { if (column != null) { this.column = column; } } public Row getRow() { return row; } protected void setRow(Row row) { if (row != null) { this.row = row; } } public ValueType getValueType() { return valueType; } protected void setValueType(ValueType valueType) { if (valueType != null) { this.valueType = valueType; } } protected void setValueType() { setValueType(ValueType.getValueType(getValue())); } public String getUid() { return uid; } protected void setUid(String uid) { if (uid == null) { uid = ""; } this.uid = uid; } public Object getValue() { return value; } public String getValueAsString() { return value.toString(); } public Integer getValueAsInteger() { if (value instanceof Integer) { return (Integer) value; } else { try { return Integer.valueOf(value.toString()); } catch (NumberFormatException e) { return 0; } } } public Double getValueAsDouble() { if (value instanceof Double) { return (Double) value; } else { try { return new Double(value.toString()); } catch (NumberFormatException e) { return new Double(0); } } } public Boolean getValueAsBoolean() { if (value instanceof Boolean) { return (Boolean) value; } else { return Boolean.valueOf(value.toString()); } } public Date getValueAsDate() { if (value instanceof Date) { return (Date) value; } else { // TODO: could try parsing string value return null; } } public void setValue(Object value) { if (value == null) { value = ""; setValueType(ValueType.TEXT); } this.value = value; } }
package com.amee.platform.science; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.Transformer; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.joda.time.DateTime; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; /** * A class representing a series of {@link DataPoint} values. Provides various mathematical operations * such as plus, subtract and multiply along with the crucial integrate method. */ public class DataSeries { private final Log log = LogFactory.getLog("science"); private List<DataPoint> dataPoints = new ArrayList<DataPoint>(); /// These dates will be used to define a query window on the series. private DateTime seriesStartDate; private DateTime seriesEndDate; /** * Construct an empty series. */ public DataSeries() { this(new ArrayList<DataPoint>()); } /** * Construct a series from the list of {@link DataPoint} values. * * @param dataPoints - the list of {@link DataPoint} values */ public DataSeries(List<DataPoint> dataPoints) { this.dataPoints = new ArrayList<DataPoint>(dataPoints); } /** * A copy constructor. * * @param dataSeries to copy */ protected DataSeries(DataSeries dataSeries) { for (DataPoint dataPoint : dataSeries.dataPoints) { this.addDataPoint(new DataPoint(dataPoint)); } } /** * Return a copy of this object. * * @return a copy */ public DataSeries copy() { return new DataSeries(this); } public String toString() { try { return getJSONObject().toString(); } catch (JSONException e) { throw new RuntimeException("Caught JSONException: " + e.getMessage(), e); } } public JSONObject getJSONObject() throws JSONException { JSONObject obj = new JSONObject(); JSONArray arr = new JSONArray(); for (DataPoint dataPoint : dataPoints) { arr.put(dataPoint.getJSONArray()); } obj.put("dataPoints", arr); if (seriesStartDate != null) { obj.put("seriesStartDate", seriesStartDate.toString()); } if (seriesEndDate != null) { obj.put("seriesEndDate", seriesEndDate.toString()); } return obj; } protected Decimal getSeriesTimeInMillis() { if (dataPoints.isEmpty()) { return Decimal.ZERO; } DateTime seriesStart = getSeriesStartDate(); DateTime seriesEnd = getSeriesEndDate(); return new Decimal(seriesEnd.getMillis() - seriesStart.getMillis()); } private DateTime getSeriesStartDate() { if (!dataPoints.isEmpty()) { DateTime first = dataPoints.get(0).getDateTime(); return (seriesStartDate != null) && seriesStartDate.isAfter(first) ? seriesStartDate : first; } else { return null; } } private DateTime getSeriesEndDate() { if (!dataPoints.isEmpty()) { return (seriesEndDate != null) ? seriesEndDate : dataPoints.get(dataPoints.size() - 1).getDateTime(); } else { return null; } } // Combine this DataSeries with another DataSeries using the given Operation. @SuppressWarnings("unchecked") private DataSeries combine(DataSeries series, Operation operation) { // Create a union of all DateTime points in the two DataSeries and sort the resultant collection (DESC). List<DateTime> dateTimePoints = (List) CollectionUtils.union(getDateTimePoints(), series.getDateTimePoints()); Collections.sort(dateTimePoints); // For each DateTime point, find the nearest corresponding DataPoint in each series and apply the desired // Operation. List<DataPoint> combinedSeries = new ArrayList<DataPoint>(); for (DateTime dateTimePoint : dateTimePoints) { DataPoint lhs = getDataPoint(dateTimePoint); DataPoint rhs = series.getDataPoint(dateTimePoint); operation.setOperands(lhs, rhs); combinedSeries.add(new DataPoint(dateTimePoint, operation.operate().getValue())); } return new DataSeries(combinedSeries); } /** * Add a DataSeries to this DataSeries. * * @param series - the DataSeries to add * @return a new DataSeries representing the addition of the two DataSeries */ public DataSeries plus(DataSeries series) { return combine(series, new PlusOperation()); } /** * Add a DataPoint to this DataSeries. * * @param dataPoint - the DataPoint to add * @return a new DataSeries representing the addition of the DataSeries and the DataPoint */ public DataSeries plus(DataPoint dataPoint) { DataSeries series = new DataSeries(); series.addDataPoint(dataPoint); return plus(series); } /** * Add a float value to this DataSeries. * * @param f - the float value to add * @return a new DataSeries representing the addition of the float value and the DataSeries */ public DataSeries plus(float f) { List<DataPoint> combinedDataPoints = new ArrayList<DataPoint>(); for (DataPoint dp : dataPoints) { combinedDataPoints.add(dp.plus(f)); } return new DataSeries(combinedDataPoints); } /** * Subtract a DataSeries from this DataSeries. * * @param series - the DataSeries to subtract * @return a new DataSeries representing the subtraction of the DataSeries from this DataSeries */ public DataSeries subtract(DataSeries series) { return combine(series, new SubtractOperation()); } /** * Subtract a DataPoint from this DataSeries. * * @param dataPoint - the DataPoint to subtract * @return a new DataSeries representing the subtraction of the DataPoint from this DataSeries */ public DataSeries subtract(DataPoint dataPoint) { DataSeries series = new DataSeries(); series.addDataPoint(dataPoint); return subtract(series); } /** * Subtract a float value from this DataSeries. * * @param f - the float value to subtract * @return a new DataSeries representing the subtraction of the float value from this DataSeries */ public DataSeries subtract(float f) { List<DataPoint> combinedDataPoints = new ArrayList<DataPoint>(); for (DataPoint dp : dataPoints) { combinedDataPoints.add(dp.subtract(f)); } return new DataSeries(combinedDataPoints); } /** * Divide this DataSeries by another DataSeries. * * @param series - the DataSeries value by which to divide this DataSeries * @return a new DataSeries representing the division of this DataSeries by the DataSeries */ public DataSeries divide(DataSeries series) { return combine(series, new DivideOperation()); } /** * Divide this DataSeries by a DataPoint. * * @param dataPoint - the DataPoint value by which to divide this DataSeries * @return a new DataSeries representing the division of this DataSeries by the DataPoint */ public DataSeries divide(DataPoint dataPoint) { DataSeries series = new DataSeries(); series.addDataPoint(dataPoint); return divide(series); } /** * Divide this DataSeries by a float value. * * @param f - the float value by which to divide this DataSeries * @return a new DataSeries representing the division of this DataSeries by the float value */ public DataSeries divide(float f) { List<DataPoint> combinedDataPoints = new ArrayList<DataPoint>(); for (DataPoint dp : dataPoints) { combinedDataPoints.add(dp.divide(f)); } return new DataSeries(combinedDataPoints); } /** * Multiply this DataSeries by another DataSeries. * * @param series - the DataSeries to multiply this DataSeries * @return a new DataSeries representing the multiplication of the two DataSeries */ public DataSeries multiply(DataSeries series) { return combine(series, new MultiplyOperation()); } /** * Multiply this DataSeries by a DataPoint. * * @param dataPoint - the DataPoint value to multiply this DataPoint * @return a new DataSeries representing the multiplication of the DataSeries and the DataPoint */ public DataSeries multiply(DataPoint dataPoint) { DataSeries series = new DataSeries(); series.addDataPoint(dataPoint); return multiply(series); } /** * Multiply this DataSeries by a float value. * * @param f - the float value to multiply this DataSeries * @return a new DataSeries representing the multiplication of the DataSeries and the float value */ public DataSeries multiply(float f) { List<DataPoint> combinedDataPoints = new ArrayList<DataPoint>(); for (DataPoint dp : dataPoints) { combinedDataPoints.add(dp.multiply(f)); } return new DataSeries(combinedDataPoints); } /** * Get the single-valued average of the DataPoints within the DataSeries that occur during the * specified query time-period. * <p/> * If there is no time-period (the query time-period is zero) then the result will be zero. * * @return - the average as a {@link Decimal} value */ public Decimal integrate() { Decimal integral = Decimal.ZERO; Decimal seriesTimeInMillis = getSeriesTimeInMillis(); if (!seriesTimeInMillis.equals(Decimal.ZERO)) { Collections.sort(dataPoints); for (int i = 0; i < dataPoints.size(); i++) { // Work out segment time series. DataPoint current = dataPoints.get(i); DateTime end; if (i == (dataPoints.size() - 1)) { end = getSeriesEndDate(); } else { DataPoint next=dataPoints.get(i+1); end=getSeriesEndDate().isBefore(next.getDateTime()) ? getSeriesEndDate() : next.getDateTime(); } DateTime start = getSeriesStartDate().isAfter(current.getDateTime()) ? getSeriesStartDate() : current.getDateTime(); Decimal segmentInMillis = new Decimal( end.getMillis() -start.getMillis()); // the filtering should have removed points after the end of the window of interest // but in case it hasn't (and for direct testing not via internal value) // Add weighted average value. Decimal weightedAverage = current.getValue().multiply(segmentInMillis).divide(seriesTimeInMillis); log.error( "Diagnostics from integrate()"+weightedAverage+","+current.getValue()+","+i+","+dataPoints.size()+ ","+segmentInMillis.divide(seriesTimeInMillis)); if (start.isAfter(end)) continue; integral = integral.add(weightedAverage); } } return integral; } /** * Get the Collection of {@link org.joda.time.DateTime} points in the DataSeries. * * @return the Collection of {@link org.joda.time.DateTime} points in the DataSeries */ @SuppressWarnings("unchecked") public Collection<DateTime> getDateTimePoints() { return (Collection<DateTime>) CollectionUtils.collect(dataPoints, new Transformer() { public Object transform(Object input) { DataPoint dataPoint = (DataPoint) input; return dataPoint.getDateTime(); } }); } /** * Get the active {@link DataPoint} at a specific point in time. * * @param dateTime - the point in time for which to return the {@link DataPoint} * @return the {@link DataPoint} at dateTime */ public DataPoint getDataPoint(DateTime dateTime) { DataPoint selected = DataPoint.NULL; for (DataPoint dataPoint : dataPoints) { if (!dataPoint.getDateTime().isAfter(dateTime)) { selected = dataPoint; } else { break; } } return selected; } /** * Add a {@link DataPoint} to this series. * * @param dataPoint - the {@link DataPoint} to add to this series. */ public void addDataPoint(DataPoint dataPoint) { dataPoints.add(dataPoint); } /** * Set the start of the query window. * * @param seriesStartDate - the start of the query window */ public void setSeriesStartDate(DateTime seriesStartDate) { if (seriesStartDate == null) return; this.seriesStartDate = seriesStartDate; } /** * Set the end of the query window. * * @param seriesEndDate - the end of the query window */ public void setSeriesEndDate(DateTime seriesEndDate) { if (seriesEndDate == null) return; this.seriesEndDate = seriesEndDate; } } /** * Represents an abstract mathematical operation * one would want to perform on a pair of {@link DataPoint} values. */ abstract class Operation { protected DataPoint lhs; protected DataPoint rhs; void setOperands(DataPoint lhs, DataPoint rhs) { this.lhs = lhs; this.rhs = rhs; } abstract DataPoint operate(); } class PlusOperation extends Operation { public DataPoint operate() { return lhs.plus(rhs); } } class SubtractOperation extends Operation { public DataPoint operate() { return lhs.subtract(rhs); } } class DivideOperation extends Operation { public DataPoint operate() { return lhs.divide(rhs); } } class MultiplyOperation extends Operation { public DataPoint operate() { return lhs.multiply(rhs); } }
package com.github.underscore; public class Trie { public static class TrieNode { // Initialize your data structure here. TrieNode[] children; boolean isWord; public TrieNode() { children = new TrieNode[1071]; } } private final TrieNode root; private boolean startWith; public Trie() { root = new TrieNode(); } // Inserts a word into the trie. public void insert(String word) { insert(word, root, 0); } private void insert(String word, TrieNode root, int idx) { if (idx == word.length()) { root.isWord = true; return; } int index = word.charAt(idx) - ' '; if (root.children[index] == null) { root.children[index] = new TrieNode(); } insert(word, root.children[index], idx + 1); } // Returns if the word is in the trie. public boolean search(String word) { return search(word, root, 0); } public boolean search(String word, TrieNode root, int idx) { if (idx == word.length()) { startWith = true; return root.isWord; } int index = word.charAt(idx) - ' '; if (root.children[index] == null) { startWith = false; return false; } return search(word, root.children[index], idx + 1); } // Returns if there is any word in the trie // that starts with the given prefix. public boolean startsWith(String prefix) { search(prefix); return startWith; } }
package com.google.sps.data; import com.google.appengine.api.datastore.Entity; import com.google.appengine.api.datastore.Key; import com.google.appengine.api.datastore.KeyFactory; import com.google.appengine.api.datastore.DatastoreService; import com.google.appengine.api.datastore.DatastoreServiceFactory; public final class Folder { private String folderName; private String folderDefaultLanguage; private String folderKey; private String parentKey; public Folder( String folderName, String folderDefaultLanguage) { this.folderName = folderName; this.folderDefaultLanguage = folderDefaultLanguage; this.folderKey = "null"; } public Folder(Entity entity, String key) { this.folderName = (String) entity.getProperty("folderName"); this.folderDefaultLanguage = (String) entity.getProperty("folderDefaultLanguage"); this.folderKey = key; } public String getFolderName() { return this.folderName; } public String getFolderDefaultLanguage() { return this.folderDefaultLanguage; } public String getFolderKey() { return this.folderKey; } public void setFolderName(String newFolderName) { this.folderName = newFolderName; } public void setFolderKey(String folderKey) { this.folderKey = folderKey; } public void setParentKey(String key) { this.parentKey = key; } public Entity createEntity() { Entity folder = new Entity("Folder", KeyFactory.stringToKey(this.parentKey)); folder.setProperty("folderName", this.folderName); folder.setProperty("folderDefaultLanguage", this.folderDefaultLanguage); return folder; } }
package com.commafeed.backend.dao; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.hibernate.Transaction; import org.hibernate.context.internal.ManagedSessionContext; public abstract class UnitOfWork<T> { private SessionFactory sessionFactory; public UnitOfWork(SessionFactory sessionFactory) { this.sessionFactory = sessionFactory; } protected abstract T runInSession() throws Exception; public T run() { final Session session = sessionFactory.openSession(); if (ManagedSessionContext.hasBind(sessionFactory)) { throw new IllegalStateException("Already in a unit of work!"); } T t = null; try { ManagedSessionContext.bind(session); session.beginTransaction(); try { t = runInSession(); commitTransaction(session); } catch (Exception e) { rollbackTransaction(session); this.<RuntimeException> rethrow(e); } } finally { session.close(); ManagedSessionContext.unbind(sessionFactory); } return t; } private void rollbackTransaction(Session session) { final Transaction txn = session.getTransaction(); if (txn != null && txn.isActive()) { txn.rollback(); } } private void commitTransaction(Session session) { final Transaction txn = session.getTransaction(); if (txn != null && txn.isActive()) { txn.commit(); } } @SuppressWarnings("unchecked") private <E extends Exception> void rethrow(Exception e) throws E { throw (E) e; } }
package com.heroku.sdk; import static java.lang.String.format; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Reader; import java.io.StringReader; import java.math.BigInteger; import java.nio.file.Files; import java.nio.file.Path; import java.security.KeyPair; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.PrivateKey; import java.security.SecureRandom; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; import java.util.function.Consumer; import org.bouncycastle.cert.X509CertificateHolder; import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter; import org.bouncycastle.openssl.PEMKeyPair; import org.bouncycastle.openssl.PEMParser; import org.bouncycastle.openssl.jcajce.JcaPEMKeyConverter; /** * This class is used to create a java.security.KeyStore from environment variables. * * @author Joe Kutner */ public class EnvKeyStore { /** * Create a KeyStore representation from environment variables. * * @param keyEnvVar The environment variable name of the key * @param certEnvVar The environment variable name of the certificate * @param passwordEnvVar The environment variable name of the password * @return an EnvKeyStore with a loaded KeyStore * @throws CertificateException * @throws NoSuchAlgorithmException * @throws KeyStoreException * @throws IOException */ public static EnvKeyStore create(final String keyEnvVar, final String certEnvVar, final String passwordEnvVar) throws CertificateException, NoSuchAlgorithmException, KeyStoreException, IOException { return new EnvKeyStore( System.getenv(keyEnvVar), System.getenv(certEnvVar), System.getenv(passwordEnvVar) ); } /** * Create a TrustStore representation from an environment variable. * * @param trustEnvVar The environment variable name of the certificate * @param passwordEnvVar The environment variable name of the password * @return an EnvKeyStore with a loaded TrustStore * @throws CertificateException * @throws NoSuchAlgorithmException * @throws KeyStoreException * @throws IOException */ public static EnvKeyStore create(final String trustEnvVar, final String passwordEnvVar) throws CertificateException, NoSuchAlgorithmException, KeyStoreException, IOException { return new EnvKeyStore( System.getenv(trustEnvVar), System.getenv(passwordEnvVar) ); } /** * Create a KeyStore representation from an environment variable. * * @param key The the certificate as a string * @param cert The certificate as a string * @param password The password as a string * @return an EnvKeyStore with a loaded TrustStore * @throws CertificateException * @throws NoSuchAlgorithmException * @throws KeyStoreException * @throws IOException */ public static EnvKeyStore createFromPEMStrings(final String key, final String cert, final String password) throws CertificateException, NoSuchAlgorithmException, KeyStoreException, IOException { return new EnvKeyStore(key, cert, password); } /** * Create a TrustStore representation from an environment variable. * * @param trust The the certificate as a string * @param password The password as a string * @return an EnvKeyStore with a loaded TrustStore * @throws CertificateException * @throws NoSuchAlgorithmException * @throws KeyStoreException * @throws IOException */ public static EnvKeyStore createFromPEMStrings(final String trust, final String password) throws CertificateException, NoSuchAlgorithmException, KeyStoreException, IOException { return new EnvKeyStore(trust, password); } /** * Create a KeyStore representation from environment variables. * * @param keyEnvVar The environment variable name of the key * @param certEnvVar The environment variable name of the certificate * @return an EnvKeyStore with a loaded KeyStore * @throws CertificateException * @throws NoSuchAlgorithmException * @throws KeyStoreException * @throws IOException */ public static EnvKeyStore createWithRandomPassword(final String keyEnvVar, final String certEnvVar) throws CertificateException, NoSuchAlgorithmException, KeyStoreException, IOException { return new EnvKeyStore( System.getenv(keyEnvVar), System.getenv(certEnvVar), new BigInteger(130, new SecureRandom()).toString(32) ); } /** * Create a TrustStore representation from an environment variable. * * @param trustEnvVar The environment variable name of the certificate * @return an EnvKeyStore with a loaded TrustStore * @throws CertificateException * @throws NoSuchAlgorithmException * @throws KeyStoreException * @throws IOException */ public static EnvKeyStore createWithRandomPassword(final String trustEnvVar) throws CertificateException, NoSuchAlgorithmException, KeyStoreException, IOException { return new EnvKeyStore( System.getenv(trustEnvVar), new BigInteger(130, new SecureRandom()).toString(32) ); } private String password; private KeyStore keystore; private static final String DEFAULT_TYPE = "PKCS12"; EnvKeyStore(String key, String cert, String password) throws CertificateException, NoSuchAlgorithmException, KeyStoreException, IOException { this.password = password; this.keystore = createKeyStore( new StringReader(key), new StringReader(cert), password ); } EnvKeyStore(String cert, String password) throws CertificateException, NoSuchAlgorithmException, KeyStoreException, IOException { this.password = password; this.keystore = createTrustStore( new StringReader(cert) ); } public String password() { return this.password; } public KeyStore keyStore() { return this.keystore; } public String type() { return DEFAULT_TYPE; } public InputStream toInputStream() throws CertificateException, NoSuchAlgorithmException, KeyStoreException, IOException { return new ByteArrayInputStream(toBytes()); } public byte[] toBytes() throws CertificateException, NoSuchAlgorithmException, KeyStoreException, IOException { ByteArrayOutputStream bos = new ByteArrayOutputStream(); this.store(bos); bos.close(); return bos.toByteArray(); } public void store(OutputStream out) throws CertificateException, NoSuchAlgorithmException, KeyStoreException, IOException { this.keystore.store(out, password.toCharArray()); } public void store(Path path) throws CertificateException, NoSuchAlgorithmException, KeyStoreException, IOException { Files.write(path, toBytes()); } public File storeTemp() throws IOException, CertificateException, NoSuchAlgorithmException, KeyStoreException { File temp = File.createTempFile("env-keystore", type().toLowerCase()); store(temp.toPath()); return temp; } public void asFile(Consumer<File> c) throws IOException, CertificateException, NoSuchAlgorithmException, KeyStoreException { File temp = storeTemp(); c.accept(temp); Files.delete(temp.toPath()); } private static KeyStore createKeyStore(final Reader keyReader, final Reader certReader, final String password) throws IOException, CertificateException, KeyStoreException, NoSuchAlgorithmException { PEMParser pem = new PEMParser(keyReader); PEMKeyPair pemKeyPair = ((PEMKeyPair) pem.readObject()); JcaPEMKeyConverter jcaPEMKeyConverter = new JcaPEMKeyConverter(); KeyPair keyPair = jcaPEMKeyConverter.getKeyPair(pemKeyPair); PrivateKey key = keyPair.getPrivate(); pem.close(); keyReader.close(); PEMParser parser = new PEMParser(certReader); X509Certificate certificate = parseCert(parser); parser.close(); KeyStore ks = KeyStore.getInstance(DEFAULT_TYPE); ks.load(null); ks.setKeyEntry("alias", key, password.toCharArray(), new X509Certificate[]{certificate}); return ks; } private static KeyStore createTrustStore(final Reader certReader) throws IOException, KeyStoreException, NoSuchAlgorithmException, CertificateException { PEMParser parser = new PEMParser(certReader); KeyStore ks = KeyStore.getInstance(DEFAULT_TYPE); ks.load(null); int i = 0; X509Certificate certificate; while ((certificate = parseCert(parser)) != null) { ks.setCertificateEntry(format("alias%d", i), certificate); i += 1; } parser.close(); return ks; } private static X509Certificate parseCert(PEMParser parser) throws IOException, CertificateException { X509CertificateHolder certHolder = (X509CertificateHolder) parser.readObject(); if (certHolder == null) { return null; } X509Certificate certificate = new JcaX509CertificateConverter().getCertificate(certHolder); return certificate; } }
package com.conveyal.r5.transit; import com.conveyal.gtfs.GTFSFeed; import com.conveyal.gtfs.model.*; import com.conveyal.r5.api.util.TransitModes; import com.google.common.base.Strings; import com.google.common.collect.HashMultimap; import com.google.common.collect.Multimap; import gnu.trove.list.TIntList; import gnu.trove.list.array.TIntArrayList; import gnu.trove.map.TIntIntMap; import gnu.trove.map.TObjectIntMap; import gnu.trove.map.hash.TIntIntHashMap; import gnu.trove.map.hash.TObjectIntHashMap; import com.conveyal.r5.streets.StreetRouter; import java.time.LocalDate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.Serializable; import java.time.DateTimeException; import java.time.ZoneId; import java.time.zone.ZoneRulesException; import java.util.*; /** * A key simplifying factor is that we don't handle overnight trips. This is fine for analysis at usual times of day. */ public class TransitLayer implements Serializable, Cloneable { /** Distance limit for stop trees, meters. Set to 3.5 km to match OTP GraphIndex.MAX_WALK_METERS */ public static final int STOP_TREE_DISTANCE_LIMIT = 3500; /** * Distance limit for transfers, meters. Set to 1km which is slightly above OTP's 600m (which was specified as * 1 m/s with 600s max time, which is actually somewhat less than 600m due to extra costs due to steps etc. */ public static final int TRANSFER_DISTANCE_LIMIT = 1000; private static final Logger LOG = LoggerFactory.getLogger(TransitLayer.class); //TransportNetwork timezone. It is read from GTFS agency. If it is invalid it is GMT protected ZoneId timeZone; // Do we really need to store this? It does serve as a key into the GTFS MapDB. // It contains information that is temporarily also held in stopForIndex. public List<String> stopIdForIndex = new ArrayList<>(); // Inverse map of stopIdForIndex, reconstructed from that list (not serialized). No-entry value is -1. public transient TObjectIntMap<String> indexForStopId; // This is used as an initial size estimate for many lists. public static final int TYPICAL_NUMBER_OF_STOPS_PER_TRIP = 30; public List<TripPattern> tripPatterns = new ArrayList<>(); // Maybe we need a StopStore that has (streetVertexForStop, transfers, flags, etc.) public TIntList streetVertexForStop = new TIntArrayList(); // Inverse map of streetVertexForStop, and reconstructed from that list. public transient TIntIntMap stopForStreetVertex; // For each stop, a packed list of transfers to other stops // FIXME we may currently be storing weight or time to reach other stop, which we did to avoid floating point division. Instead, store distances in millimeters, and divide by speed in mm/sec. public List<TIntList> transfersForStop = new ArrayList<>(); /** Information about a route */ public List<RouteInfo> routes = new ArrayList<>(); /** The names of the stops */ public List<String> stopNames = new ArrayList<>(); public List<TIntList> patternsForStop; public List<Service> services = new ArrayList<>(); /** If true at index stop allows boarding with wheelchairs **/ public BitSet stopsWheelchair; // TODO there is probably a better way to do this, but for now we need to retain stop object for linking to streets public transient List<Stop> stopForIndex = new ArrayList<>(); // The coordinates of a place roughly in the center of the transit network, for centering maps and coordinate systems. public double centerLon; public double centerLat; /** does this TransitLayer have any frequency-based trips? */ public boolean hasFrequencies = false; /** Does this TransitLayer have any schedules */ public boolean hasSchedules = false; /** * For each transit stop, an int->int map giving the distance of every reachable street intersection from the * origin stop. This is the result of running a distance-constrained street search from every stop in the graph. */ public transient List<TIntIntMap> stopTrees; /** * The TransportNetwork containing this TransitLayer. This link up the object tree also allows us to access the * StreetLayer associated with this TransitLayer in the same TransportNetwork without maintaining bidirectional * references between the two layers. */ public TransportNetwork parentNetwork = null; /** Load a GTFS feed with full load level */ public void loadFromGtfs (GTFSFeed gtfs) { loadFromGtfs(gtfs, LoadLevel.FULL); } /** * Load data from a GTFS feed. Call multiple times to load multiple feeds. */ public void loadFromGtfs (GTFSFeed gtfs, LoadLevel level) { // Load stops. // ID is the GTFS string ID, stopIndex is the zero-based index, stopVertexIndex is the index in the street layer. TObjectIntMap<String> indexForUnscopedStopId = new TObjectIntHashMap<>(); stopsWheelchair = new BitSet(gtfs.stops.size()); for (Stop stop : gtfs.stops.values()) { int stopIndex = stopIdForIndex.size(); String scopedStopId = String.join(":", stop.feed_id, stop.stop_id); // This is only used while building the TransitNetwork to look up StopTimes from the same feed. indexForUnscopedStopId.put(stop.stop_id, stopIndex); stopIdForIndex.add(scopedStopId); stopForIndex.add(stop); if (stop.wheelchair_boarding != null && stop.wheelchair_boarding.trim().equals("1")) { stopsWheelchair.set(stopIndex); } if (level == LoadLevel.FULL) { stopNames.add(stop.stop_name); } } // Load service periods, assigning integer codes which will be referenced by trips and patterns. TObjectIntMap<String> serviceCodeNumber = new TObjectIntHashMap<>(20, 0.5f, -1); gtfs.services.forEach((serviceId, service) -> { int serviceIndex = services.size(); services.add(service); serviceCodeNumber.put(serviceId, serviceIndex); LOG.debug("Service {} has ID {}", serviceIndex, serviceId); }); // Group trips by stop pattern (including pickup/dropoff type) and fill stop times into patterns. // Also group trips by the blockId they belong to, and chain them together if they allow riders to stay on board // the vehicle from one trip to the next, even if it changes routes or directions. This is called "interlining". LOG.info("Grouping trips by stop pattern and block, and creating trip schedules."); // These are temporary maps used only for grouping purposes. Map<TripPatternKey, TripPattern> tripPatternForStopSequence = new HashMap<>(); Multimap<String, TripSchedule> tripsForBlock = HashMultimap.create(); TObjectIntMap<Route> routeIndexForRoute = new TObjectIntHashMap<>(); int nTripsAdded = 0; TRIPS: for (String tripId : gtfs.trips.keySet()) { Trip trip = gtfs.trips.get(tripId); // Construct the stop pattern and schedule for this trip // Should we really be resolving to an object reference for Route? // That gets in the way of GFTS persistence. String scopedRouteId = String.join(":", trip.route.feed_id, trip.route.route_id); TripPatternKey tripPatternKey = new TripPatternKey(scopedRouteId); TIntList arrivals = new TIntArrayList(TYPICAL_NUMBER_OF_STOPS_PER_TRIP); TIntList departures = new TIntArrayList(TYPICAL_NUMBER_OF_STOPS_PER_TRIP); TIntList stopSequences = new TIntArrayList(TYPICAL_NUMBER_OF_STOPS_PER_TRIP); int previousDeparture = Integer.MIN_VALUE; int nStops = 0; Iterable<StopTime> stopTimes; try { stopTimes = gtfs.getInterpolatedStopTimesForTrip(tripId); } catch (GTFSFeed.FirstAndLastStopsDoNotHaveTimes e) { LOG.warn("First and last stops do not both have times specified on trip {} on route {}, skipping this as interpolation is impossible", trip.trip_id, trip.route.route_id); continue TRIPS; } for (StopTime st : stopTimes) { tripPatternKey.addStopTime(st, indexForUnscopedStopId); arrivals.add(st.arrival_time); departures.add(st.departure_time); stopSequences.add(st.stop_sequence); if (previousDeparture > st.arrival_time || st.arrival_time > st.departure_time) { LOG.warn("Negative-time travel at stop {} on trip {} on route {}, skipping this trip as it will wreak havoc with routing", st.stop_id, trip.trip_id, trip.route.route_id); continue TRIPS; } if (previousDeparture == st.arrival_time) { LOG.warn("Zero-length hop at stop {} on trip {} on route {} {}", st.stop_id, trip.trip_id, trip.route.route_id, trip.route.route_short_name); } previousDeparture = st.departure_time; nStops++; } if (nStops == 0) { LOG.warn("Trip {} on route {} has no stops, it will not be used", trip.trip_id, trip.route.route_id); continue; } TripPattern tripPattern = tripPatternForStopSequence.get(tripPatternKey); if (tripPattern == null) { tripPattern = new TripPattern(tripPatternKey); // if we haven't seen the route yet _from this feed_ (as IDs are only feed-unique) // create it. if (level == LoadLevel.FULL) { if (!routeIndexForRoute.containsKey(trip.route)) { int routeIndex = routes.size(); RouteInfo ri = new RouteInfo(trip.route); routes.add(ri); routeIndexForRoute.put(trip.route, routeIndex); } tripPattern.routeIndex = routeIndexForRoute.get(trip.route); } tripPatternForStopSequence.put(tripPatternKey, tripPattern); tripPattern.originalId = tripPatterns.size(); tripPatterns.add(tripPattern); } tripPattern.setOrVerifyDirection(trip.direction_id); int serviceCode = serviceCodeNumber.get(trip.service.service_id); // TODO there's no reason why we can't just filter trips like this, correct? // TODO this means that invalid trips still have empty patterns created TripSchedule tripSchedule = TripSchedule.create(trip, arrivals.toArray(), departures.toArray(), stopSequences.toArray(), serviceCode); if (tripSchedule == null) continue; tripPattern.addTrip(tripSchedule); this.hasFrequencies = this.hasFrequencies || tripSchedule.headwaySeconds != null; this.hasSchedules = this.hasSchedules || tripSchedule.headwaySeconds == null; nTripsAdded += 1; // Record which block this trip belongs to, if any. if ( ! Strings.isNullOrEmpty(trip.block_id)) { tripsForBlock.put(trip.block_id, tripSchedule); } } LOG.info("Done creating {} trips on {} patterns.", nTripsAdded, tripPatternForStopSequence.size()); LOG.info("Chaining trips together according to blocks to model interlining..."); // Chain together trips served by the same vehicle that allow transfers by simply staying on board. // Elsewhere this is done by grouping by (serviceId, blockId) but this is not supported by the spec. // Discussion started on gtfs-changes. tripsForBlock.asMap().forEach((blockId, trips) -> { TripSchedule[] schedules = trips.toArray(new TripSchedule[trips.size()]); Arrays.sort(schedules); // Sorts on first departure time for (int i = 0; i < schedules.length - 1; i++) { schedules[i].chainTo(schedules[i + 1]); } }); LOG.info("Done chaining trips together according to blocks."); LOG.info("Sorting trips on each pattern"); for (TripPattern tripPattern : tripPatternForStopSequence.values()) { Collections.sort(tripPattern.tripSchedules); } LOG.info("done sorting"); LOG.info("Finding the approximate center of the transport network..."); findCenter(gtfs.stops.values()); //Set transportNetwork timezone //If there are no agencies (which is strange) it is GMT //Otherwise it is set to first valid agency timezone and warning is shown if agencies have different timezones if (gtfs.agency.size() == 0) { timeZone = ZoneId.of("GMT"); LOG.warn("graph contains no agencies; API request times will be interpreted as GMT."); } else { for (Agency agency : gtfs.agency.values()) { if (agency.agency_timezone == null) { LOG.warn("Agency {} is without timezone", agency.agency_name); continue; } ZoneId tz; try { tz = ZoneId.of(agency.agency_timezone); } catch (ZoneRulesException z) { LOG.error("Agency {} in GTFS with timezone '{}' wasn't found in timezone database reason: {}", agency.agency_name, agency.agency_timezone, z.getMessage()); //timezone will be set to GMT if it is still empty after for loop continue; } catch (DateTimeException dt) { LOG.error("Agency {} in GTFS has timezone in wrong format:'{}'. Expected format: area/city ", agency.agency_name, agency.agency_timezone); //timezone will be set to GMT if it is still empty after for loop continue; } //First time setting timezone if (timeZone == null) { LOG.info("TransportNetwork time zone set to {} from agency '{}' and agency_timezone:{}", tz, agency.agency_name, agency.agency_timezone); timeZone = tz; } else if (!timeZone.equals(tz)) { LOG.error("agency time zone {} differs from TransportNetwork time zone: {}. This will be problematic.", tz, timeZone); } } //This can only happen if all agencies have empty timezones if (timeZone == null) { timeZone = ZoneId.of("GMT"); LOG.warn( "No agency in graph had valid timezone; API request times will be interpreted as GMT."); } } // Will be useful in naming patterns. // LOG.info("Finding topology of each route/direction..."); // Multimap<T2<String, Integer>, TripPattern> patternsForRouteDirection = HashMultimap.create(); // tripPatterns.forEach(tp -> patternsForRouteDirection.put(new T2(tp.routeId, tp.directionId), tp)); // for (T2<String, Integer> routeAndDirection : patternsForRouteDirection.keySet()) { // RouteTopology topology = new RouteTopology(routeAndDirection.first, routeAndDirection.second, patternsForRouteDirection.get(routeAndDirection)); } // The median of all stopTimes would be best but that involves sorting a huge list of numbers. // So we just use the mean of all stops for now. private void findCenter (Collection<Stop> stops) { double lonSum = 0; double latSum = 0; for (Stop stop : stops) { latSum += stop.stop_lat; lonSum += stop.stop_lon; } // Stops is a HashMap so size() is fast. If it ever becomes a MapDB BTree, we may want to do this differently. centerLat = latSum / stops.size(); centerLon = lonSum / stops.size(); } /** (Re-)build transient indexes of this TripPattern, connecting stops to patterns etc. */ public void rebuildTransientIndexes () { // 1. Which patterns pass through each stop? // We could store references to patterns rather than indexes. int nStops = stopIdForIndex.size(); patternsForStop = new ArrayList<>(nStops); for (int i = 0; i < nStops; i++) { patternsForStop.add(new TIntArrayList()); } int p = 0; for (TripPattern pattern : tripPatterns) { for (int stopIndex : pattern.stops) { if (!patternsForStop.get(stopIndex).contains(p)) { patternsForStop.get(stopIndex).add(p); } } p++; } // 2. What street vertex represents each transit stop? Invert the serialized map. stopForStreetVertex = new TIntIntHashMap(streetVertexForStop.size(), 0.5f, -1, -1); for (int s = 0; s < streetVertexForStop.size(); s++) { stopForStreetVertex.put(streetVertexForStop.get(s), s); } // 3. What is the integer index for each GTFS stop ID? indexForStopId = new TObjectIntHashMap<>(stopIdForIndex.size(), 0.5f, -1); for (int s = 0; s < stopIdForIndex.size(); s++) { indexForStopId.put(stopIdForIndex.get(s), s); } } /** * Run a distance-constrained street search from every transit stop in the graph. * Store the distance to every reachable street vertex for each of these origin stops. */ public void buildStopTrees() { LOG.info("Building stop trees (cached distances between transit stops and street intersections)."); // Allocate a new empty array of stop trees, releasing any existing ones. stopTrees = new ArrayList<>(getStopCount()); for (int stop = 0; stop < getStopCount(); stop++) { buildOneStopTree(stop); } LOG.info("Done building stop trees."); } /** * Perform a single on-street search from the specified transit stop. Store the distance in millimeters to every reached * street vertex. * @param stop the internal integer stop ID for which to build a stop tree. */ public void buildOneStopTree(int stop) { // Lists do not auto-grow if you try to add an element past their end. // So until we need different behavior, we only support adding a stop tree to the end of the list, // not updating an existing one or adding one out past the end of the list. if (stopTrees.size() != stop) { throw new RuntimeException("New stop trees can only be added to the end of the list."); } int originVertex = streetVertexForStop.get(stop); if (originVertex == -1) { // -1 indicates that this stop is not linked to the street network. LOG.warn("Stop {} has not been linked to the street network, cannot build stop tree.", stop); stopTrees.add(null); return; } StreetRouter router = new StreetRouter(parentNetwork.streetLayer); router.distanceLimitMeters = STOP_TREE_DISTANCE_LIMIT; // Dominate based on distance in millimeters, since (a) we're using a hard distance limit, and (b) we divide // by a speed to get time when we use the stop trees. router.dominanceVariable = StreetRouter.State.RoutingVariable.DISTANCE_MILLIMETERS; router.setOrigin(originVertex); router.route(); // This will return distance in millimeters since that is our dominance function stopTrees.add(router.getReachedVertices()); } public static TransitLayer fromGtfs (List<String> files) { TransitLayer transitLayer = new TransitLayer(); for (String file : files) { GTFSFeed gtfs = GTFSFeed.fromFile(file); transitLayer.loadFromGtfs(gtfs); //Makes sure that temporary mapdb files are deleted after they aren't needed gtfs.close(); } return transitLayer; } public int getStopCount () { return stopIdForIndex.size(); } // Mark all services that are active on the given day. Trips on inactive services will not be used in the search. public BitSet getActiveServicesForDate (LocalDate date) { BitSet activeServices = new BitSet(); int s = 0; for (Service service : services) { if (service.activeOn(date)) { activeServices.set(s); } s++; } return activeServices; } // TODO setStreetLayer which automatically links and records the streetLayer ID in a field for use elsewhere? public TransitLayer clone() { try { return (TransitLayer) super.clone(); } catch (CloneNotSupportedException e) { throw new RuntimeException(e); } } /** How much information should we load/save? */ public enum LoadLevel { /** Load only information required for analytics, leaving out route names, etc. */ BASIC, /** Load enough information for customer facing trip planning */ FULL } public static TransitModes getTransitModes(int routeType) { if (routeType >= 100 && routeType < 200){ // Railway Service return TransitModes.RAIL; }else if (routeType >= 200 && routeType < 300){ //Coach Service return TransitModes.BUS; }else if (routeType >= 300 && routeType < 500){ //Suburban Railway Service and Urban Railway service return TransitModes.RAIL; }else if (routeType >= 500 && routeType < 700){ //Metro Service and Underground Service return TransitModes.SUBWAY; }else if (routeType >= 700 && routeType < 900){ //Bus Service and Trolleybus service return TransitModes.BUS; }else if (routeType >= 900 && routeType < 1000){ //Tram service return TransitModes.TRAM; }else if (routeType >= 1000 && routeType < 1100){ //Water Transport Service return TransitModes.FERRY; }else if (routeType >= 1100 && routeType < 1200){ //Air Service throw new IllegalArgumentException("Air transport not supported" + routeType); }else if (routeType >= 1200 && routeType < 1300){ //Ferry Service return TransitModes.FERRY; }else if (routeType >= 1300 && routeType < 1400){ //Telecabin Service return TransitModes.GONDOLA; }else if (routeType >= 1400 && routeType < 1500){ //Funicalar Service return TransitModes.FUNICULAR; }else if (routeType >= 1500 && routeType < 1600){ //Taxi Service throw new IllegalArgumentException("Taxi service not supported" + routeType); } //Is this really needed? /**else if (routeType >= 1600 && routeType < 1700){ //Self drive return TransitModes.CAR; }*/ /* Original GTFS route types. Should these be checked before TPEG types? */ switch (routeType) { case 0: return TransitModes.TRAM; case 1: return TransitModes.SUBWAY; case 2: return TransitModes.RAIL; case 3: return TransitModes.BUS; case 4: return TransitModes.FERRY; case 5: return TransitModes.CABLE_CAR; case 6: return TransitModes.GONDOLA; case 7: return TransitModes.FUNICULAR; default: throw new IllegalArgumentException("unknown gtfs route type " + routeType); } } /** * @return a semi-shallow copy of this transit layer for use when applying scenarios. */ public TransitLayer scenarioCopy(TransportNetwork newScenarioNetwork) { TransitLayer copy = this.clone(); copy.parentNetwork = newScenarioNetwork; // Protectively copy all the lists that will be affected by adding new stops to the network // See: StopSpec.materializeOne() // We would really only need to do this for modifications that create new stops. copy.stopIdForIndex = new ArrayList<>(this.stopIdForIndex); copy.stopNames = new ArrayList<>(this.stopNames); copy.streetVertexForStop = new TIntArrayList(this.streetVertexForStop); copy.stopTrees = new ArrayList<>(this.stopTrees); copy.transfersForStop = new ArrayList<>(this.transfersForStop); return copy; } }
/* Created in support of Team 2465 (Kauaibots). Go Thunderchicken! */ /* Open Source Software - may be modified and shared by FRC teams. Any */ /* in the root directory of the project. */ package com.kauailabs.nav6.frc; import java.util.Arrays; import com.kauailabs.nav6.IMUProtocol; import edu.wpi.first.wpilibj.PIDSource; import edu.wpi.first.wpilibj.PIDSourceType; import edu.wpi.first.wpilibj.SensorBase; import edu.wpi.first.wpilibj.SerialPort; import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj.livewindow.LiveWindowSendable; import edu.wpi.first.wpilibj.tables.ITable; /** * The IMU class provides a simplified interface to the KauaiLabs nav6 IMU. * * The IMU class enables access to basic connectivity and state information, * as well as key orientation information (yaw, pitch, roll, compass heading). * * Advanced capabilities of the nav6 IMU may be accessed via the IMUAdvanced * class. * @author Scott */ public class IMU extends SensorBase implements PIDSource, LiveWindowSendable, Runnable { static final int YAW_HISTORY_LENGTH = 10; static final byte DEFAULT_UPDATE_RATE_HZ = 100; static final short DEFAULT_ACCEL_FSR_G = 2; static final short DEFAULT_GYRO_FSR_DPS = 2000; SerialPort serialPort; float yawHistory[]; int nextYawHistoryIndex; double userYawOffset; ITable m_table; Thread m_thread; protected byte updateRateHz; volatile float yaw; volatile float pitch; volatile float roll; volatile float compassHeading; volatile int updateCount = 0; volatile int byteCount = 0; volatile float nav6YawOffsetDegrees; volatile short accelFsrG; volatile short gyroFsrDps; volatile short flags; double lastUpdateTime; boolean stop = false; private IMUProtocol.YPRUpdate yprUpdateData; protected byte updateType = IMUProtocol.MSGID_YPR_UPDATE; /** * Constructs the IMU class, overriding the default update rate * with a custom rate which may be from 4 to 100, representing * the number of updates per second sent by the nav6 IMU. * * Note that increasing the update rate may increase the * CPU utilization. * @param serialPort BufferingSerialPort object to use * @param updateRateHz Custom Update Rate (Hz) */ public IMU(SerialPort serialPort, byte updateRateHz) { yprUpdateData = new IMUProtocol.YPRUpdate(); this.updateRateHz = updateRateHz; flags = 0; accelFsrG = DEFAULT_ACCEL_FSR_G; gyroFsrDps = DEFAULT_GYRO_FSR_DPS; this.serialPort = serialPort; yawHistory = new float[YAW_HISTORY_LENGTH]; yaw = (float) 0.0; pitch = (float) 0.0; roll = (float) 0.0; try { serialPort.reset(); } catch (RuntimeException ex) { ex.printStackTrace(); } initIMU(); m_thread = new Thread(this); m_thread.start(); } /** * Constructs the IMU class, using the default update rate. * * @param serial_port BufferingSerialPort object to use */ public IMU(SerialPort serial_port) { this(serial_port,DEFAULT_UPDATE_RATE_HZ); } protected void initIMU() { // The nav6 IMU serial port configuration is 8 data bits, no parity, one stop bit. // No flow control is used. // Conveniently, these are the defaults used by the WPILib's SerialPort class. // In addition, the WPILib's SerialPort class also defaults to: // Timeout period of 5 seconds // Termination ('\n' character) // Transmit immediately initializeYawHistory(); userYawOffset = 0; // set the nav6 into the desired update mode byte streamCommandBuffer[] = new byte[256]; int packetLength = IMUProtocol.encodeStreamCommand( streamCommandBuffer, updateType, updateRateHz ); try { serialPort.write( streamCommandBuffer, packetLength ); } catch (RuntimeException ex) { ex.printStackTrace(); } } protected void setStreamResponse( IMUProtocol.StreamResponse response ) { flags = response.flags; nav6YawOffsetDegrees = response.yawOffsetDegrees; accelFsrG = response.accelFsrG; gyroFsrDps = response.gyroFsrDps; updateRateHz = (byte)response.updateRateHz; } private void initializeYawHistory() { Arrays.fill(yawHistory,0); nextYawHistoryIndex = 0; lastUpdateTime = 0.0; } private void setYawPitchRoll(float yaw, float pitch, float roll, float compassHeading) { this.yaw = yaw; this.pitch = pitch; this.roll = roll; this.compassHeading = compassHeading; updateYawHistory(this.yaw); } protected void updateYawHistory(float currYaw) { if (nextYawHistoryIndex >= YAW_HISTORY_LENGTH) { nextYawHistoryIndex = 0; } yawHistory[nextYawHistoryIndex] = currYaw; lastUpdateTime = Timer.getFPGATimestamp(); nextYawHistoryIndex++; } private double getAverageFromYawHistory() { double yawHistorySum = 0.0; for (int i = 0; i < YAW_HISTORY_LENGTH; i++) { yawHistorySum += yawHistory[i]; } double yawHistoryAvg = yawHistorySum / YAW_HISTORY_LENGTH; return yawHistoryAvg; } /** * Returns the current pitch value (in degrees, from -180 to 180) * reported by the nav6 IMU. * @return The current pitch value in degrees (-180 to 180). */ public float getPitch() { return pitch; } /** * Returns the current roll value (in degrees, from -180 to 180) * reported by the nav6 IMU. * @return The current roll value in degrees (-180 to 180). */ public float getRoll() { return roll; } /** * Returns the current yaw value (in degrees, from -180 to 180) * reported by the nav6 IMU. * * Note that the returned yaw value will be offset by a user-specified * offset value; this user-specified offset value is set by * invoking the zeroYaw() method. * @return The current yaw value in degrees (-180 to 180). */ public float getYaw() { float calculatedYaw = (float) (this.yaw - userYawOffset); if (calculatedYaw < -180) { calculatedYaw += 360; } if (calculatedYaw > 180) { calculatedYaw -= 360; } return calculatedYaw; } /** * Returns the current tilt-compensated compass heading * value (in degrees, from 0 to 360) reported by the nav6 IMU. * * Note that this value is sensed by the nav6 magnetometer, * which can be affected by nearby magnetic fields (e.g., the * magnetic fields generated by nearby motors). * @return The current tilt-compensated compass heading, in degrees (0-360). */ public float getCompassHeading() { return compassHeading; } /** * Sets the user-specified yaw offset to the current * yaw value reported by the nav6 IMU. * * This user-specified yaw offset is automatically * subtracted from subsequent yaw values reported by * the getYaw() method. */ public void zeroYaw() { userYawOffset = getAverageFromYawHistory(); } /** * Indicates whether the nav6 IMU is currently connected * to the host computer. A connection is considered established * whenever a value update packet has been received from the * nav6 IMU within the last second. * @return Returns true if a valid update has been received within the last second. */ public boolean isConnected() { double timeSinceLastUpdate = Timer.getFPGATimestamp() - this.lastUpdateTime; return timeSinceLastUpdate <= 1.0; } /** * Returns the count in bytes of data received from the * nav6 IMU. This could can be useful for diagnosing * connectivity issues. * * If the byte count is increasing, but the update count * (see getUpdateCount()) is not, this indicates a software * misconfiguration. * @return The number of bytes received from the nav6 IMU. */ public double getByteCount() { return byteCount; } /** * Returns the count of valid update packets which have * been received from the nav6 IMU. This count should increase * at the same rate indicated by the configured update rate. * @return The number of valid updates received from the nav6 IMU. */ public double getUpdateCount() { return updateCount; } /** * Returns true if the nav6 IMU is currently performing automatic * calibration. Automatic calibration occurs when the nav6 IMU * is initially powered on, during which time the nav6 IMU should * be held still. * * During this automatically calibration, the yaw, pitch and roll * values returned may not be accurate. * * Once complete, the nav6 IMU will automatically remove an internal * yaw offset value from all reported values. * @return Returns true if the nav6 IMU is currently calibrating. */ public boolean isCalibrating() { short calibrationState = (short) (this.flags & IMUProtocol.NAV6_FLAG_MASK_CALIBRATION_STATE); return (calibrationState != IMUProtocol.NAV6_CALIBRATION_STATE_COMPLETE); } /** * Returns the current yaw value reported by the nav6 IMU. This * yaw value is useful for implementing features including "auto rotate * to a known angle". * @return The current yaw angle in degrees (-180 to 180). */ public double pidGet() { return getYaw(); } public void updateTable() { if (m_table != null) { m_table.putNumber("Value", getYaw()); } } public void startLiveWindowMode() { } public void stopLiveWindowMode() { } public void initTable(ITable itable) { m_table = itable; updateTable(); } public ITable getTable() { return m_table; } public String getSmartDashboardType() { return "Gyro"; } // Invoked when a new packet is received; returns the packet length if the packet // is valid, based upon IMU Protocol definitions; otherwise, returns 0 protected int decodePacketHandler(byte[] receivedData, int offset, int bytesRemaining) { int packetLength = IMUProtocol.decodeYPRUpdate(receivedData, offset, bytesRemaining, yprUpdateData); if (packetLength > 0) { setYawPitchRoll(yprUpdateData.yaw,yprUpdateData.pitch,yprUpdateData.roll,yprUpdateData.compass_heading); } return packetLength; } // IMU Class thread run method public void run() { stop = false; boolean streamResponseReceived = false; double lastStreamCommandSentTimestamp = 0.0; try { serialPort.setReadBufferSize(512); serialPort.setTimeout(1.0); serialPort.enableTermination('\n'); serialPort.flush(); serialPort.reset(); } catch (RuntimeException ex) { ex.printStackTrace(); } IMUProtocol.StreamResponse response = new IMUProtocol.StreamResponse(); byte[] streamCommand = new byte[256]; int cmdPacketLength = IMUProtocol.encodeStreamCommand(streamCommand, updateType, updateRateHz); try { serialPort.reset(); serialPort.write( streamCommand, cmdPacketLength ); serialPort.flush(); lastStreamCommandSentTimestamp = Timer.getFPGATimestamp(); } catch (RuntimeException ex) { ex.printStackTrace(); } while (!stop) { try { // Wait, with delays to conserve CPU resources, until // bytes have arrived. while ( !stop && ( serialPort.getBytesReceived() < 1 ) ) { Timer.delay(0.1); } int packetsReceived = 0; byte[] receivedData = serialPort.read(256); int bytesRead = receivedData.length; if (bytesRead > 0) { byteCount += bytesRead; int i = 0; // Scan the buffer looking for valid packets while (i < bytesRead) { // Attempt to decode a packet int bytesRemaining = bytesRead - i; int packetLength = decodePacketHandler(receivedData,i,bytesRemaining); if (packetLength > 0) { packetsReceived++; updateCount++; i += packetLength; } else { packetLength = IMUProtocol.decodeStreamResponse(receivedData, i, bytesRemaining, response); if (packetLength > 0) { packetsReceived++; setStreamResponse(response); streamResponseReceived = true; i += packetLength; } else { // current index is not the start of a valid packet; increment i++; } } } if (packetsReceived == 0 && bytesRead == 256) { // Workaround for issue found in Java SerialPort implementation: // No packets received and 256 bytes received; this // condition occurs in the Java SerialPort. In this case, // reset the serial port. serialPort.reset(); } // If a stream configuration response has not been received within three seconds // of operation, (re)send a stream configuration request if (!streamResponseReceived && ((Timer.getFPGATimestamp() - lastStreamCommandSentTimestamp ) > 3.0)) { cmdPacketLength = IMUProtocol.encodeStreamCommand(streamCommand, updateType, updateRateHz); try { lastStreamCommandSentTimestamp = Timer.getFPGATimestamp(); serialPort.write( streamCommand, cmdPacketLength ); serialPort.flush(); } catch (RuntimeException ex2) { ex2.printStackTrace(); } } else { // If no bytes remain in the buffer, and not awaiting a response, sleep a bit if ( streamResponseReceived && serialPort.getBytesReceived() == 0) { Timer.delay(1.0/updateRateHz); } } } } catch (RuntimeException ex) { // This exception typically indicates a Timeout streamResponseReceived = false; ex.printStackTrace(); } } } @Override public void setPIDSourceType(PIDSourceType pidSource) { // TODO Auto-generated method stub } @Override public PIDSourceType getPIDSourceType() { // TODO Auto-generated method stub return null; } }
package com.cotescu.radu.commons; import java.io.File; import java.io.FileInputStream; import java.io.FileWriter; import java.io.IOException; import java.io.InputStreamReader; import java.util.zip.GZIPInputStream; /** * Class providing static methods for extracting GZIP compressed text files. * @author Radu Cotescu * */ public class GZIPHandler { private static final int BUFFER_SIZE = 2048; /** * This method extracts a GZIP compressed text file. * @param gzipArchive the File representing the GZIP archive * @param outputFile the full path wrapped in a File object to the extracted file * @param shouldAppend flag that sets if the extraction should add the extracted contents to an already existing file with the same name as the desired file name * @param shouldDeleteArchive flag that sets if the archive should be deleted after a successful extraction * @throws IOException if any I/O error is detected */ public static void extract(File gzipArchive, File outputFile, boolean shouldAppend, boolean shouldDeleteArchive) throws IOException { FileInputStream fis = new FileInputStream(gzipArchive); GZIPInputStream gis = new GZIPInputStream(fis); InputStreamReader isr = new InputStreamReader(gis); FileWriter fw = new FileWriter(outputFile, shouldAppend); char[] charBuffer = new char[BUFFER_SIZE]; int read; while((read = isr.read(charBuffer)) != -1) { fw.write(charBuffer, 0, read); } fw.flush(); if (shouldDeleteArchive) { gzipArchive.delete(); } } }
package org.nick.wwwjdic.hkr; import java.util.Arrays; import java.util.List; import org.nick.wwwjdic.Analytics; import org.nick.wwwjdic.Constants; import org.nick.wwwjdic.R; import org.nick.wwwjdic.WebServiceBackedActivity; import org.nick.wwwjdic.ocr.WeOcrClient; import android.content.Intent; import android.content.SharedPreferences; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Matrix; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.preference.PreferenceManager; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.CheckBox; import android.widget.Toast; public class RecognizeKanjiActivity extends WebServiceBackedActivity implements OnClickListener { private static final String TAG = RecognizeKanjiActivity.class .getSimpleName(); private static final String KR_DEFAULT_URL = "http://kanji.sljfaq.org/kanji16/kanji-0.016.cgi"; private static final String PREF_KR_URL_KEY = "pref_kr_url"; private static final String PREF_KR_TIMEOUT_KEY = "pref_kr_timeout"; private static final String PREF_KR_ANNOTATE = "pref_kr_annotate"; private static final String PREF_KR_ANNOTATE_MIDWAY = "pref_kr_annotate_midway"; private static final String WEOCR_DEFAULT_URL = "http://maggie.ocrgrid.org/cgi-bin/weocr/nhocr.cgi"; private static final String PREF_WEOCR_URL_KEY = "pref_weocr_url"; private static final String PREF_WEOCR_TIMEOUT_KEY = "pref_weocr_timeout"; private static final int OCR_IMAGE_WIDTH = 128; private static final int NUM_OCR_CANDIDATES = 20; private static final int HKR_RESULT = 1; private Button clearButton; private Button recognizeButton; private Button ocrButton; private CheckBox lookAheadCb; private KanjiDrawView drawView; @Override protected void activityOnCreate(Bundle savedInstanceState) { setContentView(R.layout.kanji_draw); setTitle(R.string.hkr); findViews(); clearButton.setOnClickListener(this); recognizeButton.setOnClickListener(this); ocrButton.setOnClickListener(this); drawView.setAnnotateStrokes(isAnnoateStrokes()); drawView.setAnnotateStrokesMidway(isAnnotateStrokesMidway()); drawView.requestFocus(); } @Override protected void onStart() { super.onStart(); Analytics.startSession(this); } @Override protected void onStop() { super.onStop(); Analytics.endSession(this); } @Override protected void onResume() { drawView.setAnnotateStrokes(isAnnoateStrokes()); drawView.setAnnotateStrokesMidway(isAnnotateStrokesMidway()); } private void findViews() { drawView = (KanjiDrawView) this.findViewById(R.id.kanji_draw_view); clearButton = (Button) findViewById(R.id.clear_canvas_button); recognizeButton = (Button) findViewById(R.id.recognize_button); ocrButton = (Button) findViewById(R.id.ocr_button); lookAheadCb = (CheckBox) findViewById(R.id.lookAheadCb); } public static class RecognizeKanjiHandler extends WsResultHandler { public RecognizeKanjiHandler(RecognizeKanjiActivity krActivity) { super(krActivity); } @Override public void handleMessage(Message msg) { if (activity == null) { // we are in the process of rotating the screen, defer handling Message newMsg = obtainMessage(msg.what, msg.arg1, msg.arg2); newMsg.obj = msg.obj; sendMessageDelayed(newMsg, 500); return; } RecognizeKanjiActivity krActivity = (RecognizeKanjiActivity) activity; switch (msg.what) { case HKR_RESULT: krActivity.dismissProgressDialog(); if (msg.arg1 == 1) { String[] results = (String[]) msg.obj; krActivity.sendToDictionary(results); } else { Toast t = Toast.makeText(krActivity, R.string.hkr_failed, Toast.LENGTH_SHORT); t.show(); } break; default: super.handleMessage(msg); } } } @Override protected WsResultHandler createHandler() { return new RecognizeKanjiHandler(this); } class HkrTask implements Runnable { private List<Stroke> strokes; private Handler handler; public HkrTask(List<Stroke> strokes, Handler handler) { this.strokes = strokes; this.handler = handler; } @Override public void run() { try { KanjiRecognizerClient krClient = new KanjiRecognizerClient( getKrUrl(), getKrTimeout()); String[] results = krClient .recognize(strokes, isUseLookahead()); Log.i(TAG, "go KR result " + Arrays.asList(results)); Message msg = handler.obtainMessage(HKR_RESULT, 1, 0); msg.obj = results; handler.sendMessage(msg); } catch (Exception e) { Log.e("TAG", "Character recognition failed", e); Message msg = handler.obtainMessage(HKR_RESULT, 0, 0); handler.sendMessage(msg); } } } private int getKrTimeout() { SharedPreferences preferences = PreferenceManager .getDefaultSharedPreferences(this); String timeoutStr = preferences.getString(PREF_KR_TIMEOUT_KEY, "10"); return Integer.parseInt(timeoutStr) * 1000; } private String getKrUrl() { SharedPreferences preferences = PreferenceManager .getDefaultSharedPreferences(this); return preferences.getString(PREF_KR_URL_KEY, KR_DEFAULT_URL); } private boolean isAnnotateStrokesMidway() { SharedPreferences preferences = PreferenceManager .getDefaultSharedPreferences(this); return preferences.getBoolean(PREF_KR_ANNOTATE_MIDWAY, false); } private boolean isAnnoateStrokes() { SharedPreferences preferences = PreferenceManager .getDefaultSharedPreferences(this); return preferences.getBoolean(PREF_KR_ANNOTATE, true); } private boolean isUseLookahead() { return lookAheadCb.isChecked(); } @Override public void onClick(View v) { switch (v.getId()) { case R.id.clear_canvas_button: clear(); break; case R.id.recognize_button: recognizeKanji(); break; case R.id.ocr_button: ocrKanji(); break; default: // do nothing } } private void ocrKanji() { Bitmap bitmap = drawingToBitmap(); OcrTask task = new OcrTask(bitmap, handler); String message = getResources().getString(R.string.doing_hkr); submitWsTask(task, message); Analytics.event("recognizeKanjiOcr", this); } class OcrTask implements Runnable { private Bitmap bitmap; private Handler handler; public OcrTask(Bitmap b, Handler h) { bitmap = b; handler = h; } @Override public void run() { try { WeOcrClient client = new WeOcrClient(getWeocrUrl(), getWeocrTimeout()); String[] candidates = client.sendCharacterOcrRequest(bitmap, NUM_OCR_CANDIDATES); if (candidates != null) { Message msg = handler.obtainMessage(HKR_RESULT, 1, 0); msg.obj = candidates; handler.sendMessage(msg); } else { Log.d("TAG", "OCR failed: null returned"); Message msg = handler.obtainMessage(HKR_RESULT, 0, 0); handler.sendMessage(msg); } } catch (Exception e) { Log.e("TAG", "OCR failed", e); Message msg = handler.obtainMessage(HKR_RESULT, 0, 0); handler.sendMessage(msg); } } } private int getWeocrTimeout() { SharedPreferences preferences = PreferenceManager .getDefaultSharedPreferences(this); String timeoutStr = preferences.getString(PREF_WEOCR_TIMEOUT_KEY, "10"); return Integer.parseInt(timeoutStr) * 1000; } private String getWeocrUrl() { SharedPreferences preferences = PreferenceManager .getDefaultSharedPreferences(this); return preferences.getString(PREF_WEOCR_URL_KEY, WEOCR_DEFAULT_URL); } private Bitmap drawingToBitmap() { Bitmap b = Bitmap.createBitmap(drawView.getWidth(), drawView.getWidth(), Bitmap.Config.ARGB_8888); Canvas c = new Canvas(b); boolean annotate = drawView.isAnnotateStrokes(); drawView.setAnnotateStrokes(false); drawView.setBackgroundColor(0xff888888); drawView.setStrokePaintColor(Color.BLACK); drawView.draw(c); drawView.setAnnotateStrokes(annotate); drawView.setBackgroundColor(Color.BLACK); drawView.setStrokePaintColor(Color.WHITE); int width = drawView.getWidth(); int newWidth = OCR_IMAGE_WIDTH; float scale = ((float) newWidth) / width; Matrix matrix = new Matrix(); matrix.postScale(scale, scale); c.scale(scale, scale); Bitmap resized = Bitmap.createBitmap(b, 0, 0, width, width, matrix, true); return resized; } private void clear() { drawView.clear(); } private void recognizeKanji() { List<Stroke> strokes = drawView.getStrokes(); HkrTask task = new HkrTask(strokes, handler); String message = getResources().getString(R.string.doing_hkr); submitWsTask(task, message); Analytics.event("recognizeKanji", this); } public void sendToDictionary(String[] results) { Intent intent = new Intent(this, HkrCandidates.class); Bundle extras = new Bundle(); extras.putStringArray(Constants.HKR_CANDIDATES_KEY, results); intent.putExtras(extras); startActivity(intent); } }
package com.facebook.litho; import javax.annotation.Nullable; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import android.content.res.Resources; import android.content.res.TypedArray; import android.graphics.Color; import android.graphics.drawable.Drawable; import android.graphics.Rect; import android.support.annotation.AttrRes; import android.support.annotation.ColorInt; import android.support.annotation.DimenRes; import android.support.annotation.Dimension; import android.support.annotation.DrawableRes; import android.support.annotation.Px; import android.support.annotation.StringRes; import android.support.v4.view.ViewCompat; import android.text.TextUtils; import android.util.SparseArray; import com.facebook.R; import com.facebook.litho.config.ComponentsConfiguration; import com.facebook.litho.reference.ColorDrawableReference; import com.facebook.litho.reference.Reference; import com.facebook.litho.reference.ResourceDrawableReference; import com.facebook.infer.annotation.ThreadConfined; import com.facebook.yoga.YogaAlign; import com.facebook.yoga.YogaBaselineFunction; import com.facebook.yoga.YogaFlexDirection; import com.facebook.yoga.YogaJustify; import com.facebook.yoga.YogaDirection; import com.facebook.yoga.YogaPositionType; import com.facebook.yoga.YogaWrap; import com.facebook.yoga.YogaEdge; import com.facebook.yoga.YogaConstants; import com.facebook.yoga.YogaMeasureFunction; import com.facebook.yoga.YogaNode; import com.facebook.yoga.YogaNodeAPI; import com.facebook.yoga.YogaOverflow; import com.facebook.yoga.Spacing; import static android.os.Build.VERSION.SDK_INT; import static android.os.Build.VERSION_CODES.ICE_CREAM_SANDWICH; import static android.os.Build.VERSION_CODES.JELLY_BEAN; import static android.os.Build.VERSION_CODES.JELLY_BEAN_MR1; import static android.support.annotation.Dimension.DP; import static com.facebook.litho.ComponentContext.NULL_LAYOUT; import static com.facebook.yoga.YogaEdge.ALL; import static com.facebook.yoga.YogaEdge.BOTTOM; import static com.facebook.yoga.YogaEdge.END; import static com.facebook.yoga.YogaEdge.HORIZONTAL; import static com.facebook.yoga.YogaEdge.LEFT; import static com.facebook.yoga.YogaEdge.RIGHT; import static com.facebook.yoga.YogaEdge.START; import static com.facebook.yoga.YogaEdge.TOP; import static com.facebook.yoga.YogaEdge.VERTICAL; /** * Internal class representing both a {@link ComponentLayout} and a * {@link com.facebook.litho.ComponentLayout.ContainerBuilder}. */ @ThreadConfined(ThreadConfined.ANY) class InternalNode implements ComponentLayout, ComponentLayout.ContainerBuilder { // Used to check whether or not the framework can use style IDs for // paddingStart/paddingEnd due to a bug in some Android devices. private static final boolean SUPPORTS_RTL = (SDK_INT >= JELLY_BEAN_MR1); // When this flag is set, layoutDirection style was explicitly set on this node. private static final long PFLAG_LAYOUT_DIRECTION_IS_SET = 1L << 0; // When this flag is set, alignSelf was explicitly set on this node. private static final long PFLAG_ALIGN_SELF_IS_SET = 1L << 1; // When this flag is set, position type was explicitly set on this node. private static final long PFLAG_POSITION_TYPE_IS_SET = 1L << 2; // When this flag is set, flex was explicitly set on this node. private static final long PFLAG_FLEX_IS_SET = 1L << 3; // When this flag is set, flex grow was explicitly set on this node. private static final long PFLAG_FLEX_GROW_IS_SET = 1L << 4; // When this flag is set, flex shrink was explicitly set on this node. private static final long PFLAG_FLEX_SHRINK_IS_SET = 1L << 5; // When this flag is set, flex basis was explicitly set on this node. private static final long PFLAG_FLEX_BASIS_IS_SET = 1L << 6; // When this flag is set, importantForAccessibility was explicitly set on this node. private static final long PFLAG_IMPORTANT_FOR_ACCESSIBILITY_IS_SET = 1L << 7; // When this flag is set, duplicateParentState was explicitly set on this node. private static final long PFLAG_DUPLICATE_PARENT_STATE_IS_SET = 1L << 8; // When this flag is set, margin was explicitly set on this node. private static final long PFLAG_MARGIN_IS_SET = 1L << 9; // When this flag is set, padding was explicitly set on this node. private static final long PFLAG_PADDING_IS_SET = 1L << 10; // When this flag is set, position was explicitly set on this node. private static final long PFLAG_POSITION_IS_SET = 1L << 11; // When this flag is set, width was explicitly set on this node. private static final long PFLAG_WIDTH_IS_SET = 1L << 12; // When this flag is set, minWidth was explicitly set on this node. private static final long PFLAG_MIN_WIDTH_IS_SET = 1L << 13; // When this flag is set, maxWidth was explicitly set on this node. private static final long PFLAG_MAX_WIDTH_IS_SET = 1L << 14; // When this flag is set, height was explicitly set on this node. private static final long PFLAG_HEIGHT_IS_SET = 1L << 15; // When this flag is set, minHeight was explicitly set on this node. private static final long PFLAG_MIN_HEIGHT_IS_SET = 1L << 16; // When this flag is set, maxHeight was explicitly set on this node. private static final long PFLAG_MAX_HEIGHT_IS_SET = 1L << 17; // When this flag is set, background was explicitly set on this node. private static final long PFLAG_BACKGROUND_IS_SET = 1L << 18; // When this flag is set, foreground was explicitly set on this node. private static final long PFLAG_FOREGROUND_IS_SET = 1L << 19; // When this flag is set, visibleHandler was explicitly set on this node. private static final long PFLAG_VISIBLE_HANDLER_IS_SET = 1L << 20; // When this flag is set, focusedHandler was explicitly set on this node. private static final long PFLAG_FOCUSED_HANDLER_IS_SET = 1L << 21; // When this flag is set, fullImpressionHandler was explicitly set on this node. private static final long PFLAG_FULL_IMPRESSION_HANDLER_IS_SET = 1L << 22; // When this flag is set, invisibleHandler was explicitly set on this node. private static final long PFLAG_INVISIBLE_HANDLER_IS_SET = 1L << 23; // When this flag is set, touch expansion was explicitly set on this node. private static final long PFLAG_TOUCH_EXPANSION_IS_SET = 1L << 24; // When this flag is set, border width was explicitly set on this node. private static final long PFLAG_BORDER_WIDTH_IS_SET = 1L << 25; // When this flag is set, aspectRatio was explicitly set on this node. private static final long PFLAG_ASPECT_RATIO_IS_SET = 1L << 26; // When this flag is set, transitionKey was explicitly set on this node. private static final long PFLAG_TRANSITION_KEY_IS_SET = 1L << 27; // When this flag is set, border color was explicitly set on this node. private static final long PFLAG_BORDER_COLOR_IS_SET = 1L << 28; private final ResourceResolver mResourceResolver = new ResourceResolver(); YogaNodeAPI mYogaNode; private ComponentContext mComponentContext; private Resources mResources; private Component mComponent; private int mImportantForAccessibility = ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_AUTO; private boolean mDuplicateParentState; private boolean mIsNestedTreeHolder; private InternalNode mNestedTree; private InternalNode mNestedTreeHolder; private long mPrivateFlags; private Reference<? extends Drawable> mBackground; private Reference<? extends Drawable> mForeground; private int mBorderColor = Color.TRANSPARENT; private NodeInfo mNodeInfo; private boolean mForceViewWrapping; private String mTransitionKey; private EventHandler mVisibleHandler; private EventHandler mFocusedHandler; private EventHandler mFullImpressionHandler; private EventHandler mInvisibleHandler; private String mTestKey; private Spacing mTouchExpansion; private Spacing mNestedTreePadding; private Spacing mNestedTreeBorderWidth; private boolean[] mIsPaddingPercent; private float mResolvedTouchExpansionLeft = YogaConstants.UNDEFINED; private float mResolvedTouchExpansionRight = YogaConstants.UNDEFINED; private float mResolvedX = YogaConstants.UNDEFINED; private float mResolvedY = YogaConstants.UNDEFINED; private float mResolvedWidth = YogaConstants.UNDEFINED; private float mResolvedHeight = YogaConstants.UNDEFINED; private int mLastWidthSpec = DiffNode.UNSPECIFIED; private int mLastHeightSpec = DiffNode.UNSPECIFIED; private float mLastMeasuredWidth = DiffNode.UNSPECIFIED; private float mLastMeasuredHeight = DiffNode.UNSPECIFIED; private DiffNode mDiffNode; private boolean mCachedMeasuresValid; private TreeProps mPendingTreeProps; void init(YogaNodeAPI yogaNode, ComponentContext componentContext, Resources resources) { yogaNode.setData(this); yogaNode.setOverflow(YogaOverflow.HIDDEN); yogaNode.setMeasureFunction(null); // YogaNode is the only version of YogaNodeAPI with this support; if (yogaNode instanceof YogaNode) { yogaNode.setBaselineFunction(null); } mYogaNode = yogaNode; mComponentContext = componentContext; mResources = resources; mResourceResolver.init( mComponentContext, componentContext.getResourceCache()); } @Px @Override public int getX() { if (YogaConstants.isUndefined(mResolvedX)) { mResolvedX = mYogaNode.getLayoutX(); } return (int) mResolvedX; } @Px @Override public int getY() { if (YogaConstants.isUndefined(mResolvedY)) { mResolvedY = mYogaNode.getLayoutY(); } return (int) mResolvedY; } @Px @Override public int getWidth() { if (YogaConstants.isUndefined(mResolvedWidth)) { mResolvedWidth = mYogaNode.getLayoutWidth(); } return (int) mResolvedWidth; } @Px @Override public int getHeight() { if (YogaConstants.isUndefined(mResolvedHeight)) { mResolvedHeight = mYogaNode.getLayoutHeight(); } return (int) mResolvedHeight; } @Px @Override public int getPaddingLeft() { return FastMath.round(mYogaNode.getLayoutPadding(LEFT)); } @Px @Override public int getPaddingTop() { return FastMath.round(mYogaNode.getLayoutPadding(TOP)); } @Px @Override public int getPaddingRight() { return FastMath.round(mYogaNode.getLayoutPadding(RIGHT)); } @Px @Override public int getPaddingBottom() { return FastMath.round(mYogaNode.getLayoutPadding(BOTTOM)); } public Reference<? extends Drawable> getBackground() { return mBackground; } public Reference<? extends Drawable> getForeground() { return mForeground; } public void setCachedMeasuresValid(boolean valid) { mCachedMeasuresValid = valid; } public int getLastWidthSpec() { return mLastWidthSpec; } public void setLastWidthSpec(int widthSpec) { mLastWidthSpec = widthSpec; } public int getLastHeightSpec() { return mLastHeightSpec; } public void setLastHeightSpec(int heightSpec) { mLastHeightSpec = heightSpec; } public boolean hasVisibilityHandlers() { return mVisibleHandler != null || mFocusedHandler != null || mFullImpressionHandler != null || mInvisibleHandler != null; } /** * The last value the measure funcion associated with this node {@link Component} returned * for the width. This is used together with {@link InternalNode#getLastWidthSpec()} * to implement measure caching. */ float getLastMeasuredWidth() { return mLastMeasuredWidth; } /** * Sets the last value the measure funcion associated with this node {@link Component} returned * for the width. */ void setLastMeasuredWidth(float lastMeasuredWidth) { mLastMeasuredWidth = lastMeasuredWidth; } /** * The last value the measure funcion associated with this node {@link Component} returned * for the height. This is used together with {@link InternalNode#getLastHeightSpec()} * to implement measure caching. */ float getLastMeasuredHeight() { return mLastMeasuredHeight; } /** * Sets the last value the measure funcion associated with this node {@link Component} returned * for the height. */ void setLastMeasuredHeight(float lastMeasuredHeight) { mLastMeasuredHeight = lastMeasuredHeight; } DiffNode getDiffNode() { return mDiffNode; } boolean areCachedMeasuresValid() { return mCachedMeasuresValid; } void setDiffNode(DiffNode diffNode) { mDiffNode = diffNode; } /** * Mark this node as a nested tree root holder. */ void markIsNestedTreeHolder(TreeProps currentTreeProps) { mIsNestedTreeHolder = true; mPendingTreeProps = TreeProps.copy(currentTreeProps); } /** * @return Whether this node is holding a nested tree or not. The decision was made during * tree creation {@link ComponentLifecycle#createLayout(ComponentContext, Component, boolean)}. */ boolean isNestedTreeHolder() { return mIsNestedTreeHolder; } @Override public YogaDirection getResolvedLayoutDirection() { return mYogaNode.getLayoutDirection(); } @Override public InternalNode layoutDirection(YogaDirection direction) { mPrivateFlags |= PFLAG_LAYOUT_DIRECTION_IS_SET; mYogaNode.setDirection(direction); return this; } @Override public InternalNode flexDirection(YogaFlexDirection direction) { mYogaNode.setFlexDirection(direction); return this; } @Override public InternalNode wrap(YogaWrap wrap) { mYogaNode.setWrap(wrap); return this; } @Override public InternalNode justifyContent(YogaJustify justifyContent) { mYogaNode.setJustifyContent(justifyContent); return this; } @Override public InternalNode alignItems(YogaAlign alignItems) { mYogaNode.setAlignItems(alignItems); return this; } @Override public InternalNode alignContent(YogaAlign alignContent) { mYogaNode.setAlignContent(alignContent); return this; } @Override public InternalNode alignSelf(YogaAlign alignSelf) { mPrivateFlags |= PFLAG_ALIGN_SELF_IS_SET; mYogaNode.setAlignSelf(alignSelf); return this; } @Override public InternalNode positionType(YogaPositionType positionType) { mPrivateFlags |= PFLAG_POSITION_TYPE_IS_SET; mYogaNode.setPositionType(positionType); return this; } @Override public InternalNode flex(float flex) { mPrivateFlags |= PFLAG_FLEX_IS_SET; mYogaNode.setFlex(flex); return this; } @Override public InternalNode flexGrow(float flexGrow) { mPrivateFlags |= PFLAG_FLEX_GROW_IS_SET; mYogaNode.setFlexGrow(flexGrow); return this; } @Override public InternalNode flexShrink(float flexShrink) { mPrivateFlags |= PFLAG_FLEX_SHRINK_IS_SET; mYogaNode.setFlexShrink(flexShrink); return this; } @Override public InternalNode flexBasisPx(@Px int flexBasis) { mPrivateFlags |= PFLAG_FLEX_BASIS_IS_SET; mYogaNode.setFlexBasis(flexBasis); return this; } @Override public InternalNode flexBasisPercent(float percent) { mPrivateFlags |= PFLAG_FLEX_BASIS_IS_SET; mYogaNode.setFlexBasisPercent(percent); return this; } @Override public InternalNode flexBasisAttr(@AttrRes int resId, @DimenRes int defaultResId) { return flexBasisPx(mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId)); } @Override public InternalNode flexBasisAttr(@AttrRes int resId) { return flexBasisAttr(resId, 0); } @Override public InternalNode flexBasisRes(@DimenRes int resId) { return flexBasisPx(mResourceResolver.resolveDimenOffsetRes(resId)); } @Override public InternalNode flexBasisDip(@Dimension(unit = DP) int flexBasis) { return flexBasisPx(mResourceResolver.dipsToPixels(flexBasis)); } @Override public InternalNode importantForAccessibility(int importantForAccessibility) { mPrivateFlags |= PFLAG_IMPORTANT_FOR_ACCESSIBILITY_IS_SET; mImportantForAccessibility = importantForAccessibility; return this; } @Override public InternalNode duplicateParentState(boolean duplicateParentState) { mPrivateFlags |= PFLAG_DUPLICATE_PARENT_STATE_IS_SET; mDuplicateParentState = duplicateParentState; return this; } @Override public InternalNode marginPx(YogaEdge edge, @Px int margin) { mPrivateFlags |= PFLAG_MARGIN_IS_SET; mYogaNode.setMargin(edge, margin); return this; } @Override public InternalNode marginPercent(YogaEdge edge, float percent) { mPrivateFlags |= PFLAG_MARGIN_IS_SET; mYogaNode.setMarginPercent(edge, percent); return this; } @Override public InternalNode marginAuto(YogaEdge edge) { mPrivateFlags |= PFLAG_MARGIN_IS_SET; mYogaNode.setMarginAuto(edge); return this; } @Override public InternalNode marginAttr( YogaEdge edge, @AttrRes int resId, @DimenRes int defaultResId) { return marginPx(edge, mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId)); } @Override public InternalNode marginAttr( YogaEdge edge, @AttrRes int resId) { return marginAttr(edge, resId, 0); } @Override public InternalNode marginRes(YogaEdge edge, @DimenRes int resId) { return marginPx(edge, mResourceResolver.resolveDimenOffsetRes(resId)); } @Override public InternalNode marginDip(YogaEdge edge, @Dimension(unit = DP) int margin) { return marginPx(edge, mResourceResolver.dipsToPixels(margin)); } @Override public InternalNode paddingPx(YogaEdge edge, @Px int padding) { mPrivateFlags |= PFLAG_PADDING_IS_SET; if (mIsNestedTreeHolder) { if (mNestedTreePadding == null) { mNestedTreePadding = ComponentsPools.acquireSpacing(); } mNestedTreePadding.set(edge.intValue(), padding); setIsPaddingPercent(edge, false); } else { mYogaNode.setPadding(edge, padding); } return this; } @Override public InternalNode paddingPercent(YogaEdge edge, float percent) { mPrivateFlags |= PFLAG_PADDING_IS_SET; if (mIsNestedTreeHolder) { if (mNestedTreePadding == null) { mNestedTreePadding = ComponentsPools.acquireSpacing(); } mNestedTreePadding.set(edge.intValue(), percent); setIsPaddingPercent(edge, true); } else { mYogaNode.setPaddingPercent(edge, percent); } return this; } @Override public InternalNode paddingAttr( YogaEdge edge, @AttrRes int resId, @DimenRes int defaultResId) { return paddingPx(edge, mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId)); } @Override public InternalNode paddingAttr( YogaEdge edge, @AttrRes int resId) { return paddingAttr(edge, resId, 0); } @Override public InternalNode paddingRes(YogaEdge edge, @DimenRes int resId) { return paddingPx(edge, mResourceResolver.resolveDimenOffsetRes(resId)); } @Override public InternalNode paddingDip(YogaEdge edge, @Dimension(unit = DP) int padding) { return paddingPx(edge, mResourceResolver.dipsToPixels(padding)); } @Override public InternalNode borderWidthPx(YogaEdge edge, @Px int borderWidth) { mPrivateFlags |= PFLAG_BORDER_WIDTH_IS_SET; if (mIsNestedTreeHolder) { if (mNestedTreeBorderWidth == null) { mNestedTreeBorderWidth = ComponentsPools.acquireSpacing(); } mNestedTreeBorderWidth.set(edge.intValue(), borderWidth); } else { mYogaNode.setBorder(edge, borderWidth); } return this; } @Override public InternalNode borderWidthAttr( YogaEdge edge, @AttrRes int resId, @DimenRes int defaultResId) { return borderWidthPx(edge, mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId)); } @Override public InternalNode borderWidthAttr( YogaEdge edge, @AttrRes int resId) { return borderWidthAttr(edge, resId, 0); } @Override public InternalNode borderWidthRes(YogaEdge edge, @DimenRes int resId) { return borderWidthPx(edge, mResourceResolver.resolveDimenOffsetRes(resId)); } @Override public InternalNode borderWidthDip( YogaEdge edge, @Dimension(unit = DP) int borderWidth) { return borderWidthPx(edge, mResourceResolver.dipsToPixels(borderWidth)); } @Override public Builder borderColor(@ColorInt int borderColor) { mPrivateFlags |= PFLAG_BORDER_COLOR_IS_SET; mBorderColor = borderColor; return this; } @Override public InternalNode positionPx(YogaEdge edge, @Px int position) { mPrivateFlags |= PFLAG_POSITION_IS_SET; mYogaNode.setPosition(edge, position); return this; } @Override public InternalNode positionPercent(YogaEdge edge, float percent) { mPrivateFlags |= PFLAG_POSITION_IS_SET; mYogaNode.setPositionPercent(edge, percent); return this; } @Override public InternalNode positionAttr( YogaEdge edge, @AttrRes int resId, @DimenRes int defaultResId) { return positionPx(edge, mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId)); } @Override public InternalNode positionAttr(YogaEdge edge, @AttrRes int resId) { return positionAttr(edge, resId, 0); } @Override public InternalNode positionRes(YogaEdge edge, @DimenRes int resId) { return positionPx(edge, mResourceResolver.resolveDimenOffsetRes(resId)); } @Override public InternalNode positionDip( YogaEdge edge, @Dimension(unit = DP) int position) { return positionPx(edge, mResourceResolver.dipsToPixels(position)); } @Override public InternalNode widthPx(@Px int width) { mPrivateFlags |= PFLAG_WIDTH_IS_SET; mYogaNode.setWidth(width); return this; } @Override public InternalNode widthPercent(float percent) { mPrivateFlags |= PFLAG_WIDTH_IS_SET; mYogaNode.setWidthPercent(percent); return this; } @Override public InternalNode widthRes(@DimenRes int resId) { return widthPx(mResourceResolver.resolveDimenSizeRes(resId)); } @Override public InternalNode widthAttr(@AttrRes int resId, @DimenRes int defaultResId) { return widthPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId)); } @Override public InternalNode widthAttr(@AttrRes int resId) { return widthAttr(resId, 0); } @Override public InternalNode widthDip(@Dimension(unit = DP) int width) { return widthPx(mResourceResolver.dipsToPixels(width)); } @Override public InternalNode minWidthPx(@Px int minWidth) { mPrivateFlags |= PFLAG_MIN_WIDTH_IS_SET; mYogaNode.setMinWidth(minWidth); return this; } @Override public InternalNode minWidthPercent(float percent) { mPrivateFlags |= PFLAG_MIN_WIDTH_IS_SET; mYogaNode.setMinWidthPercent(percent); return this; } @Override public InternalNode minWidthAttr(@AttrRes int resId, @DimenRes int defaultResId) { return minWidthPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId)); } @Override public InternalNode minWidthAttr(@AttrRes int resId) { return minWidthAttr(resId, 0); } @Override public InternalNode minWidthRes(@DimenRes int resId) { return minWidthPx(mResourceResolver.resolveDimenSizeRes(resId)); } @Override public InternalNode minWidthDip(@Dimension(unit = DP) int minWidth) { return minWidthPx(mResourceResolver.dipsToPixels(minWidth)); } @Override public InternalNode maxWidthPx(@Px int maxWidth) { mPrivateFlags |= PFLAG_MAX_WIDTH_IS_SET; mYogaNode.setMaxWidth(maxWidth); return this; } @Override public InternalNode maxWidthPercent(float percent) { mPrivateFlags |= PFLAG_MAX_WIDTH_IS_SET; mYogaNode.setMaxWidthPercent(percent); return this; } @Override public InternalNode maxWidthAttr(@AttrRes int resId, @DimenRes int defaultResId) { return maxWidthPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId)); } @Override public InternalNode maxWidthAttr(@AttrRes int resId) { return maxWidthAttr(resId, 0); } @Override public InternalNode maxWidthRes(@DimenRes int resId) { return maxWidthPx(mResourceResolver.resolveDimenSizeRes(resId)); } @Override public InternalNode maxWidthDip(@Dimension(unit = DP) int maxWidth) { return maxWidthPx(mResourceResolver.dipsToPixels(maxWidth)); } @Override public InternalNode heightPx(@Px int height) { mPrivateFlags |= PFLAG_HEIGHT_IS_SET; mYogaNode.setHeight(height); return this; } @Override public InternalNode heightPercent(float percent) { mPrivateFlags |= PFLAG_HEIGHT_IS_SET; mYogaNode.setHeightPercent(percent); return this; } @Override public InternalNode heightRes(@DimenRes int resId) { return heightPx(mResourceResolver.resolveDimenSizeRes(resId)); } @Override public InternalNode heightAttr(@AttrRes int resId, @DimenRes int defaultResId) { return heightPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId)); } @Override public InternalNode heightAttr(@AttrRes int resId) { return heightAttr(resId, 0); } @Override public InternalNode heightDip(@Dimension(unit = DP) int height) { return heightPx(mResourceResolver.dipsToPixels(height)); } @Override public InternalNode minHeightPx(@Px int minHeight) { mPrivateFlags |= PFLAG_MIN_HEIGHT_IS_SET; mYogaNode.setMinHeight(minHeight); return this; } @Override public InternalNode minHeightPercent(float percent) { mPrivateFlags |= PFLAG_MIN_HEIGHT_IS_SET; mYogaNode.setMinHeightPercent(percent); return this; } @Override public InternalNode minHeightAttr(@AttrRes int resId, @DimenRes int defaultResId) { return minHeightPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId)); } @Override public InternalNode minHeightAttr(@AttrRes int resId) { return minHeightAttr(resId, 0); } @Override public InternalNode minHeightRes(@DimenRes int resId) { return minHeightPx(mResourceResolver.resolveDimenSizeRes(resId)); } @Override public InternalNode minHeightDip(@Dimension(unit = DP) int minHeight) { return minHeightPx(mResourceResolver.dipsToPixels(minHeight)); } @Override public InternalNode maxHeightPx(@Px int maxHeight) { mPrivateFlags |= PFLAG_MAX_HEIGHT_IS_SET; mYogaNode.setMaxHeight(maxHeight); return this; } @Override public InternalNode maxHeightPercent(float percent) { mPrivateFlags |= PFLAG_MAX_HEIGHT_IS_SET; mYogaNode.setMaxHeightPercent(percent); return this; } @Override public InternalNode maxHeightAttr(@AttrRes int resId, @DimenRes int defaultResId) { return maxHeightPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId)); } @Override public InternalNode maxHeightAttr(@AttrRes int resId) { return maxHeightAttr(resId, 0); } @Override public InternalNode maxHeightRes(@DimenRes int resId) { return maxHeightPx(mResourceResolver.resolveDimenSizeRes(resId)); } @Override public InternalNode maxHeightDip(@Dimension(unit = DP) int maxHeight) { return maxHeightPx(mResourceResolver.dipsToPixels(maxHeight)); } @Override public InternalNode aspectRatio(float aspectRatio) { mPrivateFlags |= PFLAG_ASPECT_RATIO_IS_SET; if (mYogaNode instanceof YogaNode) { ((YogaNode) mYogaNode).setAspectRatio(aspectRatio); return this; } else { throw new IllegalStateException("Aspect ration requires using YogaNode not YogaNodeDEPRECATED"); } } private boolean shouldApplyTouchExpansion() { return mTouchExpansion != null && mNodeInfo != null && mNodeInfo.hasTouchEventHandlers(); } boolean hasTouchExpansion() { return ((mPrivateFlags & PFLAG_TOUCH_EXPANSION_IS_SET) != 0L); } Spacing getTouchExpansion() { return mTouchExpansion; } int getTouchExpansionLeft() { if (!shouldApplyTouchExpansion()) { return 0; } if (YogaConstants.isUndefined(mResolvedTouchExpansionLeft)) { mResolvedTouchExpansionLeft = resolveHorizontalSpacing(mTouchExpansion, Spacing.LEFT); } return FastMath.round(mResolvedTouchExpansionLeft); } int getTouchExpansionTop() { if (!shouldApplyTouchExpansion()) { return 0; } return FastMath.round(mTouchExpansion.get(Spacing.TOP)); } int getTouchExpansionRight() { if (!shouldApplyTouchExpansion()) { return 0; } if (YogaConstants.isUndefined(mResolvedTouchExpansionRight)) { mResolvedTouchExpansionRight = resolveHorizontalSpacing(mTouchExpansion, Spacing.RIGHT); } return FastMath.round(mResolvedTouchExpansionRight); } int getTouchExpansionBottom() { if (!shouldApplyTouchExpansion()) { return 0; } return FastMath.round(mTouchExpansion.get(Spacing.BOTTOM)); } @Override public InternalNode touchExpansionPx(YogaEdge edge, @Px int touchExpansion) { if (mTouchExpansion == null) { mTouchExpansion = ComponentsPools.acquireSpacing(); } mPrivateFlags |= PFLAG_TOUCH_EXPANSION_IS_SET; mTouchExpansion.set(edge.intValue(), touchExpansion); return this; } @Override public InternalNode touchExpansionAttr( YogaEdge edge, @AttrRes int resId, @DimenRes int defaultResId) { return touchExpansionPx( edge, mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId)); } @Override public InternalNode touchExpansionAttr( YogaEdge edge, @AttrRes int resId) { return touchExpansionAttr(edge, resId, 0); } @Override public InternalNode touchExpansionRes(YogaEdge edge, @DimenRes int resId) { return touchExpansionPx(edge, mResourceResolver.resolveDimenOffsetRes(resId)); } @Override public InternalNode touchExpansionDip( YogaEdge edge, @Dimension(unit = DP) int touchExpansion) { return touchExpansionPx(edge, mResourceResolver.dipsToPixels(touchExpansion)); } @Override public InternalNode child(ComponentLayout child) { if (child != null && child != NULL_LAYOUT) { addChildAt((InternalNode) child, mYogaNode.getChildCount()); } return this; } @Override public InternalNode child(ComponentLayout.Builder child) { if (child != null && child != NULL_LAYOUT) { child(child.build()); } return this; } @Override public InternalNode child(Component<?> child) { if (child != null) { child(Layout.create(mComponentContext, child).flexShrink(0).flexShrink(0).flexShrink(0)); } return this; } @Override public InternalNode child(Component.Builder<?> child) { if (child != null) { child(child.build()); } return this; } @Override public InternalNode background(Reference<? extends Drawable> background) { mPrivateFlags |= PFLAG_BACKGROUND_IS_SET; mBackground = background; setPaddingFromDrawableReference(background); return this; } @Override public InternalNode background(Reference.Builder<? extends Drawable> builder) { return background(builder.build()); } @Override public InternalNode backgroundAttr(@AttrRes int resId, @DrawableRes int defaultResId) { return backgroundRes(mResourceResolver.resolveResIdAttr(resId, defaultResId)); } @Override public InternalNode backgroundAttr(@AttrRes int resId) { return backgroundAttr(resId, 0); } @Override public InternalNode backgroundRes(@DrawableRes int resId) { if (resId == 0) { return background((Reference<Drawable>) null); } return background( ResourceDrawableReference.create(mComponentContext) .resId(resId) .build()); } @Override public InternalNode backgroundColor(@ColorInt int backgroundColor) { return background( ColorDrawableReference.create(mComponentContext) .color(backgroundColor) .build()); } @Override public InternalNode foreground(Reference<? extends Drawable> foreground) { mPrivateFlags |= PFLAG_FOREGROUND_IS_SET; mForeground = foreground; return this; } @Override public InternalNode foreground(Reference.Builder<? extends Drawable> builder) { return foreground(builder.build()); } @Override public InternalNode foregroundAttr(@AttrRes int resId, @DrawableRes int defaultResId) { return foregroundRes(mResourceResolver.resolveResIdAttr(resId, defaultResId)); } @Override public InternalNode foregroundAttr(@AttrRes int resId) { return foregroundAttr(resId, 0); } @Override public InternalNode foregroundRes(@DrawableRes int resId) { if (resId == 0) { return foreground((Reference<Drawable>) null); } return foreground( ResourceDrawableReference.create(mComponentContext) .resId(resId) .build()); } @Override public InternalNode foregroundColor(@ColorInt int foregroundColor) { return foreground( ColorDrawableReference.create(mComponentContext) .color(foregroundColor) .build()); } @Override public InternalNode wrapInView() { mForceViewWrapping = true; return this; } boolean isForceViewWrapping() { return mForceViewWrapping; } @Override public InternalNode clickHandler(EventHandler clickHandler) { getOrCreateNodeInfo().setClickHandler(clickHandler); return this; } @Override public InternalNode longClickHandler(EventHandler longClickHandler) { getOrCreateNodeInfo().setLongClickHandler(longClickHandler); return this; } @Override public InternalNode touchHandler(EventHandler touchHandler) { getOrCreateNodeInfo().setTouchHandler(touchHandler); return this; } @Override public ContainerBuilder focusable(boolean isFocusable) { getOrCreateNodeInfo().setFocusable(isFocusable); return this; } @Override public InternalNode visibleHandler(EventHandler visibleHandler) { mPrivateFlags |= PFLAG_VISIBLE_HANDLER_IS_SET; mVisibleHandler = visibleHandler; return this; } EventHandler getVisibleHandler() { return mVisibleHandler; } @Override public InternalNode focusedHandler(EventHandler focusedHandler) { mPrivateFlags |= PFLAG_FOCUSED_HANDLER_IS_SET; mFocusedHandler = focusedHandler; return this; } EventHandler getFocusedHandler() { return mFocusedHandler; } @Override public InternalNode fullImpressionHandler(EventHandler fullImpressionHandler) { mPrivateFlags |= PFLAG_FULL_IMPRESSION_HANDLER_IS_SET; mFullImpressionHandler = fullImpressionHandler; return this; } EventHandler getFullImpressionHandler() { return mFullImpressionHandler; } @Override public InternalNode invisibleHandler(EventHandler invisibleHandler) { mPrivateFlags |= PFLAG_INVISIBLE_HANDLER_IS_SET; mInvisibleHandler = invisibleHandler; return this; } EventHandler getInvisibleHandler() { return mInvisibleHandler; } @Override public InternalNode contentDescription(CharSequence contentDescription) { getOrCreateNodeInfo().setContentDescription(contentDescription); return this; } @Override public InternalNode contentDescription(@StringRes int stringId) { return contentDescription(mResources.getString(stringId)); } @Override public InternalNode contentDescription(@StringRes int stringId, Object... formatArgs) { return contentDescription(mResources.getString(stringId, formatArgs)); } @Override public InternalNode viewTag(Object viewTag) { getOrCreateNodeInfo().setViewTag(viewTag); return this; } @Override public InternalNode viewTags(SparseArray<Object> viewTags) { getOrCreateNodeInfo().setViewTags(viewTags); return this; } @Override public InternalNode testKey(String testKey) { mTestKey = testKey; return this; } @Override public InternalNode dispatchPopulateAccessibilityEventHandler( EventHandler<DispatchPopulateAccessibilityEventEvent> dispatchPopulateAccessibilityEventHandler) { getOrCreateNodeInfo().setDispatchPopulateAccessibilityEventHandler( dispatchPopulateAccessibilityEventHandler); return this; } @Override public InternalNode onInitializeAccessibilityEventHandler( EventHandler<OnInitializeAccessibilityEventEvent> onInitializeAccessibilityEventHandler) { getOrCreateNodeInfo().setOnInitializeAccessibilityEventHandler( onInitializeAccessibilityEventHandler); return this; } @Override public InternalNode onInitializeAccessibilityNodeInfoHandler( EventHandler<OnInitializeAccessibilityNodeInfoEvent> onInitializeAccessibilityNodeInfoHandler) { getOrCreateNodeInfo().setOnInitializeAccessibilityNodeInfoHandler( onInitializeAccessibilityNodeInfoHandler); return this; } @Override public InternalNode onPopulateAccessibilityEventHandler( EventHandler<OnPopulateAccessibilityEventEvent> onPopulateAccessibilityEventHandler) { getOrCreateNodeInfo().setOnPopulateAccessibilityEventHandler( onPopulateAccessibilityEventHandler); return this; } @Override public InternalNode onRequestSendAccessibilityEventHandler( EventHandler<OnRequestSendAccessibilityEventEvent> onRequestSendAccessibilityEventHandler) { getOrCreateNodeInfo().setOnRequestSendAccessibilityEventHandler( onRequestSendAccessibilityEventHandler); return this; } @Override public InternalNode performAccessibilityActionHandler( EventHandler<PerformAccessibilityActionEvent> performAccessibilityActionHandler) { getOrCreateNodeInfo().setPerformAccessibilityActionHandler(performAccessibilityActionHandler); return this; } @Override public InternalNode sendAccessibilityEventHandler( EventHandler<SendAccessibilityEventEvent> sendAccessibilityEventHandler) { getOrCreateNodeInfo().setSendAccessibilityEventHandler(sendAccessibilityEventHandler); return this; } @Override public InternalNode sendAccessibilityEventUncheckedHandler( EventHandler<SendAccessibilityEventUncheckedEvent> sendAccessibilityEventUncheckedHandler) { getOrCreateNodeInfo().setSendAccessibilityEventUncheckedHandler( sendAccessibilityEventUncheckedHandler); return this; } @Override public ContainerBuilder transitionKey(String key) { if (SDK_INT >= ICE_CREAM_SANDWICH) { mPrivateFlags |= PFLAG_TRANSITION_KEY_IS_SET; mTransitionKey = key; wrapInView(); } return this; } String getTransitionKey() { return mTransitionKey; } /** * A unique identifier which may be set for retrieving a component and its bounds when testing. */ String getTestKey() { return mTestKey; } void setMeasureFunction(YogaMeasureFunction measureFunction) { mYogaNode.setMeasureFunction(measureFunction); } void setBaselineFunction(YogaBaselineFunction baselineFunction) { // YogaNode is the only version of YogaNodeAPI with this support; if (mYogaNode instanceof YogaNode) { mYogaNode.setBaselineFunction(baselineFunction); } } boolean hasNewLayout() { return mYogaNode.hasNewLayout(); } void markLayoutSeen() { mYogaNode.markLayoutSeen(); } float getStyleWidth() { return mYogaNode.getWidth().value; } float getMinWidth() { return mYogaNode.getMinWidth().value; } float getMaxWidth() { return mYogaNode.getMaxWidth().value; } float getStyleHeight() { return mYogaNode.getHeight().value; } float getMinHeight() { return mYogaNode.getMinHeight().value; } float getMaxHeight() { return mYogaNode.getMaxHeight().value; } void calculateLayout(float width, float height) { final ComponentTree tree = mComponentContext == null ? null : mComponentContext.getComponentTree(); final ComponentsStethoManager stethoManager = tree == null ? null : tree.getStethoManager(); if (stethoManager != null) { applyOverridesRecursive(stethoManager, this); } mYogaNode.calculateLayout(width, height); } private static void applyOverridesRecursive( ComponentsStethoManager stethoManager, InternalNode node) { stethoManager.applyOverrides(node); for (int i = 0, count = node.getChildCount(); i < count; i++) { applyOverridesRecursive(stethoManager, node.getChildAt(i)); } if (node.hasNestedTree()) { applyOverridesRecursive(stethoManager, node.getNestedTree()); } } void calculateLayout() { calculateLayout(YogaConstants.UNDEFINED, YogaConstants.UNDEFINED); } int getChildCount() { return mYogaNode.getChildCount(); } com.facebook.yoga.YogaDirection getStyleDirection() { return mYogaNode.getStyleDirection(); } InternalNode getChildAt(int index) { if (mYogaNode.getChildAt(index) == null) { return null; } return (InternalNode) mYogaNode.getChildAt(index).getData(); } int getChildIndex(InternalNode child) { for (int i = 0, count = mYogaNode.getChildCount(); i < count; i++) { if (mYogaNode.getChildAt(i) == child.mYogaNode) { return i; } } return -1; } InternalNode getParent() { if (mYogaNode == null || mYogaNode.getParent() == null) { return null; } return (InternalNode) mYogaNode.getParent().getData(); } void addChildAt(InternalNode child, int index) { mYogaNode.addChildAt(child.mYogaNode, index); } InternalNode removeChildAt(int index) { return (InternalNode) mYogaNode.removeChildAt(index).getData(); } @Override public ComponentLayout build() { return this; } private float resolveHorizontalSpacing(Spacing spacing, int index) { final boolean isRtl = (mYogaNode.getLayoutDirection() == YogaDirection.RTL); final int resolvedIndex; switch (index) { case Spacing.LEFT: resolvedIndex = (isRtl ? Spacing.END : Spacing.START); break; case Spacing.RIGHT: resolvedIndex = (isRtl ? Spacing.START : Spacing.END); break; default: throw new IllegalArgumentException("Not an horizontal padding index: " + index); } float result = spacing.getRaw(resolvedIndex); if (YogaConstants.isUndefined(result)) { result = spacing.get(index); } return result; } ComponentContext getContext() { return mComponentContext; } Component getComponent() { return mComponent; } int getBorderColor() { return mBorderColor; } boolean shouldDrawBorders() { return mBorderColor != Color.TRANSPARENT && (mYogaNode.getLayoutBorder(LEFT) != 0 || mYogaNode.getLayoutBorder(TOP) != 0 || mYogaNode.getLayoutBorder(RIGHT) != 0 || mYogaNode.getLayoutBorder(BOTTOM) != 0); } void setComponent(Component component) { mComponent = component; } boolean hasNestedTree() { return mNestedTree != null; } @Nullable InternalNode getNestedTree() { return mNestedTree; } InternalNode getNestedTreeHolder() { return mNestedTreeHolder; } /** * Set the nested tree before measuring it in order to transfer over important information * such as layout direction needed during measurement. */ void setNestedTree(InternalNode nestedTree) { nestedTree.mNestedTreeHolder = this; mNestedTree = nestedTree; } NodeInfo getNodeInfo() { return mNodeInfo; } void copyInto(InternalNode node) { if (mNodeInfo != null) { if (node.mNodeInfo == null) { node.mNodeInfo = mNodeInfo.acquireRef(); } else { node.mNodeInfo.updateWith(mNodeInfo); } } if ((node.mPrivateFlags & PFLAG_LAYOUT_DIRECTION_IS_SET) == 0L || node.getResolvedLayoutDirection() == YogaDirection.INHERIT) { node.layoutDirection(getResolvedLayoutDirection()); } if ((node.mPrivateFlags & PFLAG_IMPORTANT_FOR_ACCESSIBILITY_IS_SET) == 0L || node.mImportantForAccessibility == ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_AUTO) { node.mImportantForAccessibility = mImportantForAccessibility; } if ((mPrivateFlags & PFLAG_DUPLICATE_PARENT_STATE_IS_SET) != 0L) { node.mDuplicateParentState = mDuplicateParentState; } if ((mPrivateFlags & PFLAG_BACKGROUND_IS_SET) != 0L) { node.mBackground = mBackground; } if ((mPrivateFlags & PFLAG_FOREGROUND_IS_SET) != 0L) { node.mForeground = mForeground; } if (mForceViewWrapping) { node.mForceViewWrapping = true; } if ((mPrivateFlags & PFLAG_VISIBLE_HANDLER_IS_SET) != 0L) { node.mVisibleHandler = mVisibleHandler; } if ((mPrivateFlags & PFLAG_FOCUSED_HANDLER_IS_SET) != 0L) { node.mFocusedHandler = mFocusedHandler; } if ((mPrivateFlags & PFLAG_FULL_IMPRESSION_HANDLER_IS_SET) != 0L) { node.mFullImpressionHandler = mFullImpressionHandler; }
package jlibs.xml.sax.binding; import jlibs.xml.sax.binding.impl.Registry; import jlibs.xml.sax.binding.impl.Relation; import jlibs.xml.sax.binding.impl.processor.BindingAnnotationProcessor; import javax.xml.namespace.QName; /** * @author Santhosh Kumar T */ @SuppressWarnings({"unchecked"}) public class BindingRegistry{ Registry registry = new Registry(); public BindingRegistry(Class... classes){ for(Class clazz: classes) register(clazz); } public BindingRegistry(QName qname, Class clazz){ register(qname, clazz); } public void register(Class clazz){ register(null, clazz); } public void register(QName qname, Class clazz){ try{ String implQName = BindingAnnotationProcessor.FORMAT.replace("${package}", clazz.getPackage()!=null?clazz.getPackage().getName():"") .replace("${class}", clazz.getSimpleName()); if(implQName.startsWith(".")) // default package implQName = implQName.substring(1); Class implClass = clazz.getClassLoader().loadClass(implQName); if(qname==null) qname = (QName)implClass.getDeclaredField("ELEMENT").get(null); if(qname==null) throw new IllegalArgumentException("can't find qname for: "+implClass); jlibs.xml.sax.binding.impl.Binding binding = (jlibs.xml.sax.binding.impl.Binding)implClass.getDeclaredField("INSTANCE").get(null); registry.register(qname, 0, binding, 0, Relation.DO_NOTHING); }catch(ClassNotFoundException ex){ throw new RuntimeException(ex); } catch(NoSuchFieldException ex){ throw new RuntimeException(ex); } catch(IllegalAccessException ex){ throw new RuntimeException(ex); } } }
package com.timepath.hex; import com.timepath.curses.Multiplexer; import com.timepath.curses.Terminal; import com.timepath.io.BitBuffer; import javax.swing.*; import java.awt.*; import java.awt.event.*; import java.beans.*; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; public class HexEditor extends Multiplexer implements KeyListener, MouseMotionListener, MouseListener, MouseWheelListener { protected static final String PROP_CARETLOCATION = "PROP_CARETLOCATION"; protected static final String PROP_MARKLOCATION = "PROP_MARKLOCATION"; private static final Logger LOG = Logger.getLogger(HexEditor.class.getName()); protected final List<Selection> tags = new LinkedList<>(); protected final Terminal termData; protected final Terminal termText; protected final Terminal termLines; protected final Terminal termHeader; protected final Terminal termShift; protected final Terminal termCalc; protected PropertyChangeSupport propertyChangeSupport = new PropertyChangeSupport(this); protected VetoableChangeSupport vetoableChangeSupport = new VetoableChangeSupport(this); protected BitBuffer bitBuffer; protected ByteBuffer sourceBuf; protected long caretLocation; protected int cols = 16; protected int limit; protected long markLocation; protected long offset; protected RandomAccessFile sourceRAF; protected int rows = 16; @SuppressWarnings("BooleanVariableAlwaysNegated") protected boolean selecting; protected int bitShift; public HexEditor() { termData = new Terminal(cols * 3, rows); termData.xPos = 9; termData.yPos = 1; termText = new Terminal(cols, rows); termText.xPos = 9 + cols * 3; termText.yPos = 1; termCalc = new Terminal(54, 6); termCalc.yPos = 1 + rows + 1; termHeader = new Terminal(( 3 * cols ) - 1, 1); termHeader.xPos = 9; initColumns(); termLines = new Terminal(8, rows); termLines.yPos = 1; Arrays.fill(termLines.fgBuf, Color.GREEN); Arrays.fill(termLines.bgBuf, Color.DARK_GRAY); termShift = new Terminal(1, 1); Arrays.fill(termShift.fgBuf, Color.CYAN); Arrays.fill(termShift.bgBuf, Color.BLACK); setBackground(Color.BLACK); add(termData, termText, termLines, termHeader, termShift, termCalc); addKeyListener(this); addMouseMotionListener(this); addMouseListener(this); addMouseWheelListener(this); vetoableChangeSupport.addVetoableChangeListener(PROP_CARETLOCATION, new VetoableChangeListener() { @Override public void vetoableChange(PropertyChangeEvent evt) throws PropertyVetoException { long v = (Long) evt.getNewValue(); if(( v < 0 ) || ( v > limit )) { throw new PropertyVetoException("Caret would be out of bounds", evt); } } }); propertyChangeSupport.addPropertyChangeListener(PROP_CARETLOCATION, new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { long newPos = (Long) evt.getNewValue(); if(newPos < offset) { // on previous page skip(-( cols * rows )); } else if(newPos >= ( offset + ( cols * rows ) )) { // on next page skip(cols * rows); } if(!selecting) { try { setMarkLocation(newPos); } catch(PropertyVetoException ex) { LOG.log(Level.SEVERE, null, ex); } } } }); setFocusable(true); reset(); } protected void initColumns() { Arrays.fill(termHeader.bgBuf, Color.WHITE); Arrays.fill(termHeader.fgBuf, Color.BLACK); StringBuilder sb = new StringBuilder(cols * 3); for(int i = 0; i < cols; i++) { sb.append(String.format(" %02X", i & 0xFF)); } termHeader.position(0, 0); termHeader.write(sb.substring(1)); } public void skip(long delta) { seek(offset + delta); } public void seek(long seek) { seek = Math.max(Math.min(seek, limit - ( limit % cols )), 0); if(sourceRAF != null) { try { sourceRAF.seek(seek); byte[] array = new byte[(int) Math.min(cols * rows, sourceRAF.length() - seek)]; sourceRAF.read(array); bitBuffer = new BitBuffer(ByteBuffer.wrap(array)); bitBuffer.position(0, bitShift); offset = seek; } catch(IOException ex) { LOG.log(Level.SEVERE, null, ex); } } else if(sourceBuf != null) { sourceBuf.position((int) seek); bitBuffer = new BitBuffer(getSlice(sourceBuf)); bitBuffer.position(0, bitShift); offset = seek; } } protected ByteBuffer getSlice(ByteBuffer source) { return getSlice(source, source.remaining()); } protected ByteBuffer getSlice(ByteBuffer source, int length) { int originalLimit = source.limit(); source.limit(source.position() + length); ByteBuffer sub = source.slice(); source.position(source.limit()); source.limit(originalLimit); sub.order(ByteOrder.LITTLE_ENDIAN); return sub; } protected void reset() { markLocation = -1; caretLocation = 0; } public static RandomAccessFile mapFile(File file) throws FileNotFoundException { return new RandomAccessFile(file, "rw"); } /** * @return the markLocation */ public long getMarkLocation() { return markLocation; } /** * @param markLocation * the markLocation to set * * @throws java.beans.PropertyVetoException */ public void setMarkLocation(long markLocation) throws PropertyVetoException { long oldMarkLocation = this.markLocation; if(oldMarkLocation == markLocation) return; vetoableChangeSupport.fireVetoableChange(PROP_MARKLOCATION, oldMarkLocation, markLocation); this.markLocation = markLocation; propertyChangeSupport.firePropertyChange(PROP_MARKLOCATION, oldMarkLocation, markLocation); } @Override public void keyTyped(KeyEvent e) { } @Override public void keyPressed(KeyEvent e) { try { boolean update = true; switch(e.getKeyCode()) { case KeyEvent.VK_UP: setCaretLocation(caretLocation - cols); break; case KeyEvent.VK_DOWN: setCaretLocation(Math.min(caretLocation + cols, limit)); break; case KeyEvent.VK_LEFT: setCaretLocation(caretLocation - 1); break; case KeyEvent.VK_RIGHT: setCaretLocation(caretLocation + 1); break; case KeyEvent.VK_SHIFT: selecting = true; break; case KeyEvent.VK_HOME: if(e.isControlDown()) { seek(0); } else { setCaretLocation(caretLocation - ( caretLocation % cols )); } break; case KeyEvent.VK_END: if(e.isControlDown()) { int rowsTotal = ( ( limit + cols ) - 1 ) / rows; seek(( cols * rowsTotal ) - ( cols * rows )); } else { setCaretLocation(Math.min(( caretLocation + cols ) - 1 - ( caretLocation % cols ), limit)); } break; case KeyEvent.VK_PAGE_DOWN: skip(cols); break; case KeyEvent.VK_PAGE_UP: skip(-cols); break; case KeyEvent.VK_ENTER: tags.add(new Selection(markLocation, caretLocation, Color.RED)); break; default: update = false; break; } if(update) update(); } catch(PropertyVetoException ex) { LOG.log(Level.FINER, null, ex); } } @Override public void keyReleased(KeyEvent e) { if(e.getKeyCode() == KeyEvent.VK_SHIFT) { selecting = false; } } /** * @return the caretLocation */ long getCaretLocation() { return caretLocation; } /** * @param caretLocation * the caretLocation to set * * @throws java.beans.PropertyVetoException */ public void setCaretLocation(long caretLocation) throws PropertyVetoException { long oldCaretLocation = this.caretLocation; if(oldCaretLocation == caretLocation) return; vetoableChangeSupport.fireVetoableChange(PROP_CARETLOCATION, oldCaretLocation, caretLocation); this.caretLocation = caretLocation; propertyChangeSupport.firePropertyChange(PROP_CARETLOCATION, oldCaretLocation, caretLocation); } @Override public void mouseDragged(MouseEvent e) { if(SwingUtilities.isLeftMouseButton(e)) { mousePressed(e); } } @Override public void mouseMoved(MouseEvent e) { } @Override public void mouseClicked(MouseEvent e) { } @Override public void mousePressed(MouseEvent e) { requestFocusInWindow(); if(SwingUtilities.isLeftMouseButton(e)) { int cell; if(( cell = termData.viewToCell(e.getPoint()) ) >= 0) { if(( ( cell + 1 ) % ( cols * 3 ) ) != 0) { int i = ( cell + 1 ) / 3; try { setCaretLocation(offset + i); } catch(PropertyVetoException ignored) { } } } if(( cell = termText.viewToCell(e.getPoint()) ) >= 0) { try { setCaretLocation(offset + cell); } catch(PropertyVetoException ignored) { } } update(); } } public void update() { updateRows(); termShift.position(0, 0); termShift.write(bitShift); updateData(); try { updateStats(); } catch(BufferUnderflowException ignored) { } repaint(); } protected void updateRows() { for(int i = 0; i < rows; i++) { String address = String.format("%08X", ( i * cols ) + offset); termLines.position(0, i); termLines.write(address); } } protected void updateData() { termData.clear(); termText.clear(); if(bitBuffer == null) return; bitBuffer.position(0, bitShift); int row = 0; byte[] bytes = new byte[cols]; while(bitBuffer.hasRemaining()) { int read = Math.min(bitBuffer.remaining(), bytes.length); bitBuffer.get(bytes, 0, read); StringBuilder sb = new StringBuilder(read * 3); for(int i = 0; i < read; i++) { sb.append(String.format(" %02X", bytes[i] & 0xFF)); } termData.position(0, row); termData.write(sb.substring(1)); StringBuilder sb2 = new StringBuilder(read); for(int i = 0; i < read; i++) { sb2.append(displayChar(bytes[i] & 0xFF)); } termText.position(0, row); termText.write(sb2.toString()); if(++row >= rows) break; } } protected String displayChar(int i) { return String.valueOf(( Character.isWhitespace(i) || Character.isISOControl(i) ) ? '.' : (char) i); } protected void updateStats() { int pos = (int) ( caretLocation - offset ); termCalc.clear(); if(( bitBuffer == null ) || ( pos > bitBuffer.limit() ) || ( pos < 0 )) return; bitBuffer.position(pos, bitShift); byte[] temp = new byte[Math.min(bitBuffer.remaining(), 4)]; bitBuffer.get(temp); bitBuffer.position(pos, bitShift); ByteBuffer calcBuf = ByteBuffer.wrap(temp); int[] idx = { 0, 6, 18 }; int yOff = 0; termCalc.position(idx[0], yOff); termCalc.write(" 8"); termCalc.position(idx[0], yOff + 1); termCalc.write("± 8"); termCalc.position(idx[0], yOff + 2); termCalc.write(" 16"); termCalc.position(idx[0], yOff + 3); termCalc.write("± 16"); termCalc.position(idx[0], yOff + 4); termCalc.write(" 32"); termCalc.position(idx[0], yOff + 5); termCalc.write("± 32"); // byte calcBuf.position(0); long value = calcBuf.get(); termCalc.position(idx[1], yOff); termCalc.write(value & 0xFF); termCalc.position(idx[1] + ( ( value < 0 ) ? -1 : 0 ), yOff + 1); termCalc.write(value); // binary for(int i = 0; i < temp.length; i++) { termCalc.position(idx[2] + ( i * 9 ), yOff); termCalc.write(new StringBuilder(binaryDump(temp[i] & 0xFF))); } // short calcBuf.position(0); calcBuf.order(ByteOrder.LITTLE_ENDIAN); value = calcBuf.getShort(); termCalc.position(idx[1], yOff + 2); termCalc.write(value & 0xFFFF); termCalc.position(idx[1] + ( ( value < 0 ) ? -1 : 0 ), yOff + 3); termCalc.write(value); calcBuf.position(0); calcBuf.order(ByteOrder.BIG_ENDIAN); value = calcBuf.getShort(); termCalc.position(idx[2], yOff + 2); termCalc.write(value & 0xFFFF); termCalc.position(idx[2] + ( ( value < 0 ) ? -1 : 0 ), yOff + 3); termCalc.write(value); // int calcBuf.position(0); calcBuf.order(ByteOrder.LITTLE_ENDIAN); value = calcBuf.getInt(); termCalc.position(idx[1], yOff + 4); termCalc.write(value & 0xFFFFFFFFL); termCalc.position(idx[1] + ( ( value < 0 ) ? -1 : 0 ), yOff + 5); termCalc.write(value); calcBuf.position(0); calcBuf.order(ByteOrder.BIG_ENDIAN); value = calcBuf.getInt(); termCalc.position(idx[2], yOff + 4); termCalc.write(value & 0xFFFFFFFFL); termCalc.position(idx[2] + ( ( value < 0 ) ? -1 : 0 ), yOff + 5); termCalc.write(value); } protected String binaryDump(long l) { return String.format("%8s", Long.toBinaryString(l)).replace(' ', '0'); } @Override public void mouseReleased(MouseEvent e) { } @Override public void mouseEntered(MouseEvent e) { } @Override public void mouseExited(MouseEvent e) { } public void setBitShift(int bitShift) { if(( bitShift < 0 ) || ( bitShift >= 8 )) { // Shifting off current byte try { setCaretLocation(caretLocation + Math.round(Math.signum(bitShift))); } catch(PropertyVetoException ignored) { } // Bring back into acceptable range bitShift += 8; bitShift %= 8; } this.bitShift = bitShift; } @Override public void mouseWheelMoved(MouseWheelEvent e) { if(e.getScrollType() == MouseWheelEvent.WHEEL_UNIT_SCROLL) { if(e.isControlDown()) { if(e.getWheelRotation() > 0) { setBitShift(bitShift + 1); } else if(e.getWheelRotation() < 0) { setBitShift(bitShift - 1); } } else { skip(e.getUnitsToScroll() * cols); } update(); } } @Override public void paint(Graphics g) { super.paint(g); Graphics2D g2 = (Graphics2D) g; for(int i = 0; i < ( tags.size() + 1 ); i++) { Selection sel = ( i == tags.size() ) ? new Selection(markLocation, caretLocation, Color.RED) : tags.get(i); g2.setColor(sel.getColor()); if(sel.getMark() >= 0) { Polygon p = calcPolygon(termData, sel.getMark(), sel.getCaret(), 2, 1); g2.drawPolygon(p); p = calcPolygon(termText, sel.getMark(), sel.getCaret(), 1, 0); g2.drawPolygon(p); } } long markLoc = markLocation; if(( markLoc >= offset ) && ( markLoc < ( offset + ( cols * rows ) ) )) { g2.setColor(Color.YELLOW); g2.draw(getCellRect(termData, markLoc, 2, 1)); g2.draw(getCellRect(termText, markLoc, 1, 0)); } long caretLoc = caretLocation; if(( caretLoc >= offset ) && ( caretLoc < ( offset + ( cols * rows ) ) )) { g2.setColor(Color.WHITE); g2.draw(getCellRect(termData, caretLoc, 2, 1)); g2.draw(getCellRect(termText, caretLoc, 1, 0)); } } protected Shape getCellRect(Terminal term, long address, int width, int spacing) { address -= offset; Point p = term.cellToView(address * ( width + spacing )); return new Rectangle(p.x, p.y, metrics.width * width, metrics.height); } protected Polygon calcPolygon(Terminal term, long markIdx, long caretIdx, int width, int spacing) { caretIdx -= offset; long caretRow = caretIdx / cols; if(caretIdx < 0) { caretIdx = 0; } else if(caretIdx > ( cols * rows )) { caretIdx = cols * rows - 1; } Point caretPos = term.cellToView(caretIdx * ( width + spacing )); caretPos.translate(-term.xPos * metrics.width, -term.yPos * metrics.height); markIdx -= offset; long markRow = markIdx / cols; if(markIdx < 0) { markIdx = 0; } else if(markIdx > ( cols * rows )) { markIdx = cols * rows - 1; } Point markPos = term.cellToView(markIdx * ( width + spacing )); markPos.translate(-term.xPos * metrics.width, -term.yPos * metrics.height); Point rel = new Point((int) ( caretIdx - markIdx ), (int) ( caretRow - markRow )); if(rel.x >= 0) { // further right caretPos.x += metrics.width * width; } else { markPos.x += metrics.width * width; } if(rel.y >= 0) { // further down caretPos.y += metrics.height; } else { markPos.y += metrics.height; } Polygon p = new Polygon(); p.addPoint(markPos.x, markPos.y); if(rel.y > 0) { p.addPoint(( ( cols * ( width + spacing ) ) - spacing ) * metrics.width, markPos.y); p.addPoint(( ( cols * ( width + spacing ) ) - spacing ) * metrics.width, caretPos.y - metrics.height); p.addPoint(caretPos.x, caretPos.y - metrics.height); } else if(rel.y < 0) { p.addPoint(0, markPos.y); p.addPoint(0, caretPos.y + metrics.height); p.addPoint(caretPos.x, caretPos.y + metrics.height); } else { p.addPoint(caretPos.x, markPos.y); } p.addPoint(caretPos.x, caretPos.y); if(rel.y > 0) { p.addPoint(0, caretPos.y); p.addPoint(0, markPos.y + metrics.height); p.addPoint(markPos.x, markPos.y + metrics.height); } else if(rel.y < 0) { p.addPoint(( ( cols * ( width + spacing ) ) - spacing ) * metrics.width, caretPos.y); p.addPoint(( ( cols * ( width + spacing ) ) - spacing ) * metrics.width, markPos.y - metrics.height); p.addPoint(markPos.x, markPos.y - metrics.height); } else { p.addPoint(markPos.x, caretPos.y); } p.translate(term.xPos * metrics.width, term.yPos * metrics.height); return p; } public void setData(RandomAccessFile rf) { reset(); sourceRAF = rf; if(rf != null) { try { limit = (int) rf.length() - 1; } catch(IOException ex) { LOG.log(Level.SEVERE, null, ex); } } seek(0); update(); } public void setData(ByteBuffer buf) { reset(); sourceBuf = buf; if(buf != null) { bitBuffer = new BitBuffer(buf); limit = bitBuffer.capacity() - 1; } seek(0); update(); } }
package com.xpn.xwiki.web; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.xpn.xwiki.XWikiContext; import com.xpn.xwiki.XWikiException; import com.xpn.xwiki.doc.XWikiAttachment; import com.xpn.xwiki.doc.XWikiDocument; import com.xpn.xwiki.plugin.fileupload.FileUploadPlugin; /** * Action that handles uploading document attachments. It saves all the uploaded files whose fieldname start with * {@code filepath}. * * @version $Id$ */ public class UploadAction extends XWikiAction { /** Logging helper object. */ private static final Log LOG = LogFactory.getLog(UploadAction.class); /** The prefix of the accepted file input field name. */ private static final String FILE_FIELD_NAME = "filepath"; /** The prefix of the corresponding filename input field name. */ private static final String FILENAME_FIELD_NAME = "filename"; /** * {@inheritDoc} * * @see XWikiAction#action(XWikiContext) */ @Override public boolean action(XWikiContext context) throws XWikiException { XWikiResponse response = context.getResponse(); Object exception = context.get("exception"); boolean ajax = ((Boolean) context.get("ajax")).booleanValue(); // check Exception File upload is large if (exception != null) { if (exception instanceof XWikiException) { XWikiException exp = (XWikiException) exception; if (exp.getCode() == XWikiException.ERROR_XWIKI_APP_FILE_EXCEPTION_MAXSIZE) { response.setStatus(HttpServletResponse.SC_REQUEST_ENTITY_TOO_LARGE); context.put("message", "fileuploadislarge"); return true; } } } // CSRF prevention if (!csrfTokenCheck(context)) { return false; } XWikiDocument doc = context.getDoc().clone(); // The document is saved for each attachment in the group. FileUploadPlugin fileupload = (FileUploadPlugin) context.get("fileuploadplugin"); Map<String, String> fileNames = new HashMap<String, String>(); List<String> wrongFileNames = new ArrayList<String>(); List<String> failedFiles = new ArrayList<String>(); for (String fieldName : fileupload.getFileItemNames(context)) { try { if (fieldName.startsWith(FILE_FIELD_NAME)) { String fileName = getFileName(fieldName, fileupload, context); if (fileName != null) { fileNames.put(fileName, fieldName); } } } catch (Exception ex) { wrongFileNames.add(fileupload.getFileName(fieldName, context)); } } for (Entry<String, String> file : fileNames.entrySet()) { try { uploadAttachment(file.getValue(), file.getKey(), fileupload, doc, context); } catch (Exception ex) { LOG.warn("Saving uploaded file failed", ex); failedFiles.add(file.getKey()); } } LOG.debug("Found files to upload: " + fileNames); LOG.debug("Failed attachments: " + failedFiles); LOG.debug("Wrong attachment names: " + wrongFileNames); if (ajax) { try { response.getOutputStream().println("ok"); } catch (IOException ex) { LOG.error("Unhandled exception writing output:", ex); } return false; } // Forward to the attachment page String redirect = fileupload.getFileItemAsString("xredirect", context); if (StringUtils.isEmpty(redirect)) { redirect = context.getDoc().getURL("attach", true, context); } sendRedirect(response, redirect); return false; } /** * Attach a file to the current document. * * @param fieldName the target file field * @param filename * @param fileupload the {@link FileUploadPlugin} holding the form data * @param doc the target document * @param context the current request context * @return {@code true} if the file was successfully attached, {@code false} otherwise. * @throws XWikiException if the form data cannot be accessed, or if the database operation failed */ public boolean uploadAttachment(String fieldName, String filename, FileUploadPlugin fileupload, XWikiDocument doc, XWikiContext context) throws XWikiException { XWikiResponse response = context.getResponse(); String username = context.getUser(); // Read XWikiAttachment XWikiAttachment attachment = doc.getAttachment(filename); if (attachment == null) { attachment = new XWikiAttachment(); doc.getAttachmentList().add(attachment); } try { attachment.setContent(fileupload.getFileItemInputStream(fieldName, context)); } catch (IOException e) { throw new XWikiException(XWikiException.MODULE_XWIKI_APP, XWikiException.ERROR_XWIKI_APP_UPLOAD_FILE_EXCEPTION, "Exception while reading uploaded parsed file", e); } attachment.setFilename(filename); attachment.setAuthor(username); // Add the attachment to the document attachment.setDoc(doc); doc.setAuthor(username); if (doc.isNew()) { doc.setCreator(username); } // Adding a comment with a link to the download URL String comment; String nextRev = attachment.getNextVersion(); ArrayList<String> params = new ArrayList<String>(); params.add(filename); params.add(doc.getAttachmentRevisionURL(filename, nextRev, context)); if (attachment.isImage(context)) { comment = context.getMessageTool().get("core.comment.uploadImageComment", params); } else { comment = context.getMessageTool().get("core.comment.uploadAttachmentComment", params); } // Save the document. try { context.getWiki().saveDocument(doc, comment, context); } catch (XWikiException e) { // check Exception is ERROR_XWIKI_APP_JAVA_HEAP_SPACE when saving Attachment if (e.getCode() == XWikiException.ERROR_XWIKI_APP_JAVA_HEAP_SPACE) { response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); context.put("message", "javaheapspace"); return true; } throw e; } return false; } /** * Extract the corresponding attachment name for a given file field. It can either be specified in a separate form * input field, or it is extracted from the original filename. * * @param fieldName the target file field * @param fileupload the {@link FileUploadPlugin} holding the form data * @param context the current request context * @return a valid attachment name * @throws XWikiException if the form data cannot be accessed, or if the specified filename is invalid */ protected String getFileName(String fieldName, FileUploadPlugin fileupload, XWikiContext context) throws XWikiException { String filenameField = FILENAME_FIELD_NAME + fieldName.substring(FILE_FIELD_NAME.length()); String filename = null; // Try to use the name provided by the user filename = fileupload.getFileItemAsString(filenameField, context); if (!StringUtils.isBlank(filename)) { // TODO These should be supported, the URL should just contain escapes. if (filename.indexOf("/") != -1 || filename.indexOf("\\") != -1 || filename.indexOf(";") != -1) { throw new XWikiException(XWikiException.MODULE_XWIKI_APP, XWikiException.ERROR_XWIKI_APP_INVALID_CHARS, "Invalid filename: " + filename); } } if (StringUtils.isBlank(filename)) { // Try to get the actual filename on the client String fname = fileupload.getFileName(fieldName, context); if (StringUtils.indexOf(fname, "/") >= 0) { fname = StringUtils.substringAfterLast(fname, "/"); } if (StringUtils.indexOf(fname, "\\") >= 0) { fname = StringUtils.substringAfterLast(fname, "\\"); } filename = fname; } // Sometimes spaces are replaced with '+' by the browser. filename = filename.replaceAll("\\+", " "); if (StringUtils.isBlank(filename)) { // The file field was left empty, ignore this return null; } // Issues fixed by the clearName : // 1) Attaching images with a name containing special characters generates bugs // (image are not displayed), XWIKI-2090. // 2) Attached files that we can't delete or link in the Wiki pages, XWIKI-2087. filename = context.getWiki().clearName(filename, false, true, context); return filename; } /** * {@inheritDoc} * * @see XWikiAction#render(XWikiContext) */ @Override public String render(XWikiContext context) throws XWikiException { boolean ajax = ((Boolean) context.get("ajax")).booleanValue(); if (ajax) { try { context.getResponse().getOutputStream().println( "error: " + context.getMessageTool().get((String) context.get("message"))); } catch (IOException ex) { LOG.error("Unhandled exception writing output:", ex); } return null; } return "exception"; } }
package com.github.semres.gui; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import com.github.semres.*; import com.github.semres.babelnet.BabelNetManager; import com.github.semres.babelnet.BabelNetSynset; import com.github.semres.user.UserEdge; import com.teamdev.jxbrowser.chromium.*; import com.teamdev.jxbrowser.chromium.events.FinishLoadingEvent; import com.teamdev.jxbrowser.chromium.events.LoadAdapter; import com.teamdev.jxbrowser.chromium.events.ScriptContextAdapter; import com.teamdev.jxbrowser.chromium.events.ScriptContextEvent; import com.teamdev.jxbrowser.chromium.javafx.BrowserView; import javafx.application.Platform; import javafx.event.ActionEvent; import javafx.fxml.FXML; import javafx.fxml.FXMLLoader; import javafx.fxml.Initializable; import javafx.scene.Parent; import javafx.scene.Scene; import javafx.scene.control.Menu; import javafx.scene.control.MenuBar; import javafx.scene.control.MenuItem; import javafx.scene.layout.AnchorPane; import javafx.stage.FileChooser; import javafx.stage.Modality; import javafx.stage.Stage; import org.apache.commons.beanutils.PropertyUtils; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.eclipse.rdf4j.rio.RDFFormat; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.net.URL; import java.util.*; import java.util.stream.Collectors; public class MainController extends Controller implements Initializable { static Logger log = Logger.getRootLogger(); @FXML private MenuBar menuBar; @FXML private MenuItem turtleMenuItem; @FXML private MenuItem nTriplesMenuItem; @FXML private AnchorPane boardPane; @FXML private Menu viewMenu; @FXML private Menu babelNetMenu; @FXML private Menu exportSubmenu; @FXML private MenuItem saveMenuItem; @FXML private MenuItem updateMenuItem; @FXML private MenuItem searchBabelNetMenuItem; private BrowserView boardView; private Board board; private Browser browser; private BabelNetManager babelNetManager; private DatabasesManager databasesManager; private String newApiKey; @Override public void initialize(URL location, ResourceBundle resources) { // Add developer tools BrowserPreferences.setChromiumSwitches("--remote-debugging-port=9222"); babelNetManager = new BabelNetManager(); browser = new Browser(); boardView = new BrowserView(browser); AnchorPane.setTopAnchor(boardView, 0.0); AnchorPane.setBottomAnchor(boardView, 0.0); AnchorPane.setLeftAnchor(boardView, 0.0); AnchorPane.setRightAnchor(boardView, 0.0); boardPane.getChildren().add(boardView); // Add javaApp object to javascript. browser.addScriptContextListener(new ScriptContextAdapter() { @Override public void onScriptContextCreated(ScriptContextEvent event) { Browser browser = event.getBrowser(); JSValue window = browser.executeJavaScriptAndReturnValue("window"); window.asObject().setProperty("javaApp", new JavaApp()); } }); // Enable loading resources from inside jar file. BrowserContext browserContext = browser.getContext(); ProtocolService protocolService = browserContext.getProtocolService(); protocolService.setProtocolHandler("jar", new JarProtocolHandler()); // updateMenuItem should be disabled if there are unsaved changes on the board or there's no api key. // searchBabelNetMenuItem should be disabled if there's no api key babelNetMenu.setOnShowing(e -> { searchBabelNetMenuItem.setDisable(board == null || StringUtils.isEmpty(BabelNetManager.getApiKey())); updateMenuItem.setDisable(board == null || board.isBoardEdited() || StringUtils.isEmpty(BabelNetManager.getApiKey())); }); } void setBoard(Board board) { this.board = board; board.setBabelNetManager(babelNetManager); browser.loadURL(getClass().getResource("/html/board.html").toExternalForm()); // Disable option to update synsets if there's no api key. if (StringUtils.isEmpty(BabelNetManager.getApiKey())) { browser.addLoadListener(new LoadAdapter() { @Override public void onFinishLoadingFrame(FinishLoadingEvent event) { if (event.isMainFrame()) { browser.executeJavaScript("disableUpdates()"); } } }); } String remoteDebuggingURL = browser.getRemoteDebuggingURL(); log.info("Remote debugging URL: " + remoteDebuggingURL); // Enable some options. saveMenuItem.setDisable(false); exportSubmenu.setDisable(false); viewMenu.setDisable(false); } public String getNewApiKey() { return newApiKey; } public void save() { board.save(); } public void export(ActionEvent event) { MenuItem clickedMenuItem = ((MenuItem) event.getSource()); RDFFormat format; FileChooser.ExtensionFilter extensionFilter; if (clickedMenuItem == turtleMenuItem) { format = RDFFormat.TURTLE; extensionFilter = new FileChooser.ExtensionFilter("Turtle text files (*.ttl)", "*.ttl"); } else if (clickedMenuItem == nTriplesMenuItem) { format = RDFFormat.NTRIPLES; extensionFilter = new FileChooser.ExtensionFilter("N-Triples text files (*.nt)", "*.ttl"); } else { format = RDFFormat.RDFXML; extensionFilter = new FileChooser.ExtensionFilter("RDF/XML text files (*.nt)", "*.rdf"); } String content = board.export(format); FileChooser fileChooser = new FileChooser(); fileChooser.getExtensionFilters().add(extensionFilter); File file = fileChooser.showSaveDialog(menuBar.getScene().getWindow()); if (file != null) { try { saveFile(content, file); } catch (IOException e) { Utils.showError("Could not save file."); } } } private void saveFile(String content, File file) throws IOException { FileWriter fileWriter = new FileWriter(file); fileWriter.write(content); fileWriter.close(); } void setBabelNetApiKey(String key) throws IOException { BabelNetManager.setApiKey(key); newApiKey = key; } void addSynset(Synset synset) { addSynsetToBoard(synset); addSynsetToView(synset); } void addSynsetToBoard(Synset synset) { board.addSynset(synset); } void addSynsetToView(Synset synset) { Collection<Edge> edges = synset.getOutgoingEdges().values(); List<Synset> pointedSynsets = new ArrayList<>(); for (Edge edge : edges) { pointedSynsets.add(board.getSynset(edge.getPointedSynset())); } browser.executeJavaScript(String.format("addSynset(%s, %s, %s)", synsetToJson(synset), synsetsToJson(pointedSynsets), edgesToJson(edges))); } void addEdge(Edge edge) { board.addEdge(edge); addEdgeToView(edge); } void addEdgeToView(Edge edge) { browser.executeJavaScript("addEdge(" + edgeToJson(edge) + ");"); } void editSynset(Synset oldSynset, Synset editedSynset) { board.editSynset(oldSynset, editedSynset); browser.executeJavaScript("updateSynset(" + synsetToJson(editedSynset) + ");"); } public void editEdge(UserEdge oldEdge, UserEdge editedEdge) { board.editEdge(oldEdge, editedEdge); browser.executeJavaScript("updateEdge(" + edgeToJson(editedEdge) + ");"); } Synset getSynset(String id) { return board.getSynset(id); } Collection<Synset> loadSynsets(String searchPhrase) { return board.loadSynsets(searchPhrase); } Synset loadSynset(String id) { return board.loadSynset(id); } Collection<Edge> loadEdges(String synsetId) { return board.loadEdges(synsetId); } Collection<Edge> downloadBabelNetEdges(String synsetId) throws IOException { return board.downloadBabelNetEdges(synsetId); } List<SynsetUpdate> checkForUpdates() throws IOException { return board.checkForUpdates(); } public List<SynsetUpdate> checkForUpdates(String checkedSynsetId) throws IOException { return board.checkForUpdates(checkedSynsetId); } public void update(List<SynsetUpdate> updates) { board.update(updates); redrawNodes(); } private void redrawNodes() { JSArray synsetIds = browser.executeJavaScriptAndReturnValue("clear()").asArray(); for (int i = 0; i < synsetIds.length(); ++i) { String id = synsetIds.get(i).getStringValue(); Synset synset = board.getSynset(id); if (synset != null) { addSynsetToView(synset); } } } boolean synsetExists(String id) { return board.isIdAlreadyTaken(id); } List<BabelNetSynset> searchBabelNet(String searchPhrase) throws IOException { return babelNetManager.getSynsets(searchPhrase); } public void openDatabasesWindow() throws IOException { openNewWindow("/fxml/databases-list.fxml", "Databases"); } public void openLoadSynsetWindow() throws IOException { openNewWindow("/fxml/load-synset.fxml", "Load synset"); } public void openSearchBabelNetWindow() throws IOException { openNewWindow("/fxml/search-babelnet.fxml", "Search BabelNet"); } public void openUpdatesWindow() throws IOException { openNewWindow("/fxml/updates-list.fxml", "BabelNet updates"); } public void openUpdatesWindow(String checkedSynsetId) throws IOException { FXMLLoader loader = new FXMLLoader(getClass().getResource("/fxml/updates-list.fxml")); Parent root; try { root = loader.load(); } catch (Exception e) { e.printStackTrace(); return; } Stage newStage = new Stage(); newStage.setTitle("BabelNet updates"); newStage.setScene(new Scene(root)); newStage.sizeToScene(); newStage.initOwner(menuBar.getScene().getWindow()); newStage.initModality(Modality.WINDOW_MODAL); UpdatesListController updatesListController = loader.getController(); updatesListController.setCheckedSynsetId(checkedSynsetId); updatesListController.setParent(MainController.this); blockBrowserView(newStage); newStage.show(); } public void openApiKeyWindow() throws IOException { openNewWindow("/fxml/edit-api-key.fxml", "BabelNet API key"); } private Controller openNewWindow(String fxmlPath, String title) throws IOException { FXMLLoader loader = new FXMLLoader(getClass().getResource(fxmlPath)); Parent root = loader.load(); Stage newStage = new Stage(); newStage.setTitle(title); newStage.setScene(new Scene(root)); newStage.sizeToScene(); newStage.initOwner(menuBar.getScene().getWindow()); newStage.initModality(Modality.WINDOW_MODAL); ChildController childController = loader.getController(); childController.setParent(MainController.this); blockBrowserView(newStage); newStage.show(); return childController; } // Block input on BrowserView and return to default handlers when window is closed. private void blockBrowserView(Stage stage) { boardView.setMouseEventsHandler((e) -> true); boardView.setScrollEventsHandler((e) -> true); boardView.setGestureEventsHandler((e) -> true); boardView.setKeyEventsHandler((e) -> true); stage.setOnHidden((e) -> { boardView.setMouseEventsHandler(null); boardView.setScrollEventsHandler(null); boardView.setGestureEventsHandler(null); boardView.setKeyEventsHandler(null); } ); } private Map<String, Object> synsetToMap(Synset synset) { Map<String, Object> synsetMap; try { synsetMap = PropertyUtils.describe(synset); } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { throw new RuntimeException(e); } synsetMap.remove("outgoingEdges"); return synsetMap; } private String synsetToJson(Synset synset) { ObjectMapper mapper = getMapper(); String jsonSynset = null; try { jsonSynset = mapper.writeValueAsString(synsetToMap(synset)); } catch (JsonProcessingException e) { e.printStackTrace(); } return jsonSynset; } private String synsetsToJson(Collection<? extends Synset> synsets) { List<Map<String, Object>> synsetMaps = synsets.stream().map(this::synsetToMap).collect(Collectors.toList()); ObjectMapper mapper = getMapper(); String jsonSynsets = null; try { jsonSynsets = mapper.writeValueAsString(synsetMaps); } catch (JsonProcessingException e) { e.printStackTrace(); } return jsonSynsets; } private Map<String, Object> edgeToMap(Edge edge) { Map<String, Object> edgeMap = new HashMap<>(); edgeMap.put("id", edge.getId()); edgeMap.put("description", edge.getDescription()); edgeMap.put("weight", edge.getWeight()); edgeMap.put("relationType", edge.getRelationType().toString().toLowerCase()); edgeMap.put("targetSynset", synsetToMap(board.getSynset(edge.getPointedSynset()))); edgeMap.put("sourceSynset", synsetToMap(board.getSynset(edge.getOriginSynset()))); edgeMap.put("lastEditedTime", edge.getLastEditedTime()); edgeMap.put("class", edge.getClass().getCanonicalName()); return edgeMap; } private String edgeToJson(Edge edge) { String jsonEdge = null; ObjectMapper mapper = getMapper(); try { jsonEdge = mapper.writeValueAsString(edgeToMap(edge)); } catch (JsonProcessingException e) { e.printStackTrace(); } return jsonEdge; } private String edgesToJson(Collection<? extends Edge> edges) { List<Map<String, Object>> edgeMaps = edges.stream().map(this::edgeToMap).collect(Collectors.toList()); ObjectMapper mapper = getMapper(); String jsonEdges = null; try { jsonEdges = mapper.writeValueAsString(edgeMaps); } catch (JsonProcessingException e) { e.printStackTrace(); } return jsonEdges; } private ObjectMapper getMapper() { ObjectMapper mapper = new ObjectMapper(); // Configure mapper to properly parse LocalDateTime mapper.findAndRegisterModules(); mapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false); return mapper; } public void dispose() { browser.dispose(); } public DatabasesManager getDatabasesManager() { return databasesManager; } public void setDatabasesManager(DatabasesManager databasesManager) { this.databasesManager = databasesManager; } public BabelNetManager getBabelNetManager() { return babelNetManager; } public void setBabelNetManager(BabelNetManager babelNetManager) { this.babelNetManager = babelNetManager; } public void exit() { Stage stage = (Stage) menuBar.getScene().getWindow(); stage.close(); } public Collection<RelationType> getRelationTypes() { return board.getRelationTypes(); } public void addRelationType(RelationType relationType) { board.addRelationType(relationType); } public void removeRelationType(RelationType relationType) { board.removeRelationType(relationType); } public class JavaApp { public void openNewSynsetWindow() { Platform.runLater(() -> { try { openNewWindow("/fxml/add-synset.fxml","Add synset"); } catch (IOException e) { Utils.showError(e.getMessage()); } }); } public void openNewEdgeWindow(String originSynsetId, String destinationSynsetId) { Platform.runLater(() -> { try { AddingEdgeController childController = (AddingEdgeController) openNewWindow("/fxml/add-edge.fxml", "Edge details"); childController.setOriginSynset(board.getSynset(originSynsetId)); childController.setDestinationSynset(board.getSynset(destinationSynsetId)); } catch (IOException e) { Utils.showError(e.getMessage()); } }); } public void openSynsetDetailsWindow(String synsetId) { Platform.runLater(() -> { try { SynsetDetailsController childController = (SynsetDetailsController) openNewWindow("/fxml/synset-details.fxml", "Synset details"); childController.setSynset(board.getSynset(synsetId)); } catch (IOException e) { Utils.showError(e.getMessage()); } }); } public void openEdgeDetailsWindow(String edgeId) { Platform.runLater(() -> { try { EdgeDetailsController childController = (EdgeDetailsController) openNewWindow("/fxml/edge-details.fxml", "Edge details"); childController.setEdge(board.getEdge(edgeId)); } catch (IOException e) { Utils.showError(e.getMessage()); } }); } public void removeSynset(String id) { board.removeSynset(id); } public void removeEdge(String id) { board.removeEdge(id); } public void loadEdges(String synsetId) { Synset synset = board.getSynset(synsetId); Collection<Edge> edges; if (synset.isExpanded()) { edges = synset.getOutgoingEdges().values(); } else { edges = MainController.this.loadEdges(synsetId); } List<Synset> pointedSynsets = new ArrayList<>(); for (Edge edge : edges) { pointedSynsets.add(board.getSynset(edge.getPointedSynset())); } browser.executeJavaScript(String.format("expandSynset(\"%s\", %s, %s);", synsetId, synsetsToJson(pointedSynsets), edgesToJson(edges))); } public void downloadEdgesFromBabelNet(String synsetId) { Collection<Edge> edges; try { edges = MainController.this.downloadBabelNetEdges(synsetId); } catch (IOException e) { Utils.showError(e.getMessage()); return; } List<Synset> pointedSynsets = new ArrayList<>(); for (Edge edge : edges) { pointedSynsets.add(board.getSynset(edge.getPointedSynset())); } browser.executeJavaScript(String.format("addBabelNetEdges(\"%s\", %s, %s);", synsetId, synsetsToJson(pointedSynsets), edgesToJson(edges))); } public void checkForUpdates(String synsetId) { Platform.runLater(() -> { try { openUpdatesWindow(synsetId); } catch (IOException e) { Utils.showError(e.getMessage()); } }); } } }
package elucent.roots; import java.util.Random; import net.minecraft.block.Block; import net.minecraft.block.BlockCrops; import net.minecraft.block.BlockNetherWart; import net.minecraft.client.renderer.Tessellator; import net.minecraft.entity.item.EntityItem; import net.minecraft.init.Blocks; import net.minecraft.item.ItemStack; import net.minecraft.util.ResourceLocation; import net.minecraftforge.client.event.TextureStitchEvent; import net.minecraftforge.event.entity.EntityEvent; import net.minecraftforge.event.entity.living.LivingDropsEvent; import net.minecraftforge.event.entity.living.LivingEvent.LivingUpdateEvent; import net.minecraftforge.event.entity.living.LivingExperienceDropEvent; import net.minecraftforge.event.entity.living.LivingHurtEvent; import net.minecraftforge.event.entity.player.PlayerInteractEvent; import net.minecraftforge.event.world.BlockEvent; import net.minecraftforge.event.world.BlockEvent.BreakEvent; import net.minecraftforge.event.world.BlockEvent.HarvestDropsEvent; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; import net.minecraftforge.fml.common.gameevent.TickEvent; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; public class EventManager { Random random = new Random(); @SubscribeEvent public void onBlockHarvested(HarvestDropsEvent event){ Block block = event.getState().getBlock(); if (block == Blocks.TALLGRASS){ if (random.nextInt(40) == 0){ event.getDrops().add(new ItemStack(RegistryManager.oldRoot,1)); } } if (block == Blocks.WHEAT || block == Blocks.CARROTS || block == Blocks.POTATOES || block == Blocks.BEETROOTS){ if (((BlockCrops)block).isMaxAge(event.getState())){ if (random.nextInt(30) == 0){ event.getDrops().add(new ItemStack(RegistryManager.verdantSprig,1)); } } } if (block == Blocks.NETHER_WART){ if (((BlockNetherWart) block).getMetaFromState(event.getState()) == 2){ if (random.nextInt(20) == 0){ event.getDrops().add(new ItemStack(RegistryManager.infernalStem,1)); } } } if (block == Blocks.CHORUS_FLOWER){ if (random.nextInt(10) == 0){ event.getDrops().add(new ItemStack(RegistryManager.dragonsEye,1)); } } } @SubscribeEvent public void onPlayerInteract(PlayerInteractEvent.RightClickBlock event){ if (PlayerManager.hasEffect(event.getEntityPlayer(), "allium") && random.nextInt(4) != 0){ event.setCanceled(true); } } @SubscribeEvent public void onLivingTick(LivingUpdateEvent event){ if (event.getEntityLiving().getEntityData().hasKey("RMOD_skipTicks")){ if (event.getEntityLiving().getEntityData().getInteger("RMOD_skipTicks") > 0){ event.getEntityLiving().getEntityData().setInteger("RMOD_skipTicks", event.getEntityLiving().getEntityData().getInteger("RMOD_skipTicks")-1); if (event.getEntityLiving().getEntityData().getInteger("RMOD_skipTicks") <= 0){ event.getEntityLiving().getEntityData().removeTag("RMOD_skipTicks"); } event.setCanceled(true); } } } @SubscribeEvent public void onLivingDrops(LivingDropsEvent event){ if (event.getEntityLiving().getEntityData().hasKey("RMOD_dropItems")){ if (!event.getEntityLiving().getEntityData().getBoolean("RMOD_dropItems")){ event.setCanceled(true); } } } @SubscribeEvent public void onLivingXP(LivingExperienceDropEvent event){ if (event.getEntityLiving().getEntityData().hasKey("RMOD_dropItems")){ if (!event.getEntityLiving().getEntityData().getBoolean("RMOD_dropItems")){ event.setCanceled(true); } } } @SubscribeEvent public void onLivingDamage(LivingHurtEvent event){ if (event.getEntityLiving().getEntityData().hasKey("RMOD_vuln")){ event.setAmount((float) (event.getAmount()*(1.0+event.getEntityLiving().getEntityData().getDouble("RMOD_vuln")))); event.getEntityLiving().getEntityData().removeTag("RMOD_vuln"); } } @SubscribeEvent public void onTick(TickEvent.ServerTickEvent event){ PlayerManager.updateEffects(); } @SideOnly(Side.CLIENT) @SubscribeEvent public void onTextureStitch(TextureStitchEvent event){ ResourceLocation magicParticleRL = new ResourceLocation("roots:entity/magicParticle"); event.getMap().registerSprite(magicParticleRL); } }
package filter.expression; import java.util.ArrayList; import java.util.List; import java.util.function.Predicate; import backend.interfaces.IModel; import backend.resource.TurboIssue; import filter.MetaQualifierInfo; import filter.QualifierApplicationException; public class Negation implements FilterExpression { private FilterExpression expr; public Negation(FilterExpression expr) { this.expr = expr; } /** * This method is used to serialise qualifiers. Thus whatever form returned * should be syntactically valid. */ @Override public String toString() { return "NOT " + expr; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Negation negation = (Negation) o; return !(expr != null ? !expr.equals(negation.expr) : negation.expr != null); } @Override public int hashCode() { return expr != null ? expr.hashCode() : 0; } @Override public boolean isSatisfiedBy(IModel model, TurboIssue issue, MetaQualifierInfo info) { return !expr.isSatisfiedBy(model, issue, info); } @Override public boolean canBeAppliedToIssue() { return false; } @Override public void applyTo(TurboIssue issue, IModel model) throws QualifierApplicationException { assert false; } @Override public List<String> getQualifierNames() { return expr.getQualifierNames(); } @Override public FilterExpression filter(Predicate<Qualifier> pred) { FilterExpression expr = this.expr.filter(pred); if (expr == Qualifier.EMPTY) { return Qualifier.EMPTY; } else { return new Negation(expr); } } @Override public List<Qualifier> find(Predicate<Qualifier> pred) { List<Qualifier> expr = this.expr.find(pred); ArrayList<Qualifier> result = new ArrayList<>(); result.addAll(expr); return result; } }
package com.jcabi.aspects.aj; import com.jcabi.log.Logger; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.Set; import javax.validation.ConstraintViolation; import javax.validation.ConstraintViolationException; import javax.validation.Path; import javax.validation.Valid; import javax.validation.Validation; import javax.validation.Validator; import javax.validation.constraints.NotNull; import javax.validation.constraints.Pattern; import javax.validation.metadata.ConstraintDescriptor; import org.aspectj.lang.JoinPoint; import org.aspectj.lang.annotation.AfterReturning; import org.aspectj.lang.annotation.Aspect; import org.aspectj.lang.annotation.Before; import org.aspectj.lang.reflect.ConstructorSignature; import org.aspectj.lang.reflect.MethodSignature; @Aspect @SuppressWarnings("PMD.CyclomaticComplexity") public final class MethodValidator { /** * JSR-303 Validator. */ private final transient Validator validator = MethodValidator.build(); /** * Validate arguments of a method. * @param point Join point * @checkstyle LineLength (3 lines) */ @Before("execution(* *(.., @(javax.validation.* || javax.validation.constraints.*) (*), ..))") public void beforeMethod(final JoinPoint point) { if (this.validator != null) { this.validate( point, MethodSignature.class.cast(point.getSignature()) .getMethod() .getParameterAnnotations() ); } } /** * Validate arguments of constructor. * @param point Join point * @checkstyle LineLength (3 lines) */ @Before("initialization(*.new(.., @(javax.validation.* || javax.validation.constraints.*) (*), ..))") public void beforeCtor(final JoinPoint point) { if (this.validator != null) { this.validate( point, ConstructorSignature.class.cast(point.getSignature()) .getConstructor() .getParameterAnnotations() ); } } /** * Validate method response. * @param point Join point * @param result Result of the method * @checkstyle LineLength (4 lines) * @since 0.7.11 */ @AfterReturning( pointcut = "execution(@(javax.validation.* || javax.validation.constraints.*) * *(..))", returning = "result" ) public void after(final JoinPoint point, final Object result) { final Method method = MethodSignature.class.cast( point.getSignature() ).getMethod(); if (method.isAnnotationPresent(NotNull.class) && result == null) { throw new ConstraintViolationException( new HashSet<ConstraintViolation<?>>( Arrays.<ConstraintViolation<?>>asList( MethodValidator.violation( result, method.getAnnotation(NotNull.class).message() ) ) ) ); } } /** * Validate method at the given point. * @param point Join point * @param params Parameters (their annotations) */ private void validate(final JoinPoint point, final Annotation[][] params) { final Set<ConstraintViolation<?>> violations = new HashSet<ConstraintViolation<?>>(); for (int pos = 0; pos < params.length; ++pos) { violations.addAll( this.validate(pos, point.getArgs()[pos], params[pos]) ); } if (!violations.isEmpty()) { throw new ConstraintViolationException( MethodValidator.pack(violations), violations ); } } /** * Validate one method argument against its annotations. * @param pos Position of the argument in method signature * @param arg The argument * @param annotations Array of annotations * @return A set of violations * @todo #61 It's a temporary design, which enables only NotNull, * Valid, and Pattern annotations. In the future we should use * JSR-303 Validator, when they implement validation of values (see * their appendix C). */ private Set<ConstraintViolation<?>> validate(final int pos, final Object arg, final Annotation[] annotations) { final Set<ConstraintViolation<?>> violations = new HashSet<ConstraintViolation<?>>(); for (Annotation antn : annotations) { if (antn.annotationType().equals(NotNull.class)) { if (arg == null) { violations.add( MethodValidator.violation( String.format("param #%d", pos), NotNull.class.cast(antn).message() ) ); } } else if (antn.annotationType().equals(Valid.class)) { violations.addAll(this.validator.validate(arg)); } else if (antn.annotationType().equals(Pattern.class)) { if (arg != null && !arg.toString() .matches(Pattern.class.cast(antn).regexp())) { violations.add( MethodValidator.violation( String.format("param #%d '%s'", pos, arg), Pattern.class.cast(antn).message() ) ); } } else { throw new IllegalStateException( Logger.format( "%[type]s annotation is not supported at the moment", antn ) ); } } return violations; } /** * Create one simple violation. * @param arg The argument passed * @param msg Error message to show * @return The violation */ private static ConstraintViolation<?> violation(final Object arg, final String msg) { // @checkstyle AnonInnerLength (50 lines) return new ConstraintViolation<String>() { @Override public String toString() { return String.format("%s %s", arg, msg); } @Override public ConstraintDescriptor<?> getConstraintDescriptor() { return null; } @Override public Object getInvalidValue() { return arg; } @Override public Object getLeafBean() { return null; } @Override public String getMessage() { return msg; } @Override public String getMessageTemplate() { return msg; } @Override public Path getPropertyPath() { return null; } @Override public String getRootBean() { return ""; } @Override public Class<String> getRootBeanClass() { return String.class; } @Override public Object[] getExecutableParameters() { return new Object[] {}; } @Override public Object getExecutableReturnValue() { return null; } @Override public <U> U unwrap(final Class<U> type) { return null; } }; } /** * Pack violations into string. * @param errs All violations * @return The full text */ private static String pack(final Collection<ConstraintViolation<?>> errs) { final StringBuilder text = new StringBuilder(); for (ConstraintViolation<?> violation : errs) { if (text.length() > 0) { text.append("; "); } text.append(violation.getMessage()); } return text.toString(); } /** * Build validator. * @return Validator to use in the singleton */ @SuppressWarnings("PMD.AvoidCatchingThrowable") private static Validator build() { Validator val = null; try { val = Validation.buildDefaultValidatorFactory().getValidator(); } catch (javax.validation.ValidationException ex) { Logger.error( MethodValidator.class, "JSR-303 validator failed to initialize: %s", ex.getMessage() ); } catch (Throwable ex) { Logger.error( MethodValidator.class, "JSR-303 validator thrown during initialization: %[exception]s", ex ); } return val; } }
package gl8080.lifegame.logic; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Objects; public class Cell { /** * * @return */ public static Cell dead() { return new Cell(false); } /** * * @return */ public static Cell alive() { return new Cell(true); } private boolean alive; private Boolean nextStatus; private List<Cell> neighbors = Collections.emptyList(); private Cell(boolean alive) { this.alive = alive; } /** * * @return {@code true} */ public boolean isAlive() { return this.alive; } public void reserveNextStatus() { long liveCellCount = neighbors.stream().filter(Cell::isAlive).count(); if (4 <= liveCellCount) { this.nextStatus = false; } else if (liveCellCount == 3) { this.nextStatus = true; } else if (liveCellCount == 2) { this.nextStatus = this.alive; } else { this.nextStatus = false; } } public void stepNextStatus() { if (this.nextStatus == null) { throw new IllegalStateException(""); } this.alive = this.nextStatus; this.nextStatus = null; } /** * * * @return {@code true} * @throws */ boolean toBeAlive() { if (this.nextStatus == null) { throw new IllegalStateException(""); } return this.nextStatus; } /** * * * @param neighbors * @throws NullPointerException {@code null} */ void setNeighbors(List<Cell> neighbors) { Objects.requireNonNull(neighbors, " null "); this.neighbors = new ArrayList<>(neighbors); } /** * * <p> * <br> * * * @return */ public List<Cell> getNeighbors() { return new ArrayList<>(this.neighbors); } }
package io.mycat.buffer; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.concurrent.ConcurrentLinkedQueue; import org.apache.log4j.Logger; /** * @author wuzh */ public final class BufferPool { // this value not changed ,isLocalCacheThread use it public static final String LOCAL_BUF_THREAD_PREX = "$_"; private final ThreadLocalBufferPool localBufferPool; private static final Logger LOGGER = Logger.getLogger(BufferPool.class); private final int chunkSize; private final int conReadBuferChunk; private final ConcurrentLinkedQueue<ByteBuffer> items = new ConcurrentLinkedQueue<ByteBuffer>(); /** * ConnectionSocketConnectionByteBufferDirect * ByteBufer2-N * BufferPoolByteBuffer */ private final ConcurrentLinkedQueue<ByteBuffer> conReadBuferQueue = new ConcurrentLinkedQueue<ByteBuffer>(); private long sharedOptsCount; private int newCreated; private final long threadLocalCount; private final long capactiy; public BufferPool(long bufferSize, int chunkSize, int conReadBuferChunk, int threadLocalPercent) { this.chunkSize = chunkSize; this.conReadBuferChunk = conReadBuferChunk; long size = bufferSize / chunkSize; size = (bufferSize % chunkSize == 0) ? size : size + 1; this.capactiy = size; threadLocalCount = threadLocalPercent * capactiy / 100; for (long i = 0; i < capactiy; i++) { items.offer(createDirectBuffer(chunkSize)); } localBufferPool = new ThreadLocalBufferPool(threadLocalCount); } private static final boolean isLocalCacheThread() { final String thname = Thread.currentThread().getName(); return (thname.length() < LOCAL_BUF_THREAD_PREX.length()) ? false : (thname.charAt(0) == '$' && thname.charAt(1) == '_'); } public int getConReadBuferChunk() { return conReadBuferChunk; } public int getChunkSize() { return chunkSize; } public long getSharedOptsCount() { return sharedOptsCount; } public long size() { return this.items.size(); } public long capacity() { return capactiy + newCreated; } public ByteBuffer allocateConReadBuffer() { ByteBuffer result = conReadBuferQueue.poll(); if (result != null) { return result; } else { return createDirectBuffer(conReadBuferChunk); } } public BufferArray allocateArray() { return new BufferArray(this); } public ByteBuffer allocate() { ByteBuffer node = null; if (isLocalCacheThread()) { // allocate from threadlocal node = localBufferPool.get().poll(); if (node != null) { return node; } } node = items.poll(); if (node == null) { newCreated++; node = this.createDirectBuffer(chunkSize); } return node; } private boolean checkValidBuffer(ByteBuffer buffer) { // nullchunkSize if (buffer == null || !buffer.isDirect()) { return false; } else if (buffer.capacity() != chunkSize) { LOGGER.warn("cant' recycle a buffer not equals my pool chunksize " + chunkSize + " he is " + buffer.capacity()); return false; } buffer.clear(); return true; } public void recycleConReadBuffer(ByteBuffer buffer) { if (buffer == null || !buffer.isDirect()) { return; } else if (buffer.capacity() != conReadBuferChunk) { LOGGER.warn("cant' recycle a buffer not equals my pool con read chunksize " + buffer.capacity()); } else { buffer.clear(); this.conReadBuferQueue.add(buffer); } } public void recycle(ByteBuffer buffer) { if (!checkValidBuffer(buffer)) { return; } if (isLocalCacheThread()) { BufferQueue localQueue = localBufferPool.get(); if (localQueue.snapshotSize() < threadLocalCount) { localQueue.put(buffer); } else { // recyle 3/4 thread local buffer items.addAll(localQueue.removeItems(threadLocalCount * 3 / 4)); items.offer(buffer); sharedOptsCount++; } } else { sharedOptsCount++; items.offer(buffer); } } public boolean testIfDuplicate(ByteBuffer buffer) { for (ByteBuffer exists : items) { if (exists == buffer) { return true; } } return false; } private ByteBuffer createTempBuffer(int size) { return ByteBuffer.allocate(size); } private ByteBuffer createDirectBuffer(int size) { // for performance return ByteBuffer.allocateDirect(size); } public ByteBuffer allocate(int size) { if (size <= this.chunkSize) { return allocate(); } else { LOGGER.warn("allocate buffer size large than default chunksize:" + this.chunkSize + " he want " + size); return createTempBuffer(size); } } public static void main(String[] args) { BufferPool pool = new BufferPool(1024 * 5, 1024, 1024 * 3, 2); long i = pool.capacity(); ArrayList<ByteBuffer> all = new ArrayList<ByteBuffer>(); for (long j = 0; j <= i; j++) { all.add(pool.allocate()); } for (ByteBuffer buf : all) { pool.recycle(buf); } System.out.println(pool.size()); } }
package com.laytonsmith.core; import com.laytonsmith.PureUtilities.Vector3D; import com.laytonsmith.abstraction.MCBannerMeta; import com.laytonsmith.abstraction.MCBlockStateMeta; import com.laytonsmith.abstraction.MCBookMeta; import com.laytonsmith.abstraction.MCBrewerInventory; import com.laytonsmith.abstraction.MCColor; import com.laytonsmith.abstraction.MCCreatureSpawner; import com.laytonsmith.abstraction.MCEnchantment; import com.laytonsmith.abstraction.MCEnchantmentStorageMeta; import com.laytonsmith.abstraction.MCFireworkBuilder; import com.laytonsmith.abstraction.MCFireworkEffect; import com.laytonsmith.abstraction.MCFireworkEffectMeta; import com.laytonsmith.abstraction.MCFireworkMeta; import com.laytonsmith.abstraction.MCFurnaceInventory; import com.laytonsmith.abstraction.MCFurnaceRecipe; import com.laytonsmith.abstraction.MCInventory; import com.laytonsmith.abstraction.MCInventoryHolder; import com.laytonsmith.abstraction.MCItemFactory; import com.laytonsmith.abstraction.MCItemMeta; import com.laytonsmith.abstraction.MCItemStack; import com.laytonsmith.abstraction.MCLeatherArmorMeta; import com.laytonsmith.abstraction.MCLivingEntity; import com.laytonsmith.abstraction.MCLocation; import com.laytonsmith.abstraction.MCMapMeta; import com.laytonsmith.abstraction.MCMetadataValue; import com.laytonsmith.abstraction.MCPattern; import com.laytonsmith.abstraction.MCPlugin; import com.laytonsmith.abstraction.MCPotionData; import com.laytonsmith.abstraction.MCPotionMeta; import com.laytonsmith.abstraction.MCRecipe; import com.laytonsmith.abstraction.MCShapedRecipe; import com.laytonsmith.abstraction.MCShapelessRecipe; import com.laytonsmith.abstraction.MCSkullMeta; import com.laytonsmith.abstraction.MCTropicalFishBucketMeta; import com.laytonsmith.abstraction.MCWorld; import com.laytonsmith.abstraction.StaticLayer; import com.laytonsmith.abstraction.blocks.MCBlockState; import com.laytonsmith.abstraction.blocks.MCMaterial; import com.laytonsmith.abstraction.blocks.MCBanner; import com.laytonsmith.abstraction.blocks.MCBrewingStand; import com.laytonsmith.abstraction.blocks.MCChest; import com.laytonsmith.abstraction.blocks.MCDispenser; import com.laytonsmith.abstraction.blocks.MCDropper; import com.laytonsmith.abstraction.blocks.MCFurnace; import com.laytonsmith.abstraction.blocks.MCHopper; import com.laytonsmith.abstraction.blocks.MCShulkerBox; import com.laytonsmith.abstraction.entities.MCTropicalFish; import com.laytonsmith.abstraction.enums.MCDyeColor; import com.laytonsmith.abstraction.enums.MCEntityType; import com.laytonsmith.abstraction.enums.MCFireworkType; import com.laytonsmith.abstraction.enums.MCItemFlag; import com.laytonsmith.abstraction.enums.MCPatternShape; import com.laytonsmith.abstraction.enums.MCPotionEffectType; import com.laytonsmith.abstraction.enums.MCPotionType; import com.laytonsmith.abstraction.enums.MCRecipeType; import com.laytonsmith.core.constructs.CArray; import com.laytonsmith.core.constructs.CBoolean; import com.laytonsmith.core.constructs.CDouble; import com.laytonsmith.core.constructs.CInt; import com.laytonsmith.core.constructs.CNull; import com.laytonsmith.core.constructs.CString; import com.laytonsmith.core.constructs.Construct; import com.laytonsmith.core.constructs.Target; import com.laytonsmith.core.environments.Environment; import com.laytonsmith.core.exceptions.CRE.AbstractCREException; import com.laytonsmith.core.exceptions.CRE.CRECastException; import com.laytonsmith.core.exceptions.CRE.CREEnchantmentException; import com.laytonsmith.core.exceptions.CRE.CREFormatException; import com.laytonsmith.core.exceptions.CRE.CREInvalidWorldException; import com.laytonsmith.core.exceptions.CRE.CRENotFoundException; import com.laytonsmith.core.exceptions.CRE.CRERangeException; import com.laytonsmith.core.exceptions.ConfigRuntimeException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; /** * This file is responsible for converting CH objects into server objects, and vice versa */ public class ObjectGenerator { private static ObjectGenerator pog = null; public static ObjectGenerator GetGenerator() { if(pog == null) { pog = new ObjectGenerator(); } return pog; } /** * Gets a Location Object, given a MCLocation * * @param l * @return */ public CArray location(MCLocation l) { return location(l, true); } /** * Gets a Location Object, optionally with yaw and pitch, given a MCLocation * * @param l * @param includeYawAndPitch * @return */ public CArray location(MCLocation l, boolean includeYawAndPitch) { CArray ca = CArray.GetAssociativeArray(Target.UNKNOWN); Construct x = new CDouble(l.getX(), Target.UNKNOWN); Construct y = new CDouble(l.getY(), Target.UNKNOWN); Construct z = new CDouble(l.getZ(), Target.UNKNOWN); Construct world = new CString(l.getWorld().getName(), Target.UNKNOWN); ca.set("0", x, Target.UNKNOWN); ca.set("1", y, Target.UNKNOWN); ca.set("2", z, Target.UNKNOWN); ca.set("3", world, Target.UNKNOWN); ca.set("x", x, Target.UNKNOWN); ca.set("y", y, Target.UNKNOWN); ca.set("z", z, Target.UNKNOWN); ca.set("world", world, Target.UNKNOWN); if(includeYawAndPitch) { // guarantee yaw in the 0 - 359.9~ range float yawRaw = l.getYaw() % 360.0f; if(yawRaw < 0.0f) { yawRaw += 360.0f; } Construct yaw = new CDouble(yawRaw, Target.UNKNOWN); Construct pitch = new CDouble(l.getPitch(), Target.UNKNOWN); ca.set("4", yaw, Target.UNKNOWN); ca.set("5", pitch, Target.UNKNOWN); ca.set("yaw", yaw, Target.UNKNOWN); ca.set("pitch", pitch, Target.UNKNOWN); } return ca; } /** * Given a Location Object, returns a MCLocation. If the optional world is not specified in the object, the world * provided is used instead. Location "objects" are MethodScript arrays that represent a location in game. There are * 4 usages: <ul> <li>(x, y, z)</li> <li>(x, y, z, world)</li> <li>(x, y, z, yaw, pitch)</li> <li>(x, y, z, world, * yaw, pitch)</li> </ul> In all cases, the pitch and yaw default to 0, and the world defaults to the specified * world. <em>More conveniently: ([world], x, y, z, [yaw, pitch])</em> */ public MCLocation location(Construct c, MCWorld w, Target t) { if(!(c instanceof CArray)) { throw new CREFormatException("Expecting an array, received " + c.getCType(), t); } CArray array = (CArray) c; MCWorld world = w; double x = 0; double y = 0; double z = 0; float yaw = 0; float pitch = 0; if(!array.inAssociativeMode()) { if(array.size() == 3) { //Just the xyz, with default yaw and pitch, and given world x = Static.getNumber(array.get(0, t), t); y = Static.getNumber(array.get(1, t), t); z = Static.getNumber(array.get(2, t), t); } else if(array.size() == 4) { //x, y, z, world x = Static.getNumber(array.get(0, t), t); y = Static.getNumber(array.get(1, t), t); z = Static.getNumber(array.get(2, t), t); world = Static.getServer().getWorld(array.get(3, t).val()); } else if(array.size() == 5) { //x, y, z, yaw, pitch, with given world x = Static.getNumber(array.get(0, t), t); y = Static.getNumber(array.get(1, t), t); z = Static.getNumber(array.get(2, t), t); yaw = (float) Static.getNumber(array.get(3, t), t); pitch = (float) Static.getNumber(array.get(4, t), t); } else if(array.size() == 6) { //All have been given x = Static.getNumber(array.get(0, t), t); y = Static.getNumber(array.get(1, t), t); z = Static.getNumber(array.get(2, t), t); world = Static.getServer().getWorld(array.get(3, t).val()); yaw = (float) Static.getNumber(array.get(4, t), t); pitch = (float) Static.getNumber(array.get(5, t), t); } else { throw new CREFormatException("Expecting a Location array, but the array did not meet the format specifications", t); } } else { if(array.containsKey("x")) { x = Static.getNumber(array.get("x", t), t); } if(array.containsKey("y")) { y = Static.getNumber(array.get("y", t), t); } if(array.containsKey("z")) { z = Static.getNumber(array.get("z", t), t); } if(array.containsKey("world")) { world = Static.getServer().getWorld(array.get("world", t).val()); } if(array.containsKey("yaw")) { yaw = (float) Static.getDouble(array.get("yaw", t), t); } if(array.containsKey("pitch")) { pitch = (float) Static.getDouble(array.get("pitch", t), t); } } //If world is still null at this point, it's an error if(world == null) { throw new CREInvalidWorldException("The specified world doesn't exist, or no world was provided", t); } return StaticLayer.GetLocation(world, x, y, z, yaw, pitch); } /** * An Item Object consists of data about a particular item stack. Information included is: recipeType, data, qty, * and an array of enchantment objects (labeled enchants): erecipeType (enchantment recipeType) and elevel * (enchantment level). For backwards compatibility, this information is also listed in numerical slots as well as * associative slots. If the MCItemStack is null, or the underlying item is nonexistant (or air) CNull is returned. * * @param is * @return An item array or CNull */ public Construct item(MCItemStack is, Target t) { if(is.isEmpty()) { return CNull.NULL; } CArray ret = CArray.GetAssociativeArray(t); ret.set("name", new CString(is.getType().getName(), t), t); ret.set("data", new CInt(is.getDurability(), t), t); ret.set("qty", new CInt(is.getAmount(), t), t); ret.set("meta", itemMeta(is, t), t); return ret; } /** * Gets an MCItemStack from a given item "object". Supports both the old and new formats currently * * @param i * @param t * @return An abstract item stack */ public MCItemStack item(Construct i, Target t) { return item(i, t, false); } public MCItemStack item(Construct i, Target t, boolean legacy) { if(i instanceof CNull) { return EmptyItem(); } if(!(i instanceof CArray)) { throw new CREFormatException("Expected an array!", t); } CArray item = (CArray) i; String mat; MCItemStack ret; int data = 0; int qty = 1; if(item.containsKey("qty")) { qty = Static.getInt32(item.get("qty", t), t); if(qty <= 0) { return EmptyItem(); } } if(item.containsKey("data")) { data = Static.getInt32(item.get("data", t), t); } legacy = legacy || item.containsKey("type"); if(legacy) { // Do legacy item conversion MCMaterial material; if(item.containsKey("name")) { mat = item.get("name", t).val(); if(mat.equals("MAP")) { // special handling, ignore data until later material = StaticLayer.GetMaterialFromLegacy(mat, 0); } else { material = StaticLayer.GetMaterialFromLegacy(mat, data); } } else { Construct type = item.get("type", t); if(type instanceof CString) { int seperatorIndex = type.val().indexOf(':'); if(seperatorIndex != -1) { try { data = Integer.parseInt(type.val().substring(seperatorIndex + 1)); } catch (NumberFormatException e) { throw new CRERangeException("The item data \"" + type.val().substring(seperatorIndex + 1) + "\" is not a valid integer.", t); } type = new CString(type.val().substring(0, seperatorIndex), t); } } mat = type.val(); int id = Static.getInt32(type, t); if(id == 358) { // special map handling, ignore data until later material = StaticLayer.GetMaterialFromLegacy(id, 0); } else { material = StaticLayer.GetMaterialFromLegacy(id, data); } } if(material == null) { throw new CREFormatException("Could not convert legacy item from " + mat + ":" + data, t); } // convert legacy meta to material if(material.getName().equals("PIG_SPAWN_EGG") && item.containsKey("meta")) { Construct meta = item.get("meta", t); if(meta instanceof CArray && ((CArray) meta).containsKey("spawntype")) { Construct spawntype = ((CArray) meta).get("spawntype", t); if(!(spawntype instanceof CNull)) { MCMaterial newmaterial; String entityName = spawntype.val().toUpperCase(); if(entityName.equals("MUSHROOM_COW")) { newmaterial = StaticLayer.GetMaterial("MOOSHROOM_SPAWN_EGG"); } else if(entityName.equals("PIG_ZOMBIE")) { newmaterial = StaticLayer.GetMaterial("ZOMBIE_PIGMAN_SPAWN_EGG"); } else { newmaterial = StaticLayer.GetMaterial(entityName + "_SPAWN_EGG"); } if(newmaterial != null) { material = newmaterial; } } } } ret = StaticLayer.GetItemStack(material, data, qty); CHLog.GetLogger().w(CHLog.Tags.DEPRECATION, "Converted \"" + mat + ":" + data + "\"" + " to " + material.getName(), t); } else if(item.containsKey("name")) { mat = item.get("name", t).val(); if(data > 0) { ret = StaticLayer.GetItemStack(mat, data, qty); } else { ret = StaticLayer.GetItemStack(mat, qty); } } else { throw new CREFormatException("Could not find item name key in array!", t); } if(ret == null) { throw new CREFormatException("Could not find item material from \"" + mat + "\"", t); } if(item.containsKey("meta")) { ret.setItemMeta(itemMeta(item.get("meta", t), ret.getType(), t)); } if(legacy) { // convert legacy data to meta if(ret.getType().getName().equals("FILLED_MAP")) { MCMapMeta meta = (MCMapMeta) ret.getItemMeta(); meta.setMapId(data); ret.setItemMeta(meta); } } // Deprecated fallback to enchants in item array if not in meta if(item.containsKey("enchants")) { try { Map<MCEnchantment, Integer> enchants = enchants((CArray) item.get("enchants", t), t); for(Map.Entry<MCEnchantment, Integer> entry : enchants.entrySet()) { ret.addUnsafeEnchantment(entry.getKey(), entry.getValue()); } } catch (ClassCastException ex) { throw new CREFormatException("Enchants must be an array of enchantment arrays.", t); } } return ret; } private static MCItemStack EmptyItem() { return StaticLayer.GetItemStack("AIR", 1); } public Construct itemMeta(MCItemStack is, Target t) { if(!is.hasItemMeta()) { return CNull.NULL; } else { Construct display; Construct lore; CArray ma = CArray.GetAssociativeArray(t); MCItemMeta meta = is.getItemMeta(); if(meta.hasDisplayName()) { display = new CString(meta.getDisplayName(), t); } else { display = CNull.NULL; } if(meta.hasLore()) { lore = new CArray(t); for(String l : meta.getLore()) { ((CArray) lore).push(new CString(l, t), t); } } else { lore = CNull.NULL; } ma.set("display", display, t); ma.set("lore", lore, t); ma.set("enchants", enchants(meta.getEnchants(), t), t); ma.set("repair", new CInt(meta.getRepairCost(), t), t); Set<MCItemFlag> itemFlags = meta.getItemFlags(); CArray flagArray = new CArray(t); if(itemFlags.size() > 0) { for(MCItemFlag flag : itemFlags) { flagArray.push(new CString(flag.name(), t), t); } } ma.set("flags", flagArray, t); ma.set("unbreakable", CBoolean.get(meta.isUnbreakable()), t); // Specific ItemMeta if(meta instanceof MCBlockStateMeta) { MCBlockState bs = ((MCBlockStateMeta) meta).getBlockState(); if(bs instanceof MCShulkerBox || bs instanceof MCChest || bs instanceof MCDispenser || bs instanceof MCDropper || bs instanceof MCHopper) { // Handle InventoryHolders with inventory slots that do not have a special meaning. MCInventory inv = ((MCInventoryHolder) bs).getInventory(); CArray box = CArray.GetAssociativeArray(t); for(int i = 0; i < inv.getSize(); i++) { Construct item = ObjectGenerator.GetGenerator().item(inv.getItem(i), t); if(!(item instanceof CNull)) { box.set(i, item, t); } } ma.set("inventory", box, t); } else if(bs instanceof MCBanner) { MCBanner banner = (MCBanner) bs; CArray patterns = new CArray(t, banner.numberOfPatterns()); for(MCPattern p : banner.getPatterns()) { CArray pattern = CArray.GetAssociativeArray(t); pattern.set("shape", new CString(p.getShape().toString(), t), t); pattern.set("color", new CString(p.getColor().toString(), t), t); patterns.push(pattern, t); } ma.set("patterns", patterns, t); } else if(bs instanceof MCCreatureSpawner) { MCCreatureSpawner mccs = (MCCreatureSpawner) bs; ma.set("spawntype", mccs.getSpawnedType().name()); ma.set("delay", new CInt(mccs.getDelay(), t), t); } else if(bs instanceof MCBrewingStand) { MCBrewingStand brewStand = (MCBrewingStand) bs; ma.set("brewtime", new CInt(brewStand.getBrewingTime(), t), t); ma.set("fuel", new CInt(brewStand.getFuelLevel(), t), t); MCBrewerInventory inv = brewStand.getInventory(); CArray invData = CArray.GetAssociativeArray(t); if(inv.getFuel().getAmount() != 0) { invData.set("fuel", ObjectGenerator.GetGenerator().item(inv.getFuel(), t), t); } if(inv.getIngredient().getAmount() != 0) { invData.set("ingredient", ObjectGenerator.GetGenerator().item(inv.getIngredient(), t), t); } if(inv.getLeftBottle().getAmount() != 0) { invData.set("leftbottle", ObjectGenerator.GetGenerator().item(inv.getLeftBottle(), t), t); } if(inv.getMiddleBottle().getAmount() != 0) { invData.set("middlebottle", ObjectGenerator.GetGenerator().item(inv.getMiddleBottle(), t), t); } if(inv.getRightBottle().getAmount() != 0) { invData.set("rightbottle", ObjectGenerator.GetGenerator().item(inv.getRightBottle(), t), t); } ma.set("inventory", invData, t); } else if(bs instanceof MCFurnace) { MCFurnace furnace = (MCFurnace) bs; ma.set("burntime", new CInt(furnace.getBurnTime(), t), t); ma.set("cooktime", new CInt(furnace.getCookTime(), t), t); MCFurnaceInventory inv = furnace.getInventory(); CArray invData = CArray.GetAssociativeArray(t); if(inv.getResult().getAmount() != 0) { invData.set("result", ObjectGenerator.GetGenerator().item(inv.getResult(), t), t); } if(inv.getFuel().getAmount() != 0) { invData.set("fuel", ObjectGenerator.GetGenerator().item(inv.getFuel(), t), t); } if(inv.getSmelting().getAmount() != 0) { invData.set("smelting", ObjectGenerator.GetGenerator().item(inv.getSmelting(), t), t); } ma.set("inventory", invData, t); } } else if(meta instanceof MCFireworkEffectMeta) { MCFireworkEffectMeta mcfem = (MCFireworkEffectMeta) meta; MCFireworkEffect effect = mcfem.getEffect(); if(effect == null) { ma.set("effect", CNull.NULL, t); } else { ma.set("effect", fireworkEffect(effect, t), t); } } else if(meta instanceof MCFireworkMeta) { MCFireworkMeta mcfm = (MCFireworkMeta) meta; CArray firework = CArray.GetAssociativeArray(t); firework.set("strength", new CInt(mcfm.getStrength(), t), t); CArray fe = new CArray(t); for(MCFireworkEffect effect : mcfm.getEffects()) { fe.push(fireworkEffect(effect, t), t); } firework.set("effects", fe, t); ma.set("firework", firework, t); } else if(meta instanceof MCLeatherArmorMeta) { CArray color = color(((MCLeatherArmorMeta) meta).getColor(), t); ma.set("color", color, t); } else if(meta instanceof MCBookMeta) { Construct title; Construct author; Construct pages; if(((MCBookMeta) meta).hasTitle()) { title = new CString(((MCBookMeta) meta).getTitle(), t); } else { title = CNull.NULL; } if(((MCBookMeta) meta).hasAuthor()) { author = new CString(((MCBookMeta) meta).getAuthor(), t); } else { author = CNull.NULL; } if(((MCBookMeta) meta).hasPages()) { pages = new CArray(t); for(String p : ((MCBookMeta) meta).getPages()) { ((CArray) pages).push(new CString(p, t), t); } } else { pages = CNull.NULL; } ma.set("title", title, t); ma.set("author", author, t); ma.set("pages", pages, t); } else if(meta instanceof MCSkullMeta) { if(((MCSkullMeta) meta).hasOwner()) { ma.set("owner", new CString(((MCSkullMeta) meta).getOwner(), t), t); } else { ma.set("owner", CNull.NULL, t); } } else if(meta instanceof MCEnchantmentStorageMeta) { Construct stored; if(((MCEnchantmentStorageMeta) meta).hasStoredEnchants()) { stored = enchants(((MCEnchantmentStorageMeta) meta).getStoredEnchants(), t); } else { stored = CNull.NULL; } ma.set("stored", stored, t); } else if(meta instanceof MCPotionMeta) { MCPotionMeta potionmeta = (MCPotionMeta) meta; CArray effects = potions(potionmeta.getCustomEffects(), t); ma.set("potions", effects, t); MCPotionData potiondata = potionmeta.getBasePotionData(); if(potiondata != null) { ma.set("base", potionData(potiondata, t), t); } } else if(meta instanceof MCBannerMeta) { MCBannerMeta bannermeta = (MCBannerMeta) meta; CArray patterns = new CArray(t, bannermeta.numberOfPatterns()); for(MCPattern p : bannermeta.getPatterns()) { CArray pattern = CArray.GetAssociativeArray(t); pattern.set("shape", new CString(p.getShape().toString(), t), t); pattern.set("color", new CString(p.getColor().toString(), t), t); patterns.push(pattern, t); } ma.set("patterns", patterns, t); } else if(meta instanceof MCMapMeta) { MCMapMeta mm = ((MCMapMeta) meta); MCColor mapcolor = mm.getColor(); if(mapcolor == null) { ma.set("color", CNull.NULL, t); } else { ma.set("color", color(mapcolor, t), t); } if(mm.hasMapId()) { ma.set("mapid", new CInt(mm.getMapId(), t), t); } else { ma.set("mapid", CNull.NULL, t); } } else if(meta instanceof MCTropicalFishBucketMeta) { MCTropicalFishBucketMeta fm = (MCTropicalFishBucketMeta) meta; if(fm.hasVariant()) { ma.set("fishcolor", new CString(fm.getBodyColor().name(), t), t); ma.set("fishpatterncolor", new CString(fm.getPatternColor().name(), t), t); ma.set("fishpattern", new CString(fm.getPattern().name(), t), t); } else { ma.set("fishcolor", CNull.NULL, t); ma.set("fishpatterncolor", CNull.NULL, t); ma.set("fishpattern", CNull.NULL, t); } } return ma; } } public MCItemMeta itemMeta(Construct c, MCMaterial mat, Target t) throws ConfigRuntimeException { MCItemFactory itemFactory = Static.getServer().getItemFactory(); if(itemFactory == null) { throw new CRENotFoundException("Could not find the internal MCItemFactory object (are you running in cmdline mode?)", t); } MCItemMeta meta = itemFactory.getItemMeta(mat); if(c instanceof CNull) { return meta; } CArray ma; if(c instanceof CArray) { ma = (CArray) c; try { if(ma.containsKey("display")) { Construct dni = ma.get("display", t); if(!(dni instanceof CNull)) { meta.setDisplayName(dni.val()); } } if(ma.containsKey("lore")) { Construct li = ma.get("lore", t); if(li instanceof CNull) { //do nothing } else if(li instanceof CString) { List<String> ll = new ArrayList<>(); ll.add(li.val()); meta.setLore(ll); } else if(li instanceof CArray) { CArray la = (CArray) li; List<String> ll = new ArrayList<>(); for(int j = 0; j < la.size(); j++) { ll.add(la.get(j, t).val()); } meta.setLore(ll); } else { throw new CREFormatException("Lore was expected to be an array or a string.", t); } } if(ma.containsKey("enchants")) { Construct enchants = ma.get("enchants", t); if(enchants instanceof CArray) { for(Map.Entry<MCEnchantment, Integer> ench : enchants((CArray) enchants, t).entrySet()) { meta.addEnchant(ench.getKey(), ench.getValue(), true); } } else { throw new CREFormatException("Enchants field was expected to be an array of Enchantment arrays", t); } } if(ma.containsKey("repair") && !(ma.get("repair", t) instanceof CNull)) { meta.setRepairCost(Static.getInt32(ma.get("repair", t), t)); } if(ma.containsKey("flags")) { Construct flags = ma.get("flags", t); if(flags instanceof CArray) { CArray flagArray = (CArray) flags; for(int i = 0; i < flagArray.size(); i++) { Construct flag = flagArray.get(i, t); meta.addItemFlags(MCItemFlag.valueOf(flag.getValue().toUpperCase())); } } else { throw new CREFormatException("Itemflags was expected to be an array of flags.", t); } } if(ma.containsKey("unbreakable")) { meta.setUnbreakable(Static.getBoolean(ma.get("unbreakable", t), t)); } // Specific ItemMeta if(meta instanceof MCBlockStateMeta) { MCBlockStateMeta bsm = (MCBlockStateMeta) meta; MCBlockState bs = bsm.getBlockState(); if(bs instanceof MCShulkerBox || bs instanceof MCChest || bs instanceof MCDispenser || bs instanceof MCDropper || bs instanceof MCHopper) { if(ma.containsKey("inventory")) { MCInventory inv = ((MCInventoryHolder) bs).getInventory(); Construct cInvRaw = ma.get("inventory", t); if(cInvRaw instanceof CArray) { CArray cinv = (CArray) cInvRaw; for(String key : cinv.stringKeySet()) { try { int index = Integer.parseInt(key); if(index < 0 || index >= inv.getSize()) { ConfigRuntimeException.DoWarning("Out of range value (" + index + ") found" + " in " + bs.getClass().getSimpleName().replaceFirst("MC", "") + " inventory array, so ignoring."); } MCItemStack is = ObjectGenerator.GetGenerator().item(cinv.get(key, t), t); inv.setItem(index, is); } catch (NumberFormatException ex) { ConfigRuntimeException.DoWarning("Expecting integer value for key in " + bs.getClass().getSimpleName().replaceFirst("MC", "") + " inventory array, but \"" + key + "\" was found. Ignoring."); } } bsm.setBlockState(bs); } else if(!(cInvRaw instanceof CNull)) { throw new CREFormatException(bs.getClass().getSimpleName().replaceFirst("MC", "") + " inventory expected to be an array or null.", t); } } } else if(bs instanceof MCBanner) { MCBanner banner = (MCBanner) bs; if(ma.containsKey("patterns")) { CArray array = ArgumentValidation.getArray(ma.get("patterns", t), t); for(String key : array.stringKeySet()) { CArray pattern = ArgumentValidation.getArray(array.get(key, t), t); MCPatternShape shape = MCPatternShape.valueOf(pattern.get("shape", t).val().toUpperCase()); String color = pattern.get("color", t).val().toUpperCase(); try { MCDyeColor dyecolor = MCDyeColor.valueOf(color); banner.addPattern(StaticLayer.GetConvertor().GetPattern(dyecolor, shape)); } catch (IllegalArgumentException ex) { if(color.equals("SILVER")) { // convert old DyeColor banner.addPattern(StaticLayer.GetConvertor().GetPattern(MCDyeColor.LIGHT_GRAY, shape)); } else { throw ex; } } } } banner.update(); bsm.setBlockState(banner); } else if(bs instanceof MCCreatureSpawner) { MCCreatureSpawner mccs = (MCCreatureSpawner) bs; if(ma.containsKey("spawntype")) { MCEntityType type = MCEntityType.valueOf(ma.get("spawntype", t).val().toUpperCase()); mccs.setSpawnedType(type); } if(ma.containsKey("delay")) { int delay = Static.getInt32(ma.get("delay", t), t); mccs.setDelay(delay); } bsm.setBlockState(bs); } else if(bs instanceof MCBrewingStand) { MCBrewingStand brewStand = (MCBrewingStand) bs; if(ma.containsKey("brewtime")) { brewStand.setBrewingTime(ArgumentValidation.getInt32(ma.get("brewtime", t), t)); } if(ma.containsKey("fuel")) { brewStand.setFuelLevel(ArgumentValidation.getInt32(ma.get("fuel", t), t)); } if(ma.containsKey("inventory")) { CArray invData = ArgumentValidation.getArray(ma.get("inventory", t), t); MCBrewerInventory inv = brewStand.getInventory(); if(invData.containsKey("fuel")) { inv.setFuel(ObjectGenerator.GetGenerator().item(invData.get("fuel", t), t)); } if(invData.containsKey("ingredient")) { inv.setIngredient(ObjectGenerator.GetGenerator().item(invData.get("ingredient", t), t)); } if(invData.containsKey("leftbottle")) { inv.setLeftBottle(ObjectGenerator.GetGenerator().item(invData.get("leftbottle", t), t)); } if(invData.containsKey("middlebottle")) { inv.setMiddleBottle(ObjectGenerator.GetGenerator().item(invData.get("middlebottle", t), t)); } if(invData.containsKey("rightbottle")) { inv.setRightBottle(ObjectGenerator.GetGenerator().item(invData.get("rightbottle", t), t)); } } bsm.setBlockState(bs); } else if(bs instanceof MCFurnace) { MCFurnace furnace = (MCFurnace) bs; if(ma.containsKey("burntime")) { furnace.setBurnTime(ArgumentValidation.getInt16(ma.get("burntime", t), t)); } if(ma.containsKey("cooktime")) { furnace.setCookTime(ArgumentValidation.getInt16(ma.get("cooktime", t), t)); } if(ma.containsKey("inventory")) { CArray invData = ArgumentValidation.getArray(ma.get("inventory", t), t); MCFurnaceInventory inv = furnace.getInventory(); if(invData.containsKey("result")) { inv.setResult(ObjectGenerator.GetGenerator().item(invData.get("result", t), t)); } if(invData.containsKey("fuel")) { inv.setFuel(ObjectGenerator.GetGenerator().item(invData.get("fuel", t), t)); } if(invData.containsKey("smelting")) { inv.setSmelting(ObjectGenerator.GetGenerator().item(invData.get("smelting", t), t)); } } bsm.setBlockState(bs); } } else if(meta instanceof MCFireworkEffectMeta) { MCFireworkEffectMeta femeta = (MCFireworkEffectMeta) meta; if(ma.containsKey("effect")) { Construct cfem = ma.get("effect", t); if(cfem instanceof CArray) { femeta.setEffect(fireworkEffect((CArray) cfem, t)); } else if(!(cfem instanceof CNull)) { throw new CREFormatException("FireworkCharge effect was expected to be an array or null.", t); } } } else if(meta instanceof MCFireworkMeta) { MCFireworkMeta fmeta = (MCFireworkMeta) meta; if(ma.containsKey("firework")) { Construct construct = ma.get("firework", t); if(construct instanceof CArray) { CArray firework = (CArray) construct; if(firework.containsKey("strength")) { fmeta.setStrength(Static.getInt32(firework.get("strength", t), t)); } if(firework.containsKey("effects")) { // New style (supports multiple effects) Construct effects = firework.get("effects", t); if(effects instanceof CArray) { for(Construct effect : ((CArray) effects).asList()) { if(effect instanceof CArray) { fmeta.addEffect(fireworkEffect((CArray) effect, t)); } else { throw new CREFormatException("Firework effect was expected to be an array.", t); } } } else { throw new CREFormatException("Firework effects was expected to be an array.", t); } } else { // Old style (supports only one effect) fmeta.addEffect(fireworkEffect(firework, t)); } } else { throw new CREFormatException("Firework was expected to be an array.", t); } } } else if(meta instanceof MCLeatherArmorMeta) { if(ma.containsKey("color")) { Construct ci = ma.get("color", t); if(ci instanceof CNull) { //nothing } else if(ci instanceof CArray) { ((MCLeatherArmorMeta) meta).setColor(color((CArray) ci, t)); } else { throw new CREFormatException("Color was expected to be an array.", t); } } } else if(meta instanceof MCBookMeta) { if(ma.containsKey("title")) { Construct title = ma.get("title", t); if(!(title instanceof CNull)) { ((MCBookMeta) meta).setTitle(title.val()); } } if(ma.containsKey("author")) { Construct author = ma.get("author", t); if(!(author instanceof CNull)) { ((MCBookMeta) meta).setAuthor(author.val()); } } if(ma.containsKey("pages")) { Construct pages = ma.get("pages", t); if(pages instanceof CNull) { //nothing } else if(pages instanceof CArray) { CArray pa = (CArray) pages; List<String> pl = new ArrayList<>(); for(int j = 0; j < pa.size(); j++) { pl.add(pa.get(j, t).val()); } ((MCBookMeta) meta).setPages(pl); } else { throw new CREFormatException("Pages field was expected to be an array.", t); } } } else if(meta instanceof MCSkullMeta) { if(ma.containsKey("owner")) { Construct owner = ma.get("owner", t); if(!(owner instanceof CNull)) { ((MCSkullMeta) meta).setOwner(owner.val()); } } } else if(meta instanceof MCEnchantmentStorageMeta) { if(ma.containsKey("stored")) { Construct stored = ma.get("stored", t); if(stored instanceof CNull) { //Still doing nothing } else if(stored instanceof CArray) { for(Map.Entry<MCEnchantment, Integer> ench : enchants((CArray) stored, t).entrySet()) { ((MCEnchantmentStorageMeta) meta).addStoredEnchant(ench.getKey(), ench.getValue(), true); } } else { throw new CREFormatException("Stored field was expected to be an array of Enchantment arrays", t); } } } else if(meta instanceof MCPotionMeta) { if(ma.containsKey("potions")) { Construct effects = ma.get("potions", t); if(effects instanceof CArray) { for(MCLivingEntity.MCEffect e : potions((CArray) effects, t)) { ((MCPotionMeta) meta).addCustomEffect(e.getPotionEffectType(), e.getStrength(), e.getTicksRemaining(), e.isAmbient(), e.hasParticles(), e.showIcon(), true, t); } } else { throw new CREFormatException("Effects was expected to be an array of potion arrays.", t); } } if(ma.containsKey("base")) { Construct potiondata = ma.get("base", t); if(potiondata instanceof CArray) { CArray pd = (CArray) potiondata; ((MCPotionMeta) meta).setBasePotionData(potionData((CArray) potiondata, t)); } } } else if(meta instanceof MCBannerMeta) { if(ma.containsKey("patterns")) { CArray array = ArgumentValidation.getArray(ma.get("patterns", t), t); for(String key : array.stringKeySet()) { CArray pattern = ArgumentValidation.getArray(array.get(key, t), t); MCPatternShape shape = MCPatternShape.valueOf(pattern.get("shape", t).val().toUpperCase()); MCDyeColor color = MCDyeColor.valueOf(pattern.get("color", t).val().toUpperCase()); ((MCBannerMeta) meta).addPattern(StaticLayer.GetConvertor().GetPattern(color, shape)); } } } else if(meta instanceof MCMapMeta) { if(ma.containsKey("color")) { Construct ci = ma.get("color", t); if(ci instanceof CArray) { ((MCMapMeta) meta).setColor(color((CArray) ci, t)); } else if(!(ci instanceof CNull)) { throw new CREFormatException("Color was expected to be an array.", t); } } if(ma.containsKey("mapid")) { Construct cid = ma.get("mapid", t); if(!(cid instanceof CNull)) { ((MCMapMeta) meta).setMapId(Static.getInt32(cid, t)); } } } else if(meta instanceof MCTropicalFishBucketMeta) { if(ma.containsKey("fishpatterncolor")) { Construct patterncolor = ma.get("fishpatterncolor", t); if(!(patterncolor instanceof CNull)) { MCDyeColor color = MCDyeColor.valueOf(patterncolor.val().toUpperCase()); ((MCTropicalFishBucketMeta) meta).setPatternColor(color); } } if(ma.containsKey("fishcolor")) { Construct fishcolor = ma.get("fishcolor", t); if(!(fishcolor instanceof CNull)) { MCDyeColor color = MCDyeColor.valueOf(fishcolor.val().toUpperCase()); ((MCTropicalFishBucketMeta) meta).setBodyColor(color); } } if(ma.containsKey("fishpattern")) { Construct pa = ma.get("fishpattern", t); if(!(pa instanceof CNull)) { MCTropicalFish.MCPattern pattern = MCTropicalFish.MCPattern.valueOf(pa.val().toUpperCase()); ((MCTropicalFishBucketMeta) meta).setPattern(pattern); } } } } catch (Exception ex) { throw new CREFormatException(ex.getMessage(), t, ex); } } else { throw new CREFormatException("An array was expected but received " + c + " instead.", t); } return meta; } public CArray exception(ConfigRuntimeException e, Environment env, Target t) { AbstractCREException ex = AbstractCREException.getAbstractCREException(e); return ex.getExceptionObject(); } public AbstractCREException exception(CArray exception, Target t) throws ClassNotFoundException { return AbstractCREException.getFromCArray(exception, t); } /** * Returns a CArray given an MCColor. It will be in the format array(r: 0, g: 0, b: 0) * * @param color * @param t * @return */ public CArray color(MCColor color, Target t) { CArray ca = CArray.GetAssociativeArray(t); ca.set("r", new CInt(color.getRed(), t), t); ca.set("g", new CInt(color.getGreen(), t), t); ca.set("b", new CInt(color.getBlue(), t), t); return ca; } /** * Returns an MCColor given a colorArray, which supports the following three format recipeTypes (in this order of * priority) array(r: 0, g: 0, b: 0) array(red: 0, green: 0, blue: 0) array(0, 0, 0) * * @param color * @param t * @return */ public MCColor color(CArray color, Target t) { int red; int green; int blue; if(color.containsKey("r")) { red = Static.getInt32(color.get("r", t), t); } else if(color.containsKey("red")) { red = Static.getInt32(color.get("red", t), t); } else { red = Static.getInt32(color.get(0, t), t); } if(color.containsKey("g")) { green = Static.getInt32(color.get("g", t), t); } else if(color.containsKey("green")) { green = Static.getInt32(color.get("green", t), t); } else { green = Static.getInt32(color.get(1, t), t); } if(color.containsKey("b")) { blue = Static.getInt32(color.get("b", t), t); } else if(color.containsKey("blue")) { blue = Static.getInt32(color.get("blue", t), t); } else { blue = Static.getInt32(color.get(2, t), t); } try { return StaticLayer.GetConvertor().GetColor(red, green, blue); } catch (IllegalArgumentException ex) { throw new CRERangeException(ex.getMessage(), t, ex); } } /** * Gets a vector object, given a Vector. * * @param vector the Vector * @return the vector array */ public CArray vector(Vector3D vector) { return vector(vector, Target.UNKNOWN); } /** * Gets a vector object, given a Vector and a Target. * * @param vector the Vector * @param t the Target * @return the vector array */ public CArray vector(Vector3D vector, Target t) { CArray ca = CArray.GetAssociativeArray(t); //Integral keys first ca.set(0, new CDouble(vector.X(), t), t); ca.set(1, new CDouble(vector.Y(), t), t); ca.set(2, new CDouble(vector.Z(), t), t); //Then string keys ca.set("x", new CDouble(vector.X(), t), t); ca.set("y", new CDouble(vector.Y(), t), t); ca.set("z", new CDouble(vector.Z(), t), t); return ca; } /** * Gets a Vector, given a vector object. * * A vector has three parts: the X, Y, and Z. If the vector object is missing the Z part, then we will assume it is * zero. If the vector object is missing the X and/or Y part, then we will assume it is not a vector. * * Furthermore, the string keys ("x", "y" and "z") take precedence over the integral ones. For example, in a case of * <code>array(0, 1, 2, x: 3, y: 4, z: 5)</code>, the resultant Vector will be of the value * <code>Vector(3, 4, 5)</code>. * * For consistency, the method will accept any Construct, but it requires a CArray. * * @param c the vector array * @param t the target * @return the Vector */ public Vector3D vector(Construct c, Target t) { return vector(Vector3D.ZERO, c, t); } /** * Modifies an existing vector using a given vector object. Because Vector3D is immutable, this method does not * actually modify the existing vector, but creates a new one. * * @param v the original vector * @param c the vector array * @param t the target * @return the Vector */ public Vector3D vector(Vector3D v, Construct c, Target t) { if(c instanceof CArray) { CArray va = (CArray) c; double x = v.X(); double y = v.Y(); double z = v.Z(); if(!va.isAssociative()) { if(va.size() == 3) { // 3rd dimension vector x = Static.getNumber(va.get(0, t), t); y = Static.getNumber(va.get(1, t), t); z = Static.getNumber(va.get(2, t), t); } else if(va.size() == 2) { // 2nd dimension vector x = Static.getNumber(va.get(0, t), t); y = Static.getNumber(va.get(1, t), t); } else if(va.size() == 1) { x = Static.getNumber(va.get(0, t), t); } } else { if(va.containsKey("x")) { x = Static.getNumber(va.get("x", t), t); } if(va.containsKey("y")) { y = Static.getNumber(va.get("y", t), t); } if(va.containsKey("z")) { z = Static.getNumber(va.get("z", t), t); } } return new Vector3D(x, y, z); } else if(c instanceof CNull) { // fulfilling the todo? return v; } else { throw new CREFormatException("Expecting an array, received " + c.getCType(), t); } } public CArray enchants(Map<MCEnchantment, Integer> map, Target t) { CArray ret = CArray.GetAssociativeArray(t); for(Map.Entry<MCEnchantment, Integer> entry : map.entrySet()) { ret.set(entry.getKey().getKey(), new CInt(entry.getValue(), t), t); } return ret; } public Map<MCEnchantment, Integer> enchants(CArray enchantArray, Target t) { Map<MCEnchantment, Integer> ret = new HashMap<>(); for(String key : enchantArray.stringKeySet()) { if(enchantArray.isAssociative()) { MCEnchantment etype = StaticLayer.GetEnchantmentByName(key); if(etype == null) { throw new CREEnchantmentException("Unknown enchantment type: " + key, t); } int elevel = Static.getInt32(enchantArray.get(key, t), t); ret.put(etype, elevel); } else { // legacy CArray ea = Static.getArray(enchantArray.get(key, t), t); String setype = ea.get("etype", t).val(); MCEnchantment etype = StaticLayer.GetEnchantmentByName(setype); int elevel = Static.getInt32(ea.get("elevel", t), t); if(etype == null) { if(setype.equals("SWEEPING")) { // data from 1.11.2, changed in 1.12 etype = StaticLayer.GetEnchantmentByName("SWEEPING_EDGE"); } if(etype == null) { throw new CREEnchantmentException("Unknown enchantment type at " + key, t); } } ret.put(etype, elevel); } } return ret; } public CArray potions(List<MCLivingEntity.MCEffect> effectList, Target t) { CArray ea = CArray.GetAssociativeArray(t); for(MCLivingEntity.MCEffect eff : effectList) { CArray effect = CArray.GetAssociativeArray(t); effect.set("id", new CInt(eff.getPotionEffectType().getId(), t), t); effect.set("strength", new CInt(eff.getStrength(), t), t); effect.set("seconds", new CDouble(eff.getTicksRemaining() / 20.0, t), t); effect.set("ambient", CBoolean.get(eff.isAmbient()), t); effect.set("particles", CBoolean.get(eff.hasParticles()), t); effect.set("icon", CBoolean.get(eff.showIcon()), t); ea.set(eff.getPotionEffectType().name().toLowerCase(), effect, t); } return ea; } public List<MCLivingEntity.MCEffect> potions(CArray ea, Target t) { List<MCLivingEntity.MCEffect> ret = new ArrayList<>(); for(String key : ea.stringKeySet()) { if(ea.get(key, t) instanceof CArray) { CArray effect = (CArray) ea.get(key, t); MCPotionEffectType type; int strength = 0; double seconds = 30.0; boolean ambient = false; boolean particles = true; boolean icon = true; try { if(ea.isAssociative()) { type = MCPotionEffectType.valueOf(key.toUpperCase()); } else if(effect.containsKey("id")) { type = MCPotionEffectType.getById(Static.getInt32(effect.get("id", t), t)); } else { throw new CREFormatException("No potion type was given.", t); } } catch (IllegalArgumentException ex) { throw new CREFormatException(ex.getMessage(), t); } if(effect.containsKey("strength")) { strength = Static.getInt32(effect.get("strength", t), t); } if(effect.containsKey("seconds")) { seconds = Static.getDouble(effect.get("seconds", t), t); if(seconds < 0.0) { throw new CRERangeException("Seconds cannot be less than 0", t); } else if(seconds * 20 > Integer.MAX_VALUE) { throw new CRERangeException("Seconds cannot be greater than 107374182", t); } } if(effect.containsKey("ambient")) { ambient = Static.getBoolean(effect.get("ambient", t), t); } if(effect.containsKey("particles")) { particles = Static.getBoolean(effect.get("particles", t), t); } if(effect.containsKey("icon")) { icon = Static.getBoolean(effect.get("icon", t), t); } ret.add(new MCLivingEntity.MCEffect(type, strength, (int) (seconds * 20), ambient, particles, icon)); } else { throw new CREFormatException("Expected a potion array at index" + key, t); } } return ret; } public CArray potionData(MCPotionData mcpd, Target t) { CArray base = CArray.GetAssociativeArray(t); base.set("type", mcpd.getType().name(), t); base.set("extended", CBoolean.get(mcpd.isExtended()), t); base.set("upgraded", CBoolean.get(mcpd.isUpgraded()), t); return base; } public MCPotionData potionData(CArray pd, Target t) { MCPotionType type; try { type = MCPotionType.valueOf(pd.get("type", t).val().toUpperCase()); } catch (IllegalArgumentException ex) { throw new CREFormatException("Invalid potion type: " + pd.get("type", t).val(), t); } boolean extended = false; boolean upgraded = false; if(pd.containsKey("extended")) { Construct cext = pd.get("extended", t); if(cext instanceof CBoolean) { extended = ((CBoolean) cext).getBoolean(); } else { throw new CREFormatException( "Expected potion value for key \"extended\" to be a boolean", t); } } if(pd.containsKey("upgraded")) { Construct cupg = pd.get("upgraded", t); if(cupg instanceof CBoolean) { upgraded = ((CBoolean) cupg).getBoolean(); } else { throw new CREFormatException( "Expected potion value for key \"upgraded\" to be a boolean", t); } } try { return StaticLayer.GetPotionData(type, extended, upgraded); } catch (IllegalArgumentException ex) { throw new CREFormatException(ex.getMessage(), t, ex); } } public CArray fireworkEffect(MCFireworkEffect mcfe, Target t) { CArray fe = CArray.GetAssociativeArray(t); fe.set("flicker", CBoolean.get(mcfe.hasFlicker()), t); fe.set("trail", CBoolean.get(mcfe.hasTrail()), t); MCFireworkType type = mcfe.getType(); if(type != null) { fe.set("type", new CString(mcfe.getType().name(), t), t); } else { fe.set("type", CNull.NULL, t); } CArray colors = new CArray(t); for(MCColor c : mcfe.getColors()) { colors.push(ObjectGenerator.GetGenerator().color(c, t), t); } fe.set("colors", colors, t); CArray fadeColors = new CArray(t); for(MCColor c : mcfe.getFadeColors()) { fadeColors.push(ObjectGenerator.GetGenerator().color(c, t), t); } fe.set("fade", fadeColors, t); return fe; } public MCFireworkEffect fireworkEffect(CArray fe, Target t) { MCFireworkBuilder builder = StaticLayer.GetConvertor().GetFireworkBuilder(); if(fe.containsKey("flicker")) { builder.setFlicker(Static.getBoolean(fe.get("flicker", t), t)); } if(fe.containsKey("trail")) { builder.setTrail(Static.getBoolean(fe.get("trail", t), t)); } if(fe.containsKey("colors")) { Construct colors = fe.get("colors", t); if(colors instanceof CArray) { CArray ccolors = (CArray) colors; if(ccolors.size() == 0) { builder.addColor(MCColor.WHITE); } else { for(Construct color : ccolors.asList()) { MCColor mccolor; if(color instanceof CString) { mccolor = StaticLayer.GetConvertor().GetColor(color.val(), t); } else if(color instanceof CArray) { mccolor = color((CArray) color, t); } else if(color instanceof CInt && ccolors.size() == 3) { // Appears to be a single color builder.addColor(color(ccolors, t)); break; } else { throw new CREFormatException("Expecting individual color to be an array or string, but found " + color.typeof(), t); } builder.addColor(mccolor); } } } else if(colors instanceof CString) { String split[] = colors.val().split("\\|"); if(split.length == 0) { builder.addColor(MCColor.WHITE); } else { for(String s : split) { builder.addColor(StaticLayer.GetConvertor().GetColor(s, t)); } } } else { throw new CREFormatException("Expecting an array or string for colors parameter, but found " + colors.typeof(), t); } } else { builder.addColor(MCColor.WHITE); } if(fe.containsKey("fade")) { Construct colors = fe.get("fade", t); if(colors instanceof CArray) { CArray ccolors = (CArray) colors; for(Construct color : ccolors.asList()) { MCColor mccolor; if(color instanceof CArray) { mccolor = color((CArray) color, t); } else if(color instanceof CString) { mccolor = StaticLayer.GetConvertor().GetColor(color.val(), t); } else if(color instanceof CInt && ccolors.size() == 3) { // Appears to be a single color builder.addFadeColor(color(ccolors, t)); break; } else { throw new CREFormatException("Expecting individual color to be an array or string, but found " + color.typeof(), t); } builder.addFadeColor(mccolor); } } else if(colors instanceof CString) { String split[] = colors.val().split("\\|"); for(String s : split) { builder.addFadeColor(StaticLayer.GetConvertor().GetColor(s, t)); } } else { throw new CREFormatException("Expecting an array or string for fade parameter, but found " + colors.typeof(), t); } } if(fe.containsKey("type")) { try { builder.setType(MCFireworkType.valueOf(fe.get("type", t).val().toUpperCase())); } catch (IllegalArgumentException ex) { throw new CREFormatException(ex.getMessage(), t, ex); } } return builder.build(); } public Construct recipe(MCRecipe r, Target t) { if(r == null) { return CNull.NULL; } CArray ret = CArray.GetAssociativeArray(t); ret.set("type", new CString(r.getRecipeType().name(), t), t); ret.set("result", item(r.getResult(), t), t); if(r instanceof MCFurnaceRecipe) { MCFurnaceRecipe furnace = (MCFurnaceRecipe) r; ret.set("input", item(furnace.getInput(), t), t); ret.set("key", furnace.getKey(), t); } else if(r instanceof MCShapelessRecipe) { MCShapelessRecipe shapeless = (MCShapelessRecipe) r; CArray il = new CArray(t); for(MCItemStack i : shapeless.getIngredients()) { il.push(item(i, t), t); } ret.set("ingredients", il, t); ret.set("key", shapeless.getKey(), t); } else if(r instanceof MCShapedRecipe) { MCShapedRecipe shaped = (MCShapedRecipe) r; CArray shape = new CArray(t); for(String line : shaped.getShape()) { shape.push(new CString(line, t), t); } CArray imap = CArray.GetAssociativeArray(t); for(Map.Entry<Character, MCItemStack> entry : shaped.getIngredientMap().entrySet()) { imap.set(entry.getKey().toString(), item(entry.getValue(), t), t); } ret.set("shape", shape, t); ret.set("ingredients", imap, t); ret.set("key", shaped.getKey(), t); } return ret; } public MCRecipe recipe(Construct c, Target t) { if(!(c instanceof CArray)) { throw new CRECastException("Expected array but received " + c.getCType().name(), t); } CArray recipe = (CArray) c; String recipeKey = null; if(recipe.containsKey("key")) { recipeKey = recipe.get("key", t).val(); } MCRecipeType recipeType; try { recipeType = MCRecipeType.valueOf(recipe.get("type", t).val()); } catch (IllegalArgumentException e) { throw new CREFormatException("Invalid recipe type.", t); } MCItemStack result = item(recipe.get("result", t), t); MCRecipe ret; try { ret = StaticLayer.GetNewRecipe(recipeKey, recipeType, result); } catch (IllegalArgumentException ex) { throw new CREFormatException(ex.getMessage(), t); } switch(recipeType) { case SHAPED: CArray shaped = Static.getArray(recipe.get("shape", t), t); String[] shape = new String[(int) shaped.size()]; if(shaped.size() < 1 || shaped.size() > 3 || shaped.inAssociativeMode()) { throw new CREFormatException("Shape array is invalid.", t); } int i = 0; for(Construct row : shaped.asList()) { if(row instanceof CString && row.val().length() >= 1 && row.val().length() <= 3) { shape[i] = row.val(); i++; } else { throw new CREFormatException("Shape array is invalid.", t); } } ((MCShapedRecipe) ret).setShape(shape); CArray shapedIngredients = Static.getArray(recipe.get("ingredients", t), t); if(!shapedIngredients.inAssociativeMode()) { throw new CREFormatException("Ingredients array is invalid.", t); } for(String key : shapedIngredients.stringKeySet()) { MCMaterial mat = null; Construct ingredient = shapedIngredients.get(key, t); if(ingredient instanceof CString) { mat = StaticLayer.GetMaterial(ingredient.val()); if(mat == null) { // maybe legacy item format try { if(ingredient.val().contains(":")) { String[] split = ingredient.val().split(":"); mat = StaticLayer.GetMaterialFromLegacy(Integer.valueOf(split[0]), Integer.valueOf(split[1])); } else { mat = StaticLayer.GetConvertor().getMaterial(Integer.valueOf(ingredient.val())); } CHLog.GetLogger().w(CHLog.Tags.DEPRECATION, "Numeric item formats (eg. \"0:0\" are deprecated.", t); } catch (NumberFormatException ex) {} } } else if(ingredient instanceof CInt) { mat = StaticLayer.GetConvertor().getMaterial(Static.getInt32(ingredient, t)); CHLog.GetLogger().w(CHLog.Tags.DEPRECATION, "Numeric item ingredients are deprecated.", t); } else if(ingredient instanceof CArray) { mat = item(ingredient, t).getType(); } if(mat == null) { throw new CREFormatException("Ingredient is invalid: " + ingredient.val(), t); } ((MCShapedRecipe) ret).setIngredient(key.charAt(0), mat); } return ret; case SHAPELESS: CArray ingredients = Static.getArray(recipe.get("ingredients", t), t); if(ingredients.inAssociativeMode()) { throw new CREFormatException("Ingredients array is invalid.", t); } for(Construct ingredient : ingredients.asList()) { if(ingredient instanceof CString) { MCMaterial mat = StaticLayer.GetMaterial(ingredient.val()); if(mat == null) { // maybe legacy item format try { if(ingredient.val().contains(":")) { String[] split = ingredient.val().split(":"); mat = StaticLayer.GetMaterialFromLegacy(Integer.valueOf(split[0]), Integer.valueOf(split[1])); } else { mat = StaticLayer.GetConvertor().getMaterial(Integer.valueOf(ingredient.val())); } CHLog.GetLogger().w(CHLog.Tags.DEPRECATION, "Numeric item formats (eg. \"0:0\" are deprecated.", t); } catch (NumberFormatException ex) {} if(mat == null) { throw new CREFormatException("Ingredient is invalid: " + ingredient.val(), t); } } ((MCShapelessRecipe) ret).addIngredient(mat); } else if(ingredient instanceof CArray) { ((MCShapelessRecipe) ret).addIngredient(item(ingredient, t)); } else { throw new CREFormatException("Item was not found", t); } } return ret; case FURNACE: Construct input = recipe.get("input", t); if(input instanceof CString) { MCMaterial mat = StaticLayer.GetMaterial(input.val()); if(mat == null) { throw new CREFormatException("Furnace input is invalid: " + input.val(), t); } ((MCFurnaceRecipe) ret).setInput(mat); } else if(input instanceof CArray) { ((MCFurnaceRecipe) ret).setInput(item(input, t)); } else { throw new CREFormatException("Item was not found", t); } return ret; default: throw new CREFormatException("Could not find valid recipe type.", t); } } public MCMaterial material(String name, Target t) { MCMaterial mat = StaticLayer.GetMaterial(name.toUpperCase()); if(mat == null) { throw new CREFormatException("Unknown material type: " + name, t); } return mat; } public MCMaterial material(Construct name, Target t) { return material(name.val(), t); } /** * Gets a MetadataValue, given a construct and a plugin. * * @param value * @param plugin * @return */ public MCMetadataValue metadataValue(Construct value, MCPlugin plugin) { return metadataValue(Static.getJavaObject(value), plugin); } /** * Gets a MetadataValue, given an object and a plugin. * * @param value * @param plugin * @return */ public MCMetadataValue metadataValue(Object value, MCPlugin plugin) { return StaticLayer.GetMetadataValue(value, plugin); } }
package kalang.compiler; import kalang.MethodNotFoundException; import kalang.FieldNotFoundException; import kalang.AmbiguousMethodException; import kalang.AstNotFoundException; import javax.annotation.Nonnull; import kalang.ast.ClassNode; import kalang.ast.ElementExpr; import kalang.ast.AssignExpr; import kalang.ast.Statement; import kalang.ast.MultiStmtExpr; import kalang.ast.MethodNode; import kalang.ast.ContinueStmt; import kalang.ast.VarObject; import kalang.ast.ExprNode; import kalang.ast.AstNode; import kalang.ast.LoopStmt; import kalang.ast.InvocationExpr; import kalang.ast.ExprStmt; import kalang.ast.BinaryExpr; import kalang.ast.VarExpr; import kalang.ast.ThisExpr; import kalang.ast.ConstExpr; import kalang.ast.TryStmt; import kalang.ast.ThrowStmt; import kalang.ast.CatchBlock; import kalang.ast.CastExpr; import kalang.ast.BlockStmt; import kalang.ast.BreakStmt; import kalang.ast.UnaryExpr; import kalang.ast.NewArrayExpr; import kalang.ast.IfStmt; import kalang.ast.FieldExpr; import kalang.ast.ReturnStmt; import kalang.util.AstUtil; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import kalang.antlr.KalangParser; import kalang.antlr.KalangParser.BlockStmtContext; import kalang.antlr.KalangParser.BreakStatContext; import kalang.antlr.KalangParser.CastExprContext; import kalang.antlr.KalangParser.ClassBodyContext; import kalang.antlr.KalangParser.CompilationUnitContext; import kalang.antlr.KalangParser.ContinueStatContext; import kalang.antlr.KalangParser.DoWhileStatContext; import kalang.antlr.KalangParser.ExprAssignContext; import kalang.antlr.KalangParser.ExprGetArrayElementContext; import kalang.antlr.KalangParser.ExprGetFieldContext; import kalang.antlr.KalangParser.ExprIdentifierContext; import kalang.antlr.KalangParser.ExprInvocationContext; import kalang.antlr.KalangParser.ExprLiteralContext; import kalang.antlr.KalangParser.ExprMemberInvocationContext; import kalang.antlr.KalangParser.ExprMidOpContext; import kalang.antlr.KalangParser.ExprParenContext; import kalang.antlr.KalangParser.ExprSelfOpPreContext; import kalang.antlr.KalangParser.ExprSelfRefContext; import kalang.antlr.KalangParser.ExprStatContext; import kalang.antlr.KalangParser.ExpressionContext; import kalang.antlr.KalangParser.ExpressionsContext; import kalang.antlr.KalangParser.FieldDeclContext; import kalang.antlr.KalangParser.ForStatContext; import kalang.antlr.KalangParser.IfStatContext; import kalang.antlr.KalangParser.ImportDeclContext; import kalang.antlr.KalangParser.LiteralContext; import kalang.antlr.KalangParser.MethodDeclContext; import kalang.antlr.KalangParser.NewExprContext; import kalang.antlr.KalangParser.QualifiedNameContext; import kalang.antlr.KalangParser.ReturnStatContext; import kalang.antlr.KalangParser.StatContext; import kalang.antlr.KalangParser.TryStatContext; import kalang.antlr.KalangParser.TypeContext; import kalang.antlr.KalangParser.VarDeclContext; import kalang.antlr.KalangParser.VarDeclStatContext; import kalang.antlr.KalangParser.VarModifierContext; import kalang.antlr.KalangParser.WhileStatContext; import kalang.antlr.KalangVisitor; import kalang.core.VarTable; import javax.annotation.Nullable; import kalang.antlr.KalangParser.LocalVarDeclContext; import kalang.ast.AnnotationNode; import kalang.ast.ArrayLengthExpr; import kalang.ast.AssignableExpr; import kalang.ast.ClassReference; import kalang.ast.CompareExpr; import kalang.ast.ErrorousExpr; import kalang.ast.FieldNode; import kalang.ast.IncrementExpr; import kalang.ast.InstanceOfExpr; import kalang.ast.LocalVarNode; import kalang.ast.LogicExpr; import kalang.ast.MathExpr; import kalang.ast.MultiStmt; import kalang.ast.NewObjectExpr; import kalang.ast.ObjectFieldExpr; import kalang.ast.ObjectInvokeExpr; import kalang.ast.ParameterExpr; import kalang.ast.ParameterNode; import kalang.ast.StaticFieldExpr; import kalang.ast.StaticInvokeExpr; import kalang.ast.SuperExpr; import kalang.ast.UnknownFieldExpr; import kalang.ast.UnknownInvocationExpr; import kalang.ast.VarDeclStmt; import kalang.core.ArrayType; import kalang.core.ClassType; import kalang.core.GenericType; import kalang.core.MethodDescriptor; import kalang.core.NullableKind; import kalang.core.ParameterizedType; import kalang.core.PrimitiveType; import kalang.core.Type; import kalang.core.Types; import kalang.core.WildcardType; import kalang.exception.Exceptions; import kalang.util.BoxUtil; import kalang.util.ClassTypeUtil; import kalang.util.MethodUtil; import kalang.util.ModifierUtil; import kalang.util.NameUtil; import kalang.util.OffsetRangeHelper; import kalang.util.StringLiteralUtil; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.TokenStream; import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor; import org.antlr.v4.runtime.tree.ParseTree; import org.antlr.v4.runtime.tree.TerminalNode; /** * build ast from antlr parse tree * * @author Kason Yang */ public class AstBuilder extends AbstractParseTreeVisitor implements KalangVisitor { public static final int PARSING_PHASE_META = 1, PARSING_PHASE_ALL = 2; private int parsingPhase=0; //static String DEFAULT_VAR_TYPE;// = "java.lang.Object"; //short name to full name @Nonnull private final Map<String, String> fullNames = new HashMap<>(); @Nonnull private final List<String> importPaths = new LinkedList<>(); @Nonnull private final ClassNode thisClazz = new ClassNode(); @Nonnull private final ClassType thisType = Types.getClassType(thisClazz); private MethodNode method; private VarTable<VarObject,Type> overrideTypes = new VarTable(); protected BlockStmt currentBlock = null; private final HashMap<MethodNode,BlockStmtContext> methodBodys = new HashMap<>(); @Nonnull private AstLoader astLoader; private ParserRuleContext compilationContext; @Nonnull private TokenStream tokenStream; @Nonnull private final String className; @Nonnull private String classPath; //@Nonnull //private VarTable<String, LocalVarNode> vtb; @Nonnull private KalangParser parser; private CompileErrorHandler errorHandler = (error) ->{ System.err.println(error); }; private final CompilationUnit compilationUnit; private List<String> methodDeclared = new ArrayList<>(); private final Map<String,GenericType> declarededGenericTypes = new HashMap<>(); private void newOverrideTypeStack(){ overrideTypes = new VarTable(overrideTypes); } private void popOverrideTypeStack(){ overrideTypes = overrideTypes.getParent(); } private void removeOverrideType(ExprNode expr){ //TODO impl removeOverrideType and call on assign //overrideTypes. } private void changeTypeTemporarilyIfCould(ExprNode expr,Type type){ VarObject key ; if(expr instanceof VarExpr){ key = ((VarExpr) expr).getVar(); }else if(expr instanceof ParameterExpr){ key = ((ParameterExpr) expr).getParameter(); }else if(expr instanceof FieldExpr){ key = ((FieldExpr) expr).getField().getFieldNode(); }else{ key = null; } if(key!=null){ overrideTypes.put(key, type); } } private void onNull(ExprNode expr,boolean onTrue,boolean isEQ){ boolean isNull = (onTrue && isEQ) || (!onTrue && !isEQ); NullableKind nullable = isNull ? NullableKind.NULLABLE : NullableKind.NONNULL; Type type = expr.getType(); if(type instanceof ClassType){ ClassType newType = Types.getClassType((ClassType)type, nullable); changeTypeTemporarilyIfCould(expr,newType); } } protected void onIf(ExprNode expr,boolean onTrue){ if(expr instanceof InstanceOfExpr && onTrue){ InstanceOfExpr ie = (InstanceOfExpr) expr; changeTypeTemporarilyIfCould(ie.getExpr(),Types.getClassType(ie.getTarget().getReferencedClassNode())); } if(expr instanceof CompareExpr){ CompareExpr ce = (CompareExpr) expr; ExprNode e1 = ce.getExpr1(); ExprNode e2 = ce.getExpr2(); boolean isEQ = ce.getOperation().equals(CompareExpr.OP_EQ); if(e1.getType().equals(Types.NULL_TYPE)){ onNull(e2, onTrue,isEQ); }else if(e2.getType().equals(Types.NULL_TYPE)){ onNull(e1,onTrue,isEQ); } } if(expr instanceof UnaryExpr){ onIf(((UnaryExpr) expr).getExpr(),!onTrue); } if(expr instanceof LogicExpr){ LogicExpr le = (LogicExpr) expr; if(le.getOperation().equals(LogicExpr.OP_LOGIC_AND)){ if(onTrue){ onIf(le.getExpr1(),true); onIf(le.getExpr2(),true); } }else if(le.getOperation().equals(LogicExpr.OP_LOGIC_OR)){ if(!onTrue){ onIf(le.getExpr1(),false); onIf(le.getExpr2(),false); } } } } public CompileErrorHandler getErrorHandler() { return errorHandler; } public void setErrorHandler(CompileErrorHandler errorHandler) { this.errorHandler = errorHandler; } public ParserRuleContext getParseTree(){ return compilationContext; } @Nonnull public String getClassName() { return className; } LocalVarNode createTempVar(Type type){ LocalVarNode v = new LocalVarNode(); v.type = type; return v; } @Nullable private ClassReference requireClassReference(@Nonnull Token token){ ClassNode ast = requireAst(token); if(ast==null) return null; return new ClassReference(ast); } @Nullable private ClassType requireClassType(@Nonnull Token token){ return requireClassType(token.getText(),token); } @Nullable private ClassType requireClassType(@Nonnull String id,@Nonnull Token token){ ClassNode ast = requireAst(id, token); if(ast==null) return null; return Types.getClassType(ast); } public void compile(){ compile(PARSING_PHASE_ALL,null); } public void compile(int targetPhase){ compile(targetPhase, null); } public void compile(int targetPhase,@Nullable AstLoader astLoader) { if(astLoader==null){ this.astLoader = new AstLoader(); }else{ this.astLoader = astLoader; } if(targetPhase>=PARSING_PHASE_META && parsingPhase < PARSING_PHASE_META){ parsingPhase = PARSING_PHASE_META; this.compilationContext = parser.compilationUnit(); visit(compilationContext); if(!AstUtil.containsConstructor(thisClazz)){ AstUtil.createEmptyConstructor(thisClazz); } } if(targetPhase>=PARSING_PHASE_ALL && parsingPhase < PARSING_PHASE_ALL){ parsingPhase = PARSING_PHASE_ALL; for(MethodNode m:thisClazz.getDeclaredMethodNodes()){ BlockStmtContext body = methodBodys.get(m); if(body!=null){ method = m; m.body = requireBlock(body); if(m.body!=null && AstUtil.isConstructor(m)){ @SuppressWarnings("null") List<Statement> bodyStmts = m.body.statements; if(!AstUtil.hasConstructorCallStatement(bodyStmts)){ try { bodyStmts.add(0, AstUtil.createDefaultSuperConstructorCall(thisClazz)); } catch (MethodNotFoundException|AmbiguousMethodException ex) { AstBuilder.this.handleSyntaxError("default constructor not found", body.start); } } } } } } } public AstBuilder(@Nonnull CompilationUnit compilationUnit, @Nonnull KalangParser parser) { this.compilationUnit = compilationUnit; this.className = compilationUnit.getSource().getClassName(); thisClazz.name = className; this.classPath = ""; this.parser = parser; tokenStream = parser.getTokenStream(); if (className.contains(".")) { classPath = className.substring(0, className.lastIndexOf('.')); } } @Override public String toString(){ return getClass().getName() + ":" + className; } @Nonnull public AstLoader getAstLoader() { return astLoader; } @Nonnull public TokenStream getTokenStream() { return tokenStream; } @Nonnull public KalangParser getParser() { return parser; } @Nullable private ClassNode requireAst(Token token){ return requireAst(token.getText(),token); } /** * checks whether a class is available * @param id * @param token * @return */ @Nullable private ClassNode requireAst(String id,Token token) { id = expandClassName(id); try { return astLoader.loadAst(id); } catch (AstNotFoundException ex) { AstBuilder.this.handleSyntaxError("ast not found:" + id, token); return null; } } @Nullable private ClassNode getAst(String id){ if(id==null || id.length()==0) return null; id =expandClassName(id); return astLoader.getAst(id); } @Nullable private ClassType parseClassType(KalangParser.ClassTypeContext ctx){ NullableKind nullable = ctx.nullable==null ? NullableKind.NONNULL : NullableKind.NULLABLE; Token rawTypeToken = ctx.rawClass; String rawType = rawTypeToken.getText(); GenericType gt = declarededGenericTypes.get(rawType); if(gt!=null) return gt; ClassType clazzType = requireClassType(rawTypeToken); if(clazzType==null) return null; ClassNode clazzNode = clazzType.getClassNode(); GenericType[] clzDeclaredGenericTypes = clazzNode.getGenericTypes(); if(clzDeclaredGenericTypes!=null && clzDeclaredGenericTypes.length>0){ Type[] typeArguments = new Type[clzDeclaredGenericTypes.length]; List<KalangParser.ParameterizedElementTypeContext> parameterTypes = ctx.parameterTypes; if(parameterTypes!=null && parameterTypes.size()>0){ if(clzDeclaredGenericTypes.length!=parameterTypes.size()){ this.handleSyntaxError("wrong number of type arguments",ctx); return null; } for(int i=0;i<typeArguments.length;i++){ typeArguments[i] = parseParameterizedElementType(parameterTypes.get(i)); //TODO should return null? if(typeArguments[i]==null) return null; } }else{ for(int i=0;i<typeArguments.length;i++){ //TODO here should get bounded type,not root type typeArguments[i] = Types.getRootType(); } } return Types.getParameterizedType(clazzType, typeArguments,nullable); }else{ return Types.getClassType(clazzType, nullable); } } @Nullable private Type parseSingleType(KalangParser.SingleTypeContext ctx){ if(ctx.classType()!=null){ return parseClassType(ctx.classType()); }else{ return Types.getPrimitiveType(ctx.getText()); } } @Nullable private Type parseType(TypeContext ctx) { if(ctx.singleType()!=null){ return parseSingleType(ctx.singleType()); }else{ NullableKind nullable = ctx.nullable!=null ? NullableKind.NULLABLE : NullableKind.NONNULL; Type ct = parseType(ctx.type()); if(ct==null) return null; return Types.getArrayType(ct,nullable); } } @Override public Object visit(ParseTree tree) { if (tree == null) { System.err.println("visit null"); return null; } return super.visit(tree); } public void importPackage(@Nonnull String packageName) { this.importPaths.add(packageName); } BlockStmt wrapBlock(Statement... statms){ BlockStmt bs = newBlock(); bs.statements.addAll(Arrays.asList(statms)); popBlock(); return bs; } BlockStmt newBlock(){ BlockStmt bs = new BlockStmt(currentBlock); currentBlock = bs; return bs; } void popBlock(){ currentBlock = currentBlock.getParentBlock(); } @Nonnull public ClassNode getAst() { return this.thisClazz; } private void mapAst(@Nonnull AstNode node,@Nonnull ParserRuleContext tree){ node.offset = OffsetRangeHelper.getOffsetRange(tree); } private void mapAst(@Nonnull AstNode node,@Nonnull Token token){ node.offset = OffsetRangeHelper.getOffsetRange(token); } @Override public ThrowStmt visitThrowStat(KalangParser.ThrowStatContext ctx) { ThrowStmt ts = new ThrowStmt(visitExpression(ctx.expression())); mapAst(ts, ctx); return ts; } @Override public MultiStmtExpr visitMapExpr(KalangParser.MapExprContext ctx) { Type keyType = ctx.keyType!=null ? requireClassType(ctx.keyType) : Types.getRootType(); Type valueType = ctx.valueType!=null ? requireClassType(ctx.valueType) : Types.getRootType(); if(keyType==null || valueType == null) return null; LocalVarNode vo = createTempVar(Types.getParameterizedType(Types.getMapImplClassType(),new Type[]{keyType,valueType})); VarDeclStmt vds = new VarDeclStmt(vo); NewObjectExpr newExpr; try { newExpr = new NewObjectExpr(Types.getMapImplClassType()); } catch (MethodNotFoundException|AmbiguousMethodException ex) { throw new RuntimeException(ex); } List<Statement> stmts = new LinkedList<>(); stmts.add(vds); stmts.add(new ExprStmt(new AssignExpr(new VarExpr(vo), newExpr))); VarExpr ve = new VarExpr(vo); List<TerminalNode> ids = ctx.Identifier(); for (int i = 0; i < ids.size(); i++) { ExpressionContext e = ctx.expression(i); ExprNode v = (ExprNode) visit(e); ConstExpr k = new ConstExpr(ctx.Identifier(i).getText()); ExprNode[] args = new ExprNode[]{k,v}; InvocationExpr iv; try { iv = ObjectInvokeExpr.create(ve, "put",args); } catch (MethodNotFoundException|AmbiguousMethodException ex) { throw new RuntimeException(ex); } ExprStmt es = new ExprStmt(iv); stmts.add(es); } MultiStmtExpr mse = new MultiStmtExpr(stmts, ve); mapAst(mse,ctx); //TODO set generic toType return mse; } @Override public MultiStmtExpr visitListExpr(KalangParser.ListExprContext ctx) { List<Statement> stmts = new LinkedList<>(); Type valueType = ctx.Identifier()!=null ?requireClassType(ctx.Identifier().getSymbol()) :Types.getRootType(); if(valueType==null) return null; LocalVarNode vo = createTempVar(Types.getParameterizedType(Types.getListImplClassType(),new Type[]{valueType})); VarDeclStmt vds = new VarDeclStmt(vo); NewObjectExpr newExpr; try { newExpr = new NewObjectExpr(Types.getListImplClassType()); } catch (MethodNotFoundException|AmbiguousMethodException ex) { throw new RuntimeException(ex); } stmts.add(vds); VarExpr ve = new VarExpr(vo); stmts.add(new ExprStmt(new AssignExpr(ve, newExpr))); for (ExpressionContext e : ctx.expression()) { InvocationExpr iv; try { iv = ObjectInvokeExpr.create(ve,"add",new ExprNode[]{visitExpression(e)}); } catch (MethodNotFoundException|AmbiguousMethodException ex) { throw new RuntimeException(ex); } stmts.add(new ExprStmt(iv)); } MultiStmtExpr mse = new MultiStmtExpr(stmts, ve); //TODO set generic toType mapAst(mse,ctx); return mse; } @Override public ExprNode visitExprNewArray(KalangParser.ExprNewArrayContext ctx) { Type type = parseSingleType(ctx.singleType()); ExprNode ret; if(ctx.size!=null){ ExprNode size = visitExpression(ctx.size); ret = new NewArrayExpr(type,size); }else{ ExprNode[] initExprs = new ExprNode[ctx.initExpr.size()]; for(int i=0;i<initExprs.length;i++){ initExprs[i] = visitExpression(ctx.initExpr.get(i)); } ret= BoxUtil.createInitializedArray(type, initExprs); } mapAst(ret, ctx); return ret; } @Override public AstNode visitExprQuestion(KalangParser.ExprQuestionContext ctx) { List<Statement> stmts = new LinkedList<>(); LocalVarNode vo = new LocalVarNode(); VarDeclStmt vds = new VarDeclStmt(vo); stmts.add(vds); VarExpr ve = new VarExpr(vo); ExprNode conditionExpr = (ExprNode) visit(ctx.expression(0)); ExprNode trueExpr = (ExprNode) visit(ctx.expression(1)); ExprNode falseExpr = (ExprNode) visit(ctx.expression(2)); IfStmt is = new IfStmt(conditionExpr ,wrapBlock(new ExprStmt(new AssignExpr(ve, trueExpr))) ,wrapBlock(new ExprStmt(new AssignExpr(ve,falseExpr))) ); Type trueType = trueExpr.getType(); Type falseType = falseExpr.getType(); if(trueType.equals(falseType)){ vo.type = trueType; }else{ //TODO get common type vo.type = Types.getRootType(); } stmts.add(is); MultiStmtExpr mse = new MultiStmtExpr(stmts, ve); mapAst(ve, ctx); return mse; } @Override public AstNode visitPostIfStmt(KalangParser.PostIfStmtContext ctx) { ExprNode leftExpr = visitExpression(ctx.expression(0)); if (!(leftExpr instanceof AssignExpr)) { this.handleSyntaxError("AssignExpr required", ctx); } AssignExpr assignExpr = (AssignExpr) leftExpr; AssignableExpr to = assignExpr.getTo(); ExprNode from = assignExpr.getFrom(); ExprNode cond = visitExpression(ctx.expression(1)); Token op = ctx.op; if (op != null) { String opStr = op.getText(); BinaryExpr be = createBinaryExpr(to, cond, opStr); cond = be; } AssignExpr as = new AssignExpr(to, from); IfStmt is = new IfStmt(cond,wrapBlock(new ExprStmt(as)),null); mapAst(is,ctx); return is; } @Override public AstNode visitCompilationUnit(CompilationUnitContext ctx) { thisClazz.name = this.className; visitChildren(ctx); return null; } @Override public AstNode visitClassBody(ClassBodyContext ctx) { this.visitChildren(ctx); mapAst(thisClazz, ctx); return null; } @Override public Void visitFieldDecl(FieldDeclContext ctx) { int mdf = this.parseModifier(ctx.varModifier()); for(VarDeclContext vd:ctx.varDecl()){ FieldNode fieldNode = thisClazz.createField(); fieldNode.modifier =ModifierUtil.setPrivate(mdf); if(vd.expression()!=null){ ExprNode initExpr = visitExpression(vd.expression()); varDecl(vd,fieldNode,initExpr.getType()); //TODO simplify it if(AstUtil.isStatic(fieldNode.modifier)){ thisClazz.staticInitStmts.add(new ExprStmt(new AssignExpr(new StaticFieldExpr(new ClassReference(thisClazz), fieldNode), initExpr))); }else{ thisClazz.initStmts.add(new ExprStmt(new AssignExpr(new ObjectFieldExpr(new ThisExpr(thisType), fieldNode), initExpr))); } }else{ varDecl(vd,fieldNode, Types.getRootType()); } if(!AstUtil.hasGetter(thisClazz, fieldNode)){ AstUtil.createGetter(thisClazz, fieldNode, mdf); } if(!AstUtil.hasSetter(thisClazz, fieldNode)){ AstUtil.createSetter(thisClazz, fieldNode, mdf); } } return null; } @Override public AstNode visitMethodDecl(MethodDeclContext ctx) { String name; Type type; boolean isOverriding = ctx.OVERRIDE() != null; int mdf = parseModifier(ctx.varModifier()); if (ctx.prefix != null && ctx.prefix.getText().equals("constructor")) { type = Types.VOID_TYPE; name = "<init>"; } else { if (ctx.type() == null) { type = Types.VOID_TYPE; } else { type = parseType(ctx.type()); } name = ctx.name.getText(); } method = thisClazz.createMethodNode(); method.annotations.addAll(getAnnotations(ctx.annotation())); method.modifier = mdf; method.type = type; method.name = name; if (ctx.varDecl() != null) { for(VarDeclContext vd:ctx.varDecl()){ ParameterNode pn = ParameterNode.create(method); varDecl(vd, pn, Types.getRootType()); method.parameters.add(pn); } } //check method duplicated before generate java stub String mStr = MethodUtil.getDeclarationKey(method); if (methodDeclared.contains(mStr)) { //TODO should remove the duplicated method handleSyntaxError("declare method duplicately:"+mStr, ctx); return null; } methodDeclared.add(mStr); MethodDescriptor overriddenMd = ClassTypeUtil.getMethodDescriptor(thisClazz.superType, mStr, thisClazz, true); if(overriddenMd==null){ overriddenMd = ClassTypeUtil.getMethodDescriptor(thisClazz.interfaces.toArray(new ClassType[thisClazz.interfaces.size()]), mStr, thisClazz, true); } if(isOverriding && overriddenMd==null){ handleSyntaxError("method does not override any method", ctx); } if(!isOverriding && overriddenMd!=null){ handleSyntaxError("method override a method but not declare", ctx); } if (ctx.blockStmt() != null) { methodBodys.put(method, ctx.blockStmt()); } if (ctx.exceptionTypes != null) { for (Token et : ctx.exceptionTypes) { ClassType exType = requireClassType(et); if(exType!=null){ method.exceptionTypes.add(exType); } } } mapAst(method, ctx); return method; } @Override public AstNode visitType(TypeContext ctx) { //do nothing return null; } public List<Object> visitAll(List<? extends ParserRuleContext> list) { List<Object> ret = new ArrayList<>(list.size()); for (ParserRuleContext i : list) { ret.add(visit(i)); } return ret; } @Override public AstNode visitIfStat(IfStatContext ctx) { ExprNode expr = visitExpression(ctx.expression()); BlockStmt trueBody = null; BlockStmt falseBody = null; if (ctx.trueStmt != null) { newOverrideTypeStack(); onIf(expr, true); trueBody=requireBlock(ctx.trueStmt); popOverrideTypeStack(); //TODO pop block state } if (ctx.falseStmt != null) { newOverrideTypeStack(); onIf(expr,false); falseBody=requireBlock(ctx.falseStmt); popOverrideTypeStack(); //TODO pop block state } IfStmt ifStmt = new IfStmt(expr,trueBody,falseBody); mapAst(ifStmt,ctx); return ifStmt; } private ExprNode visitExpression(ExpressionContext expression) { Object node = visit(expression); if(node instanceof ExprNode){ return (ExprNode) node; }else if(node instanceof AstNode){ return new ErrorousExpr((AstNode)node); }else{ return new ErrorousExpr(); } } @Override public Statement visitStat(StatContext ctx) { return (Statement) visit(ctx.getChild(0)); } @Override public AstNode visitReturnStat(ReturnStatContext ctx) { ReturnStmt rs = new ReturnStmt(); if (ctx.expression() != null) { rs.expr = visitExpression(ctx.expression()); } mapAst(rs,ctx); return rs; } @Override public Statement visitVarDeclStat(VarDeclStatContext ctx) { Statement vars = visitLocalVarDecl(ctx.localVarDecl()); mapAst(vars,ctx); return vars; } @Override public VarObject visitVarDecl(VarDeclContext ctx) { throw new UnsupportedOperationException(); } private void varDecl(VarDeclContext ctx,VarObject vds,Type inferedType){ String name = ctx.name.getText(); TypeContext type = null; if (ctx.varType != null) { type = ctx.varType; } else if (ctx.type() != null) { type = ctx.type(); } Type declType = type != null ? parseType(type) : inferedType; if (isDefindedId(name)) { AstBuilder.this.handleSyntaxError("the name is definded:" + name, ctx); } vds.name = name; vds.type = declType; // if (ctx.expression() != null) { // vds.initExpr = visitExpression(ctx.expression()); // if(vds.type==null && vds.initExpr!=null){ // vds.type = vds.initExpr.getType(); if(vds.type==null){ vds.type = Types.getRootType(); } mapAst(vds,ctx); } public void methodIsAmbiguous(Token token , AmbiguousMethodException ex){ AstBuilder.this.handleSyntaxError(ex.getMessage(), token); } public void methodNotFound(Token token , String className,String methodName,ExprNode[] params){ Type[] types = AstUtil.getExprTypes(params); AstBuilder.this.handleSyntaxError("method not found:" + MethodUtil.toString(className,methodName, types), token); } public void handleSyntaxError(String msg, Token token) { //TODO what does EMPTY means? handleSyntaxError(msg, (ParserRuleContext.EMPTY), token, token); } public void handleSyntaxError(String msg,ParserRuleContext tree) { handleSyntaxError(msg, tree, tree.start, tree.stop); } public void handleSyntaxError(String desc,ParserRuleContext rule,Token start,Token stop){ SyntaxError syntaxError = new SyntaxError(desc, compilationUnit, rule, start,stop); errorHandler.handleCompileError(syntaxError); } @Override public AstNode visitBreakStat(BreakStatContext ctx) { BreakStmt bs = new BreakStmt(); mapAst(bs,ctx); return bs; } @Override public AstNode visitContinueStat(ContinueStatContext ctx) { ContinueStmt cs = new ContinueStmt(); mapAst(cs,ctx); return cs; } @Override public AstNode visitWhileStat(WhileStatContext ctx) { ExprNode preConditionExpr = visitExpression(ctx.expression()); BlockStmt loopBody = null; if (ctx.stat() != null) { loopBody = requireBlock(ctx.stat()); } LoopStmt ws = new LoopStmt(loopBody,preConditionExpr,null); mapAst(ws,ctx); return ws; } @Override public AstNode visitDoWhileStat(DoWhileStatContext ctx) { BlockStmt loopBody = null; if (ctx.blockStmt() != null) { loopBody = requireBlock(ctx.blockStmt()); } ExprNode postConditionExpr = visitExpression(ctx.expression()); LoopStmt ls = new LoopStmt(loopBody,null,postConditionExpr); mapAst(ls,ctx); return ls; } @Override public AstNode visitForStat(ForStatContext ctx) { //It seems that here lacks of var stack BlockStmt forStmt = newBlock(); if(ctx.localVarDecl()!=null){ Statement vars = visitLocalVarDecl(ctx.localVarDecl()); forStmt.statements.add(vars); } ExprNode preConditionExpr = (ExprNode) visit(ctx.expression()); BlockStmt bs =newBlock(); if (ctx.stat() != null) { Statement st = visitStat(ctx.stat()); if(st instanceof BlockStmt){ bs.statements.addAll(((BlockStmt)st).statements); } } if(ctx.expressions()!=null){ bs.statements.addAll(visitExpressions(ctx.expressions())); } popBlock(); LoopStmt ls = new LoopStmt(bs, preConditionExpr, null); mapAst(ls,ctx); forStmt.statements.add(ls); popBlock(); return forStmt; } @Override public List<Statement> visitExpressions(ExpressionsContext ctx) { List<Statement> list = new LinkedList(); for (ExpressionContext e : ctx.expression()) { ExprNode expr = visitExpression(e); list.add(new ExprStmt(expr)); } return list; } @Override public AstNode visitExprStat(ExprStatContext ctx) { ExprNode expr = visitExpression(ctx.expression()); ExprStmt es = new ExprStmt(expr); mapAst(es,ctx); return es; } @Override public ExprNode visitExprMemberInvocation(ExprMemberInvocationContext ctx) { String methodName; if (ctx.key != null) { methodName = ctx.key.getText(); } else { methodName = ctx.Identifier().getText(); } if(methodName.equals("this")){ methodName = "<init>"; }else if(methodName.equals("super")){ methodName = "<init>"; } ExprNode[] args = visitAll(ctx.params).toArray(new ExprNode[0]); ExprNode ie = getImplicitInvokeExpr(methodName,args,ctx); return ie; } private BinaryExpr createBinaryExpr(ExprNode expr1,ExprNode expr2,String op){ switch(op){ case "==": case "!=": case ">": case ">=": case "<": case "<=": return new CompareExpr(expr1, expr2, op); case "&&": case "||": return new LogicExpr(expr1, expr2, op); default: return new MathExpr(expr1, expr2, op); } } protected ExprNode createFieldExpr(ExprGetFieldContext to,@Nullable ExpressionContext fromCtx){ String refKey = to.refKey.getText(); ExpressionContext exp = to.expression(); String fname = to.Identifier().getText(); AssignableExpr toExpr; Object expr = visit(exp); if(refKey.equals(".")){ ExprNode fieldExpr; if(expr instanceof ExprNode){ ExprNode exprNode = (ExprNode) expr; fieldExpr = getObjectFieldLikeExpr(exprNode,fname,to); }else if(expr instanceof ClassReference){ fieldExpr = getStaticFieldExpr((ClassReference)expr, fname, to); }else{ throw new UnknownError("unknown node:" + expr); } if(fromCtx==null){ return fieldExpr; }else{ if(fieldExpr instanceof AssignableExpr){ toExpr = (AssignableExpr) fieldExpr; }else{ AstBuilder.this.handleSyntaxError("unsupported", to); return null; } return new AssignExpr(toExpr,visitExpression(fromCtx)); } }else if(refKey.equals("->")){ //ClassNode ast = getAst("kalang.runtime.dynamic.FieldVisitor"); //if(ast==null) throw new UnknownError(); ExprNode[] params; String methodName; if(fromCtx==null){ params = new ExprNode[0]; methodName = "get" + NameUtil.firstCharToUpperCase(fname); }else{ params = new ExprNode[1]; methodName = "set" + NameUtil.firstCharToUpperCase(fname); } if(expr instanceof ExprNode){ //params[0] = (ExprNode) expr; if(fromCtx!=null) params[0] = visitExpression(fromCtx); return getObjectInvokeExpr((ExprNode)expr, methodName, params, to); }else{ //TODO handle static property throw Exceptions.unsupportedTypeException(expr); } //return getStaticInvokeExpr(new ClassReference(classAst),methodName,params, to); }else{ throw new UnsupportedOperationException(refKey); } } @Override public ExprNode visitExprAssign(ExprAssignContext ctx) { String assignOp = ctx.getChild(1).getText(); ExpressionContext toCtx = ctx.expression(0); ExpressionContext fromCtx = ctx.expression(1); if(toCtx instanceof ExprGetFieldContext){ return createFieldExpr((ExprGetFieldContext)toCtx,fromCtx); } ExprNode to = visitExpression(toCtx); ExprNode from = visitExpression(fromCtx); if (assignOp.length() > 1) { String op = assignOp.substring(0, assignOp.length() - 1); from = createBinaryExpr(to, from, op); } AssignableExpr toExpr; if(to instanceof AssignableExpr){ toExpr = (AssignableExpr) to; AssignExpr aexpr = new AssignExpr(toExpr,from); mapAst(aexpr, ctx); return aexpr; }else{ AstBuilder.this.handleSyntaxError("unsupported assign statement",ctx); return null; } } @Override public AstNode visitExprMidOp(ExprMidOpContext ctx) { String op = ctx.getChild(1).getText(); ExprNode expr1 = visitExpression(ctx.expression(0)); ExprNode expr2 = visitExpression(ctx.expression(1)); Type type1 = expr1.getType(); Type type2 = expr2.getType(); boolean isPrimitive1 = (type1 instanceof PrimitiveType); boolean isPrimitive2 = (type2 instanceof PrimitiveType); ExprNode expr; if(isPrimitive1 && isPrimitive2){ BinaryExpr be = createBinaryExpr(expr1,expr2,op); expr = be; }else if(Types.isNumber(type1) && Types.isNumber(type2)){ PrimitiveType t = SemanticAnalyzer.getMathType(type1, type2, op); expr1 = BoxUtil.assign(expr1, type1, t); expr2 = BoxUtil.assign(expr2, type2, t); if(expr1==null || expr2 == null){ throw new UnknownError("cast fail"); } expr = createBinaryExpr(expr1, expr2, op); }else if(op.equals("==") || op.equals("!=")){ expr = createBinaryExpr(expr1, expr2, op); }else if(op.equals("+")){ if(!Types.getStringClassType().equals(type1)){ expr1 = BoxUtil.castToString(expr1); } if(!Types.getStringClassType().equals(type2)){ expr2 = BoxUtil.castToString(expr2); } if(expr1==null || expr2 == null){ AstBuilder.this.handleSyntaxError("unsupported types", ctx); return null; } InvocationExpr ie; try { ie = ObjectInvokeExpr.create(expr1, "concat",new ExprNode[]{expr2}); } catch (MethodNotFoundException|AmbiguousMethodException ex) { throw new RuntimeException(ex); } expr = ie; }else{ throw new UnknownError("unknown binary expression"); } mapAst(expr, ctx); return expr; } private ExprNode getImplicitInvokeExpr(String methodName,ExprNode[] args, ParserRuleContext ctx){ ExprNode expr; try { ClassType clazzType = thisType; InvocationExpr.MethodSelection ms = InvocationExpr.applyMethod(clazzType, methodName, args,clazzType.getMethodDescriptors(thisClazz, true)); if(Modifier.isStatic(ms.selectedMethod.getModifier())){ expr = new StaticInvokeExpr(new ClassReference(thisClazz), ms.selectedMethod, ms.appliedArguments); }else{ expr = new ObjectInvokeExpr(new ThisExpr(thisType), ms.selectedMethod, ms.appliedArguments); } } catch (MethodNotFoundException ex) { expr = new UnknownInvocationExpr(null, methodName, args); } catch (AmbiguousMethodException ex) { methodIsAmbiguous(ctx.start, ex); return null; } mapAst(expr, ctx); return expr; } private ExprNode getObjectInvokeExpr(ExprNode target,String methodName,List<ExpressionContext> argumentsCtx,ParserRuleContext ctx){ ExprNode[] args = visitAll(argumentsCtx).toArray(new ExprNode[0]); return getObjectInvokeExpr(target, methodName, args, ctx); } private ExprNode getObjectInvokeExpr(ExprNode target,String methodName,ExprNode[] args,ParserRuleContext ctx){ if("<init>".equals(methodName)){ throw new UnsupportedOperationException("don't get constructor by this method"); } ExprNode expr; try { ObjectInvokeExpr invoke = ObjectInvokeExpr.create(target, methodName, args,thisClazz); if(invoke.getMethod().getMethodNode().type instanceof GenericType){ Type invokeType = invoke.getType(); if(invokeType instanceof ClassType){ expr = new CastExpr(invokeType, invoke); }else{ expr = invoke; } }else{ expr = invoke; } } catch (MethodNotFoundException ex) { expr= new UnknownInvocationExpr(target,methodName,args); } catch(AmbiguousMethodException ex){ methodIsAmbiguous(ctx.start,ex); return null; } mapAst(expr, ctx); return expr; } private ExprNode getStaticInvokeExpr(ClassReference clazz,String methodName,List<ExpressionContext> argumentsCtx,ParserRuleContext ctx){ return getStaticInvokeExpr(clazz, methodName, visitAll(argumentsCtx).toArray(new ExprNode[0]), ctx); } private ExprNode getStaticInvokeExpr(ClassReference clazz,String methodName, ExprNode[] argumentsCtx,ParserRuleContext ctx){ ExprNode[] args = argumentsCtx; ExprNode expr; try { expr = StaticInvokeExpr.create(clazz, methodName, args); } catch (MethodNotFoundException ex) { expr = new UnknownInvocationExpr(clazz, methodName , args); } catch(AmbiguousMethodException ex){ methodIsAmbiguous(ctx.start, ex); return null; } mapAst(expr, ctx); return expr; } @Override public AstNode visitExprInvocation(ExprInvocationContext ctx) { Object target = visit(ctx.target); if(target==null) return null; String mdName = ctx.Identifier().getText(); String refKey = ctx.refKey.getText(); if(refKey.equals(".")){ if(target instanceof ClassReference){ return getStaticInvokeExpr((ClassReference) target, mdName,ctx.params, ctx); }else if(target instanceof ExprNode){ return getObjectInvokeExpr((ExprNode) target, mdName, ctx.params,ctx); }else{ throw new UnknownError("unknown node:"+ target); } }else if(refKey.equals("->")){ ExprNode[] invokeArgs = new ExprNode[3]; ExprNode[] params = new ExprNode[ctx.params.size()]; if(target instanceof ClassReference){ invokeArgs[0] = new ConstExpr(null); }else if(target instanceof ExprNode){ invokeArgs[0] = ((ExprNode) target); } invokeArgs[1] = new ConstExpr(mdName); for(int i=0;i<params.length;i++){ params[i] = visitExpression(ctx.params.get(i)); } invokeArgs[2] = BoxUtil.createInitializedArray(Types.getRootType(), params); ClassNode dispatcherAst = getAst("kalang.runtime.invoke.MethodDispatcher"); if(dispatcherAst==null){ throw new RuntimeException("Runtime library is required!"); } return getStaticInvokeExpr(new ClassReference(dispatcherAst), "invokeMethod", invokeArgs, ctx); }else{ throw new UnsupportedOperationException(refKey); } } @Override public ExprNode visitExprGetField(ExprGetFieldContext ctx) { return createFieldExpr(ctx, null); } @Override public UnaryExpr visitExprSelfOpPre(ExprSelfOpPreContext ctx) { String op = ctx.getChild(0).getText(); UnaryExpr ue = new UnaryExpr( visitExpression( ctx.expression() ) , op ); mapAst(ue, ctx); return ue; } @Override public ElementExpr visitExprGetArrayElement(ExprGetArrayElementContext ctx) { ElementExpr ee = new ElementExpr( visitExpression(ctx.expression(0)) ,visitExpression(ctx.expression(1)) ); mapAst(ee, ctx); return ee; } /** * expand the class name if could * @param id * @return */ private String expandClassName(String id){ if (fullNames.containsKey(id)) { return fullNames.get(id); } else { List<String> paths = new ArrayList<>(importPaths.size()+1); paths.add(classPath); paths.addAll(importPaths); for (String p : paths) { String clsName; if(p!=null && p.length()>0){ clsName = p + "." + id; }else{ clsName = id; } ClassNode cls = astLoader.getAst(clsName); if (cls != null) { return clsName; } } } return id; } private boolean isDefindedId(String id){ if(isClassId(id)) return true; if(getNodeById(id,null)!=null) return true; return false; } private boolean isClassId(String name){ String id = expandClassName(name); ClassNode targetClass = astLoader.getAst(id); return (targetClass!=null); } private AstNode requireNameDefined(String name,Token token){ AstNode n = getNodeById(name, token); if(n==null){ AstBuilder.this.handleSyntaxError(name + " is undefined!", token); return null; } return n; } protected VarTable<String, LocalVarNode> requireVarTable(){ return currentBlock.getScopeVarTable(); } @Nullable protected VarTable<String, LocalVarNode> getVarTable(){ return currentBlock==null ? null : currentBlock.getScopeVarTable(); } @Nullable private AstNode getNodeById(@Nonnull String name,@Nullable Token token) { if(isClassId(name)){ ClassReference clsRef = new ClassReference(requireAst(name,token)); if(token!=null) mapAst(clsRef, token); return clsRef; } VarTable<String, LocalVarNode> vtb = getVarTable(); if (vtb!=null && vtb.exist(name)) { LocalVarNode var = vtb.get(name); //vars.indexOf(vo); VarExpr ve = new VarExpr(var,overrideTypes.get(var)); if(token!=null) mapAst(ve, token); return ve; } else { //find parameters if (method != null && method.parameters != null) { for (ParameterNode p : method.parameters) { if (p.name.equals(name)) { ParameterExpr ve = new ParameterExpr(p,overrideTypes.get(p)); if(token!=null) mapAst(ve, token); return ve; } } } if (thisClazz.fields != null) { for (FieldNode f : thisClazz.fields) { if (f.name!=null && f.name.equals(name)) { //TODO override field type FieldExpr fe; if(Modifier.isStatic(f.modifier)){ fe = new StaticFieldExpr(new ClassReference(thisClazz), f); }else{ fe = new ObjectFieldExpr(new ThisExpr(thisType), f); } if(token!=null) mapAst(fe, token); return fe; } } } } return null; } @Override public ConstExpr visitLiteral(LiteralContext ctx) { String t = ctx.getText(); Object v; if (ctx.IntegerLiteral() != null) { v = ( Integer.parseInt(t)); } else if (ctx.FloatingPointLiteral() != null) { v = ( Float.parseFloat(t)); } else if (ctx.BooleanLiteral() != null) { v = ( Boolean.parseBoolean(t)); } else if (ctx.CharacterLiteral() != null) { char[] chars = t.toCharArray(); v = ( chars[1]); } else if (ctx.StringLiteral() != null) { v = (StringLiteralUtil.parse(t.substring(1, t.length() - 1))); }else if(ctx.Identifier()!=null){ ClassReference cr = requireClassReference(ctx.Identifier().getSymbol()); v = (cr); } else if(ctx.getText().equals("null")) { v = null; }else{ throw new UnknownError("unknown literal:"+ctx.getText()); } ConstExpr ce = new ConstExpr(v); mapAst(ce,ctx); return ce; } @Override public AstNode visitImportDecl(ImportDeclContext ctx) { String name = ctx.name.getText(); String delim = ctx.delim.getText(); String prefix = ""; if("\\".equals(delim)){ boolean relative = ctx.root == null || ctx.root.getText().length() == 0; if (relative && this.classPath.length() > 0) { prefix = this.classPath + "."; } } if (ctx.path != null) { for (Token p : ctx.path) { prefix += p.getText() + "."; } } if (name.equals("*")) { this.importPaths.add(prefix.substring(0, prefix.length() - 1)); } else { String key = name; if (ctx.alias != null) { key = ctx.alias.getText(); } this.fullNames.put(key, prefix + name); } return null; } @Override public AstNode visitQualifiedName(QualifiedNameContext ctx) { //do nothing return null; } private int parseModifier(VarModifierContext modifier) { if (modifier == null) { return Modifier.PUBLIC; } int m = 0; int access = 0; for (ParseTree c : modifier.children) { String s = c.getText(); switch (s) { case "public": access = Modifier.PUBLIC; break; case "protected": access = Modifier.PROTECTED; break; case "private": access = Modifier.PRIVATE; break; case "static": m += Modifier.STATIC; break; case "final": m += Modifier.FINAL; break; default: break; } } if (access == 0) { access = Modifier.PUBLIC; } return m + access; } @Override public AstNode visitNewExpr(NewExprContext ctx) { ClassType clsType = parseClassType(ctx.classType()); if(clsType==null) return null; ExprNode[] params = visitAll(ctx.params).toArray(new ExprNode[0]); NewObjectExpr newExpr; try { newExpr = new NewObjectExpr(clsType,params); mapAst(newExpr,ctx); return newExpr; } catch (MethodNotFoundException ex) { methodNotFound(ctx.classType().rawClass, clsType.getName(), "<init>", params); return null; } catch(AmbiguousMethodException ex){ methodIsAmbiguous(ctx.classType().rawClass ,ex); return null; } } @Override public AstNode visitCastExpr(CastExprContext ctx) { ExprNode expr = visitExpression(ctx.expression()); Type toType = parseType(ctx.type()); CastExpr ce = new CastExpr(toType,expr); mapAst(ce,ctx); return ce; } @Override public AstNode visitTryStat(TryStatContext ctx) { BlockStmt tryExecStmt = requireBlock(ctx.exec); List<CatchBlock> tryCatchBlocks = new LinkedList<>(); if (ctx.catchTypes != null) { for (int i = 0; i < ctx.catchTypes.size(); i++) { String vName = ctx.catchVarNames.get(i).getText(); String vType = ctx.catchTypes.get(i).getText(); LocalVarNode vo = new LocalVarNode(); vo.name = vName; vo.type = requireClassType(vType, ctx.catchTypes.get(i).start); BlockStmt catchExecStmt = requireBlock(ctx.catchExec.get(i)); CatchBlock catchStmt = new CatchBlock(vo,catchExecStmt); tryCatchBlocks.add(catchStmt); } } BlockStmt tryFinallyStmt = null; if (ctx.finallyExec != null) { tryFinallyStmt = requireBlock(ctx.finallyExec); } TryStmt tryStmt = new TryStmt(tryExecStmt,tryCatchBlocks,tryFinallyStmt); mapAst(tryStmt,ctx); return tryStmt; } @Override public Statement visitLocalVarDecl(LocalVarDeclContext ctx) { //TODO create statements MultiStmt ms = new MultiStmt(); //List<LocalVarNode> list = new LinkedList(); for (VarDeclContext v : ctx.varDecl()) { ExprNode initExpr = null; if(v.expression()!=null){ initExpr = visitExpression(v.expression()); } LocalVarNode localVar = new LocalVarNode(); VarDeclStmt vds = new VarDeclStmt(localVar); ms.statements.add(vds); if(initExpr==null){ varDecl(v, localVar, Types.getRootType()); }else{ varDecl(v, localVar,initExpr.getType()); AssignExpr assignExpr = new AssignExpr(new VarExpr(localVar), initExpr); mapAst(assignExpr, v); ms.statements.add(new ExprStmt(assignExpr)); } mapAst(localVar,ctx); //list.add(localVar); requireVarTable().put(localVar.name, localVar); } return ms; } @Override public AstNode visitExprIdentifier(ExprIdentifierContext ctx) { String name = ctx.Identifier().getText(); AstNode expr = this.getNodeById(name,ctx.Identifier().getSymbol()); if (expr == null) { this.handleSyntaxError(name + " is undefined!", ctx); return null; } mapAst(expr,ctx); return expr; } @Override public AstNode visitExprLiteral(ExprLiteralContext ctx) { return visitLiteral(ctx.literal()); } @Override public AstNode visitExprParen(ExprParenContext ctx) { return visitExpression(ctx.expression()); } @Override public AstNode visitBlockStmt(BlockStmtContext ctx) { BlockStmt bs =newBlock(); if (ctx.stat() == null) { return bs; } for (StatContext s : ctx.stat()) { bs.statements.add(visitStat(s)); } mapAst(bs,ctx); popBlock(); return bs; } @Override public AstNode visitVarModifier(VarModifierContext ctx) { throw new UnsupportedOperationException(); } @Override public AstNode visitExprSelfRef(ExprSelfRefContext ctx) { String key = ctx.ref.getText(); AstNode expr; if(key.equals("this")){ expr = new ThisExpr(thisType); }else if(key.equals("super")){ expr = new SuperExpr(thisClazz); }else{ throw new UnknownError(); } mapAst(expr, ctx); return expr; } @Override public Object visitSingleType(KalangParser.SingleTypeContext ctx) { throw new UnsupportedOperationException(); } @Override public Object visitPrimitiveType(KalangParser.PrimitiveTypeContext ctx) { throw new UnsupportedOperationException("Not supported yet."); } @Override public IncrementExpr visitExprInc(KalangParser.ExprIncContext ctx) { return getIncrementExpr(ctx.expression(), ctx.op.getText(), false); } @Override public IncrementExpr visitExprIncPre(KalangParser.ExprIncPreContext ctx) { return getIncrementExpr(ctx.expression(), ctx.op.getText(), true); } public IncrementExpr getIncrementExpr(ExpressionContext expressionContext,String op,boolean isPrefix){ ExprNode expr = visitExpression(expressionContext); if(!(expr instanceof AssignableExpr)){ AstBuilder.this.handleSyntaxError("require assignable expression", expressionContext); return null; } boolean isDesc = op.equals(" return new IncrementExpr((AssignableExpr) expr, isDesc, isPrefix); } private ExprNode checkBox(ExprNode expr1, Type fromType, Type toType,Token token) { ExprNode expr = BoxUtil.assign(expr1,fromType,toType); if(expr==null){ AstBuilder.this.handleSyntaxError("unable to cast " + fromType + " to " + toType, token); } return expr; } protected ExprNode getObjectFieldLikeExpr(ExprNode expr,String fieldName,ParserRuleContext rule){ ExprNode ret; Type type = expr.getType(); if(!(type instanceof ClassType)){ AstBuilder.this.handleSyntaxError("unsupported type", rule); return null; } ClassType exprType = (ClassType) type; if ((exprType instanceof ArrayType) && fieldName.equals("length")) { ret = new ArrayLengthExpr(expr); } else { try { ret = ObjectFieldExpr.create(expr, fieldName,thisClazz); } catch (FieldNotFoundException ex) { ret = new UnknownFieldExpr(expr,exprType.getClassNode(),fieldName); } } mapAst(ret, rule); return ret; } protected AssignableExpr getStaticFieldExpr(ClassReference clazz,String fieldName,ParserRuleContext rule){ AssignableExpr ret; try { ret = StaticFieldExpr.create(clazz,fieldName,thisClazz); } catch (FieldNotFoundException ex) { ret = new UnknownFieldExpr(clazz, clazz.getReferencedClassNode(), fieldName); } mapAst(ret, rule); return ret; } @Override public Object visitErrorousStat(KalangParser.ErrorousStatContext ctx) { handleSyntaxError("missing ';'", ctx, ctx.start , ctx.stop); return null; } @Override public Object visitErrorousMemberExpr(KalangParser.ErrorousMemberExprContext ctx) { handleSyntaxError("identifier excepted", ctx, ctx.stop , ctx.stop); return null; } @Override public Object visitExprInstanceOf(KalangParser.ExprInstanceOfContext ctx) { ExprNode expr = visitExpression(ctx.expression()); Token ts = ctx.Identifier().getSymbol(); AstNode tnode = getNodeById(ts.getText(), ts); if(tnode instanceof ClassReference){ InstanceOfExpr ie = new InstanceOfExpr(expr, (ClassReference)tnode); mapAst(ie, ctx); return ie; }else{ AstBuilder.this.handleSyntaxError("unsupported type", ts); return null; } } @Override public Object visitScriptDef(KalangParser.ScriptDefContext ctx) { thisClazz.modifier = Modifier.PUBLIC; thisClazz.superType = Types.getRootType(); List<MethodDeclContext> mds = ctx.methodDecl(); if(mds!=null){ for(MethodDeclContext m:mds){ visit(m); } } MethodNode mm = thisClazz.createMethodNode(); mm.name = "main"; mm.modifier = Modifier.PUBLIC + Modifier.STATIC; mm.type = Types.VOID_TYPE; mm.exceptionTypes = Collections.singletonList(Types.getExceptionClassType()); ParameterNode pn = ParameterNode.create(mm); pn.name = "args"; pn.type = Types.getArrayType(Types.getStringClassType()); mm.parameters = Collections.singletonList(pn); method = mm; List<StatContext> stats = ctx.stat(); List<Statement> ss = new LinkedList<>(); BlockStmt body = newBlock(); if(stats!=null){ for(StatContext s:stats){ Object statement = visit(s); if(statement!=null){ ss.add((Statement)statement); } } } popBlock(); body.statements.addAll(ss); mm.body = body; return null; } protected List<AnnotationNode> getAnnotations(@Nullable List<KalangParser.AnnotationContext> ctxs){ List<AnnotationNode> list = new LinkedList<>(); if(ctxs!=null){ for(KalangParser.AnnotationContext an:ctxs){ AnnotationNode anNode = visitAnnotation(an); if(anNode!=null) list.add(anNode); } } return list; } @Override public Object visitClassDef(KalangParser.ClassDefContext ctx) { thisClazz.annotations.addAll(getAnnotations(ctx.annotation())); thisClazz.modifier = parseModifier(ctx.varModifier()); Token classKind = ctx.classKind; if(classKind!=null){ if (classKind.getText().equals("interface")) { thisClazz.isInterface = true; } } List<Token> gnrTypes = ctx.genericTypes; if(gnrTypes!=null && !gnrTypes.isEmpty()){ for(Token g:gnrTypes){ GenericType gt = new GenericType(g.getText(),new Type[]{Types.getRootType()},NullableKind.NONNULL); this.declarededGenericTypes.put(gt.getName(),gt); thisClazz.declareGenericType(gt); } } if (ctx.parentClass != null) { ClassType parentClass = parseClassType(ctx.parentClass); if(parentClass!=null){ thisClazz.superType = parentClass; } }else{ thisClazz.superType = Types.getRootType(); } if (ctx.interfaces != null && ctx.interfaces.size() > 0) { for (KalangParser.ClassTypeContext itf : ctx.interfaces) { ClassType itfClz = parseClassType(itf); if(itfClz!=null){ thisClazz.interfaces.add(itfClz); } } } visit(ctx.classBody()); mapAst(thisClazz, ctx); return null; } @Override public AnnotationNode visitAnnotation(KalangParser.AnnotationContext ctx) { ClassNode anType = requireAst(ctx.annotationType); if(anType==null) return null; List<Token> vk = ctx.annotationValueKey; LiteralContext dv = ctx.annotationDefaultValue; AnnotationNode anNode = new AnnotationNode(anType); if(vk!=null && vk.size()>0){ List<LiteralContext> anValues = ctx.annotationValue; int ksize = vk.size(); for(int i=0;i<ksize;i++){ String kname = vk.get(i).getText(); ConstExpr value = visitLiteral(anValues.get(i)); anNode.values.put(kname, value); } }else if(dv!=null){ ConstExpr defaultValue = visitLiteral(dv); anNode.values.put("value", defaultValue); } //TODO validate annotation's values return anNode; } private BlockStmt requireBlock(ParserRuleContext stmt) { if(stmt instanceof BlockStmtContext){ return (BlockStmt)visit(stmt); }else{ BlockStmt bs = newBlock(); bs.statements.add((Statement)visit(stmt)); popBlock(); return bs; } } @Override public Object visitClassType(KalangParser.ClassTypeContext ctx) { return null; } @Override public Object visitParameterizedElementType(KalangParser.ParameterizedElementTypeContext ctx) { return null; } private Type parseParameterizedElementType(KalangParser.ParameterizedElementTypeContext ctx){ if(ctx.Identifier()!=null){ String id = ctx.Identifier().getText(); if(declarededGenericTypes.containsKey(id)){ return declarededGenericTypes.get(id); } return requireClassType(ctx.Identifier().getSymbol()); }else{ return parseWildcardType(ctx.wildcardType()); } } @Override public Object visitWildcardType(KalangParser.WildcardTypeContext ctx) { return null; } private Type parseWildcardType(KalangParser.WildcardTypeContext ctx){ ClassType classType = parseClassType(ctx.classType()); if(classType==null) return null; Type[] bounds = new Type[]{classType}; String boundKind = ctx.boundKind.getText(); if(boundKind.equals("super")){ return new WildcardType(new Type[]{Types.getRootType()},bounds); }else{ return new WildcardType(bounds,null); } } }
package com.mollie.api.resource; import java.net.URISyntaxException; import org.apache.http.client.utils.URIBuilder; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.lang.reflect.ParameterizedType; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.LinkedHashMap; import com.google.gson.Gson; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import com.mollie.api.MollieClient; import com.mollie.api.MollieException; abstract public class BaseResource <T> { public static final String REST_CREATE = MollieClient.HTTP_POST; public static final String REST_UPDATE = MollieClient.HTTP_POST; public static final String REST_READ = MollieClient.HTTP_GET; public static final String REST_LIST = MollieClient.HTTP_GET; public static final String REST_DELETE = MollieClient.HTTP_DELETE; /** * Default number of objects to retrieve when listing all objects. */ public static final int DEFAULT_LIMIT = 10; protected MollieClient _api; public BaseResource(MollieClient api) { _api = api; } /** * @return Default resource name used by this resource when performing api calls. */ protected String getResourceName() { String className = getClass().getName(); String resourceName = className; String [] elements = className.split("\\."); if (elements.length > 0) resourceName = elements[elements.length-1]; return resourceName.toLowerCase(); } @SuppressWarnings("unchecked") protected Class<T> returnedClass() { ParameterizedType parameterizedType = (ParameterizedType)getClass() .getGenericSuperclass(); return (Class<T>) parameterizedType.getActualTypeArguments()[0]; } /** * Convenience method to copy all public properties from a src object into * a dst object of the same type. * * @param src Source object to copy properties from * @param dst Target object */ protected void copyInto(T src, T dst) { Field[] fromFields = returnedClass().getDeclaredFields(); Object value = null; try { for (Field field : fromFields) { int modifiers = field.getModifiers(); if ((modifiers & Modifier.PUBLIC) == Modifier.PUBLIC && (modifiers & Modifier.FINAL) != Modifier.FINAL && (modifiers & Modifier.STATIC) != Modifier.STATIC) { value = field.get(src); field.set(dst, value); } } } catch (Exception e) { e.printStackTrace(); } } /** * Retrieve all objects of a certain resource. * * @return list of fetched objects * @throws MollieException when the client is unable to fetch the objects * from the server * * @see #all(Map options) * @see #all(int offset, int limit) * @see #all(int offset, int limit, Map options) */ public List<T> all() throws MollieException { return this.all(0, 0); } /** * Retrieve all objects of a certain resource. * * @param options additional options to include when fetching objects. * @return list of fetched objects * @throws MollieException when the client is unable to fetch the objects * from the server * * @see #all() * @see #all(int offset, int limit) * @see #all(int offset, int limit, Map options) */ public List<T> all(Map<String,String> options) throws MollieException { return this.all(0, 0, options); } /** * Retrieve all objects of a certain resource. * * @param offset page offset of the objects to retrieve * @param limit maximimum number of objects to retrieve * @return list of fetched objects * @throws MollieException when the client is unable to fetch the objects * from the server * * @see #all() * @see #all(Map options) * @see #all(int offset, int limit, Map options) */ public List<T> all(int offset, int limit) throws MollieException { return this.all(offset, limit, null); } /** * Retrieve all objects of a certain resource. * * @param offset page offset of the objects to retrieve * @param limit maximimum number of objects to retrieve * @param options additional options to include when fetching objects. * @return list of fetched objects * @throws MollieException when the client is unable to fetch the objects * from the server * * @see #all() * @see #all(Map options) * @see #all(int offset, int limit) */ public List<T> all(int offset, int limit, Map<String,String> options) throws MollieException { return this.rest_list(this.getResourceName(), offset, limit, options); } /** * Retrieve information on a single resource from Mollie. * * Will throw a MollieException if the resource cannot be found. * * @param resourceId Id of the object to retrieve. * @return object * @throws MollieException if the client is unable to get a resource from * the server. */ public T get(String resourceId) throws MollieException { return this.rest_read(this.getResourceName(), resourceId); } /** * Create a resource with the remote API. * * @param data an object containing details on the resource. Fields supported * depend on the resource created. * @return object on success or null on failure. * @throws MollieException when the client is unable to create a resource * on the server. */ public T create(Object data) throws MollieException { Gson gson = new Gson(); String encoded = gson.toJson(data); if (encoded != null) return this.rest_create(this.getResourceName(), encoded); else return null; } /** * Creates a resource with the REST API. * * @param restResource resource name * @param body contents used for creation of object * @return object on success or null on failure. * @throws MollieException when the client is unable to create a resource * on the server. */ private T rest_create(String restResource, String body) throws MollieException { JsonObject result = this.performApiCall(REST_CREATE, restResource, body); if (result != null) { Gson gson = new Gson(); return gson.fromJson(result, returnedClass()); } return null; } /** * Retrieves a single object from the REST API. * * @param restResource resource name * @param id Id of the object to retrieve * @return object * @throws MollieException when the client is unable to read the results * from the server. */ private T rest_read(String restResource, String id) throws MollieException { String method = restResource + "/" + id; JsonObject result = this.performApiCall(REST_READ, method); if (result != null) { Gson gson = new Gson(); return gson.fromJson(result, returnedClass()); } return null; } /** * Creates a valid query string from the supplied options that can be used * as url parameters. The method returns null if there was an error building * the query string. * * @param options options to build a query string from * @return a valid query string or null. */ private String buildQueryFromMap(Map<String,String> options) { URIBuilder ub = null; String queryString = null; try { ub = new URIBuilder(); for (Map.Entry<String, String> entry : options.entrySet()) { ub.addParameter(entry.getKey(), entry.getValue()); } queryString = ub.build().getQuery(); } catch (URISyntaxException e) { e.printStackTrace(); } return queryString; } /** * Get a collection of objects from the REST API. * * @param restResource resource name * @param offset page offset of the objects to retrieve * @param limit maximimum number of objects to retrieve * @param options additional options * @return * @throws MollieException if there was a problem fetching the objects */ private List<T> rest_list(String restResource, int offset, int limit, Map<String,String> options) throws MollieException { String query = null; if (options == null) { options = new LinkedHashMap<String,String>(); } if (!options.containsKey("offset")) { options.put("offset", Integer.toString(offset)); } if (!options.containsKey("count")) { options.put("count", Integer.toString(limit)); } query = buildQueryFromMap(options); String apiPath = restResource + (query != null ? "?" + query : ""); JsonObject result = this.performApiCall(REST_LIST, apiPath); ArrayList<T> arraylist = new ArrayList<T>(); if (result != null) { Gson gson = new Gson(); for (JsonElement object : result.get("data").getAsJsonArray()) arraylist.add(gson.fromJson(object, returnedClass())); } return arraylist; } /** * Perform an API call with an empty body contents, interpret the results * and convert them to the correct object type. * * @param httpMethod the http method to use * @param apiMethod the api method to call * * @return object {@link JsonObject} or null if there was a problem with * the api call. * @throws MollieException if there was an error performing the call or if * the results could not be decoded into a {@link JsonObject} * * @see #performApiCall(String httpMethod, String apiMethod) */ protected JsonObject performApiCall(String httpMethod, String apiMethod) throws MollieException { return performApiCall(httpMethod, apiMethod, null); } /** * Perform an API call, interpret the results and convert them to the * correct object type. * * @param httpMethod the http method to use * @param apiMethod the api method to call * @param httpBody the contents to send to the server. * * @return object {@link JsonObject} or null if there was a problem with * the api call * @throws MollieException if there was an error performing the call or if * the results could not be decoded into a {@link JsonObject} * * @see #performApiCall(String httpMethod, String apiMethod) */ protected JsonObject performApiCall(String httpMethod, String apiMethod, String httpBody) throws MollieException { String result = _api.performHttpCall(httpMethod, apiMethod, httpBody); JsonParser parser = new JsonParser(); JsonElement element = null; JsonObject object = null; try { if ((element = parser.parse(result)) != null) { if (element.isJsonObject()) { object = element.getAsJsonObject(); } } } catch (com.google.gson.JsonParseException e) {} if (object == null) { throw new MollieException("Unable to decode Mollie response: \""+result+"\""); } else { if (object.get("error") != null) { MollieException exception = null; JsonObject error = object.get("error").getAsJsonObject(); String type = error.get("type").getAsString(); String message = error.get("message").getAsString(); exception = new MollieException("Error executing API call (" + type +"): " + message + "."); if (error.has("field")) { exception.setField(error.get("field").getAsString()); } throw exception; } } return object; } }
package kembe.sim.runner; import fj.*; import fj.control.parallel.Actor; import fj.control.parallel.Strategy; import fj.function.Effect1; import kembe.sim.Timed; import kembe.util.Actors; import kembe.util.Order; import org.joda.time.DateTime; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.ConcurrentSkipListSet; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; public abstract class Scheduler { public static Scheduler realtimeScheduler() { return new Scheduler() { Timer service = new Timer(); @Override public void scheduleAt(final DateTime time, final SchedulerTask task) { service.schedule( new TimerTask() { @Override public void run() { try { task.run( time ); } catch (Exception e) { e.printStackTrace(); ; } } }, time.toDate() ); } }; } public static Scheduler instantScheduler() { return new Scheduler() { Actor<Timed<SchedulerTask>> actor = orderedActor( Strategy.<Unit>executorStrategy( Executors.newSingleThreadExecutor() ), Timed.<SchedulerTask>timedOrd(), (Effect1<Timed<SchedulerTask>>) task -> task.value.run( task.time ) ); @Override public void scheduleAt(DateTime time, SchedulerTask t) { actor.act( new Timed<>( time, t ) ); } }; } private static <T> Actor<T> orderedActor(final Strategy<Unit> s, final Ord<T> ord, final Effect1<T> ea) { return Actors.stackSafeQueueActor(Strategy.<Unit>seqStrategy(), new Effect1<T>() { // Lock to ensure the actor only acts on one message at a time AtomicBoolean suspended = new AtomicBoolean(true); AtomicLong atomicLong = new AtomicLong(0); ConcurrentSkipListSet<Numbered<T>> mbox = new ConcurrentSkipListSet<>(Order.toComparator(numberedOrd(ord))); // Product so the actor can use its strategy (to act on messages in other threads, // to handle exceptions, etc.) P1<Unit> processor = new P1<Unit>() { @Override public Unit _1() { try { // get next item from queue Numbered<T> a = mbox.pollFirst(); // if there is one, process it if (a != null) { ea.f(a.value); // try again, in case there are more messages s.par(this); } else { // clear the lock suspended.set(true); // work again, in case someone else queued up a message while we were holding the lock work(); } } catch (Exception e) { e.printStackTrace();//Nothing else to do here } return Unit.unit(); } }; // Effect's body -- queues up a message and tries to unsuspend the actor @Override public void f(T a) { mbox.add(new Numbered<>(atomicLong.incrementAndGet(), a)); work(); } // If there are pending messages, use the strategy to run the processor protected void work() { if (!mbox.isEmpty() && suspended.compareAndSet(true, false)) { s.par(processor); } } }); } private static <T> Ord<Numbered<T>> numberedOrd(Ord<T> ordT) { return Ord.p2Ord( ordT, Ord.longOrd ).contramap( new F<Numbered<T>, P2<T, Long>>() { @Override public P2<T, Long> f(Numbered<T> tNumbered) { return P.p( tNumbered.value, tNumbered.number ); } } ); } public void schedule(Timed<SchedulerTask> timedT) { scheduleAt( timedT.time, timedT.value ); } public Effect1<Timed<SchedulerTask>> toEffect() { return this::schedule; } public abstract void scheduleAt(DateTime time, SchedulerTask t); static class Numbered<T> { public final long number; public final T value; Numbered(long number, T value) { this.number = number; this.value = value; } } }
package com.ociweb.iot.grove.oled; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.ociweb.iot.maker.FogCommandChannel; import com.ociweb.iot.maker.image.FogBitmap; import com.ociweb.iot.maker.image.FogBitmapLayout; import com.ociweb.iot.maker.image.FogColorSpace; import com.ociweb.iot.maker.image.FogPixelScanner; import com.ociweb.pronghorn.iot.schema.I2CCommandSchema; import com.ociweb.pronghorn.pipe.DataOutputBlobWriter; public abstract class BinaryOLED { Logger logger = LoggerFactory.getLogger((BinaryOLED.class)); protected final FogCommandChannel ch; protected final int[] data_out; protected final int[] cmd_out; protected final int i2c_address; protected static final int BATCH_SIZE = 50; public static final int COMMAND_MODE = 0x80; public static final int DATA_MODE = 0x40; protected BinaryOLED(FogCommandChannel ch, int[] data_out, int[]cmd_out, int i2c_address){ this.ch = ch; this.data_out = data_out; this.cmd_out = cmd_out; this.i2c_address = i2c_address; ch.ensureI2CWriting(16, BATCH_SIZE); } public FogBitmap createEmptyBmp() { return new FogBitmap(createBmpLayout()); } /** * Sends a "data" identifier byte followed by the user-supplied byte over the i2c. * @return true if the command byte and the supplied byte were succesfully sent, false otherwise. */ protected boolean sendData(){ return sendData(0, data_out.length); } protected boolean sendData(int[] data){ return sendData(data, 0, data.length); } /** * If no data is supplied, we are using the default data_out held by this object * @param start * @param length * @return true */ protected boolean sendData(int start, int length){ return sendData(data_out, start,length); } /** * Send an array of data * Implemented by calling {@link #sendData(int[], int, int, int)}, which recursively calls itself * exactly 'm' times, where 'm' is the number of batches requires to send the data array specified by the start and length. * Implemented to use an array of passed-in data instead of defaulting to this.data_out so that one doesn't have * to go through the trouble of copying the entire data array if the data array is already constructed * @param start * @param length * @return true if the i2c bus is ready, false otherwise. */ protected boolean sendData(int[] data, int start, int length){ if (!ch.i2cIsReady()){ return false; } //call the helper method to recursively send batches return sendData(data, start,BATCH_SIZE, start+length); } protected boolean sendData(int [] data, int start, int length, int finalTargetIndex){ DataOutputBlobWriter<I2CCommandSchema> i2cPayloadWriter = ch.i2cCommandOpen(i2c_address); i2cPayloadWriter.write(DATA_MODE); int i; for (i = start; i < Math.min(start + length - 1, finalTargetIndex); i++){ i2cPayloadWriter.write(data[i]); } ch.i2cCommandClose(); ch.i2cFlushBatch(); if (i == finalTargetIndex){ return true; } return sendData(data, i, BATCH_SIZE, finalTargetIndex); //calls itself recursively until we reach finalTargetIndex } protected boolean sendCommand(int b){ if (!ch.i2cIsReady()){ return false; } DataOutputBlobWriter<I2CCommandSchema> i2cPayloadWriter = ch.i2cCommandOpen(i2c_address); i2cPayloadWriter.write(COMMAND_MODE); i2cPayloadWriter.write(b); ch.i2cCommandClose(); return true; } /** * Unliked send data, sendCommands makes the assumption that the call is not sending more than one batch worth of commands *Each command involves two bytes. So if the caller is trying to send a command array of size 5, they are really sending *10 bytes. * @param start * @param length * @return true */ protected boolean sendCommands(int start, int length){ if (!ch.i2cIsReady()){ return false; } /* DataOutputBlobWriter<I2CCommandSchema> i2cPayloadWriter = ch.i2cCommandOpen(i2c_address); assert(length*2 <= BATCH_SIZE); for (int i = start; i < start + length; i++){ i2cPayloadWriter.write(COMMAND_MODE); i2cPayloadWriter.write(cmd_out[i]); } ch.i2cCommandClose(); ch.i2cFlushBatch(); return true; */ return sendCommands(cmd_out,start,length); } protected boolean sendCommands(int[] cmd, int start, int length){ if (!ch.i2cIsReady()){ return false; } //call the helper method to recursively send batches return sendCommands(cmd, start,BATCH_SIZE, start+length); } private boolean sendCommands(int [] cmd, int start, int length, int finalTargetIndex){ DataOutputBlobWriter<I2CCommandSchema> i2cPayloadWriter = ch.i2cCommandOpen(i2c_address); length = length / 2; //we need to send two bytes for each command int i; for (i = start; i < Math.min(start + length, finalTargetIndex); i++){ i2cPayloadWriter.write(COMMAND_MODE); i2cPayloadWriter.write(cmd[i]); } ch.i2cCommandClose(); ch.i2cFlushBatch(); if (i == finalTargetIndex){ return true; } return sendCommands(cmd, i, BATCH_SIZE, finalTargetIndex); //calls itself recursively until we reach finalTargetIndex } protected abstract boolean init(); public abstract boolean clear(); public abstract boolean cleanClear(); public abstract boolean displayOn(); public abstract boolean displayOff(); public abstract boolean inverseOn(); public abstract boolean inverseOff(); public abstract boolean setContrast(int contrast); public abstract boolean setTextRowCol(int row, int col); public abstract boolean printCharSequence(CharSequence s); public abstract boolean printCharSequenceAt(CharSequence s, int row, int col); public abstract boolean activateScroll(); public abstract boolean deactivateScroll(); public abstract boolean setUpScroll(); public abstract boolean display(int[][] raw_image); public abstract boolean display(int[][] raw_image, int pixelDepth); public abstract boolean setHorizontalMode(); public abstract boolean setVerticalMode(); public abstract boolean display(FogPixelScanner scanner); public abstract FogBitmapLayout createBmpLayout(); }
package mooklabs.laputamod; import java.util.Iterator; import mooklabs.laputamod.blocks.VoluciteBlock; import mooklabs.laputamod.blocks.VoluciteIngot; import mooklabs.laputamod.blocks.VoluciteOre; import mooklabs.laputamod.items.NecklaceString; import mooklabs.laputamod.items.VoluciteNecklace; import mooklabs.laputamod.items.VolucitePendant; import mooklabs.laputamod.items.tools.LArmor; import mooklabs.laputamod.items.tools.LAxe; import mooklabs.laputamod.items.tools.LPickaxe; import mooklabs.laputamod.items.tools.LShovel; import mooklabs.laputamod.items.tools.LSword; import mooklabs.laputamod.proxy.CommonProxy; import mooklabs.mookcore.MMod; import mooklabs.mookcore.ToxicWorldGenerator; import mooklabs.nausicaamod.proxy.GuiHandlerNausicaa; import net.minecraft.block.Block; import net.minecraft.creativetab.CreativeTabs; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityList; import net.minecraft.entity.EntityList.EntityEggInfo; import net.minecraft.entity.EntityLiving; import net.minecraft.entity.EnumCreatureType; import net.minecraft.item.Item; import net.minecraft.item.Item.ToolMaterial; import net.minecraft.item.ItemArmor.ArmorMaterial; import net.minecraft.item.ItemStack; import net.minecraft.world.biome.BiomeGenBase; import net.minecraftforge.common.util.EnumHelper; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import cpw.mods.fml.common.FMLLog; import cpw.mods.fml.common.Loader; import cpw.mods.fml.common.Mod; import cpw.mods.fml.common.Mod.EventHandler; import cpw.mods.fml.common.Mod.Instance; import cpw.mods.fml.common.SidedProxy; import cpw.mods.fml.common.event.FMLInitializationEvent; import cpw.mods.fml.common.event.FMLInterModComms; import cpw.mods.fml.common.event.FMLInterModComms.IMCEvent; import cpw.mods.fml.common.event.FMLInterModComms.IMCMessage; import cpw.mods.fml.common.event.FMLPostInitializationEvent; import cpw.mods.fml.common.event.FMLPreInitializationEvent; import cpw.mods.fml.common.registry.EntityRegistry; import cpw.mods.fml.common.registry.GameRegistry; import cpw.mods.fml.common.registry.LanguageRegistry; /** * this is the main class for laputamod<br> * spacegeek did to work on it (AND THEN SHE FOUND TEA) * not dependent on nausicaamod * @author mooklabs(emiAndVic) */ //the @mod annotation tells forge its a mod @Mod(modid = LapMain.modid, version = LapMain.VERSION, name = LapMain.name) public class LapMain extends MMod{ public static final String modid = "laputamod"; public static final String VERSION = "0.0.01"; public static final String name = "LaputaMod"; public final static String itemfold = "laputamod"; // Says where the client and server 'proxy' code is loaded. @SidedProxy(clientSide = "mooklabs.laputamod.proxy.ClientProxy", serverSide = "mooklabs.laputamod.proxy.CommonProxy") public static CommonProxy proxy; // The instance of your mod that Forge uses. basically its the object, not the static class @Instance("laputamod") public static LapMain instance = new LapMain(); // GUI ids later // {{ Creative tab madness! public static CreativeTabs tabLap = new CreativeTabs("tabNLaputa") { @Override public Item getTabIconItem() { return LapMain.volucite; } }; // {{Enums: this makes the tool and armor types and sets their stats! pritty simple really! // diamond is(3, 1561, 8.0F, 3.0F, 10) IRON is(2, 250, 6.0F, 2.0F, 14) public static ToolMaterial weakVolucite = EnumHelper.addToolMaterial("crystal", 3, 1600, 40F, 7F, 50); // DIAMOND is(33, {3, 8, 6, 3}, 10) iron is {2, 6, 5, 2} // String name,int durability, int[] reductionAmounts, int enchantability public static ArmorMaterial weakVoluciteArmor = EnumHelper.addArmorMaterial("weakVolucite", 50, new int[] {6,10,8,6}, 50);//very enchantable //{{ Tools public static final Item voluciteSword = new LSword(weakVolucite).setCreativeTab(tabLap).setUnlocalizedName("voluciteSword").setTextureName(LapMain.itemfold + ":voluciteSword"); public static final Item volucitePickaxe = new LPickaxe(weakVolucite).setCreativeTab(tabLap).setUnlocalizedName("volucitePickaxe").setTextureName(LapMain.itemfold + ":volucitePickaxe"); public static final Item voluciteAxe = new LAxe(weakVolucite).setCreativeTab(tabLap).setUnlocalizedName("voluciteAxe").setTextureName(LapMain.itemfold + ":voluciteAxe"); public static final Item voluciteShovel = new LShovel(weakVolucite).setCreativeTab(tabLap).setUnlocalizedName("voluciteShovel").setTextureName(LapMain.itemfold + ":voluciteShovel"); // Armor public static final Item unfiredCeramicHelmet = new LArmor(weakVoluciteArmor, 1, 0).setCreativeTab(tabLap); public static final Item unfiredCeramicChestplate = new LArmor(weakVoluciteArmor, 1, 1).setCreativeTab(tabLap); public static final Item unfiredCeramicLegs = new LArmor(weakVoluciteArmor, 1, 2).setCreativeTab(tabLap); public static final Item unfiredCeramicBoots = new LArmor(weakVoluciteArmor, 1, 3).setCreativeTab(tabLap); // {{ blocks and items //since constructor isnt public adding "{}" causes it to be a subclass public final static Block voluciteBlock = new VoluciteOre().setCreativeTab(tabLap); public final static Block solidVoluciteBlock = new VoluciteBlock().setCreativeTab(tabLap); public final static Item volucite = new VoluciteIngot().setCreativeTab(tabLap).setUnlocalizedName("voluciteCrystal").setTextureName(LapMain.itemfold + ":voluciteCrystal"); public final static Item volucitePendant = new VolucitePendant().setCreativeTab(tabLap).setUnlocalizedName("volucitePendant").setTextureName(LapMain.itemfold + ":volucitePendant"); public final static Item voluciteNecklace = new VoluciteNecklace().setCreativeTab(tabLap).setUnlocalizedName("voluciteNecklace").setTextureName(LapMain.itemfold + ":voluciteNecklace"); public final static Item string = new NecklaceString().setCreativeTab(tabLap).setUnlocalizedName("necklaceString").setTextureName(LapMain.itemfold + ":necklaceString"); //public final static Item clasp = new hdfsa().setCreativeTab(tabLap); // worldgen // This just changes some stuff, it is applied after chunck gen is your world generation file. public static ToxicWorldGenerator modifyWorldGen = new ToxicWorldGenerator(); public static final Logger logger = LogManager.getLogger("LaputaMod"); @EventHandler public void preInit(FMLPreInitializationEvent event) { init(this.name); proxy.registerRenderers();// does more than just renders //quite important itemBlockNameReg(); //{{EVERYTHING // {{/////////////////////////////ITEMSTACKS//////////////////////////////////// //itemstacks are what you have in your inventory slots, it can be up to 64 of an item, with metadata ItemStack voluciteStack = new ItemStack(volucite); ItemStack voluciteBlockStack = new ItemStack(voluciteBlock); ItemStack volucitePendantStack = new ItemStack(volucitePendant); ItemStack voluciteNecklaceStack = new ItemStack(voluciteNecklace); ItemStack necklaceStringStack = new ItemStack(string); // {{/////////////////////////////RECIPES//////////////////////////////////// /** shapeless recipes */ // GameRegistry.addShapelessRecipe([output],[ingredients],[more ingredients etc]); GameRegistry.addShapelessRecipe(voluciteNecklaceStack, volucitePendantStack, necklaceStringStack); /** shaped recipes */ //This makes a diamond Pick //GameRegistry.addRecipe(diamondPickStack, "ddd", " s ", " s ", 'd', diamondStack, 's', stickStack); GameRegistry.addRecipe(volucitePendantStack, " x ", "x x", " x ", 'x', volucite); //SMELTING //GameRegistry.addSmelting(inputStack, outputStack, float valueOfExpFromSmelting); GameRegistry.addSmelting(voluciteBlockStack, voluciteStack, 5); } GuiHandlerNausicaa guiHandler = new GuiHandlerNausicaa(); @EventHandler public void load(FMLInitializationEvent event) { logger.info("\n************************\nWelcome to LaputaMod!!!\nBlow up the World!...\n..if you can!\n***************************"); // {{entities BiomeGenBase[] biomesToSpawnIn = { BiomeGenBase.forest, BiomeGenBase.jungle, BiomeGenBase.desert, BiomeGenBase.taiga }; //registerEntity(Ohmu.class, "Ohmu", 0xeaeaea, 0x111111); //addSpawn(Ohmu.class, 1, 1, 1, biomesToSpawnIn); } // /////////////Mobs!//////////// /** * @param entityClass a Entity__.class * @param entityName name of entity * @param the color of egg */ @Override public void registerEntity(Class<? extends Entity> entityClass, String entityName, int bkEggColor, int fgEggColor) { int id = EntityRegistry.findGlobalUniqueEntityId(); EntityRegistry.registerGlobalEntityID(entityClass, entityName, id); EntityList.entityEggs.put(Integer.valueOf(id), new EntityEggInfo(id, bkEggColor, fgEggColor)); } @Override public void addSpawn(Class<? extends EntityLiving> entityClass, int spawnProb, int min, int max, BiomeGenBase[] biomes) { if (spawnProb > 0) { EntityRegistry.addSpawn(entityClass, spawnProb, min, max, EnumCreatureType.creature, biomes); } } @EventHandler public void postInit(FMLPostInitializationEvent event) { try { if (Loader.isModLoaded("nausicaamodtech")) FMLLog.info("[NausicaaMod]: Loaded NausicaaModTech addon(Good Job!)"); else FMLLog.warning("[NausicaaMod]: DID NOT LOAD NausicaaModTech addon\nYou did something wrong, like not download and put it in the right folder :P"); } catch (Exception e) { FMLLog.severe("[NausicaaMod]: Something went wrong when checking for a mod being loaded"); } } private void itemBlockNameReg() { // {{ block registration registerBlock(voluciteBlock, "Infused Stone"); registerBlock(solidVoluciteBlock, "Solid Volucite Block");//very powerful! //more blocks // {{ items registerItem(string, "String"); registerItem(volucitePendant, "Volucite Pendant"); registerItem(voluciteNecklace, "Volucite Necklace"); registerItem(volucite, "Volucite Crystal"); //more items // {{//////////////tools//////////////////////////////////// registerItem(voluciteSword, "Volucite Sword"); registerItem(volucitePickaxe, "Volucite Pickaxe"); registerItem(voluciteAxe, "Volucite Axe"); registerItem(voluciteShovel, "Volucite Shovel"); // {{////////////////armor//////////////////////// registerItem(unfiredCeramicHelmet, "Volucite Helmet"); registerItem(unfiredCeramicChestplate, "Volucite Chestplate"); registerItem(unfiredCeramicLegs, "Volucite Greaves"); registerItem(unfiredCeramicBoots, "Volucite Boots"); logger.warn("Don't Let tinkers Take over! "); FMLInterModComms.sendMessage(LapMain.modid, "boo", "Release the Omhu!"); logger.warn("*menacingly* I'll use the Crystal."); } // ///////////block reg again//////////// public static void registerBlock(Block block, String name) { ///tells forge(and mc) that the block exists GameRegistry.registerBlock(block, block.getUnlocalizedName()); //might set the name LanguageRegistry.addName(block, name); } public static void registerItem(Item item, String name) { GameRegistry.registerItem(item, modid + item.getUnlocalizedName()); LanguageRegistry.addName(item, name); } /** this is more just for future reference than anything else * you dont need to understand it */ @Override @EventHandler public void messageRecieve(IMCEvent event) { Iterator<IMCMessage> itr = event.getMessages().iterator(); while(itr.hasNext()) { IMCMessage element = itr.next(); logger.info("Sender: " + element.getSender() + "Value: " + element.getStringValue() + " "); } System.out.println(); } }
package com.opentable.logging; import java.io.IOException; import java.util.Map.Entry; import java.util.concurrent.atomic.AtomicLong; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.util.TokenBuffer; import ch.qos.logback.classic.spi.ILoggingEvent; import ch.qos.logback.core.encoder.EncoderBase; /** * This class encodes log fields as a JSON object, and writes each as a separate line to the outputStream. * <p> * You can use this encoder by adding the following phrase to any appender: * <pre> * &lt;encoder class="com.opentable.logging.JsonLogEncoder"&gt; * </pre> */ public class JsonLogEncoder extends EncoderBase<ILoggingEvent> { private static final AtomicLong LOG_SEQUENCE_NUMBER = new AtomicLong(0); private final ObjectMapper mapper; public JsonLogEncoder() { // TODO: This sucks - - won't get the mapper customizations. Find a way to inject this. this.mapper = new ObjectMapper().disable(SerializationFeature.WRITE_NULL_MAP_VALUES).setSerializationInclusion(Include.NON_NULL); } @Override public void doEncode(ILoggingEvent event) throws IOException { final ObjectNode logLine; if (event instanceof HttpLogFields) { final TokenBuffer buf = new TokenBuffer(mapper, false); mapper.writerWithType(HttpLogFields.class).writeValue(buf, event); logLine = mapper.readTree(buf.asParser()); } else { logLine = mapper.valueToTree(new ApplicationLogEvent(event)); } for (Entry<String, String> e : event.getMDCPropertyMap().entrySet()) { if (!logLine.has(e.getKey())) { logLine.put(e.getKey(), e.getValue()); } } logLine.put("sequencenumber", LOG_SEQUENCE_NUMBER.incrementAndGet()); synchronized (outputStream) { mapper.writeValue(outputStream, logLine); outputStream.write('\n'); } } @Override public void close() throws IOException { // Nothing to do here } }
package com.pump.swing.io; import java.awt.Component; import java.awt.Dimension; import java.awt.Point; import java.awt.Rectangle; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.awt.event.KeyListener; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.awt.image.BufferedImage; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import javax.swing.ImageIcon; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.JToolTip; import javax.swing.ListCellRenderer; import javax.swing.ListModel; import javax.swing.ListSelectionModel; import javax.swing.SwingUtilities; import javax.swing.Timer; import com.pump.io.location.IOLocation; import com.pump.io.location.IOLocationFilter; import com.pump.plaf.ThumbnailLabelUI; import com.pump.swing.NavigationListener; public class IOLocationTileList extends JList<IOLocation> { private static final long serialVersionUID = 1L; protected int TYPING_THRESHOLD = 750; protected KeyListener typingListener = new KeyAdapter() { StringBuffer sb = new StringBuffer(); long lastType = -1; Timer purgeText = new Timer(50, new ActionListener() { public void actionPerformed(ActionEvent e) { if (System.currentTimeMillis() - lastType > TYPING_THRESHOLD) { sb.delete(0, sb.length()); purgeText.stop(); } } }); @Override public void keyTyped(KeyEvent e) { char c = e.getKeyChar(); if (Character.isDefined(c)) { sb.append(c); stringTyped(sb.toString()); lastType = System.currentTimeMillis(); purgeText.start(); e.consume(); } } }; protected KeyListener commitKeyListener = new KeyAdapter() { @Override public void keyPressed(KeyEvent e) { if (e.getKeyCode() == KeyEvent.VK_ENTER || e.getKeyCode() == KeyEvent.VK_ACCEPT) { fireIOLocationListListeners( NavigationListener.ListSelectionType.KEY, getSelectedLocations()); e.consume(); } } }; protected void stringTyped(String s) { s = s.toLowerCase(); ListModel m = getModel(); int index = 0; synchronized (m) { while (index < m.getSize()) { IOLocation loc = (IOLocation) m.getElementAt(index); if (loc.getName().toLowerCase().startsWith(s)) { setSelectedIndex(index); return; } index++; } } } private class RepaintLocationRunnable implements Runnable { IOLocation loc; boolean isThumbnail; RepaintLocationRunnable(IOLocation l, boolean t) { loc = l; isThumbnail = t; } public void run() { repaint(loc, isThumbnail); } } protected void repaint(IOLocation loc, boolean thumbnail) { int size = getModel().getSize(); for (int index = 0; index < size; index++) { if (getModel().getElementAt(index) == loc) { Rectangle bounds = getUI().getCellBounds(this, index, index); repaint(bounds); return; } } } // TODO: can this logic (copied and pasted from LocationBrowserUI be neatly // wrapped in one interfaced/abstract model? private PropertyChangeListener graphicListener = new PropertyChangeListener() { public void propertyChange(PropertyChangeEvent evt) { if (evt.getPropertyName().equals(GraphicCache.THUMBNAIL_PROPERTY)) { IOLocation loc = (IOLocation) evt.getSource(); SwingUtilities.invokeLater(new RepaintLocationRunnable(loc, true)); } else if (evt.getPropertyName().equals(GraphicCache.ICON_PROPERTY)) { IOLocation loc = (IOLocation) evt.getSource(); SwingUtilities.invokeLater(new RepaintLocationRunnable(loc, false)); } } }; IOLocationFilter filter = null; List<NavigationListener<IOLocation>> listeners = new ArrayList<NavigationListener<IOLocation>>(); protected MouseListener commitMouseListener = new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { /* * Give other listeners a chance to react to this mouse event by * making this an invoke-later runnable: */ class ClickRunnable implements Runnable { MouseEvent mouseEvent; ClickRunnable(MouseEvent e) { mouseEvent = e; } public void run() { setSelectedIndex(-1); if (mouseEvent.getClickCount() == 1) { fireIOLocationListListeners( NavigationListener.ListSelectionType.SINGLE_CLICK, getSelectedLocations()); } else if (mouseEvent.getClickCount() == 2) { fireIOLocationListListeners( NavigationListener.ListSelectionType.DOUBLE_CLICK, getSelectedLocations()); } } } SwingUtilities.invokeLater(new ClickRunnable(e)); } }; public IOLocationTileList() { super(); initialize(); } public IOLocationTileList(ListModel<IOLocation> dataModel) { super(dataModel); initialize(); } public IOLocationTileList(IOLocation[] listData) { super(listData); initialize(); } private void initialize() { JLabel label = new JLabel(); label.setUI(new ThumbnailLabelUI()); setCellRenderer(new BasicTileCellRenderer(new GraphicCache(), label)); setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION); setFixedCellWidth(100); setFixedCellHeight(100); setVisibleRowCount(-1); setLayoutOrientation(JList.HORIZONTAL_WRAP); addKeyListener(commitKeyListener); addKeyListener(typingListener); addMouseListener(commitMouseListener); } public IOLocationFilter getFilter() { return filter; } /** * This filter may be used by renderers to show unsupported locations as * disabled. (For example, see the BasicTileCellRenderer.) * <p> * Note this does not actually filter elements out of this list, for that * you need to call: <code>IOLocationFilter.filter(srcList)</code> * * @param filter */ public void setFilter(IOLocationFilter filter) { if (this.filter == filter) return; this.filter = filter; repaint(); } public void addIOLocationListListener( NavigationListener<IOLocation> listener) { if (listeners.contains(listener)) return; listeners.add(listener); } public void removeIOLocationListListener( NavigationListener<IOLocation> listener) { listeners.remove(listener); } protected void fireIOLocationListListeners( NavigationListener.ListSelectionType type, IOLocation... locations) { for (NavigationListener<IOLocation> listener : listeners) { try { if (listener.elementsSelected(type, Arrays.copyOf(locations, locations.length))) return; } catch (Exception e) { e.printStackTrace(); } } } @Override public void setCellRenderer(ListCellRenderer cellRenderer) { ListCellRenderer oldRenderer = this.getCellRenderer(); if (oldRenderer instanceof BasicTileCellRenderer) { BasicTileCellRenderer b = (BasicTileCellRenderer) oldRenderer; b.graphicCache.removePropertyChangeListener(graphicListener); } super.setCellRenderer(cellRenderer); if (cellRenderer instanceof BasicTileCellRenderer) { BasicTileCellRenderer b = (BasicTileCellRenderer) cellRenderer; b.graphicCache.addPropertyChangeListener(graphicListener); } } public IOLocation[] getSelectedLocations() { List<IOLocation> obj = getSelectedValuesList(); IOLocation[] selection = new IOLocation[obj.size()]; for (int a = 0; a < obj.size(); a++) { selection[a] = (IOLocation) obj.get(a); } return selection; } public static class BasicTileCellRenderer implements ListCellRenderer<IOLocation> { protected GraphicCache graphicCache; protected IOLocationFilter filter; protected JLabel thumbnail; public BasicTileCellRenderer(GraphicCache graphicCache, JLabel thumbnail) { if (graphicCache == null) graphicCache = new GraphicCache(); this.graphicCache = graphicCache; this.thumbnail = thumbnail; } public GraphicCache getGraphicCache() { return graphicCache; } public JLabel getThumbnail() { return thumbnail; } public Component getListCellRendererComponent(JList list, IOLocation l, int index, boolean isSelected, boolean cellHasFocus) { String text = ""; BufferedImage image; text = l.getName(); image = getGraphicCache().requestThumbnail(l); if (image == null) { if (l.isDirectory()) { image = LocationPane.FOLDER_THUMBNAIL; } else { image = LocationPane.FILE_THUMBNAIL; } } IOLocationFilter f = null; if (list instanceof IOLocationTileList) { IOLocationTileList t = (IOLocationTileList) list; f = t.getFilter(); } boolean enabled = f == null ? true : f.filter(l) != null; JLabel label = getThumbnail(); label.setEnabled(enabled); label.setText(text); label.setIcon(new ImageIcon(image)); label.putClientProperty("selected", new Boolean(isSelected)); format(l, label); return label; } /** * This lets subclasses format this label. The default implementation * does nothing, but subclasses can override this to meddle with the * text/icon/etc before the renderer returns this label. */ protected void format(IOLocation loc, JLabel label) { } } @Override public String getToolTipText(MouseEvent event) { if (event != null) { Point p = event.getPoint(); int index = locationToIndex(p); if (index != -1) { IOLocation loc = (IOLocation) getModel().getElementAt(index); return loc.getName(); } } return super.getToolTipText(event); } @Override public Point getToolTipLocation(MouseEvent event) { if (event != null) { Point p = event.getPoint(); int index = locationToIndex(p); ListCellRenderer r = getCellRenderer(); Rectangle cellBounds; if (index != -1 && (r instanceof BasicTileCellRenderer) && (cellBounds = getCellBounds(index, index)) != null && cellBounds.contains(p.x, p.y)) { String text = getToolTipText(event); JToolTip tip = new JToolTip(); tip.setTipText(text); Dimension tipSize = tip.getPreferredSize(); BasicTileCellRenderer btcr = (BasicTileCellRenderer) r; int yOffset = cellBounds.height / 2; if (btcr.thumbnail.getUI() instanceof ThumbnailLabelUI) { ThumbnailLabelUI ui = (ThumbnailLabelUI) btcr.thumbnail .getUI(); yOffset = ui.getTextRect().y; } return new Point(cellBounds.x + cellBounds.width / 2 - tipSize.width / 2, cellBounds.y + yOffset); } } return super.getToolTipLocation(event); } private int lastRows = -1; private int lastColumns = -1; /** * This calculates the number of required rows based on a width and the * fixed cell width. Then this calls <code>setVisibleRowCount(rows)</code> * * @param width */ public void updateVisibleRowCount(int width) { int size = getModel().getSize(); int columns = Math.max(1, width / getFixedCellWidth()); int rows = (size - 1) / columns + 1; if (lastRows != rows || lastColumns != columns) { setPreferredSize(new Dimension(columns * getFixedCellWidth(), rows * getFixedCellHeight())); setVisibleRowCount(-1); } lastRows = rows; lastColumns = columns; } }
package nuclibook.models; import com.j256.ormlite.field.DatabaseField; import com.j256.ormlite.table.DatabaseTable; import nuclibook.entity_utils.ActionLogger; import nuclibook.server.Renderable; import org.joda.time.DateTime; import java.util.HashMap; /** * Model to represent an entry in the action log. */ @DatabaseTable(tableName = "action_log") public class ActionLog implements Renderable { @DatabaseField(generatedId = true) private Integer id; @DatabaseField(canBeNull = false) private long when; @DatabaseField(foreign = true, foreignAutoRefresh = true, columnName = "staff_id") private Staff staff; @DatabaseField private Integer action; @DatabaseField(canBeNull = true, columnName = "associated_id") private Integer associatedId; @DatabaseField private String note; /** * Initialise action log with no fields. */ public ActionLog() { } /** * Initialise action log. * * @param staff Staff who performed the action. * @param when Date the action was performed. * @param action The action (constant from entity_utils.ActionLogger). * @param associatedId The ID of the object that the action was performed on. */ public ActionLog(Staff staff, DateTime when, Integer action, Integer associatedId) { this.staff = staff; this.action = action; this.associatedId = associatedId; setWhen(when); } public ActionLog(Staff staff, DateTime when, Integer action, Integer associatedId, String note) { this.staff = staff; this.action = action; this.associatedId = associatedId; this.note = note; setWhen(when); } /** * Get the ID of the action. * * @return The ID of the action. */ public Integer getId() { return id; } /** * Set the ID of the action. * * @param id The ID of the action. */ public void setId(Integer id) { this.id = id; } /** * Get the date the action was performed. * * @return The date the action was performed. */ public DateTime getWhen() { return new DateTime(when); } /** * Set the date the action was performed. * * @param when The date the action was performed. */ public void setWhen(DateTime when) { this.when = when.getMillis(); } /** * Get the member of staff who performed the action. * * @return The member of staff who performed the action. */ public Staff getStaff() { return staff; } /** * Set the member of staff who performed the action. * * @return The member of staff who performed the action. */ public void setStaff(Staff staff) { this.staff = staff; } /** * Get the action that was performed. * * @return The action that was performed. */ public Integer getAction() { return action; } /** * Set the action that was performed. * * @return The action that was performed. */ public void setAction(Integer action) { this.action = action; } /** * Get the ID of the object that the action was performed on. * * @return The ID of the object that the action was performed on. */ public Integer getAssociatedId() { return associatedId; } /** * Set the ID of the object that the action was performed on. * * @return The ID of the object that the action was performed on. */ public void setAssociatedId(Integer associatedId) { this.associatedId = associatedId; } /** * Get the note associated with the action. * * @return The note associated with the action. */ public String getNote() { return note; } /** * Set the note associated with the action. * * @return The note associated with the action. */ public void setNote(String note) { this.note = note; } @Override public HashMap<String, String> getHashMap() { return new HashMap<String, String>() {{ put("id", getId().toString()); put("staff", (getStaff()) == null ? "N/A" : getStaff().getName()); put("when", getWhen().toString("YYYY-MM-dd HH:mm:ss")); Object desc = ActionLogger.actionDescription.get(getAction()); put("action", desc == null ? "Unknown" : desc.toString()); put("associated-id", (getAssociatedId()) == 0 ? "N/A" : getAssociatedId().toString()); put("notes", getNote()); }}; } }
package com.suse.salt.netapi.calls; import static com.suse.salt.netapi.utils.ClientUtils.parameterizedType; import com.suse.salt.netapi.AuthModule; import com.suse.salt.netapi.calls.runner.Jobs; import com.suse.salt.netapi.client.SaltClient; import com.suse.salt.netapi.datatypes.Batch; import com.suse.salt.netapi.datatypes.target.SSHTarget; import com.suse.salt.netapi.datatypes.Event; import com.suse.salt.netapi.datatypes.target.Target; import com.suse.salt.netapi.errors.GenericError; import com.suse.salt.netapi.event.EventListener; import com.suse.salt.netapi.event.EventStream; import com.suse.salt.netapi.event.JobReturnEvent; import com.suse.salt.netapi.event.RunnerReturnEvent; import com.suse.salt.netapi.exception.SaltException; import com.suse.salt.netapi.results.Result; import com.suse.salt.netapi.results.Return; import com.suse.salt.netapi.results.SSHResult; import com.google.gson.reflect.TypeToken; import com.suse.salt.netapi.utils.ClientUtils; import com.suse.salt.netapi.utils.FunctionE; import javax.websocket.CloseReason; import java.lang.reflect.Type; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.stream.Collectors; /** * Class representing a function call of a salt execution module. * * @param <R> the return type of the called function */ public class LocalCall<R> implements Call<R> { private final String functionName; private final Optional<List<?>> arg; private final Optional<Map<String, ?>> kwarg; private final TypeToken<R> returnType; private final Optional<?> metadata; private final Optional<Integer> timeout; private final Optional<Integer> gatherJobTimeout; public LocalCall(String functionName, Optional<List<?>> arg, Optional<Map<String, ?>> kwarg, TypeToken<R> returnType, Optional<?> metadata, Optional<Integer> timeout, Optional<Integer> gatherJobTimeout) { this.functionName = functionName; this.arg = arg; this.kwarg = kwarg; this.returnType = returnType; this.metadata = metadata; this.timeout = timeout; this.gatherJobTimeout = gatherJobTimeout; } public LocalCall(String functionName, Optional<List<?>> arg, Optional<Map<String, ?>> kwarg, TypeToken<R> returnType, Optional<Integer> timeout, Optional<Integer> gatherJobTimeout) { this(functionName, arg, kwarg, returnType, Optional.empty(), timeout, gatherJobTimeout); } public LocalCall(String functionName, Optional<List<?>> arg, Optional<Map<String, ?>> kwarg, TypeToken<R> returnType, Optional<?> metadata) { this(functionName, arg, kwarg, returnType, metadata, Optional.empty(), Optional.empty()); } public LocalCall(String functionName, Optional<List<?>> arg, Optional<Map<String, ?>> kwarg, TypeToken<R> returnType) { this(functionName, arg, kwarg, returnType, Optional.empty()); } public LocalCall<R> withMetadata(Object metadata) { return new LocalCall<>(functionName, arg, kwarg, returnType, Optional.of(metadata)); } public LocalCall<R> withoutMetadata() { return new LocalCall<>(functionName, arg, kwarg, returnType, Optional.empty()); } public TypeToken<R> getReturnType() { return returnType; } /** * {@inheritDoc} */ @Override public Map<String, Object> getPayload() { HashMap<String, Object> payload = new HashMap<>(); payload.put("fun", functionName); arg.ifPresent(arg -> payload.put("arg", arg)); kwarg.ifPresent(kwarg -> payload.put("kwarg", kwarg)); metadata.ifPresent(m -> payload.put("metadata", m)); timeout.ifPresent(timeout -> payload.put("timeout", timeout)); gatherJobTimeout.ifPresent(gatherJobTimeout -> payload.put("gather_job_timeout", gatherJobTimeout)); return payload; } /** * Calls a execution module function on the given target asynchronously and * returns information about the scheduled job that can be used to query the result. * Authentication is done with the token therefore you have to login prior * to using this function. * * @param client SaltClient instance * @param target the target for the function * @return information about the scheduled job * @throws SaltException if anything goes wrong */ public LocalAsyncResult<R> callAsync(final SaltClient client, Target<?> target) throws SaltException { Map<String, Object> customArgs = new HashMap<>(); customArgs.putAll(getPayload()); customArgs.put("tgt", target.getTarget()); customArgs.put("expr_form", target.getType()); Return<List<LocalAsyncResult<R>>> wrapper = client.call( this, Client.LOCAL_ASYNC, "/", Optional.of(customArgs), new TypeToken<Return<List<LocalAsyncResult<R>>>>(){}); LocalAsyncResult<R> result = wrapper.getResult().get(0); result.setType(getReturnType()); return result; } /** * Calls this salt call via the async client and returns the results * as they come in via the event stream. * * @param client SaltClient instance * @param target the target for the function * @param username username for authentication * @param password password for authentication * @param authModule authentication module to use * @param events the event stream to use * @param cancel future to cancel the action * @return a map from minion id to future of the result. * @throws SaltException if anything goes wrong */ public Map<String, CompletionStage<Result<R>>> callAsync( SaltClient client, Target<?> target, String username, String password, AuthModule authModule, EventStream events, CompletionStage<GenericError> cancel) throws SaltException { return callAsync( localCall -> localCall.callAsync(client, target, username, password, authModule), runnerCall -> runnerCall.callAsync(client, username, password, authModule), events, cancel ); } /** * Calls this salt call via the async client and returns the results * as they come in via the event stream. * * @param client SaltClient instance * @param target the target for the function * @param events the event stream to use * @param cancel future to cancel the action * @return a map from minion id to future of the result. * @throws SaltException if anything goes wrong */ public Map<String, CompletionStage<Result<R>>> callAsync( SaltClient client, Target<?> target, EventStream events, CompletionStage<GenericError> cancel) throws SaltException { return callAsync( localCall -> localCall.callAsync(client, target), runnerCall -> runnerCall.callAsync(client), events, cancel ); } /** * Calls this salt call via the async client and returns the results * as they come in via the event stream. * * @param localAsync function providing callAsync for LocalCalls * @param runnerAsync function providing callAsync for RunnerCalls * @param events the event stream to use * @param cancel future to cancel the action * @return a map from minion id to future of the result. * @throws SaltException if anything goes wrong */ public Map<String, CompletionStage<Result<R>>> callAsync( FunctionE<LocalCall<R>, LocalAsyncResult<R>> localAsync, FunctionE<RunnerCall<Map<String, R>>, RunnerAsyncResult<Map<String, R>>> runnerAsync, EventStream events, CompletionStage<GenericError> cancel) throws SaltException { LocalAsyncResult<R> lar = localAsync.apply(this); TypeToken<R> returnTypeToken = this.getReturnType(); Type result = ClientUtils.parameterizedType(null, Result.class, returnTypeToken.getType()); @SuppressWarnings("unchecked") TypeToken<Result<R>> typeToken = (TypeToken<Result<R>>) TypeToken.get(result); Map<String, CompletableFuture<Result<R>>> futures = lar.getMinions().stream().collect(Collectors.toMap( mid -> mid, mid -> new CompletableFuture<>() ) ); EventListener listener = new EventListener() { @Override public void notify(Event event) { Optional<JobReturnEvent> jobReturnEvent = JobReturnEvent.parse(event); if (jobReturnEvent.isPresent()) { jobReturnEvent.ifPresent(e -> onJobReturn(lar.getJid(), e, typeToken, futures) ); } else { RunnerReturnEvent.parse(event).ifPresent(e -> onRunnerReturn(lar.getJid(), e, typeToken, futures) ); } } @Override public void eventStreamClosed(CloseReason closeReason) { Result<R> error = Result.error( new GenericError( "EventStream closed with reason " + closeReason)); futures.values().forEach(f -> f.complete(error)); } }; CompletableFuture<Void> allResolves = CompletableFuture.allOf( futures.entrySet().stream().map(entry -> // mask errors since CompletableFuture.allOf resolves on first error entry.getValue().<Integer>handle((v, e) -> 0) ).toArray(CompletableFuture[]::new) ); allResolves.whenComplete((v, e) -> events.removeEventListener(listener) ); cancel.whenComplete((v, e) -> { if (v != null) { Result<R> error = Result.error(v); futures.values().forEach(f -> f.complete(error)); } else if (e != null) { futures.values().forEach(f -> f.completeExceptionally(e)); } }); events.addEventListener(listener); // fire off lookup to get a result event for minions that already finished // before we installed the listeners runnerAsync.apply(Jobs.lookupJid(lar)); return futures.entrySet().stream().collect(Collectors.toMap( Map.Entry::getKey, e -> (CompletionStage<Result<R>>) e.getValue() )); } /** * Calls a execution module function on the given target asynchronously and * returns information about the scheduled job that can be used to query the result. * Authentication is done with the given credentials no session token is created. * * @param client SaltClient instance * @param target the target for the function * @param username username for authentication * @param password password for authentication * @param authModule authentication module to use * @return information about the scheduled job * @throws SaltException if anything goes wrong */ public LocalAsyncResult<R> callAsync(final SaltClient client, Target<?> target, String username, String password, AuthModule authModule) throws SaltException { Map<String, Object> customArgs = new HashMap<>(); customArgs.putAll(getPayload()); customArgs.put("username", username); customArgs.put("password", password); customArgs.put("eauth", authModule.getValue()); customArgs.put("tgt", target.getTarget()); customArgs.put("expr_form", target.getType()); Return<List<LocalAsyncResult<R>>> wrapper = client.call( this, Client.LOCAL_ASYNC, "/run", Optional.of(customArgs), new TypeToken<Return<List<LocalAsyncResult<R>>>>(){}); LocalAsyncResult<R> result = wrapper.getResult().get(0); result.setType(getReturnType()); return result; } /** * Calls a execution module function on the given target and synchronously * waits for the result. Authentication is done with the token therefore you * have to login prior to using this function. * * @param client SaltClient instance * @param target the target for the function * @return a map containing the results with the minion name as key * @throws SaltException if anything goes wrong */ public Map<String, Result<R>> callSync(final SaltClient client, Target<?> target) throws SaltException { return callSyncHelper(client, target, Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty()).get(0); } /** * Calls a execution module function on the given target with batching and * synchronously waits for the result. Authentication is done with the token * therefore you have to login prior to using this function. * * @param client SaltClient instance * @param target the target for the function * @param batch the batch specification * @return A list of maps with each list representing each batch, and maps containing * the results with the minion names as keys. * @throws SaltException if anything goes wrong */ public List<Map<String, Result<R>>> callSync(final SaltClient client, Target<?> target, Batch batch) throws SaltException { return callSyncHelper(client, target, Optional.empty(), Optional.empty(), Optional.empty(), Optional.of(batch)); } /** * Calls a execution module function on the given target and synchronously * waits for the result. Authentication is done with the given credentials * no session token is created. * * @param client SaltClient instance * @param target the target for the function * @param username username for authentication * @param password password for authentication * @param authModule authentication module to use * @return a map containing the results with the minion name as key * @throws SaltException if anything goes wrong */ public Map<String, Result<R>> callSync( final SaltClient client, Target<?> target, String username, String password, AuthModule authModule) throws SaltException { return callSyncHelper(client, target, Optional.of(username), Optional.of(password), Optional.of(authModule), Optional.empty()).get(0); } /** * Calls a execution module function on the given target with batching and * synchronously waits for the result. Authentication is done with the given * credentials no session token is created. * * @param client SaltClient instance * @param target the target for the function * @param username username for authentication * @param password password for authentication * @param authModule authentication module to use * @param batch the batch specification * @return A list of maps with each list representing each batch, and maps containing * the results with the minion names as keys. * @throws SaltException if anything goes wrong */ public List<Map<String, Result<R>>> callSync( final SaltClient client, Target<?> target, String username, String password, AuthModule authModule, Batch batch) throws SaltException { return callSyncHelper(client, target, Optional.of(username), Optional.of(password), Optional.of(authModule), Optional.of(batch)); } /** * Helper to call an execution module function on the given target for batched or * unbatched while also providing an option to use the given credentials or to use a * prior created token. Synchronously waits for the result. * * @param client SaltClient instance * @param target the target for the function * @param username username for authentication, empty for token auth * @param password password for authentication, empty for token auth * @param authModule authentication module to use, empty for token auth * @param batch the batch parameter, empty for unbatched * @return A list of maps with each list representing each batch, and maps containing * the results with the minion names as keys. The first list is the entire * output for unbatched input. * @throws SaltException */ private List<Map<String, Result<R>>> callSyncHelper( final SaltClient client, Target<?> target, Optional<String> username, Optional<String> password, Optional<AuthModule> authModule, Optional<Batch> batch) throws SaltException { Map<String, Object> customArgs = new HashMap<>(); customArgs.putAll(getPayload()); customArgs.put("tgt", target.getTarget()); customArgs.put("expr_form", target.getType()); username.ifPresent(v -> customArgs.put("username", v)); password.ifPresent(v -> customArgs.put("password", v)); authModule.ifPresent(v -> customArgs.put("eauth", v.getValue())); batch.ifPresent(v -> customArgs.put("batch", v.toString())); Client clientType = batch.isPresent() ? Client.LOCAL_BATCH : Client.LOCAL; // We need a different endpoint for credentials vs token auth String endPoint = username.isPresent() ? "/run" : "/"; Type xor = parameterizedType(null, Result.class, getReturnType().getType()); Type map = parameterizedType(null, Map.class, String.class, xor); Type listType = parameterizedType(null, List.class, map); Type wrapperType = parameterizedType(null, Return.class, listType); @SuppressWarnings("unchecked") Return<List<Map<String, Result<R>>>> wrapper = client.call(this, clientType, endPoint, Optional.of(customArgs), (TypeToken<Return<List<Map<String, Result<R>>>>>) TypeToken.get(wrapperType)); return wrapper.getResult(); } /** * Call an execution module function on the given target via salt-ssh and synchronously * wait for the result. * * @param client SaltClient instance * @param target the target for the function * @param cfg Salt SSH configuration object * @return a map containing the results with the minion name as key * @throws SaltException if anything goes wrong */ public Map<String, Result<SSHResult<R>>> callSyncSSH(final SaltClient client, SSHTarget<?> target, SaltSSHConfig cfg) throws SaltException { Map<String, Object> args = new HashMap<>(); args.putAll(getPayload()); args.put("tgt", target.getTarget()); args.put("expr_form", target.getType()); SaltSSHUtils.mapConfigPropsToArgs(cfg, args); Type xor = parameterizedType(null, Result.class, parameterizedType(null, SSHResult.class, getReturnType().getType())); Type map = parameterizedType(null, Map.class, String.class, xor); Type listType = parameterizedType(null, List.class, map); Type wrapperType = parameterizedType(null, Return.class, listType); @SuppressWarnings("unchecked") Return<List<Map<String, Result<SSHResult<R>>>>> wrapper = client.call(this, Client.SSH, "/run", Optional.of(args), (TypeToken<Return<List<Map<String, Result<SSHResult<R>>>>>>) TypeToken.get(wrapperType)); return wrapper.getResult().get(0); } private static <R> void onRunnerReturn( String jid, RunnerReturnEvent rre, TypeToken<Result<R>> tt, Map<String, CompletableFuture<Result<R>>> targets ) { final RunnerReturnEvent.Data data = rre.getData(); if (data.getFun().contentEquals("runner.jobs.list_job")) { Jobs.Info result = data.getResult(Jobs.Info.class); if (result.getJid().equals(jid)) { targets.forEach((mid, f) -> { result.getResult(mid, tt).ifPresent(f::complete); }); } } } private static <R> void onJobReturn( String jid, JobReturnEvent jre, TypeToken<Result<R>> tt, Map<String, CompletableFuture<Result<R>>> targets ) { if (jre.getJobId().contentEquals(jid)) { CompletableFuture<Result<R>> f = targets.get(jre.getMinionId()); if (f != null) { f.complete(jre.getData().getResult(tt)); } } } }
package com.thebuerkle.mcclient.model; import com.google.common.base.Objects; /** * A 16x16 region of the map. Consists of 16 chunks stacked vertically. */ public class Region { private final Chunk[] _chunks = new Chunk[16]; private final int _x; private final int _z; public Region(IntVec3 position) { _x = position.x; _z = position.z; } public int getX() { return _x; } public int getZ() { return _z; } public Chunk get(int y) { return _chunks[y / 16]; } public void add(Chunk chunk) { // _chunks[chunk.getPosition().y / 16] = chunk; } public void remove(Chunk chunk) { // _chunks[chunk.getPosition().y / 16] = null; } @Override() public boolean equals(Object o) { if (o instanceof Region) { Region r = (Region) o; return _x == r._x && _z == r._z; } return false; } @Override() public int hashCode() { return 31 * (31 + _x) + _z; } @Override() public String toString() { return Objects.toStringHelper(this). add("X", _x). add("Z", _z). toString(); } }
package com.treetank.access; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import com.treetank.api.IAxis; import com.treetank.api.IWriteTransaction; import com.treetank.axis.DescendantAxis; import com.treetank.exception.TreetankException; import com.treetank.exception.TreetankIOException; import com.treetank.exception.TreetankUsageException; import com.treetank.node.AbstractNode; import com.treetank.node.AttributeNode; import com.treetank.node.ElementNode; import com.treetank.node.NamespaceNode; import com.treetank.page.UberPage; import com.treetank.settings.EFixed; import com.treetank.utils.TypedValue; /** * <h1>WriteTransaction</h1> * * <p> * Single-threaded instance of only write transaction per session. * </p> */ public final class WriteTransaction extends ReadTransaction implements IWriteTransaction { /** Maximum number of node modifications before auto commit. */ private final int mMaxNodeCount; /** Scheduler to commit after mMaxTime seconds. */ private ScheduledExecutorService mCommitScheduler; /** Modification counter. */ private long mModificationCount; /** * Constructor. * * @param transactionID * ID of transaction. * @param sessionState * State of the session. * @param transactionState * State of this transaction. * @param maxNodeCount * Maximum number of node modifications before auto commit. * @param maxTime * Maximum number of seconds before auto commit. */ protected WriteTransaction(final long transactionID, final SessionState sessionState, final WriteTransactionState transactionState, final int maxNodeCount, final int maxTime) throws TreetankException { super(transactionID, sessionState, transactionState); // Do not accept negative values. if ((maxNodeCount < 0) || (maxTime < 0)) { throw new TreetankUsageException( "Negative arguments are not accepted."); } // Only auto commit by node modifications if it is more then 0. mMaxNodeCount = maxNodeCount; mModificationCount = 0L; // Only auto commit by time if the time is more than 0 seconds. if (maxTime > 0) { mCommitScheduler = Executors.newScheduledThreadPool(1); mCommitScheduler.scheduleAtFixedRate(new Runnable() { public final void run() { if (mModificationCount > 0) { try { commit(); } catch (final TreetankException exc) { throw new IllegalStateException(exc); } } } }, 0, maxTime, TimeUnit.SECONDS); } else { mCommitScheduler = null; } } /** * {@inheritDoc} */ public synchronized long insertElementAsFirstChild(final String name, final String uri) throws TreetankException { return insertFirstChild(((WriteTransactionState) getTransactionState()) .createElementNode(getCurrentNode().getNodeKey(), (Long) EFixed.NULL_NODE_KEY.getStandardProperty(), (Long) EFixed.NULL_NODE_KEY.getStandardProperty(), getCurrentNode().getFirstChildKey(), ((WriteTransactionState) getTransactionState()) .createNameKey(name), ((WriteTransactionState) getTransactionState()) .createNameKey(uri), ((WriteTransactionState) getTransactionState()) .createNameKey("xs:untyped"))); } /** * {@inheritDoc} */ public synchronized long insertTextAsFirstChild(final int valueType, final byte[] value) throws TreetankException { return insertFirstChild(((WriteTransactionState) getTransactionState()) .createTextNode(getCurrentNode().getNodeKey(), (Long) EFixed.NULL_NODE_KEY.getStandardProperty(), getCurrentNode().getFirstChildKey(), valueType, value)); } /** * {@inheritDoc} */ public synchronized long insertTextAsFirstChild(final String value) throws TreetankException { return insertTextAsFirstChild( ((WriteTransactionState) getTransactionState()) .createNameKey("xs:untyped"), TypedValue .getBytes(value)); } /** * {@inheritDoc} */ public synchronized long insertElementAsRightSibling(final String name, final String uri) throws TreetankException { return insertRightSibling(((WriteTransactionState) getTransactionState()) .createElementNode(getCurrentNode().getParentKey(), (Long) EFixed.NULL_NODE_KEY.getStandardProperty(), getCurrentNode().getNodeKey(), getCurrentNode() .getRightSiblingKey(), ((WriteTransactionState) getTransactionState()) .createNameKey(name), ((WriteTransactionState) getTransactionState()) .createNameKey(uri), ((WriteTransactionState) getTransactionState()) .createNameKey("xs:untyped"))); } /** * {@inheritDoc} */ public synchronized long insertTextAsRightSibling(final int valueType, final byte[] value) throws TreetankException { return insertRightSibling(((WriteTransactionState) getTransactionState()) .createTextNode(getCurrentNode().getParentKey(), getCurrentNode().getNodeKey(), getCurrentNode() .getRightSiblingKey(), valueType, value)); } /** * {@inheritDoc} */ public synchronized long insertTextAsRightSibling(final String value) throws TreetankException { return insertTextAsRightSibling( ((WriteTransactionState) getTransactionState()) .createNameKey("xs:untyped"), TypedValue .getBytes(value)); } /** * {@inheritDoc} */ public synchronized long insertAttribute(final String name, final String uri, final int valueType, final byte[] value) throws TreetankException { return insertAttribute(((WriteTransactionState) getTransactionState()) .createAttributeNode(getCurrentNode().getNodeKey(), ((WriteTransactionState) getTransactionState()) .createNameKey(name), ((WriteTransactionState) getTransactionState()) .createNameKey(uri), ((WriteTransactionState) getTransactionState()) .createNameKey("xs:untypedAtomic"), value)); } /** * {@inheritDoc} */ public synchronized long insertAttribute(final String name, final String uri, final String value) throws TreetankException { return insertAttribute(name, uri, ((WriteTransactionState) getTransactionState()) .createNameKey("xs:untypedAtomic"), TypedValue .getBytes(value)); } /** * {@inheritDoc} */ public synchronized long insertNamespace(final String uri, final String prefix) throws TreetankException { return insertNamespace(((WriteTransactionState) getTransactionState()) .createNamespaceNode(getCurrentNode().getNodeKey(), ((WriteTransactionState) getTransactionState()) .createNameKey(uri), ((WriteTransactionState) getTransactionState()) .createNameKey(prefix))); } /** * {@inheritDoc} */ public synchronized void remove() throws TreetankException { assertNotClosed(); mModificationCount++; final AbstractNode node = (AbstractNode) getCurrentNode(); if (getCurrentNode().isDocumentRoot()) { throw new TreetankUsageException("Root node can not be removed."); } else if (getCurrentNode().isElement() || getCurrentNode().isText()) { adaptNeighbours(node, null); // removing subtree final IAxis desc = new DescendantAxis(this); while (desc.hasNext()) { desc.next(); ((WriteTransactionState) getTransactionState()) .removeNode((AbstractNode) this.getCurrentNode()); } // removing attributes moveTo(node.getNodeKey()); for (int i = 0; i < node.getAttributeCount(); i++) { moveTo(node.getAttributeKey(i)); ((WriteTransactionState) getTransactionState()) .removeNode((AbstractNode) this.getCurrentNode()); } // removing namespaces moveTo(node.getNodeKey()); for (int i = 0; i < node.getNamespaceCount(); i++) { moveTo(node.getNamespaceKey(i)); ((WriteTransactionState) getTransactionState()) .removeNode((AbstractNode) this.getCurrentNode()); } // Remove old node. ((WriteTransactionState) getTransactionState()).removeNode(node); // Set current node. if (node.hasRightSibling()) { moveTo(node.getRightSiblingKey()); return; } if (node.hasLeftSibling()) { moveTo(node.getLeftSiblingKey()); return; } moveTo(node.getParentKey()); } else if (getCurrentNode().isAttribute()) { moveToParent(); AbstractNode parent = setUpNodeModification(getCurrentNode() .getNodeKey()); ((ElementNode) parent).removeAttribute(node.getNodeKey()); tearDownNodeModification(parent); } } /** * {@inheritDoc} */ public synchronized void setName(final String name) throws TreetankIOException { assertNotClosed(); mModificationCount++; final AbstractNode node = setUpNodeModification(getCurrentNode() .getNodeKey()); node.setNameKey(((WriteTransactionState) getTransactionState()) .createNameKey(name)); setCurrentNode(node); tearDownNodeModification(node); } /** * {@inheritDoc} */ public synchronized void setURI(final String uri) throws TreetankIOException { assertNotClosed(); mModificationCount++; final AbstractNode node = setUpNodeModification(getCurrentNode() .getNodeKey()); node.setURIKey(((WriteTransactionState) getTransactionState()) .createNameKey(uri)); setCurrentNode(node); tearDownNodeModification(node); } /** * {@inheritDoc} */ public synchronized void setValue(final int valueType, final byte[] value) throws TreetankIOException { assertNotClosed(); mModificationCount++; final AbstractNode node = setUpNodeModification(getCurrentNode() .getNodeKey()); node.setValue(valueType, value); setCurrentNode(node); tearDownNodeModification(node); } /** * {@inheritDoc} */ public synchronized void setValue(final String value) throws TreetankIOException { setValue(((WriteTransactionState) getTransactionState()) .createNameKey("xs:untyped"), TypedValue.getBytes(value)); } /** * {@inheritDoc} */ @Override public synchronized void close() throws TreetankException { if (!isClosed()) { // Make sure to commit all dirty data. if (mModificationCount > 0) { throw new TreetankUsageException( "Must commit/abort transaction first"); } // Make sure to cancel the periodic commit task if it was started. if (mCommitScheduler != null) { mCommitScheduler.shutdownNow(); mCommitScheduler = null; } // Release all state immediately. getTransactionState().close(); getSessionState().closeWriteTransaction(getTransactionID()); setSessionState(null); setTransactionState(null); setCurrentNode(null); // Remember that we are closed. setClosed(); } } /** * {@inheritDoc} */ @Override public void revertTo(final long revision) throws TreetankException { assertNotClosed(); getSessionState().assertValidRevision(revision); getTransactionState().close(); // Reset internal transaction state to new uber page. setTransactionState(getSessionState().createWriteTransactionState( getTransactionID(), revision, getRevisionNumber() - 1)); // Reset modification counter. mModificationCount = 0L; moveToDocumentRoot(); } /** * {@inheritDoc} */ public synchronized void commit() throws TreetankException { assertNotClosed(); // Commit uber page. UberPage uberPage = ((WriteTransactionState) getTransactionState()) .commit(); // Remember succesfully committed uber page in session state. getSessionState().setLastCommittedUberPage(uberPage); // Reset modification counter. mModificationCount = 0L; getTransactionState().close(); // Reset internal transaction state to new uber page. setTransactionState(getSessionState().createWriteTransactionState( getTransactionID(), getRevisionNumber(), getRevisionNumber())); } /** * {@inheritDoc} */ public synchronized void abort() throws TreetankIOException { assertNotClosed(); // Reset modification counter. mModificationCount = 0L; getTransactionState().close(); long revisionToSet = 0; if (!getTransactionState().getUberPage().isBootstrap()) { revisionToSet = getRevisionNumber() - 1; } // Reset internal transaction state to last committed uber page. setTransactionState(getSessionState().createWriteTransactionState( getTransactionID(), revisionToSet, revisionToSet)); } private void intermediateCommitIfRequired() throws TreetankException { assertNotClosed(); if ((mMaxNodeCount > 0) && (mModificationCount > mMaxNodeCount)) { commit(); } } private long insertFirstChild(final AbstractNode node) throws TreetankException { assertNotClosed(); mModificationCount++; intermediateCommitIfRequired(); setCurrentNode(node); updateParentAfterInsert(true); updateRightSibling(); return node.getNodeKey(); } private long insertRightSibling(final AbstractNode node) throws TreetankException { assertNotClosed(); mModificationCount++; intermediateCommitIfRequired(); if (getCurrentNode().getNodeKey() == (Long) EFixed.ROOT_NODE_KEY .getStandardProperty()) { throw new TreetankUsageException("Root node can not have siblings."); } setCurrentNode(node); updateParentAfterInsert(false); updateLeftSibling(); updateRightSibling(); return node.getNodeKey(); } private long insertAttribute(final AttributeNode node) throws TreetankException { assertNotClosed(); mModificationCount++; intermediateCommitIfRequired(); if (!getCurrentNode().isElement()) { throw new IllegalStateException( "Only element nodes can have attributes."); } setCurrentNode(node); final AbstractNode parentNode = setUpNodeModification(node .getParentKey()); parentNode.insertAttribute(node.getNodeKey()); tearDownNodeModification(parentNode); return node.getNodeKey(); } private long insertNamespace(final NamespaceNode node) throws TreetankException { assertNotClosed(); mModificationCount++; intermediateCommitIfRequired(); if (!getCurrentNode().isElement()) { throw new IllegalStateException( "Only element nodes can have namespaces."); } setCurrentNode(node); final AbstractNode parentNode = setUpNodeModification(node .getParentKey()); parentNode.insertNamespace(node.getNodeKey()); tearDownNodeModification(parentNode); return node.getNodeKey(); } private void updateParentAfterInsert(final boolean updateFirstChild) throws TreetankIOException { final AbstractNode parentNode = setUpNodeModification(getCurrentNode() .getParentKey()); parentNode.incrementChildCount(); if (updateFirstChild) { parentNode.setFirstChildKey(getCurrentNode().getNodeKey()); } tearDownNodeModification(parentNode); } private void updateRightSibling() throws TreetankIOException { if (getCurrentNode().hasRightSibling()) { final AbstractNode rightSiblingNode = setUpNodeModification(getCurrentNode() .getRightSiblingKey()); rightSiblingNode.setLeftSiblingKey(getCurrentNode().getNodeKey()); tearDownNodeModification(rightSiblingNode); } } private void updateLeftSibling() throws TreetankIOException { final AbstractNode leftSiblingNode = setUpNodeModification(getCurrentNode() .getLeftSiblingKey()); leftSiblingNode.setRightSiblingKey(getCurrentNode().getNodeKey()); tearDownNodeModification(leftSiblingNode); } private void adaptNeighbours(final AbstractNode oldNode, final AbstractNode newNode) throws TreetankIOException { // Remember all related nodes. AbstractNode leftSibling = null; AbstractNode rightSibling = null; AbstractNode parent = null; AbstractNode firstChild = null; // getting the neighbourhood if (oldNode.hasLeftSibling()) { moveToLeftSibling(); leftSibling = (AbstractNode) getCurrentNode(); moveToRightSibling(); } if (oldNode.hasRightSibling()) { moveToRightSibling(); rightSibling = (AbstractNode) getCurrentNode(); moveToLeftSibling(); } if (!moveToParent()) { throw new IllegalStateException("Node has no parent!"); } parent = (AbstractNode) getCurrentNode(); moveTo(oldNode.getNodeKey()); if (oldNode.hasFirstChild()) { moveToFirstChild(); firstChild = (AbstractNode) getCurrentNode(); } moveTo(oldNode.getNodeKey()); // Adapt left sibling node if there is one. if (leftSibling != null) { leftSibling = setUpNodeModification(leftSibling.getNodeKey()); if (newNode == null) { if (rightSibling != null) { leftSibling.setRightSiblingKey(rightSibling.getNodeKey()); } else { leftSibling.setRightSiblingKey((Long) EFixed.NULL_NODE_KEY .getStandardProperty()); } } else { leftSibling.setRightSiblingKey(newNode.getNodeKey()); newNode.setLeftSiblingKey(leftSibling.getNodeKey()); } tearDownNodeModification(leftSibling); } // Adapt right sibling node if there is one. if (rightSibling != null) { rightSibling = setUpNodeModification(rightSibling.getNodeKey()); if (newNode == null) { if (leftSibling != null) { rightSibling.setLeftSiblingKey(leftSibling.getNodeKey()); } else { rightSibling.setLeftSiblingKey((Long) EFixed.NULL_NODE_KEY .getStandardProperty()); } } else { rightSibling.setLeftSiblingKey(newNode.getNodeKey()); newNode.setRightSiblingKey(rightSibling.getNodeKey()); } tearDownNodeModification(rightSibling); } // Adapt parent. parent = setUpNodeModification(parent.getNodeKey()); if (newNode == null) { parent.decrementChildCount(); } if (parent.getFirstChildKey() == oldNode.getNodeKey()) { if (newNode == null) { if (rightSibling != null) { parent.setFirstChildKey(rightSibling.getNodeKey()); } else { parent.setFirstChildKey((Long) EFixed.NULL_NODE_KEY .getStandardProperty()); } } else { parent.setFirstChildKey(newNode.getNodeKey()); } } tearDownNodeModification(parent); // adapt associated nodes if (newNode != null) { if (firstChild != null) { newNode.setFirstChildKey(firstChild.getNodeKey()); AbstractNode node = firstChild; do { node = setUpNodeModification(node.getNodeKey()); node.setParentKey(newNode.getNodeKey()); tearDownNodeModification(node); } while (moveToRightSibling()); } // setting the attributes and namespaces for (int i = 0; i < oldNode.getAttributeCount(); i++) { newNode.insertAttribute(oldNode.getAttributeKey(i)); AbstractNode node = setUpNodeModification(oldNode .getAttributeKey(i)); node.setParentKey(newNode.getNodeKey()); tearDownNodeModification(node); } for (int i = 0; i < oldNode.getNamespaceCount(); i++) { newNode.insertNamespace(oldNode.getNamespaceKey(i)); AbstractNode node = setUpNodeModification(oldNode .getNamespaceKey(i)); node.setParentKey(newNode.getNodeKey()); tearDownNodeModification(node); } newNode.setChildCount(oldNode.getChildCount()); } } private AbstractNode setUpNodeModification(final long nodeKey) throws TreetankIOException { final AbstractNode modNode = ((WriteTransactionState) getTransactionState()) .prepareNodeForModification(nodeKey); return modNode; } private void tearDownNodeModification(final AbstractNode node) throws TreetankIOException { ((WriteTransactionState) getTransactionState()) .finishNodeModification(node); } }
/* P0006 * * Word Ladder * Given two words (start and end), and a dictionary, find the length of * shortest transformation sequence from start to end, such that * * 1) Olny one letter can be changed at a time. * 2) Each intermediate word must exist in the dictionary. * * For example, * * Given: * start = "hit" * end = "cog" * dict = ["hot", "dot", "dog", "lot", "log"] * As one shortest transformation is * "hit" -> "hot" -> "lot" -> "log" -> "cog", * return its length 5. * * Note: * Return 0 if there is no such transformation sequence. * All words have the same length. * All words contain only lowercase alphabetic characters. * */ import java.util.HashSet; import java.util.Arrays; import java.util.LinkedList; import java.util.Queue; import java.util.Iterator; public class P0006 { @SuppressWarnings("unchecked") public static int wordLadder(String start, String end, HashSet<String> dict) { // by default, level 0 only contains start. If there is a chain of, therei // will be at least one transformation. int res = 1; // use a copy of the dictionary dict = (HashSet<String>)dict.clone(); Queue<String> queue = new LinkedList<String>(); queue.offer(start); int[] level = new int[2]; int i = res % 2; // i = 0; // i is the index of the level in which we are dequeuing level[i] = 1; int j = (i + 1) % 2; // j = 1; // j is the index of the level where we are enqueuing while (queue.size() > 0) { String current = queue.poll(); level[i] for (Iterator<String> iter = dict.iterator(); iter.hasNext(); ) { String cand = iter.next(); if (isValidTrans(current, cand)) { queue.offer(cand); iter.remove(); level[j]++; if (isValidTrans(end, cand)) // wrong: return res++; return ++res; } } if (level[i] == 0) { res++; i = res % 2; j = (i + 1) % 2; } } return 0; } private static boolean isValidTrans(String src, String tgt) { int len = src.length(); int mismatchCount = 0; for (int i = 0; i < len; i++) { if (src.charAt(i) != tgt.charAt(i)) mismatchCount++; if (mismatchCount > 1) return false; } return mismatchCount == 1; } public static void main(String[] args) { String start = "hit", end = "cog"; //String[] dictArray = new String[]{"hot", "dot", "dog", "lot", "log"}; String[] dictArray = new String[]{"hot", "dot", "lot"}; HashSet<String> dict = new HashSet<String>(Arrays.asList(dictArray)); int res = P0006.wordLadder(start, end, dict); System.out.println(res); } }
package org.basex.query.func; import static org.basex.query.func.Function.*; import static org.basex.query.util.Err.*; import static org.basex.util.Token.*; import java.io.*; import org.basex.io.*; import org.basex.io.in.*; import org.basex.io.out.*; import org.basex.io.serial.*; import org.basex.query.*; import org.basex.query.expr.*; import org.basex.query.iter.*; import org.basex.query.up.primitives.*; import org.basex.query.util.*; import org.basex.query.util.Err.ErrType; import org.basex.query.value.*; import org.basex.query.value.item.*; import org.basex.query.value.node.*; import org.basex.query.value.type.*; import org.basex.util.*; import org.basex.util.list.*; public final class FNGen extends StandardFunc { /** * Constructor. * @param ii input info * @param f function definition * @param e arguments */ public FNGen(final InputInfo ii, final Function f, final Expr[] e) { super(ii, f, e); } @Override public Iter iter(final QueryContext ctx) throws QueryException { switch(sig) { case DATA: return data(ctx); case COLLECTION: return collection(ctx).iter(); case URI_COLLECTION: return uriCollection(ctx); case UNPARSED_TEXT_LINES: return unparsedTextLines(ctx); default: return super.iter(ctx); } } @Override public Item item(final QueryContext ctx, final InputInfo ii) throws QueryException { switch(sig) { case DOC: return doc(ctx); case DOC_AVAILABLE: return docAvailable(ctx); case UNPARSED_TEXT: return unparsedText(ctx, false); case UNPARSED_TEXT_AVAILABLE: return unparsedText(ctx, true); case PUT: return put(ctx); case PARSE_XML: return parseXml(ctx); case SERIALIZE: return serialize(ctx); default: return super.item(ctx, ii); } } @Override public Value value(final QueryContext ctx) throws QueryException { switch(sig) { case COLLECTION: return collection(ctx); default: return super.value(ctx); } } @Override public Expr comp(final QueryContext ctx) throws QueryException { if(sig == Function.DATA && expr.length == 1) { final SeqType t = expr[0].type(); type = t.type.isNode() ? SeqType.get(AtomType.ATM, t.occ) : t; } return this; } /** * Performs the data function. * @param ctx query context * @return resulting iterator * @throws QueryException query exception */ private Iter data(final QueryContext ctx) throws QueryException { final Iter ir = ctx.iter(expr.length != 0 ? expr[0] : checkCtx(ctx)); return new Iter() { @Override public Item next() throws QueryException { final Item it = ir.next(); if(it == null) return null; if(it.type.isFunction()) FIATOM.thrw(info, FNGen.this); return atom(it, info); } }; } /** * Performs the collection function. * @param ctx query context * @return result * @throws QueryException query exception */ private Value collection(final QueryContext ctx) throws QueryException { // return default collection if(expr.length == 0) return ctx.resource.collection(info); // check if reference is valid final byte[] in = checkEStr(expr[0].item(ctx, info)); if(!Uri.uri(in).isValid()) INVCOLL.thrw(info, in); return ctx.resource.collection(string(in), info); } /** * Performs the uri-collection function. * @param ctx query context * @return result * @throws QueryException query exception */ private Iter uriCollection(final QueryContext ctx) throws QueryException { final Iter coll = collection(ctx).iter(); return new Iter() { @Override public Item next() throws QueryException { final Item it = coll.next(); // all items will be nodes return it == null ? null : Uri.uri(((ANode) it).baseURI(), false); } }; } /** * Performs the put function. * @param ctx query context * @return result * @throws QueryException query exception */ private Item put(final QueryContext ctx) throws QueryException { checkCreate(ctx); final byte[] file = checkEStr(expr[1], ctx); final ANode nd = checkNode(checkNoEmpty(expr[0].item(ctx, info))); if(nd == null || nd.type != NodeType.DOC && nd.type != NodeType.ELM) UPFOTYPE.thrw(info, expr[0]); final Uri u = Uri.uri(file); if(u == Uri.EMPTY || !u.isValid()) UPFOURI.thrw(info, file); final DBNode target = ctx.updates.determineDataRef(nd, ctx); // check if all target paths are unique final String uri = new IOFile(u.toJava()).path(); if(ctx.updates.putPaths.add(token(uri)) < 0) UPURIDUP.thrw(info, uri); ctx.updates.add(new Put(info, target.pre, target.data, uri), ctx); return null; } /** * Performs the doc function. * @param ctx query context * @return result * @throws QueryException query exception */ private ANode doc(final QueryContext ctx) throws QueryException { final Item it = expr[0].item(ctx, info); if(it == null) return null; final byte[] in = checkEStr(it); if(!Uri.uri(in).isValid()) INVDOC.thrw(info, in); return ctx.resource.doc(new QueryInput(string(in)), info); } /** * Performs the doc-available function. * @param ctx query context * @return result * @throws QueryException query exception */ private Bln docAvailable(final QueryContext ctx) throws QueryException { try { return Bln.get(doc(ctx) != null); } catch(final QueryException ex) { final Err err = ex.err(); if(err != null && err.type == ErrType.FODC && (err.num == 2 || err.num == 4)) return Bln.FALSE; throw ex; } } /** * Performs the unparsed-text function. * @param ctx query context * @param check only check if text is available * @return content * @throws QueryException query exception */ private Item unparsedText(final QueryContext ctx, final boolean check) throws QueryException { checkCreate(ctx); final byte[] path = checkStr(expr[0], ctx); final String enc = encoding(1, WHICHENC, ctx); final IO base = ctx.sc.baseIO(); if(base == null) throw STBASEURI.thrw(info); try { final String p = string(path); if(p.indexOf('#') != -1) FRAGID.thrw(info, p); if(!Uri.uri(token(p)).isValid()) INVURL.thrw(info, p); IO io = base.merge(p); final String rp = ctx.resource.resources.get(io.path()); if(rp != null) io = IO.get(rp); if(!io.exists()) throw RESNF.thrw(info, p); final InputStream is = io.inputStream(); try { final TextInput ti = new TextInput(io).valid(true); if(enc != null) ti.encoding(enc); if(!check) return Str.get(ti.content()); while(ti.read() != -1); return Bln.TRUE; } finally { is.close(); } } catch(final QueryException ex) { if(check) return Bln.FALSE; throw ex; } catch(final IOException ex) { if(check) return Bln.FALSE; if(ex instanceof EncodingException) INVCHARS.thrw(info, ex); if(ex instanceof InputException && enc == null) WHICHCHARS.thrw(info); throw SERANY.thrw(info, ex); } } /** * Performs the unparsed-text-lines function. * @param ctx query context * @return result * @throws QueryException query exception */ Iter unparsedTextLines(final QueryContext ctx) throws QueryException { return textIter(unparsedText(ctx, false).string(info)); } /** * Returns the specified text as lines. * @param str text input * @return result */ static Iter textIter(final byte[] str) { // not I/O exception expected, as input is a main-memory array try { final NewlineInput nli = new NewlineInput(new ArrayInput(str)); final TokenBuilder tb = new TokenBuilder(); return new Iter() { @Override public Item next() { try { return nli.readLine(tb) == null ? null : Str.get(tb.finish()); } catch(final IOException ex) { throw Util.notexpected(ex); } } }; } catch(final IOException ex) { throw Util.notexpected(ex); } } /** * Performs the parse-xml function. * @param ctx query context * @return result * @throws QueryException query exception */ private ANode parseXml(final QueryContext ctx) throws QueryException { final byte[] cont = checkEStr(expr[0], ctx); Uri base = ctx.sc.baseURI(); if(expr.length == 2) { base = Uri.uri(checkEStr(expr[1], ctx)); if(!base.isValid()) BASEINV.thrw(info, base); } final IO io = new IOContent(cont, string(base.string())); try { return new DBNode(io, ctx.context.prop); } catch(final IOException ex) { throw SAXERR.thrw(info, ex); } } /** * Performs the serialize function. * @param ctx query context * @return result * @throws QueryException query exception */ private Str serialize(final QueryContext ctx) throws QueryException { final ArrayOutput ao = new ArrayOutput(); try { // run serialization Item it = expr.length > 1 ? expr[1].item(ctx, info) : null; final Serializer ser = Serializer.get(ao, FuncParams.serializerProp(it)); final Iter ir = expr[0].iter(ctx); while((it = ir.next()) != null) ser.serialize(it); ser.close(); } catch(final SerializerException ex) { throw ex.getCause(info); } catch(final IOException ex) { SERANY.thrw(info, ex); } return Str.get(delete(ao.toArray(), '\r')); } @Override public boolean uses(final Use u) { return u == Use.CNS && sig == PARSE_XML || u == Use.UPD && sig == PUT || u == Use.X30 && (sig == DATA && expr.length == 0 || oneOf(sig, UNPARSED_TEXT, UNPARSED_TEXT_LINES, UNPARSED_TEXT_AVAILABLE, PARSE_XML, URI_COLLECTION, SERIALIZE)) || u == Use.CTX && (sig == DATA && expr.length == 0 || sig == PUT) || super.uses(u); } @Override public boolean databases(final StringList db) { if(oneOf(sig, DOC, COLLECTION)) { // [JE] XQuery: check how to handle default collection() if(expr.length == 0 || !(expr[0] instanceof Str)) return false; final QueryInput qi = new QueryInput(string(((Str) expr[0]).string())); if(qi.db == null) return false; db.add(qi.db); return true; } return super.databases(db); } @Override public boolean iterable() { // collections will never yield duplicates return sig == COLLECTION || super.iterable(); } }