answer
stringlengths
17
10.2M
package hudson.plugins.promoted_builds; import antlr.ANTLRException; import hudson.Util; import hudson.model.AbstractBuild; import hudson.model.AbstractProject; import hudson.model.Action; import hudson.model.Cause; import hudson.model.Cause.LegacyCodeCause; import hudson.model.DependencyGraph; import hudson.model.Descriptor; import hudson.model.FreeStyleProject; import hudson.model.Hudson; import hudson.model.JDK; import hudson.model.Job; import hudson.model.Label; import hudson.model.ParametersAction; import hudson.model.PermalinkProjectAction.Permalink; import hudson.model.Queue.Item; import hudson.model.Run; import hudson.model.Saveable; import hudson.model.labels.LabelAtom; import hudson.model.labels.LabelExpression; import hudson.tasks.BuildStep; import hudson.tasks.BuildStepDescriptor; import hudson.tasks.Builder; import hudson.tasks.Publisher; import hudson.util.DescribableList; import net.sf.json.JSONObject; import org.kohsuke.stapler.StaplerRequest; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.Future; /** * A dummy {@link AbstractProject} to carry out promotion operations. * * @author Kohsuke Kawaguchi */ public final class PromotionProcess extends AbstractProject<PromotionProcess,Promotion> implements Saveable { /** * {@link PromotionCondition}s. All have to be met for a build to be promoted. */ public final DescribableList<PromotionCondition,PromotionConditionDescriptor> conditions = new DescribableList<PromotionCondition, PromotionConditionDescriptor>(this); /** * The icon that represents this promotion process. This is the name of * the GIF icon that can be found in ${rootURL}/plugin/promoted-builds/icons/16x16/ * and ${rootURL}/plugin/promoted-builds/icons/32x32/, e.g. <code>"star-gold"</code>. */ public String icon; /** * The label that promotion process can be run on. */ public String assignedLabel; private List<BuildStep> buildSteps = new ArrayList<BuildStep>(); /*package*/ PromotionProcess(JobPropertyImpl property, String name) { super(property, name); } /*package*/ void configure(StaplerRequest req, JSONObject c) throws Descriptor.FormException, IOException { // apply configuration conditions.rebuild(req,c, PromotionCondition.all()); buildSteps = (List)Descriptor.newInstancesFromHeteroList( req, c, "buildStep", (List) PromotionProcess.getAll()); icon = c.getString("icon"); if (c.has("hasAssignedLabel")) { JSONObject j = c.getJSONObject("hasAssignedLabel"); assignedLabel = Util.fixEmptyAndTrim(j.getString("labelString")); } else { assignedLabel = null; } save(); } /** * Returns the root project value. * * @return the root project value. */ @Override public AbstractProject getRootProject() { return getParent().getOwner().getRootProject(); } @Override public JobPropertyImpl getParent() { return (JobPropertyImpl)super.getParent(); } /** * Gets the owner {@link AbstractProject} that configured {@link JobPropertyImpl} as * a job property. */ public AbstractProject<?,?> getOwner() { return getParent().getOwner(); } /** * Get the promotion condition by referencing it fully qualified class name */ public PromotionCondition getPromotionCondition(String promotionClassName) { for (PromotionCondition condition : conditions) { if (condition.getClass().getName().equals(promotionClassName)) { return condition; } } return null; } public DescribableList<Publisher, Descriptor<Publisher>> getPublishersList() { // TODO: extract from the buildsSteps field? Or should I separate builders and publishers? return new DescribableList<Publisher,Descriptor<Publisher>>(this); } protected Class<Promotion> getBuildClass() { return Promotion.class; } public List<BuildStep> getBuildSteps() { return buildSteps; } /** * Gets the textual representation of the assigned label as it was entered by the user. */ @Override public String getAssignedLabelString() { if (assignedLabel == null) return null; try { LabelExpression.parseExpression(assignedLabel); return assignedLabel; } catch (ANTLRException e) { // must be old label or host name that includes whitespace or other unsafe chars return LabelAtom.escape(assignedLabel); } } @Override public Label getAssignedLabel() { // Really would like to run on the exact node that the promoted build ran on, // not just the same label.. but at least this works if job is tied to one node: if (assignedLabel == null) return getOwner().getAssignedLabel(); return Hudson.getInstance().getLabel(assignedLabel); } @Override public JDK getJDK() { return getOwner().getJDK(); } /** * Gets the customWorkspace of the owner project. * * Support for FreeStyleProject only. * @return customWorkspace */ public String getCustomWorkspace() { AbstractProject<?, ?> p = getOwner(); if (p instanceof FreeStyleProject) return ((FreeStyleProject) p).getCustomWorkspace(); return null; } /** * Get the icon name, without the extension. It will always return a non null * and non empty string, as <code>"star-gold"</code> is used for compatibility * for older promotions configurations. * * @return the icon name */ public String getIcon() { return getIcon(icon); } /** * Handle compatibility with pre-1.8 configs. * * @param sIcon * the name of the icon used by this promotion; if null or empty, * we return the gold icon for compatibility with previous releases * @return the icon file name for this promotion */ private static String getIcon(String sIcon) { if ((sIcon == null) || sIcon.equals("")) return "star-gold"; else return sIcon; } /** * Get the badges of conditions that were passed for this promotion for the build */ public List<PromotionBadge> getMetQualifications(AbstractBuild<?,?> build) { List<PromotionBadge> badges = new ArrayList<PromotionBadge>(); for (PromotionCondition cond : conditions) { PromotionBadge b = cond.isMet(this, build); if (b != null) badges.add(b); } return badges; } /** * Get the conditions that have not been met for this promotion for the build */ public List<PromotionCondition> getUnmetConditions(AbstractBuild<?,?> build) { List<PromotionCondition> unmetConditions = new ArrayList<PromotionCondition>(); for (PromotionCondition cond : conditions) { if (cond.isMet(this, build) == null) unmetConditions.add(cond); } return unmetConditions; } /** * Checks if all the conditions to promote a build is met. * * @return * null if promotion conditions are not met. * otherwise returns a list of badges that record how the promotion happened. */ public Status isMet(AbstractBuild<?,?> build) { List<PromotionBadge> badges = new ArrayList<PromotionBadge>(); for (PromotionCondition cond : conditions) { PromotionBadge b = cond.isMet(this, build); if(b==null) return null; badges.add(b); } return new Status(this,badges); } /** * @deprecated * Use {@link #considerPromotion2(AbstractBuild)} */ public boolean considerPromotion(AbstractBuild<?,?> build) throws IOException { return considerPromotion2(build)!=null; } /** * Checks if the build is promotable, and if so, promote it. * * @return * null if the build was not promoted, otherwise Future that kicks in when the build is completed. */ public Future<Promotion> considerPromotion2(AbstractBuild<?,?> build) throws IOException { PromotedBuildAction a = build.getAction(PromotedBuildAction.class); // if it's already promoted, no need to do anything. if(a!=null && a.contains(this)) return null; Status qualification = isMet(build); if(qualification==null) return null; // not this time return promote2(build, new LegacyCodeCause(), qualification); // TODO: define promotion cause } public void promote(AbstractBuild<?,?> build, Cause cause, PromotionBadge... badges) throws IOException { promote2(build,cause,new Status(this,Arrays.asList(badges))); } /** * @deprecated * Use {@link #promote2(AbstractBuild, Cause, Status)} */ public void promote(AbstractBuild<?,?> build, Cause cause, Status qualification) throws IOException { promote2(build,cause,qualification); } /** * Promote the given build by using the given qualification. * * @param cause * Why the build is promoted? */ public Future<Promotion> promote2(AbstractBuild<?,?> build, Cause cause, Status qualification) throws IOException { PromotedBuildAction a = build.getAction(PromotedBuildAction.class); // build is qualified for a promotion. if(a!=null) { a.add(qualification); } else { build.addAction(new PromotedBuildAction(build,qualification)); build.save(); } // schedule promotion activity. return scheduleBuild2(build,cause); } /** * @deprecated * You need to be using {@link #scheduleBuild(AbstractBuild)} */ public boolean scheduleBuild() { return super.scheduleBuild(); } public boolean scheduleBuild(AbstractBuild<?,?> build) { return scheduleBuild(build,new LegacyCodeCause()); } /** * @deprecated * Use {@link #scheduleBuild2(AbstractBuild, Cause)} */ public boolean scheduleBuild(AbstractBuild<?,?> build, Cause cause) { return scheduleBuild2(build,cause)!=null; } public Future<Promotion> scheduleBuild2(AbstractBuild<?,?> build, Cause cause) { assert build.getProject()==getOwner(); // Get the parameters, if any, used in the target build and make these // available as part of the promotion steps List<ParametersAction> parameters = build.getActions(ParametersAction.class); // Create list of actions to pass to scheduled build List<Action> actions = new ArrayList<Action>(); actions.addAll(parameters); actions.add(new PromotionTargetAction(build)); // remember what build we are promoting return super.scheduleBuild2(0,cause,actions.toArray(new Action[actions.size()])); } public boolean isInQueue(AbstractBuild<?,?> build) { for (Item item : Hudson.getInstance().getQueue().getItems(this)) if (item.getAction(PromotionTargetAction.class).resolve()==build) return true; return false; } // these are dummy implementations to implement abstract methods. // need to think about what the implications are. public boolean isFingerprintConfigured() { throw new UnsupportedOperationException(); } protected void buildDependencyGraph(DependencyGraph graph) { throw new UnsupportedOperationException(); } public static List<Descriptor<? extends BuildStep>> getAll() { List<Descriptor<? extends BuildStep>> list = new ArrayList<Descriptor<? extends BuildStep>>(); addTo(Builder.all(), list); addTo(Publisher.all(), list); return list; } private static void addTo(List<? extends Descriptor<? extends BuildStep>> source, List<Descriptor<? extends BuildStep>> list) { for (Descriptor<? extends BuildStep> d : source) { if (d instanceof BuildStepDescriptor) { BuildStepDescriptor bsd = (BuildStepDescriptor) d; if(bsd.isApplicable(PromotionProcess.class)) list.add(d); } } } public Permalink asPermalink() { return new Permalink() { @Override public String getDisplayName() { return Messages.PromotionProcess_PermalinkDisplayName(PromotionProcess.this.getDisplayName()); } @Override public String getId() { return PromotionProcess.this.getName(); } @Override public Run<?, ?> resolve(Job<?, ?> job) { String id = getId(); for( Run<?,?> build : job.getBuilds() ) { PromotedBuildAction a = build.getAction(PromotedBuildAction.class); if(a!=null && a.contains(id)) return build; } return null; } }; } }
package io.kimia.prototype; import java.lang.annotation.*; /** * <p> * Annotates a class as a prototype of another auto-generated class by the Prototype framework. * </p> * * <p> * The name of the classes that are annotated with this annotation must start with an underscore character (_). * </p> * * <p> * The auto-generated classes are described as following: * </p> * <ol> * <li>They will be produced in the same package as the prototype.</li> * <li>They will be declared {@code public final}.</li> * <li>Their name will be the same as the prototype's without the leading underscore.</li> * <li> * They will have members of the same types and names as the prototype, * all {@code private final} except for those annotated with * {@link io.kimia.prototype.Mutable} (which are just private). * </li> * <li> * A constructor with all the immutable members as parameters will be created. * The order of the parameters will be the same as their appearance in the prototype. * </li> * <li>Getters (and setters for mutable fields) will be created.</li> * <li>{@code toString}, {@code equals} and {@code hashCode} will be created according to the members.</li> * <li> * All members that might be null, will be validated as not null when assigned, * unless they are annotated with {@link io.kimia.prototype.Nullable}. * </li> * </ol> * * <p> * For example: * </p> * <pre> * package com.example; * * &#064;Prototype class _Pet { * String name; * &#064;Mutable int age; * &#064;Nullable String voice; * } * </pre> * * <p> * Will produce: * </p> * <pre> * package com.example; * * public final Pet { * private final String name; * private int age; * private final String voice; * * public Pet(String name, String voice) {...} * public String getName() {...} * public int getAge() {...} * public void setAge(int age) {...} * public String getVoice() {...} * &#064;Override public int hashCode() {...} * &#064;Override public boolean equals(Object o) {...} * &#064;Override public String toString() {...} * } * </pre> * * @see io.kimia.prototype.Mutable * @see io.kimia.prototype.Nullable */ @Retention(RetentionPolicy.SOURCE) @Target(ElementType.TYPE) @Documented public @interface Prototype { }
package ifc.i18n; import lib.MultiMethodTest; import com.sun.star.i18n.CalendarDisplayIndex; import com.sun.star.i18n.CalendarFieldIndex; import com.sun.star.i18n.CalendarItem; import com.sun.star.i18n.XCalendar; import com.sun.star.i18n.XLocaleData; import com.sun.star.lang.Locale; import com.sun.star.lang.XMultiServiceFactory; import com.sun.star.uno.UnoRuntime; /** * Testing <code>com.sun.star.i18n.XCalendar</code> * interface methods : * <ul> * <li><code> loadDefaultCalendar()</code></li> * <li><code> loadCalendar()</code></li> * <li><code> getLoadedCalendar()</code></li> * <li><code> getAllCalendars()</code></li> * <li><code> getUniqueID()</code></li> * <li><code> setDateTime()</code></li> * <li><code> getDateTime()</code></li> * <li><code> setValue()</code></li> * <li><code> getValue()</code></li> * <li><code> isValid()</code></li> * <li><code> addValue()</code></li> * <li><code> getFirstDayOfWeek()</code></li> * <li><code> setFirstDayOfWeek()</code></li> * <li><code> setMinimumNumberOfDaysForFirstWeek()</code></li> * <li><code> getMinimumNumberOfDaysForFirstWeek()</code></li> * <li><code> getNumberOfMonthsInYear()</code></li> * <li><code> getNumberOfDaysInWeek()</code></li> * <li><code> getMonths()</code></li> * <li><code> getDays()</code></li> * <li><code> getDisplayName()</code></li> * </ul> <p> * Test is <b> NOT </b> multithread compilant. <p> * @see com.sun.star.i18n.XCalendar */ public class _XCalendar extends MultiMethodTest { private boolean debug = false; public XCalendar oObj = null; public String[][] calendars; public int[] count; public double newDTime = 1000.75; public short newValue = 2; public short firstDay = 2; public short mdfw = 3; double aOriginalDTime = 0; Locale[] installed_locales; public void before() { XLocaleData locData = null; try { locData = (XLocaleData) UnoRuntime.queryInterface( XLocaleData.class, ((XMultiServiceFactory)tParam.getMSF()).createInstance( "com.sun.star.i18n.LocaleData")); } catch (com.sun.star.uno.Exception e) { } installed_locales = locData.getAllInstalledLocaleNames(); calendars = new String[installed_locales.length][]; count = new int[installed_locales.length]; oObj.loadDefaultCalendar(installed_locales[0]); aOriginalDTime = oObj.getDateTime(); debug = tParam.getBool("DebugIsActive"); } /** * Restore the changed time during the test to the original value of the * machine: has to be correct for the following interface tests. */ public void after() { oObj.loadDefaultCalendar(installed_locales[0]); oObj.setDateTime(aOriginalDTime); } /** * Loads default calendar for different locales. <p> * Has <b> OK </b> status if method loads calendar, that is * default for a given locale. */ public void _loadDefaultCalendar() { boolean res = true; for (int i=0; i<installed_locales.length; i++) { String lang = "Language: "+installed_locales[i].Language + ", Country: "+ installed_locales[i].Country + ", Variant: "+ installed_locales[i].Country; oObj.loadDefaultCalendar(installed_locales[i]); if (oObj.getLoadedCalendar().Default) { //log.println(lang + " ... OK"); } else { log.println(lang + " ... FAILED"); } res &= oObj.getLoadedCalendar().Default; } tRes.tested("loadDefaultCalendar()", res); } /** * Tries to obtain calendars for a number of locales. <p> * Has <b> OK </b> status if the method returns more than zero calendars for * every locale. */ public void _getAllCalendars() { boolean res = true; for (int i=0; i<installed_locales.length; i++) { String lang = "Language: "+installed_locales[i].Language + ", Country: "+ installed_locales[i].Country + ", Variant: "+ installed_locales[i].Country; calendars[i] = oObj.getAllCalendars(installed_locales[i]); count[i] = calendars[i].length-1; if (calendars[i].length > 0) { //log.println(lang + " ... OK"); } else { log.println(lang + " ... FAILED"); } res &= (calendars[i].length > 0); } tRes.tested("getAllCalendars()", res); } /** * Loads calendars for a number of locales. <p> * Has <b> OK </b> status if loaded calendar names are equal to gotten * calendar names after loading.<p> * The following method tests are to be completed successfully before : * <ul> * <li> <code> getAllCalendars() </code> : gets all calendars for a given * locale </li> * </ul> */ public void _loadCalendar() { boolean res = true; requiredMethod("getAllCalendars()"); for (int i=0; i<installed_locales.length; i++) { String lang = "Language: "+installed_locales[i].Language + ", Country: "+ installed_locales[i].Country + ", Variant: "+ installed_locales[i].Country; oObj.loadCalendar(calendars[i][0], installed_locales[i]); if (calendars[i][0].equals(oObj.getLoadedCalendar().Name)) { //log.println(lang + " ... OK"); } else { log.println(lang + " ... FAILED"); } res &= calendars[i][0].equals(oObj.getLoadedCalendar().Name); } tRes.tested("loadCalendar()", res); } /** * Test calls the method, then result is checked. <p> * Has <b> OK </b> status if loaded calendar names are equal to gotten * calendar names after loading.<p> * The following method tests are to be completed successfully before : * <ul> * <li> <code> loadCalendar() </code> : loads calendar using a given name * and locale </li> * </ul> */ public void _getLoadedCalendar() { boolean res = true; requiredMethod("loadCalendar()"); for (int i=0; i<installed_locales.length; i++) { String lang = "Language: "+installed_locales[i].Language + ", Country: "+ installed_locales[i].Country + ", Variant: "+ installed_locales[i].Country; oObj.loadCalendar(calendars[i][0], installed_locales[i]); if (calendars[i][0].equals(oObj.getLoadedCalendar().Name)) { //log.println(lang + " ... OK"); } else { log.println(lang + " ... FAILED"); } res &= calendars[i][0].equals(oObj.getLoadedCalendar().Name); } tRes.tested("getLoadedCalendar()", res); } /** * Test calls the method, then result is checked. <p> * Has <b> OK </b> status if the method returns value that's equal to a * calendar name. <p> * The following method tests are to be completed successfully before : * <ul> * <li> <code> loadCalendar() </code> : loads calendar using a given name * and locale </li> * </ul> */ public void _getUniqueID() { boolean res = true; for (int i=0; i<installed_locales.length; i++) { String lang = "Language: "+installed_locales[i].Language + ", Country: "+ installed_locales[i].Country + ", Variant: "+ installed_locales[i].Country; oObj.loadCalendar(calendars[i][0], installed_locales[i]); String uID = oObj.getUniqueID(); if (uID.equals(calendars[i][0])) { //log.println(lang + " ... OK"); } else { log.println(lang + " ... FAILED"); } res &= uID.equals(calendars[i][0]); } tRes.tested("getUniqueID()",res); } /** * Test calls the method, then result is checked. <p> * Has <b> OK </b> status if the method returns value, that's equal to * value set before. <p> */ public void _setDateTime() { boolean res = true; for (int i=0; i<installed_locales.length; i++) { String lang = "Language: "+installed_locales[i].Language + ", Country: "+ installed_locales[i].Country + ", Variant: "+ installed_locales[i].Country; oObj.setDateTime(newDTime); double aDTime = oObj.getDateTime(); if (aDTime == newDTime) { //log.println(lang + " ... OK"); } else { log.println(lang + " ... FAILED"); } res &= (aDTime == newDTime); } tRes.tested("setDateTime()", res); } /** * Test calls the method, then result is checked. <p> * Has <b> OK </b> status if the method returns value, that's equal to * value set before. <p> */ public void _getDateTime() { boolean res = true; for (int i=0; i<installed_locales.length; i++) { String lang = "Language: "+installed_locales[i].Language + ", Country: "+ installed_locales[i].Country + ", Variant: "+ installed_locales[i].Country; oObj.setDateTime(newDTime); double aDTime = oObj.getDateTime(); if (aDTime == newDTime) { //log.println(lang + " ... OK"); } else { log.println(lang + " ... FAILED"); } res &= (aDTime == newDTime); } tRes.tested("getDateTime()", res); } /** * Test calls the method, then result is checked. <p> * Has <b> OK </b> status if the method returns value, that's equal to * value set before. <p> */ public void _setValue() { boolean res = true; for (int i=0; i<installed_locales.length; i++) { String error = ""; String lang = "Language: "+installed_locales[i].Language + ", Country: "+ installed_locales[i].Country + ", Variant: "+ installed_locales[i].Variant + ", Name: "+calendars[i][count[i]]; String[] names = new String[]{"DAY_OF_MONTH", "HOUR","MINUTE","SECOND","MILLISECOND", "YEAR","MONTH"}; oObj.loadCalendar(calendars[i][count[i]],installed_locales[i]); short[] fields = new short[]{CalendarFieldIndex.DAY_OF_MONTH, CalendarFieldIndex.HOUR, CalendarFieldIndex.MINUTE, CalendarFieldIndex.SECOND, CalendarFieldIndex.MILLISECOND, CalendarFieldIndex.YEAR, CalendarFieldIndex.MONTH }; for (int k=0; k<fields.length;k++) { oObj.setDateTime(0.0); // save the current values for debug purposes short[] oldValues = new short[fields.length]; for (int n=0; n < oldValues.length; n++){ oldValues[n] = oObj.getValue(fields[n]); } short set = oObj.getValue(fields[k]); if (fields[k] == CalendarFieldIndex.MONTH) set = newValue; oObj.setValue(fields[k],set); short get = oObj.getValue(fields[k]); if (get != set) { if (debug) log.println("ERROR occure: tried to set " + names[k] + " to value " + set); log.println("list of values BEFORE set " + names[k] + " to value " + set + ":"); for (int n=0; n < oldValues.length; n++){ log.println(names[n] + ":" + oldValues[n]); } log.println("list of values AFTER set " + names[k] + " to value " + set + ":"); for (int n=0; n < fields.length;n++){ log.println(names[n] + ":" + oObj.getValue(fields[n])); } error += "failed for "+names[k]+" expected "+ set+" gained "+get+" ; \n"; } } if (error.equals("")) { log.println(lang + " ... OK"); } else { log.println("*** "+lang + " ... FAILED ***"); log.println(error); } res &= (error.equals("")); } tRes.tested("setValue()", res); } /** * Test calls the method, then result is checked. <p> * Has <b> OK </b> status if the method returns value, that's equal to * value set before. <p> */ public void _getValue() { boolean res = true; requiredMethod("setValue()"); short aValue = oObj.getValue(CalendarFieldIndex.MONTH); res &= (aValue == newValue); if (!res){ log.println("the returned value is not the expected value:"); log.println("expexted: " + newValue + " returned value: " + aValue); } tRes.tested("getValue()", res); } /** * Test calls the method, then result is checked. <p> * Has <b> OK </b> status if value, added by the method is greater than * previously defined "newValue". * <p> * The following method tests are to be completed successfully before : * <ul> * <li> <code> getValue() </code> : gets the value of a field </li> * </ul> */ public void _addValue() { boolean res = true; requiredMethod("getValue()"); oObj.addValue(CalendarFieldIndex.MONTH, 1); short aValue = oObj.getValue(CalendarFieldIndex.MONTH); res &= (aValue > newValue); if (!res){ log.println("the returned value is not the expected value:"); log.println("expexted: " + newValue + " returned value: " + aValue); } tRes.tested("addValue()", res); } /** * Test calls the method. <p> * Has <b> OK </b> status if the method successfully returns * and no exceptions were thrown. */ public void _setFirstDayOfWeek() { boolean res = true; oObj.setFirstDayOfWeek(firstDay); res &= true; tRes.tested("setFirstDayOfWeek()", res); } /** * Test calls the method, then result is checked. <p> * Has <b> OK </b> status if the method returns value that is equal to * value set before. <p> * The following method tests are to be completed successfully before : * <ul> * <li> <code> setFirstDayOfWeek() </code> : set the first day of a * week</li> * </ul> */ public void _getFirstDayOfWeek() { boolean res = true; requiredMethod("setFirstDayOfWeek()"); short aFirstDayOfWeek = oObj.getFirstDayOfWeek(); res &= (aFirstDayOfWeek == firstDay); tRes.tested("getFirstDayOfWeek()", res); } /** * Test calls the method. <p> * Has <b> OK </b> status if the method successfully returns * and no exceptions were thrown. */ public void _setMinimumNumberOfDaysForFirstWeek() { boolean res = true; oObj.setMinimumNumberOfDaysForFirstWeek(mdfw); res &= true; tRes.tested("setMinimumNumberOfDaysForFirstWeek()", res); } /** * Test calls the method, then result is checked. <p> * Has <b> OK </b> status if the method returns value that is equal to * value set before. <p> * The following method tests are to be completed successfully before : * <ul> * <li> <code> setMinimumNumberOfDaysForFirstWeek() </code> : sets how * many days of a week must reside in the first week of a year</li> * </ul> */ public void _getMinimumNumberOfDaysForFirstWeek() { boolean res = true; requiredMethod("setMinimumNumberOfDaysForFirstWeek()"); short aShort = oObj.getMinimumNumberOfDaysForFirstWeek(); res &= (aShort == mdfw); tRes.tested("getMinimumNumberOfDaysForFirstWeek()", res); } /** * Test calls the method, then result is checked. <p> * Has <b> OK </b> status if the method returns 12. */ public void _getNumberOfMonthsInYear() { boolean res = true; short aShort = oObj.getNumberOfMonthsInYear(); res &= (aShort == (short) 12); tRes.tested("getNumberOfMonthsInYear()", res); } /** * Test calls the method, then result is checked. <p> * Has <b> OK </b> status if the method returns 7. */ public void _getNumberOfDaysInWeek() { boolean res = true; short aShort = oObj.getNumberOfDaysInWeek(); res &= (aShort == (short) 7); tRes.tested("getNumberOfDaysInWeek()", res); } /** * Test calls the method, then result is checked. <p> * Has <b> OK </b> status if length of array, returned by the method is 12. */ public void _getMonths() { boolean res = true; CalendarItem[] months = oObj.getMonths(); res &= (months.length == 12); tRes.tested("getMonths()", res); } /** * Test calls the method, then result is checked. <p> * Has <b> OK </b> status if length of array, returned by the method is 7. */ public void _getDays() { boolean res = true; CalendarItem[] Days = oObj.getDays(); res &= (Days.length == 7); tRes.tested("getDays()", res); } /** * After loading calendar, test calls the method, then result is checked.<p> * Has <b> OK </b> status if length of string, returned by the method is 3. */ public void _getDisplayName() { boolean res = true; oObj.loadCalendar(calendars[0][0],installed_locales[0]); String DisplayName = oObj.getDisplayName(CalendarDisplayIndex.MONTH, newValue, (short) 0); res &= (DisplayName.length() == 3); tRes.tested("getDisplayName()", res); } /** * The test sets obviously wrong value, then calls a method. After that the * test sets correct value, and again calls a method. <p> * Has <b> OK </b> status if the method returns true when valid month is * set, and if the method returns false when set month is not valid. */ public void _isValid() { boolean res = true; oObj.loadDefaultCalendar(installed_locales[0]); oObj.setValue(CalendarFieldIndex.MONTH, (short) 37); res &= !oObj.isValid(); oObj.setValue(CalendarFieldIndex.MONTH, (short) 10); res &= oObj.isValid(); tRes.tested("isValid()", res); } /** * Method returns locale for a given language and country. * @param localeIndex index of needed locale. */ /* public Locale getLocale(int localeIndex) { return new Locale(languages[localeIndex], countries[localeIndex], ""); }*/ }
package org.spine3.examples.todolist.c.aggregate; import com.google.protobuf.Message; import com.google.protobuf.Timestamp; import org.spine3.base.CommandContext; import org.spine3.change.StringChange; import org.spine3.change.TimestampChange; import org.spine3.change.ValueMismatch; import org.spine3.examples.todolist.LabelId; import org.spine3.examples.todolist.PriorityChange; import org.spine3.examples.todolist.TaskDefinition; import org.spine3.examples.todolist.TaskDetails; import org.spine3.examples.todolist.TaskId; import org.spine3.examples.todolist.TaskLabels; import org.spine3.examples.todolist.TaskPriority; import org.spine3.examples.todolist.TaskStatus; import org.spine3.examples.todolist.c.commands.CompleteTask; import org.spine3.examples.todolist.c.commands.CreateBasicTask; import org.spine3.examples.todolist.c.commands.CreateDraft; import org.spine3.examples.todolist.c.commands.DeleteTask; import org.spine3.examples.todolist.c.commands.FinalizeDraft; import org.spine3.examples.todolist.c.commands.ReopenTask; import org.spine3.examples.todolist.c.commands.RestoreDeletedTask; import org.spine3.examples.todolist.c.commands.UpdateTaskDescription; import org.spine3.examples.todolist.c.commands.UpdateTaskDueDate; import org.spine3.examples.todolist.c.commands.UpdateTaskPriority; import org.spine3.examples.todolist.c.events.DeletedTaskRestored; import org.spine3.examples.todolist.c.events.LabelledTaskRestored; import org.spine3.examples.todolist.c.events.TaskCompleted; import org.spine3.examples.todolist.c.events.TaskCreated; import org.spine3.examples.todolist.c.events.TaskDeleted; import org.spine3.examples.todolist.c.events.TaskDescriptionUpdated; import org.spine3.examples.todolist.c.events.TaskDraftCreated; import org.spine3.examples.todolist.c.events.TaskDraftFinalized; import org.spine3.examples.todolist.c.events.TaskDueDateUpdated; import org.spine3.examples.todolist.c.events.TaskPriorityUpdated; import org.spine3.examples.todolist.c.events.TaskReopened; import org.spine3.examples.todolist.c.failures.CannotCompleteTask; import org.spine3.examples.todolist.c.failures.CannotCreateDraft; import org.spine3.examples.todolist.c.failures.CannotCreateTaskWithInappropriateDescription; import org.spine3.examples.todolist.c.failures.CannotDeleteTask; import org.spine3.examples.todolist.c.failures.CannotFinalizeDraft; import org.spine3.examples.todolist.c.failures.CannotReopenTask; import org.spine3.examples.todolist.c.failures.CannotRestoreDeletedTask; import org.spine3.examples.todolist.c.failures.CannotUpdateTaskDescription; import org.spine3.examples.todolist.c.failures.CannotUpdateTaskDueDate; import org.spine3.examples.todolist.c.failures.CannotUpdateTaskPriority; import org.spine3.examples.todolist.c.failures.CannotUpdateTaskWithInappropriateDescription; import org.spine3.server.aggregate.AggregatePart; import org.spine3.server.aggregate.Apply; import org.spine3.server.command.Assign; import java.util.Collections; import java.util.List; import static com.google.common.collect.Lists.newLinkedList; import static org.spine3.examples.todolist.c.aggregate.MismatchHelper.of; import static org.spine3.examples.todolist.c.aggregate.TaskFlowValidator.ensureCompleted; import static org.spine3.examples.todolist.c.aggregate.TaskFlowValidator.ensureDeleted; import static org.spine3.examples.todolist.c.aggregate.TaskFlowValidator.ensureNeitherCompletedNorDeleted; import static org.spine3.examples.todolist.c.aggregate.TaskFlowValidator.isValidCreateDraftCommand; import static org.spine3.examples.todolist.c.aggregate.TaskFlowValidator.isValidTransition; import static org.spine3.examples.todolist.c.aggregate.TaskFlowValidator.isValidUpdateTaskDueDateCommand; import static org.spine3.examples.todolist.c.aggregate.TaskFlowValidator.isValidUpdateTaskPriorityCommand; import static org.spine3.examples.todolist.c.aggregate.failures.TaskDefinitionPartFailures.ChangeStatusFailures.throwCannotCompleteTask; import static org.spine3.examples.todolist.c.aggregate.failures.TaskDefinitionPartFailures.ChangeStatusFailures.throwCannotDeleteTask; import static org.spine3.examples.todolist.c.aggregate.failures.TaskDefinitionPartFailures.ChangeStatusFailures.throwCannotFinalizeDraft; import static org.spine3.examples.todolist.c.aggregate.failures.TaskDefinitionPartFailures.ChangeStatusFailures.throwCannotReopenTask; import static org.spine3.examples.todolist.c.aggregate.failures.TaskDefinitionPartFailures.ChangeStatusFailures.throwCannotRestoreDeletedTask; import static org.spine3.examples.todolist.c.aggregate.failures.TaskDefinitionPartFailures.TaskCreationFailures.throwCannotCreateDraftFailure; import static org.spine3.examples.todolist.c.aggregate.failures.TaskDefinitionPartFailures.TaskCreationFailures.throwCannotCreateTaskWithInappropriateDescriptionFailure; import static org.spine3.examples.todolist.c.aggregate.failures.TaskDefinitionPartFailures.UpdateFailures.throwCannotUpdateDescription; import static org.spine3.examples.todolist.c.aggregate.failures.TaskDefinitionPartFailures.UpdateFailures.throwCannotUpdateTaskDescription; import static org.spine3.examples.todolist.c.aggregate.failures.TaskDefinitionPartFailures.UpdateFailures.throwCannotUpdateTaskDueDate; import static org.spine3.examples.todolist.c.aggregate.failures.TaskDefinitionPartFailures.UpdateFailures.throwCannotUpdateTaskPriority; import static org.spine3.examples.todolist.c.aggregate.failures.TaskDefinitionPartFailures.UpdateFailures.throwCannotUpdateTooShortDescription; import static org.spine3.time.Time.getCurrentTime; import static org.spine3.time.Timestamps2.compare; /** * The aggregate managing the state of a {@link TaskDefinition}. * * @author Illia Shepilov */ @SuppressWarnings({"ClassWithTooManyMethods", /* Task definition cannot be separated and should process all commands and events related to it according to the domain model. The {@code AggregatePart} does it with methods annotated as {@code Assign} and {@code Apply}. In that case class has too many methods.*/ "OverlyCoupledClass"}) /* As each method needs dependencies necessary to perform execution that class also overly coupled.*/ public class TaskDefinitionPart extends AggregatePart<TaskId, TaskDefinition, TaskDefinition.Builder, TaskAggregateRoot> { private static final int MIN_DESCRIPTION_LENGTH = 3; private static final String DEFAULT_DRAFT_DESCRIPTION = "Task description goes here."; /** * {@inheritDoc} * * @param root */ public TaskDefinitionPart(TaskAggregateRoot root) { super(root); } @Assign List<? extends Message> handle(CreateBasicTask cmd, CommandContext ctx) throws CannotCreateTaskWithInappropriateDescription { validateCommand(cmd, ctx); final TaskId taskId = cmd.getId(); final TaskDetails.Builder taskDetails = TaskDetails.newBuilder() .setDescription(cmd.getDescription()); final TaskCreated result = TaskCreated.newBuilder() .setId(taskId) .setDetails(taskDetails) .build(); return Collections.singletonList(result); } @Assign List<? extends Message> handle(UpdateTaskDescription cmd, CommandContext ctx) throws CannotUpdateTaskDescription, CannotUpdateTaskWithInappropriateDescription { validateCommand(cmd, ctx); final TaskDefinition state = getState(); final StringChange change = cmd.getDescriptionChange(); final String actualDescription = state.getDescription(); final String expectedDescription = change.getPreviousValue(); final boolean isEquals = actualDescription.equals(expectedDescription); final TaskId taskId = cmd.getId(); if (!isEquals) { final String newDescription = change.getNewValue(); final ValueMismatch mismatch = of(expectedDescription, actualDescription, newDescription, getVersion()); throwCannotUpdateDescription(cmd, ctx, mismatch); } final TaskDescriptionUpdated taskDescriptionUpdated = TaskDescriptionUpdated.newBuilder() .setTaskId(taskId) .setDescriptionChange(change) .build(); final List<? extends Message> result = Collections.singletonList(taskDescriptionUpdated); return result; } @Assign List<? extends Message> handle(UpdateTaskDueDate cmd, CommandContext ctx) throws CannotUpdateTaskDueDate { final TaskDefinition state = getState(); final TaskStatus taskStatus = state.getTaskStatus(); final boolean isValid = isValidUpdateTaskDueDateCommand(taskStatus); final TaskId taskId = cmd.getId(); if (!isValid) { throwCannotUpdateTaskDueDate(cmd, ctx); } final TimestampChange change = cmd.getDueDateChange(); final Timestamp actualDueDate = state.getDueDate(); final Timestamp expectedDueDate = change.getPreviousValue(); final boolean isEquals = compare(actualDueDate, expectedDueDate) == 0; if (!isEquals) { final Timestamp newDueDate = change.getNewValue(); final ValueMismatch mismatch = of(expectedDueDate, actualDueDate, newDueDate, getVersion()); throwCannotUpdateTaskDueDate(cmd, ctx, mismatch); } final TaskDueDateUpdated taskDueDateUpdated = TaskDueDateUpdated.newBuilder() .setTaskId(taskId) .setDueDateChange(cmd.getDueDateChange()) .build(); final List<? extends Message> result = Collections.singletonList(taskDueDateUpdated); return result; } @Assign List<? extends Message> handle(UpdateTaskPriority cmd, CommandContext ctx) throws CannotUpdateTaskPriority { final TaskDefinition state = getState(); final TaskStatus taskStatus = state.getTaskStatus(); final boolean isValid = isValidUpdateTaskPriorityCommand(taskStatus); final TaskId taskId = cmd.getId(); if (!isValid) { throwCannotUpdateTaskPriority(cmd, ctx); } final PriorityChange priorityChange = cmd.getPriorityChange(); final TaskPriority actualPriority = state.getPriority(); final TaskPriority expectedPriority = priorityChange.getPreviousValue(); boolean isEquals = actualPriority.equals(expectedPriority); if (!isEquals) { final TaskPriority newPriority = priorityChange.getNewValue(); final ValueMismatch mismatch = of(expectedPriority, actualPriority, newPriority, getVersion()); throwCannotUpdateTaskPriority(cmd, ctx, mismatch); } final TaskPriorityUpdated taskPriorityUpdated = TaskPriorityUpdated.newBuilder() .setTaskId(taskId) .setPriorityChange( priorityChange) .build(); final List<? extends Message> result = Collections.singletonList(taskPriorityUpdated); return result; } @Assign List<? extends Message> handle(ReopenTask cmd, CommandContext ctx) throws CannotReopenTask { final TaskDefinition state = getState(); final TaskStatus currentStatus = state.getTaskStatus(); final boolean isValid = ensureCompleted(currentStatus); final TaskId taskId = cmd.getId(); if (!isValid) { throwCannotReopenTask(cmd, ctx); } final TaskReopened taskReopened = TaskReopened.newBuilder() .setTaskId(taskId) .build(); final List<TaskReopened> result = Collections.singletonList(taskReopened); return result; } @Assign List<? extends Message> handle(DeleteTask cmd, CommandContext ctx) throws CannotDeleteTask { final TaskDefinition state = getState(); final TaskStatus currentStatus = state.getTaskStatus(); final TaskStatus newStatus = TaskStatus.DELETED; final TaskId taskId = cmd.getId(); final boolean isValid = isValidTransition(currentStatus, newStatus); if (!isValid) { throwCannotDeleteTask(cmd, ctx); } final TaskDeleted taskDeleted = TaskDeleted.newBuilder() .setTaskId(taskId) .build(); final List<TaskDeleted> result = Collections.singletonList(taskDeleted); return result; } @Assign List<? extends Message> handle(CompleteTask cmd, CommandContext ctx) throws CannotCompleteTask { final TaskDefinition state = getState(); final TaskStatus currentStatus = state.getTaskStatus(); final TaskStatus newStatus = TaskStatus.COMPLETED; final TaskId taskId = cmd.getId(); final boolean isValid = isValidTransition(currentStatus, newStatus); if (!isValid) { throwCannotCompleteTask(cmd, ctx); } final TaskCompleted taskCompleted = TaskCompleted.newBuilder() .setTaskId(taskId) .build(); final List<TaskCompleted> result = Collections.singletonList(taskCompleted); return result; } @Assign List<? extends Message> handle(CreateDraft cmd, CommandContext ctx) throws CannotCreateDraft { final TaskId taskId = cmd.getId(); final boolean isValid = isValidCreateDraftCommand(getState().getTaskStatus()); if (!isValid) { throwCannotCreateDraftFailure(cmd, ctx); } final TaskDraftCreated draftCreated = TaskDraftCreated.newBuilder() .setId(taskId) .setDraftCreationTime(getCurrentTime()) .setDetails(TaskDetails.newBuilder() .setDescription(DEFAULT_DRAFT_DESCRIPTION)) .build(); final List<TaskDraftCreated> result = Collections.singletonList(draftCreated); return result; } @Assign List<? extends Message> handle(FinalizeDraft cmd, CommandContext ctx) throws CannotFinalizeDraft { final TaskStatus currentStatus = getState().getTaskStatus(); final TaskStatus newStatus = TaskStatus.FINALIZED; final TaskId taskId = cmd.getId(); final boolean isValid = isValidTransition(currentStatus, newStatus); if (!isValid) { throwCannotFinalizeDraft(cmd, ctx); } final TaskDraftFinalized taskDraftFinalized = TaskDraftFinalized.newBuilder() .setTaskId(taskId) .build(); final List<TaskDraftFinalized> result = Collections.singletonList(taskDraftFinalized); return result; } @Assign List<? extends Message> handle(RestoreDeletedTask cmd, CommandContext ctx) throws CannotRestoreDeletedTask { final TaskStatus currentStatus = getState().getTaskStatus(); final TaskId taskId = cmd.getId(); final boolean isValid = ensureDeleted(currentStatus); if (!isValid) { throwCannotRestoreDeletedTask(cmd, ctx); } final DeletedTaskRestored deletedTaskRestored = DeletedTaskRestored.newBuilder() .setTaskId(taskId) .build(); final List<Message> result = newLinkedList(); result.add(deletedTaskRestored); final TaskLabels taskLabels = getPartState(TaskLabels.class); final List<LabelId> labelIdsList = taskLabels.getLabelIdsList() .getIdsList(); for (LabelId labelId : labelIdsList) { final LabelledTaskRestored labelledTaskRestored = LabelledTaskRestored.newBuilder() .setTaskId(taskId) .setLabelId(labelId) .build(); result.add(labelledTaskRestored); } return result; } @Apply private void taskCreated(TaskCreated event) { final TaskDetails taskDetails = event.getDetails(); getBuilder().setId(event.getId()) .setCreated(getCurrentTime()) .setDescription(taskDetails.getDescription()) .setPriority(taskDetails.getPriority()) .setTaskStatus(TaskStatus.FINALIZED); } @Apply private void taskDescriptionUpdated(TaskDescriptionUpdated event) { final String newDescription = event.getDescriptionChange() .getNewValue(); getBuilder().setDescription(newDescription); } @Apply private void taskDueDateUpdated(TaskDueDateUpdated event) { final Timestamp newDueDate = event.getDueDateChange() .getNewValue(); getBuilder().setDueDate(newDueDate); } @Apply private void taskPriorityUpdated(TaskPriorityUpdated event) { final TaskPriority newPriority = event.getPriorityChange() .getNewValue(); getBuilder().setPriority(newPriority); } @Apply private void taskReopened(TaskReopened event) { getBuilder().setTaskStatus(TaskStatus.OPEN); } @Apply private void taskDeleted(TaskDeleted event) { getBuilder().setTaskStatus(TaskStatus.DELETED); } @Apply private void deletedTaskRestored(DeletedTaskRestored event) { getBuilder().setTaskStatus(TaskStatus.OPEN); } @Apply private void labelledTaskRestored(LabelledTaskRestored event) { getBuilder().setTaskStatus(TaskStatus.OPEN); } @Apply private void taskCompleted(TaskCompleted event) { getBuilder().setTaskStatus(TaskStatus.COMPLETED); } @Apply private void taskDraftFinalized(TaskDraftFinalized event) { getBuilder().setTaskStatus(TaskStatus.FINALIZED); } @Apply private void draftCreated(TaskDraftCreated event) { getBuilder().setId(event.getId()) .setCreated(event.getDraftCreationTime()) .setDescription(event.getDetails() .getDescription()) .setTaskStatus(TaskStatus.DRAFT); } private static void validateCommand(CreateBasicTask cmd, CommandContext ctx) throws CannotCreateTaskWithInappropriateDescription { final String description = cmd.getDescription(); if (description != null && description.length() < MIN_DESCRIPTION_LENGTH) { throwCannotCreateTaskWithInappropriateDescriptionFailure(cmd, ctx); } } private void validateCommand(UpdateTaskDescription cmd, CommandContext ctx) throws CannotUpdateTaskDescription, CannotUpdateTaskWithInappropriateDescription { final String description = cmd.getDescriptionChange() .getNewValue(); if (description != null && description.length() < MIN_DESCRIPTION_LENGTH) { throwCannotUpdateTooShortDescription(cmd, ctx); } boolean isValid = ensureNeitherCompletedNorDeleted(getState().getTaskStatus()); if (!isValid) { throwCannotUpdateTaskDescription(cmd, ctx); } } }
package hudson.plugins.promoted_builds; import antlr.ANTLRException; import hudson.Util; import hudson.model.AbstractBuild; import hudson.model.AbstractProject; import hudson.model.Action; import hudson.model.Cause; import hudson.model.Cause.LegacyCodeCause; import hudson.model.DependencyGraph; import hudson.model.Descriptor; import hudson.model.FreeStyleProject; import hudson.model.Hudson; import hudson.model.JDK; import hudson.model.Job; import hudson.model.Label; import hudson.model.ParametersAction; import hudson.model.PermalinkProjectAction.Permalink; import hudson.model.Queue.Item; import hudson.model.Run; import hudson.model.Saveable; import hudson.model.labels.LabelAtom; import hudson.model.labels.LabelExpression; import hudson.tasks.BuildStep; import hudson.tasks.BuildStepDescriptor; import hudson.tasks.Builder; import hudson.tasks.Publisher; import hudson.util.DescribableList; import net.sf.json.JSONObject; import org.kohsuke.stapler.StaplerRequest; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * A dummy {@link AbstractProject} to carry out promotion operations. * * @author Kohsuke Kawaguchi */ public final class PromotionProcess extends AbstractProject<PromotionProcess,Promotion> implements Saveable { /** * {@link PromotionCondition}s. All have to be met for a build to be promoted. */ public final DescribableList<PromotionCondition,PromotionConditionDescriptor> conditions = new DescribableList<PromotionCondition, PromotionConditionDescriptor>(this); /** * The icon that represents this promotion process. This is the name of * the GIF icon that can be found in ${rootURL}/plugin/promoted-builds/icons/16x16/ * and ${rootURL}/plugin/promoted-builds/icons/32x32/, e.g. <code>"star-gold"</code>. */ public String icon; /** * The label that promotion process can be run on. */ public String assignedLabel; private List<BuildStep> buildSteps = new ArrayList<BuildStep>(); /*package*/ PromotionProcess(JobPropertyImpl property, String name) { super(property, name); } /*package*/ void configure(StaplerRequest req, JSONObject c) throws Descriptor.FormException, IOException { // apply configuration conditions.rebuild(req,c, PromotionCondition.all()); buildSteps = (List)Descriptor.newInstancesFromHeteroList( req, c, "buildStep", (List) PromotionProcess.getAll()); icon = c.getString("icon"); if (c.has("hasAssignedLabel")) { JSONObject j = c.getJSONObject("hasAssignedLabel"); assignedLabel = Util.fixEmptyAndTrim(j.getString("labelString")); } else { assignedLabel = null; } save(); } /** * Returns the root project value. * * @return the root project value. */ @Override public AbstractProject getRootProject() { return getParent().getOwner().getRootProject(); } @Override public JobPropertyImpl getParent() { return (JobPropertyImpl)super.getParent(); } /** * Gets the owner {@link AbstractProject} that configured {@link JobPropertyImpl} as * a job property. */ public AbstractProject<?,?> getOwner() { return getParent().getOwner(); } /** * Get the promotion condition by referencing it fully qualified class name */ public PromotionCondition getPromotionCondition(String promotionClassName) { for (PromotionCondition condition : conditions) { if (condition.getClass().getName().equals(promotionClassName)) { return condition; } } return null; } public DescribableList<Publisher, Descriptor<Publisher>> getPublishersList() { // TODO: extract from the buildsSteps field? Or should I separate builders and publishers? return new DescribableList<Publisher,Descriptor<Publisher>>(this); } protected Class<Promotion> getBuildClass() { return Promotion.class; } public List<BuildStep> getBuildSteps() { return buildSteps; } /** * Gets the textual representation of the assigned label as it was entered by the user. */ @Override public String getAssignedLabelString() { if (assignedLabel == null) return null; try { LabelExpression.parseExpression(assignedLabel); return assignedLabel; } catch (ANTLRException e) { // must be old label or host name that includes whitespace or other unsafe chars return LabelAtom.escape(assignedLabel); } } @Override public Label getAssignedLabel() { // Really would like to run on the exact node that the promoted build ran on, // not just the same label.. but at least this works if job is tied to one node: if (assignedLabel == null) return getOwner().getAssignedLabel(); return Hudson.getInstance().getLabel(assignedLabel); } @Override public JDK getJDK() { return getOwner().getJDK(); } /** * Gets the customWorkspace of the owner project. * * Support for FreeStyleProject only. * @return customWorkspace */ public String getCustomWorkspace() { AbstractProject<?, ?> p = getOwner(); if (p instanceof FreeStyleProject) return ((FreeStyleProject) p).getCustomWorkspace(); return null; } /** * Get the icon name, without the extension. It will always return a non null * and non empty string, as <code>"star-gold"</code> is used for compatibility * for older promotions configurations. * * @return the icon name */ public String getIcon() { return getIcon(icon); } /** * Handle compatibility with pre-1.8 configs. * * @param sIcon * the name of the icon used by this promotion; if null or empty, * we return the gold icon for compatibility with previous releases * @return the icon file name for this promotion */ private static String getIcon(String sIcon) { if ((sIcon == null) || sIcon.equals("")) return "star-gold"; else return sIcon; } /** * Get the badges of conditions that were passed for this promotion for the build */ public List<PromotionBadge> getMetQualifications(AbstractBuild<?,?> build) { List<PromotionBadge> badges = new ArrayList<PromotionBadge>(); for (PromotionCondition cond : conditions) { PromotionBadge b = cond.isMet(this, build); if (b != null) badges.add(b); } return badges; } /** * Get the conditions that have not been met for this promotion for the build */ public List<PromotionCondition> getUnmetConditions(AbstractBuild<?,?> build) { List<PromotionCondition> unmetConditions = new ArrayList<PromotionCondition>(); for (PromotionCondition cond : conditions) { if (cond.isMet(this, build) == null) unmetConditions.add(cond); } return unmetConditions; } /** * Checks if all the conditions to promote a build is met. * * @return * null if promotion conditions are not met. * otherwise returns a list of badges that record how the promotion happened. */ public Status isMet(AbstractBuild<?,?> build) { List<PromotionBadge> badges = new ArrayList<PromotionBadge>(); for (PromotionCondition cond : conditions) { PromotionBadge b = cond.isMet(this, build); if(b==null) return null; badges.add(b); } return new Status(this,badges); } /** * Checks if the build is promotable, and if so, promote it. * * @return * true if the build was promoted. */ public boolean considerPromotion(AbstractBuild<?,?> build) throws IOException { if (isDisabled()) return false; PromotedBuildAction a = build.getAction(PromotedBuildAction.class); // if it's already promoted, no need to do anything. if(a!=null && a.contains(this)) return false; Status qualification = isMet(build); if(qualification==null) return false; // not this time promote(build,new LegacyCodeCause(),qualification); // TODO: define promotion cause return true; } public void promote(AbstractBuild<?,?> build, Cause cause, PromotionBadge... badges) throws IOException { promote(build,cause,new Status(this,Arrays.asList(badges))); } /** * Promote the given build by using the given qualification. * * @param cause * Why the build is promoted? */ public void promote(AbstractBuild<?,?> build, Cause cause, Status qualification) throws IOException { PromotedBuildAction a = build.getAction(PromotedBuildAction.class); // build is qualified for a promotion. if(a!=null) { a.add(qualification); } else { build.addAction(new PromotedBuildAction(build,qualification)); build.save(); } // schedule promotion activity. scheduleBuild(build,cause); } /** * @deprecated * You need to be using {@link #scheduleBuild(AbstractBuild)} */ public boolean scheduleBuild() { return super.scheduleBuild(); } public boolean scheduleBuild(AbstractBuild<?,?> build) { return scheduleBuild(build,new LegacyCodeCause()); } public boolean scheduleBuild(AbstractBuild<?,?> build, Cause cause) { assert build.getProject()==getOwner(); // Get the parameters, if any, used in the target build and make these // available as part of the promotion steps List<ParametersAction> parameters = build.getActions(ParametersAction.class); // Create list of actions to pass to scheduled build List<Action> actions = new ArrayList<Action>(); actions.addAll(parameters); actions.add(new PromotionTargetAction(build)); // remember what build we are promoting return super.scheduleBuild(0,cause,actions.toArray(new Action[actions.size()])); } public boolean isInQueue(AbstractBuild<?,?> build) { for (Item item : Hudson.getInstance().getQueue().getItems(this)) if (item.getAction(PromotionTargetAction.class).resolve()==build) return true; return false; } // these are dummy implementations to implement abstract methods. // need to think about what the implications are. public boolean isFingerprintConfigured() { throw new UnsupportedOperationException(); } protected void buildDependencyGraph(DependencyGraph graph) { throw new UnsupportedOperationException(); } public static List<Descriptor<? extends BuildStep>> getAll() { List<Descriptor<? extends BuildStep>> list = new ArrayList<Descriptor<? extends BuildStep>>(); addTo(Builder.all(), list); addTo(Publisher.all(), list); return list; } private static void addTo(List<? extends Descriptor<? extends BuildStep>> source, List<Descriptor<? extends BuildStep>> list) { for (Descriptor<? extends BuildStep> d : source) { if (d instanceof BuildStepDescriptor) { BuildStepDescriptor bsd = (BuildStepDescriptor) d; if(bsd.isApplicable(PromotionProcess.class)) list.add(d); } } } public Permalink asPermalink() { return new Permalink() { @Override public String getDisplayName() { return Messages.PromotionProcess_PermalinkDisplayName(PromotionProcess.this.getDisplayName()); } @Override public String getId() { return PromotionProcess.this.getName(); } @Override public Run<?, ?> resolve(Job<?, ?> job) { String id = getId(); for( Run<?,?> build : job.getBuilds() ) { PromotedBuildAction a = build.getAction(PromotedBuildAction.class); if(a!=null && a.contains(id)) return build; } return null; } }; } }
package io.paymenthighway; import io.paymenthighway.connect.PaymentAPIConnection; import io.paymenthighway.exception.AuthenticationException; import io.paymenthighway.model.request.*; import io.paymenthighway.model.response.*; import org.apache.http.client.HttpResponseException; import org.apache.http.impl.client.CloseableHttpClient; import java.io.Closeable; import java.io.IOException; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.util.UUID; /** * Payment Highway Payment API Service. */ public class PaymentAPI implements Closeable { private PaymentAPIConnection paymentApi = null; /** * Payment API with default HTTP client * @param serviceUrl Production or Sandbox base URL * @param signatureKeyId The signature key's ID or name * @param signatureSecret The secret signature key * @param account Payment Highway account name * @param merchant Payment Highway merchant name. One account might have multiple merchants. */ public PaymentAPI(String serviceUrl, String signatureKeyId, String signatureSecret, String account, String merchant) { CloseableHttpClient httpClient = null; try { httpClient = PaymentAPIConnection.defaultHttpClient(); } catch(NoSuchAlgorithmException | KeyManagementException exception) { // If TLSv1.2 is not supported. Hides exceptions for backwards compatibility. exception.printStackTrace(); } paymentApi = new PaymentAPIConnection(serviceUrl, signatureKeyId, signatureSecret, account, merchant, httpClient); } /** * Payment API with customizable HTTP client * Pay attention to closing if sharing the http client between multiple instances! * @param serviceUrl Production or Sandbox base URL * @param signatureKeyId The signature key's ID or name * @param signatureSecret The secret signature key * @param account Payment Highway account name * @param merchant Payment Highway merchant name. One account might have multiple merchants. * @param httpClient The underlying HTTP client. */ public PaymentAPI( String serviceUrl, String signatureKeyId, String signatureSecret, String account, String merchant, CloseableHttpClient httpClient ) { paymentApi = new PaymentAPIConnection(serviceUrl, signatureKeyId, signatureSecret, account, merchant, httpClient); } /** * @param httpClient The underlying HTTP client * @deprecated Use the constructor to inject httpClient instead. */ public void setHttpClient(CloseableHttpClient httpClient) { this.paymentApi.setHttpClient(httpClient); } /** * Payment Highway Init Transaction * * @return InitTransactionResponse from Payment Highway * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public InitTransactionResponse initTransaction() throws IOException { return paymentApi.initTransactionHandle(); } /** * Payment Highway Debit Transaction * * @param transactionId Transaction id * @param request Transaction request * @return Transaction response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public TransactionResponse debitTransaction(UUID transactionId, TransactionRequest request) throws IOException { return paymentApi.debitTransaction(transactionId, request); } /** * Payment Highway Masterpass Debit Transaction * * @param transactionId Transaction id * @param request Masterpass Debit Transaction request * @return Transaction response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public TransactionResponse debitMasterpassTransaction(UUID transactionId, MasterpassTransactionRequest request) throws IOException { return paymentApi.debitMasterpassTransaction(transactionId, request); } /** * Payment Highway Apple Pay Transaction * * @param transactionId Transaction id * @param request Apple Pay Transaction request * @return Transaction response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public TransactionResponse debitApplePayTransaction(UUID transactionId, ApplePayTransactionRequest request) throws IOException { return paymentApi.debitApplePayTransaction(transactionId, request); } /** * MobilePay app switch request. * * @param request MobilePay init request * @return MobilePayInit response * @throws IOException Exception */ public MobilePayInitResponse initMobilePaySession(MobilePayInitRequest request) throws IOException { return paymentApi.initMobilePaySession(request); } /** * MobilePay session status * * @param sessionToken Session token * @return MobilePayStatus response * @throws IOException Exception */ public MobilePayStatusResponse mobilePaySessionStatus(String sessionToken) throws IOException { return paymentApi.mobilePaySessionStatus(sessionToken); } /** * Payment Highway Revert Transaction * * @param transactionId Transaction id * @return Transaction response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public TransactionResponse revertTransaction(UUID transactionId) throws IOException { RevertTransactionRequest revertRequest = new RevertTransactionRequest(); return paymentApi.revertTransaction(transactionId, revertRequest); } /** * Payment Highway Revert Siirto Transaction * * @param transactionId Transaction id * @param referenceNumber Reference number * @return Transaction response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public TransactionResponse revertSiirtoTransaction(UUID transactionId, String referenceNumber) throws IOException { RevertSiirtoTransactionRequest revertRequest = new RevertSiirtoTransactionRequest(referenceNumber); return paymentApi.revertSiirtoTransaction(transactionId, revertRequest); } /** * Payment Highway Revert Pivo Transaction without prefilled reference number * * @param transactionId Transaction id * @return Transaction response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public TransactionResponse revertPivoTransaction(UUID transactionId) throws IOException { RevertPivoTransactionRequest revertRequest = new RevertPivoTransactionRequest(); return paymentApi.revertPivoTransaction(transactionId, revertRequest); } /** * Payment Highway Revert Pivo Transaction * * @param transactionId Transaction id * @param referenceNumber Reference number * @return Transaction response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public TransactionResponse revertPivoTransaction(UUID transactionId, String referenceNumber) throws IOException { RevertPivoTransactionRequest revertRequest = new RevertPivoTransactionRequest(referenceNumber); return paymentApi.revertPivoTransaction(transactionId, revertRequest); } /** * Payment Highway Revert Transaction with amount * * @param transactionId Transaction id * @param amount Amount to revert * @return Transaction response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public TransactionResponse revertTransaction(UUID transactionId, String amount) throws IOException { RevertTransactionRequest revertRequest = new RevertTransactionRequest(amount); return paymentApi.revertTransaction(transactionId, revertRequest); } /** * Payment Highway Revert Siirto Transaction with amount * * @param transactionId Transaction id * @param referenceNumber Reference number * @param amount Amount to revert * @return Transaction response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public TransactionResponse revertSiirtoTransaction(UUID transactionId, String referenceNumber, Long amount) throws IOException { RevertSiirtoTransactionRequest revertRequest = new RevertSiirtoTransactionRequest(referenceNumber, amount); return paymentApi.revertSiirtoTransaction(transactionId, revertRequest); } /** * Payment Highway Revert Pivo Transaction with amount, but without prefilled reference number * * @param transactionId Transaction id * @param amount Amount to revert * @return Transaction response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public TransactionResponse revertPivoTransaction(UUID transactionId, Long amount) throws IOException { RevertPivoTransactionRequest revertRequest = new RevertPivoTransactionRequest(amount); return paymentApi.revertPivoTransaction(transactionId, revertRequest); } /** * Payment Highway Revert Pivo Transaction with amount * * @param transactionId Transaction id * @param referenceNumber Reference number * @param amount Amount to revert * @return Transaction response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public TransactionResponse revertPivoTransaction(UUID transactionId, String referenceNumber, Long amount) throws IOException { RevertPivoTransactionRequest revertRequest = new RevertPivoTransactionRequest(referenceNumber, amount); return paymentApi.revertPivoTransaction(transactionId, revertRequest); } /** * Payment Highway Transaction Status Request * * @param transactionId Transaction id * @return Transaction status response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public TransactionStatusResponse transactionStatus(UUID transactionId) throws IOException { return paymentApi.transactionStatus(transactionId); } /** * Payment Highway Pivo Transaction Status Request * * @param transactionId Transaction id * @return Transaction status response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public PivoTransactionStatusResponse pivoTransactionStatus(UUID transactionId) throws IOException { return paymentApi.pivoTransactionStatus(transactionId); } /** * Payment Highway Siirto Transaction Status Request * * @param transactionId Transaction id * @return Transaction status response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public SiirtoTransactionStatusResponse siirtoTransactionStatus(UUID transactionId) throws IOException { return paymentApi.siirtoTransactionStatus(transactionId); } /** * Payment Highway Order Status Request * * @param order The ID of the order whose transactions should be searched for * @return Order search response * @throws IOException Exception */ public OrderSearchResponse searchOrders(String order) throws IOException { return paymentApi.searchOrders(order); } /** * Payment Highway Transaction Commit Request * Used to commit (capture) the transaction. * In order to find out the result of the transaction without committing it, use Transaction Result request instead. * * @param transactionId Transaction id * @param amount The amount to commit, must be less or equal than the initial transaction amount * @param currency The original transaction currency * @return Commit transaction response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public CommitTransactionResponse commitTransaction(UUID transactionId, String amount, String currency) throws IOException { CommitTransactionRequest commitRequest = new CommitTransactionRequest(amount, currency); return paymentApi.commitTransaction(transactionId, commitRequest); } /** * Payment Highway User profile information * * For now used to find user information from masterpass, example shipping address * * @param transactionId Transaction id * @return User profile response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public UserProfileResponse userProfile(UUID transactionId) throws IOException { return paymentApi.userProfile(transactionId); } /** * Payment Highway Transaction Result Request * Used to find out whether or not an uncommitted transaction succeeded, without actually committing (capturing) it. * * @param transactionId Transaction id * @return Transaction result response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public TransactionResultResponse transactionResult(UUID transactionId) throws IOException { return paymentApi.transactionResult(transactionId); } /** * Payment Highway Pivo Transaction Result Request * Used to find out whether or not a Pivo transaction succeeded. * * @param transactionId Transaction id * @return Transaction result response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public PivoTransactionResultResponse pivoTransactionResult(UUID transactionId) throws IOException { return paymentApi.pivoTransactionResult(transactionId); } /** * Payment Highway Siirto Transaction Result Request * Used to find out whether or not an siirto transaction succeeded. * * @param transactionId Transaction id * @return Transaction result response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public SiirtoTransactionResultResponse siirtoTransactionResult(UUID transactionId) throws IOException { return paymentApi.siirtoTransactionResult(transactionId); } /** * Payment Highway Tokenize Request * * @param tokenizationId Tokenization id * @return Tokenization response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public TokenizationResponse tokenize(UUID tokenizationId) throws IOException { return paymentApi.tokenization(tokenizationId); } /** * Payment Highway Daily Report Request * * @param date Date * @return Report response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public ReportResponse fetchDailyReport(String date) throws IOException { return paymentApi.fetchReport(date); } /** * Payment Highway Reconciliation Report Request * * @param date The date to fetch the reconciliation report for. * @return Reconciliation report response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ public ReconciliationReportResponse fetchReconciliationReport(String date) throws IOException { return paymentApi.fetchReconciliationReport(date); } /** * Payment Highway Reconciliation Report Request * * Deprecated: use of the default behaviour (useDateProcessed=false) is encouraged to be used instead * * @param date The date to fetch the reconciliation report for. * @param useDateProcessed True for using the Euroline processing date (legacy style), instead of the report's fetching date. May result in changes in the past. * @return Reconciliation report response * @throws HttpResponseException Exception * @throws AuthenticationException Exception * @throws IOException Exception */ @Deprecated public ReconciliationReportResponse fetchReconciliationReport(String date, Boolean useDateProcessed) throws IOException { return paymentApi.fetchReconciliationReport(date, useDateProcessed); } /** * Closes the underlying connection instances. Be careful if using custom HTTP client on multiple instances! * @throws IOException */ @Override public void close() throws IOException { if (paymentApi != null) { paymentApi.close(); } } }
package de.ub0r.android.websms.connector.common; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import java.util.List; import java.util.zip.GZIPInputStream; import org.apache.http.Header; import org.apache.http.HeaderElement; import org.apache.http.HttpEntity; import org.apache.http.HttpException; import org.apache.http.HttpRequest; import org.apache.http.HttpResponse; import org.apache.http.HttpResponseInterceptor; import org.apache.http.client.CookieStore; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.conn.scheme.PlainSocketFactory; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.scheme.SchemeRegistry; import org.apache.http.cookie.Cookie; import org.apache.http.entity.HttpEntityWrapper; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager; import org.apache.http.message.BasicHeader; import org.apache.http.message.BasicNameValuePair; import org.apache.http.params.BasicHttpParams; import org.apache.http.params.HttpParams; import org.apache.http.protocol.HTTP; import org.apache.http.protocol.HttpContext; import org.json.JSONObject; import android.app.Notification; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.net.Uri; import android.preference.PreferenceManager; import android.text.TextUtils; /** * General Utils calls. * * @author flx */ public final class Utils { /** Tag for output. */ private static final String TAG = "utl"; /** Standard buffer size. */ public static final int BUFSIZE = 32768; /** Gzip. */ private static final String GZIP = "gzip"; /** Accept-Encoding. */ private static final String ACCEPT_ENCODING = "Accept-Encoding"; /** Default port for HTTP. */ private static final int PORT_HTTP = 80; /** Default port for HTTPS. */ private static final int PORT_HTTPS = 443; /** Preference's name: use default sender. */ public static final String PREFS_USE_DEFAULT_SENDER = "use_default_sender"; /** Preference's name: custom sender. */ public static final String PREFS_CUSTOM_SENDER = "custom_sender"; /** Resturn only matching line in stream2str(). */ public static final int ONLY_MATCHING_LINE = -2; /** Common {@link DefaultHttpClient}. */ private static DefaultHttpClient httpClient = null; /** * Options passed to getHttpClient(). * * @author flx */ public static class HttpOptions { /** URL to open. */ public String url = null; /** Cookies to transmit. */ public ArrayList<Cookie> cookies = null; /** HTTP POST data. */ public HttpEntity postData = null; /** additional HTTP headers. */ public ArrayList<Header> headers = null; /** Useragent. */ public String userAgent = null; /** Referer. */ public String referer = null; /** Encoding. Default is "ISO-8859-15". */ public final String encoding; /** Trust all SSL certificates; only used on first call! */ public boolean trustAll = false; /** * Finger prints that are known to be valid; only used on first call! * Only used if {@code trustAll == false} */ public String[] knownFingerprints = null; /** Default Constructor. */ public HttpOptions() { this(null); } /** * Default Constructor. * * @param e * encoding */ public HttpOptions(final String e) { if (TextUtils.isEmpty(e)) { this.encoding = "ISO-8859-15"; } else { this.encoding = e; } } /** Clear {@link HttpOptions} to reuse it. */ public void clear() { this.url = null; this.postData = null; } /** * Add HTTP basic auth header to list of headers. * * @param user * user * @param password * password * @return HTTP basic auth header */ public final Header addBasicAuthHeader(final String user, final String password) { Header h = new BasicHeader("Authorization", "Basic " + Base64Coder.encodeString(user + ":" + password)); if (this.headers == null) { this.headers = new ArrayList<Header>(); } this.headers.add(h); return h; } /** * Add form data as {@link HttpEntity}. * * @param formData * form data * @return {@link HttpEntity} * @throws UnsupportedEncodingException * UnsupportedEncodingException */ public final HttpEntity addFormParameter( final List<BasicNameValuePair> formData) throws UnsupportedEncodingException { HttpEntity he = null; if (formData != null) { he = new UrlEncodedFormEntity(formData, this.encoding); this.postData = he; } return he; } /** * Add {@link JSONObject} as {@link HttpEntity}. * * @param json * {@link JSONObject} form data * @return {@link HttpEntity} * @throws UnsupportedEncodingException * UnsupportedEncodingException */ public final HttpEntity addJson(final JSONObject json) throws UnsupportedEncodingException { StringEntity he = new StringEntity(json.toString(), this.encoding); he.setContentEncoding(new BasicHeader(HTTP.CONTENT_TYPE, "application/json")); if (this.headers == null) { this.headers = new ArrayList<Header>(); } this.headers .add(new BasicHeader("Content-Type", "application/json")); this.postData = he; return he; } } /** * {@link HttpEntityWrapper} to wrap giziped content. * * @author flx */ public static final class GzipDecompressingEntity extends HttpEntityWrapper { /** * Default Constructor. * * @param entity * {@link HttpEntity} */ public GzipDecompressingEntity(final HttpEntity entity) { super(entity); } /** * {@inheritDoc} */ @Override public InputStream getContent() throws IOException { Log.d(TAG, "unzip content"); InputStream wrappedin = this.wrappedEntity.getContent(); return new GZIPInputStream(wrappedin); } /** * {@inheritDoc} */ @Override public long getContentLength() { return -1; } } /** * No Constructor needed here. */ private Utils() { return; } /** * Get custom sender from preferences by users choice. Else: default sender * is selected. * * @param context * {@link Context} * @param defSender * default Sender * @return selected Sender */ public static String getSender(final Context context, final String defSender) { if (context == null) { return defSender; } final SharedPreferences p = PreferenceManager .getDefaultSharedPreferences(context); if (p.getBoolean(PREFS_USE_DEFAULT_SENDER, true)) { return defSender; } final String s = p.getString(PREFS_CUSTOM_SENDER, ""); if (s == null || s.length() == 0) { return defSender; } return s; } /** * Get custom sender number from preferences by users choice. Else: default * sender is selected. * * @param context * {@link Context} * @param defSender * default Sender * @return selected Sender */ public static String getSenderNumber(final Context context, final String defSender) { if (context == null) { return defSender; } final SharedPreferences p = PreferenceManager .getDefaultSharedPreferences(context); if (p.getBoolean(PREFS_USE_DEFAULT_SENDER, true)) { return defSender; } final String s = p.getString(PREFS_CUSTOM_SENDER, ""); if (s == null || s.length() == 0) { return defSender; } final String sn = s.replaceAll("(\\+|[0-9])", ""); if (sn.length() > 0) { Log.d(TAG, "fall back to default numer: " + sn); return defSender; } return s; } /** * Parse a String of "name <number>, name <number>, number, ..." to an array * of "name <number>". * * @param recipients * recipients * @return array of recipients */ public static String[] parseRecipients(final String recipients) { String s = recipients.trim(); if (s.endsWith(",")) { s = s.substring(0, s.length() - 1); } ArrayList<String> ret = new ArrayList<String>(); String[] ss = s.split(","); String r = null; for (String rr : ss) { if (r == null) { r = rr; } else { r += "," + rr; } if (rr.contains("0") || rr.contains("1") || rr.contains("2") || rr.contains("3") || rr.contains("4") || rr.contains("5") || rr.contains("6") || rr.contains("7") || rr.contains("8") || rr.contains("9")) { r = r.trim(); final String na = getRecipientsName(r); final String nu = cleanRecipient(getRecipientsNumber(r)); if (na != null && na.trim().length() > 0) { r = na + " <" + nu + ">"; } else { r = nu; } ret.add(r); r = null; } } return ret.toArray(new String[0]); } /** * Join an array of recipients separated with separator. * * @param recipients * recipients * @param separator * separator * @return joined recipients */ public static String joinRecipients(final String[] recipients, final String separator) { if (recipients == null) { return null; } final int e = recipients.length; if (e == 0) { return null; } final StringBuilder buf = new StringBuilder(recipients[0]); for (int i = 1; i < e; i++) { buf.append(separator); buf.append(recipients[i]); } return buf.toString(); } /** * Join an array of recipients separated with separator, stripped to only * contain numbers. * * @param recipients * recipients * @param separator * separator * @param oldFormat * Use old international format. E.g. 0049, not +49. * @return joined recipients */ public static String joinRecipientsNumbers(final String[] recipients, final String separator, final boolean oldFormat) { if (recipients == null) { return null; } final int e = recipients.length; if (e == 0) { return null; } final StringBuilder buf = new StringBuilder(); if (oldFormat) { buf.append(international2oldformat( getRecipientsNumber(recipients[0]))); } else { buf.append(getRecipientsNumber(recipients[0])); } for (int i = 1; i < e; i++) { buf.append(separator); if (oldFormat) { buf.append(international2oldformat( getRecipientsNumber(recipients[i]))); } else { buf.append(getRecipientsNumber(recipients[i])); } } return buf.toString(); } /** * Get a recipient's number. * * @param recipient * recipient * @return recipient's number */ public static String getRecipientsNumber(final String recipient) { final int i = recipient.lastIndexOf('<'); if (i != -1) { final int j = recipient.indexOf('>', i); if (j > 0) { return recipient.substring(i + 1, j); } } return recipient; } /** * Get a recipient's name. * * @param recipient * recipient * @return recipient's name */ public static String getRecipientsName(final String recipient) { final int i = recipient.lastIndexOf('<'); if (i != -1) { return recipient.substring(0, i - 1).trim(); } return recipient; } /** * Clean recipient's phone number from [ -.()<>]. * * @param recipient * recipient's mobile number * @return clean number */ public static String cleanRecipient(final String recipient) { if (TextUtils.isEmpty(recipient)) { return ""; } String n; int i = recipient.indexOf("<"); int j = recipient.indexOf(">"); if (i != -1 && i < j) { n = recipient.substring(i, j); } else { n = recipient; } return n.replaceAll("[^* .replaceAll("^[* } /** * Convert international number to national. * * @param defPrefix * default prefix * @param number * international number * @return national number */ public static String international2national(final String defPrefix, final String number) { if (number.startsWith(defPrefix)) { return '0' + number.substring(defPrefix.length()); } else if (number.startsWith("00" + defPrefix.substring(1))) { return '0' + number.substring(defPrefix.length() + 1); } return number; } /** * Convert national number to international. Old format internationals were * converted to new format. * * @param defPrefix * default prefix * @param number * national number * @return international number */ public static String national2international(final String defPrefix, final String number) { if (number.startsWith("+")) { return number; } else if (number.startsWith("00")) { return "+" + number.substring(2); } else if (number.startsWith("0")) { return defPrefix + number.substring(1); } else if (defPrefix.length() > 1 && number.startsWith(defPrefix.substring(1))) { return "+" + number; } return defPrefix + number; } /** * Convert national number to international. * * @param defPrefix * default prefix * @param number * national numbers * @return international numbers */ public static String[] national2international(final String defPrefix, final String[] number) { if (number == null || number.length == 0) { return null; } final int l = number.length; String[] n = new String[l]; for (int i = 0; i < l; i++) { if (!TextUtils.isEmpty(number[i])) { n[i] = national2international(defPrefix, getRecipientsNumber(number[i])); } } return n; } /** * Convert international number to old format. Eg. +49123 to 0049123 * * @param number * international number starting with + * @return international number in old format starting with 00 */ public static String international2oldformat(final String number) { if (number.startsWith("+")) { return "00" + number.substring(1); } return number; } /** * Print all cookies from {@link CookieStore} to {@link String}. * * @param client * {@link DefaultHttpClient} * @return {@link Cookie}s formated for debug out */ private static String getCookies(final DefaultHttpClient client) { String ret = "cookies:"; for (Cookie cookie : httpClient.getCookieStore().getCookies()) { ret += "\n" + cookie.getName() + ": " + cookie.getValue(); } ret += "\nend of cookies"; return ret; } /** * Print all {@link Header}s from {@link HttpRequest} to {@link String}. * * @param request * {@link HttpRequest} * @return {@link Header}s formated for debug out */ private static String getHeaders(final HttpRequest request) { String ret = "headers:"; for (Header h : request.getAllHeaders()) { ret += "\n" + h.getName() + ": " + h.getValue(); } ret += "\nend of headers"; return ret; } /** * Get {@link Cookie}s stored in static {@link CookieStore}. * * @return {@link ArrayList} of {@link Cookie}s */ public static ArrayList<Cookie> getCookies() { if (httpClient == null) { return null; } List<Cookie> cookies = httpClient.getCookieStore().getCookies(); if (cookies == null || cookies.size() == 0) { return null; } ArrayList<Cookie> ret = new ArrayList<Cookie>(cookies.size()); ret.addAll(cookies); return ret; } /** * Get the number of {@link Cookie}s stored in static {@link CookieStore}. * * @return number of {@link Cookie}s */ public static int getCookieCount() { if (httpClient == null) { return 0; } List<Cookie> cookies = httpClient.getCookieStore().getCookies(); if (cookies == null) { return 0; } return cookies.size(); } /** * Get cookies as {@link String}. * * @return cookies */ public static String getCookiesAsString() { if (httpClient == null) { return null; } return getCookies(httpClient); } /** * Clear internal cookie cache. */ public static void clearCookies() { if (httpClient != null) { final CookieStore cs = httpClient.getCookieStore(); if (cs != null) { cs.clear(); } } } /** * Get a fresh HTTP-Connection. * * @param o * {@link HttpOptions} * @return the connection * @throws IOException * IOException */ public static HttpResponse getHttpClient(final HttpOptions o) throws IOException { Log.d(TAG, "HTTPClient URL: " + o.url); SchemeRegistry registry = null; if (httpClient == null) { if (o.trustAll || ( o.knownFingerprints != null && o.knownFingerprints.length > 0)) { registry = new SchemeRegistry(); registry.register(new Scheme("http", new PlainSocketFactory(), PORT_HTTP)); final FakeSocketFactory httpsSocketFactory; if (o.trustAll) { httpsSocketFactory = new FakeSocketFactory(); } else { httpsSocketFactory = new FakeSocketFactory( o.knownFingerprints); } registry.register(new Scheme("https", httpsSocketFactory, PORT_HTTPS)); HttpParams params = new BasicHttpParams(); httpClient = new DefaultHttpClient( new ThreadSafeClientConnManager(params, registry), params); } else { httpClient = new DefaultHttpClient(); } httpClient.addResponseInterceptor(new HttpResponseInterceptor() { public void process(final HttpResponse response, final HttpContext context) throws HttpException, IOException { HttpEntity entity = response.getEntity(); Header contentEncodingHeader = entity.getContentEncoding(); if (contentEncodingHeader != null) { HeaderElement[] codecs = contentEncodingHeader .getElements(); for (HeaderElement codec : codecs) { if (codec.getName().equalsIgnoreCase(GZIP)) { response.setEntity(new GzipDecompressingEntity( response.getEntity())); return; } } } } }); } if (o.cookies != null && o.cookies.size() > 0) { final int l = o.cookies.size(); CookieStore cs = httpClient.getCookieStore(); for (int i = 0; i < l; i++) { cs.addCookie(o.cookies.get(i)); } } // . Log.d(TAG, getCookies(httpClient)); HttpRequestBase request; if (o.postData == null) { request = new HttpGet(o.url); } else { HttpPost pr = new HttpPost(o.url); pr.setEntity(o.postData); // . Log.d(TAG, "HTTPClient POST: " + postData); request = pr; } request.addHeader("Accept", "*/*"); request.addHeader(ACCEPT_ENCODING, GZIP); if (o.referer != null) { request.setHeader("Referer", o.referer); Log.d(TAG, "HTTPClient REF: " + o.referer); } if (o.userAgent != null) { request.setHeader("User-Agent", o.userAgent); Log.d(TAG, "HTTPClient AGENT: " + o.userAgent); } addHeaders(request, o.headers); Log.d(TAG, "HTTP " + request.getMethod() + " " + request.getURI()); Log.d(TAG, getHeaders(request)); if (request instanceof HttpPost) { Log.d(TAG, ""); Log.d(TAG, ((HttpPost) request).getEntity().getContent()); } return httpClient.execute(request); } /** * Add headers to the Request. * * @param request * Request to be added headers to * @param headers * Headers to add */ private static void addHeaders(final HttpRequestBase request, final ArrayList<Header> headers) { if (headers == null) { return; } for (Header h : headers) { request.addHeader(h); } } /** * Shutdown and forget the cached HttpClient. This causes the client to be * re-created with the used settings on the next call to the * getHttpClient()-family of functions. The caller has to use this method * before calling a getHttpClient() with new trustAll or fingerprints. * Calling this function while no cached Client exits does nothing. */ public static void resetHttpClient() { if (httpClient == null) { return; } httpClient.getConnectionManager().shutdown(); httpClient = null; } /** * Read {@link InputStream} and convert it into {@link String}. * * @param is * {@link InputStream} to read from * @return {@link String} holding all the bytes from the {@link InputStream} * @throws IOException * IOException */ public static String stream2str(final InputStream is) throws IOException { return stream2str(is, 0, -1, null); } /** * Read {@link InputStream} and convert it into {@link String}. * * @param is * {@link InputStream} to read from param charset to read the * {@link InputStream}. Can be null. * @param charset * charset to be used to read {@link InputStream}. Can be null. * @return {@link String} holding all the bytes from the {@link InputStream} * @throws IOException * IOException */ public static String stream2str(final InputStream is, final String charset) throws IOException { return stream2str(is, 0, -1, null); } /** * Read {@link InputStream} and convert it into {@link String}. * * @param is * {@link InputStream} to read from * @param start * first characters of stream that should be fetched. Set to 0, * if nothing should be skipped. * @param end * last characters of stream that should be fetched. This method * might read some more characters. Set to -1 if all characters * should be read. * @return {@link String} holding all the bytes from the {@link InputStream} * @throws IOException * IOException */ public static String stream2str(final InputStream is, final int start, final int end) throws IOException { return stream2str(is, null, start, end); } /** * Read {@link InputStream} and convert it into {@link String}. * * @param is * {@link InputStream} to read from param charset to read the * {@link InputStream}. Can be null. * @param charset * charset to be used to read {@link InputStream}. Can be null. * @param start * first characters of stream that should be fetched. Set to 0, * if nothing should be skipped. * @param end * last characters of stream that should be fetched. This method * might read some more characters. Set to -1 if all characters * should be read. * @return {@link String} holding all the bytes from the {@link InputStream} * @throws IOException * IOException */ public static String stream2str(final InputStream is, final String charset, final int start, final int end) throws IOException { return stream2str(is, charset, start, end, null); } /** * Read {@link InputStream} and convert it into {@link String}. * * @param is * {@link InputStream} to read from * @param start * first characters of stream that should be fetched. Set to 0, * if nothing should be skipped. * @param end * last characters of stream that should be fetched. This method * might read some more characters. Set to -1 if all characters * should be read. * @param pattern * start reading at this pattern, set end = -2 to return only the * line, matching this pattern * @return {@link String} holding all the bytes from the {@link InputStream} * @throws IOException * IOException */ public static String stream2str(final InputStream is, final int start, final int end, final String pattern) throws IOException { return stream2str(is, null, start, end, pattern); } /** * Read {@link InputStream} and convert it into {@link String}. * * @param is * {@link InputStream} to read from * @param charset * charset to be used to read {@link InputStream}. Can be null. * @param start * first characters of stream that should be fetched. Set to 0, * if nothing should be skipped. * @param end * last characters of stream that should be fetched. This method * might read some more characters. Set to -1 if all characters * should be read. * @param pattern * start reading at this pattern, set end = -2 to return only the * line, matching this pattern * @return {@link String} holding all the bytes from the {@link InputStream} * @throws IOException * IOException */ public static String stream2str(final InputStream is, final String charset, final int start, final int end, final String pattern) throws IOException { boolean foundPattern = false; if (pattern == null) { foundPattern = true; } InputStreamReader r; if (charset == null) { r = new InputStreamReader(is); } else { r = new InputStreamReader(is, charset); } final BufferedReader bufferedReader = new BufferedReader(r, BUFSIZE); final StringBuilder data = new StringBuilder(); String line = null; long totalSkipped = 0; long skipped = 0; while (start > totalSkipped) { skipped = bufferedReader.skip(start - totalSkipped); if (skipped == 0) { break; } totalSkipped += skipped; } skipped = 0; while ((line = bufferedReader.readLine()) != null) { skipped += line.length() + 1; if (!foundPattern) { if (line.indexOf(pattern) >= 0) { if (end == ONLY_MATCHING_LINE) { return line; } foundPattern = true; Log.d(TAG, "skipped: " + skipped); } } if (foundPattern) { data.append(line + "\n"); } if (end >= 0 && skipped > (end - start)) { break; } } bufferedReader.close(); if (!foundPattern) { return null; } return data.toString(); } /** * Generate MD5 Hash from String. * * @param s * input * @return hash */ public static String md5(final String s) { try { // Create MD5 Hash final MessageDigest digest = java.security.MessageDigest .getInstance("MD5"); digest.update(s.getBytes()); final byte[] messageDigest = digest.digest(); // Create Hex String final StringBuilder hexString = new StringBuilder(32); int b; for (byte bt : messageDigest) { b = 0xFF & bt; if (b < 0x10) { hexString.append('0' + Integer.toHexString(b)); } else { hexString.append(Integer.toHexString(b)); } } return hexString.toString(); } catch (final NoSuchAlgorithmException e) { Log.e(TAG, null, e); } return ""; } /** * Get HTTP GET parameters. * * @param url * base URL * @param params * parameters as {@link BasicNameValuePair} * @param encoding * encoding * @return URL with parameters added * @throws UnsupportedEncodingException * UnsupportedEncodingException */ public static String httpGetParams(final String url, final List<BasicNameValuePair> params, final String encoding) throws UnsupportedEncodingException { Log.d(TAG, "httpGetParams(" + url + "," + params + ")"); final StringBuilder u = new StringBuilder(url); u.append("?"); final int l = params.size(); for (int i = 0; i < l; i++) { final BasicNameValuePair nv = params.get(i); if (!TextUtils.isEmpty(nv.getName()) && !TextUtils.isEmpty(nv.getValue())) { u.append(nv.getName()); u.append("="); u.append(URLEncoder.encode(nv.getValue(), encoding)); u.append("&"); } } String ret = u.toString(); if (ret.endsWith("?") || ret.endsWith("&")) { ret = ret.substring(0, ret.length() - 1); } Log.d(TAG, "new url: " + ret); return ret; } /** * Show update notification. * * @param context * {@link Context} * @param pkg * package */ public static void showUpdateNotification(final Context context, final String pkg) { Notification n = new Notification(android.R.drawable.stat_sys_warning, context.getString(R.string.update_title), 0); n.flags = Notification.FLAG_AUTO_CANCEL; PendingIntent pi = PendingIntent.getActivity(context, 0, new Intent( Intent.ACTION_VIEW, Uri.parse("market://details?id=" + pkg)), PendingIntent.FLAG_UPDATE_CURRENT); n.setLatestEventInfo(context, context.getString(R.string.update_title), context.getString(R.string.update_message), pi); NotificationManager nm = (NotificationManager) context .getSystemService(Context.NOTIFICATION_SERVICE); nm.notify(0, n); } }
package org.spine3.examples.todolist.c.aggregates; import org.spine3.examples.todolist.TaskStatus; /** * Validates task commands and state transitions. * * @author Illia Shepilov */ class TaskFlowValidator { private TaskFlowValidator() { } /** * Check whether the transition from the current task status to the new status is allowed. * * @param currentStatus current task status * @param newStatus new task status */ static boolean isValidTransition(TaskStatus currentStatus, TaskStatus newStatus) { final boolean isValid = TaskStatusTransition.isValid(currentStatus, newStatus); return isValid; } static boolean isValidUpdateTaskPriorityCommand(TaskStatus currentStatus) { final boolean isValid = ensureNeitherCompletedNorDeleted(currentStatus); return isValid; } static boolean isValidUpdateTaskDueDateCommand(TaskStatus currentStatus) { final boolean isValid = ensureNeitherCompletedNorDeleted(currentStatus); return isValid; } static boolean isValidRemoveLabelFromTaskCommand(TaskStatus currentStatus) { final boolean isValid = ensureNeitherCompletedNorDeleted(currentStatus); return isValid; } static boolean isValidAssignLabelToTaskCommand(TaskStatus currentStatus) { final boolean isValid = ensureNeitherCompletedNorDeleted(currentStatus); return isValid; } static boolean isValidCreateDraftCommand(TaskStatus currentStatus) { final boolean isValid = ensureNeitherCompletedNorDeleted(currentStatus); return isValid; } static boolean ensureNeitherCompletedNorDeleted(TaskStatus currentStatus) { boolean isDeleted = ensureNotDeleted(currentStatus); boolean isCompleted = ensureNotCompleted(currentStatus); final boolean result = !isDeleted && !isCompleted; return result; } private static boolean ensureNotCompleted(TaskStatus currentStatus) { final boolean isCompleted = currentStatus == TaskStatus.COMPLETED; return isCompleted; } private static boolean ensureNotDeleted(TaskStatus currentStatus) { final boolean isDeleted = currentStatus == TaskStatus.DELETED; return isDeleted; } }
package com.capitalone.dashboard.service; import com.capitalone.dashboard.misc.HygieiaException; import com.capitalone.dashboard.model.Collector; import com.capitalone.dashboard.model.GenericCollectorItem; import com.capitalone.dashboard.repository.CollectorRepository; import com.capitalone.dashboard.repository.GenericCollectorItemRepository; import com.capitalone.dashboard.request.GenericCollectorItemCreateRequest; import org.bson.types.ObjectId; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @Service public class GenericCollectorItemServiceImpl implements GenericCollectorItemService { private final GenericCollectorItemRepository genericCollectorItemRepository; private final CollectorRepository collectorRepository; @Autowired public GenericCollectorItemServiceImpl(GenericCollectorItemRepository genericCollectorItemRepository, CollectorRepository collectorRepository) { this.genericCollectorItemRepository = genericCollectorItemRepository; this.collectorRepository = collectorRepository; } @Override public String create(GenericCollectorItemCreateRequest request) throws HygieiaException { Collector collector = collectorRepository.findByName(request.getToolName()); if (collector == null) { throw new HygieiaException("No collector for tool name " + request.getToolName(), HygieiaException.BAD_DATA); } GenericCollectorItem item = genericCollectorItemRepository.findByToolNameAndRawDataAndRelatedCollectorItem(request.getToolName(), request.getRawData(), new ObjectId(request.getRelatedCollectorItemId())); if(item == null) { item = new GenericCollectorItem(); item.setCreationTime(System.currentTimeMillis()); item.setRawData(request.getRawData()); item.setToolName(request.getToolName()); } item.setCollectorId(collector.getId()); item.setSource(request.getSource()); item.setProcessTime(0); try { item.setRelatedCollectorItem(new ObjectId(request.getRelatedCollectorItemId())); item.setBuildId(new ObjectId(request.getBuildId())); } catch (IllegalArgumentException ie) { throw new HygieiaException("Bad relatedItemId: " + ie.getMessage(), HygieiaException.BAD_DATA); } item = genericCollectorItemRepository.save(item); return item.getId().toString(); } }
package io.usethesource.vallang.util; import java.lang.ref.ReferenceQueue; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.List; import java.util.WeakHashMap; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReferenceArray; import java.util.concurrent.locks.StampedLock; import java.util.function.Function; /** * <p> * A cache that stores the key as a weak reference, and optionally also the value. After either one of the references is cleared, the entry is dropped from the cache. * The cache is thread safe, meaning the {@link #get} method can be called without any locking requirements. * With the following guarantees: * <p> * <ul> * <li> As long as there are strong references to both the key and the value, all calls to {@link #get} will return the value reference for something that {@link Object#equals} key (not just reference equality, as Caffeine and WeakHashMap do) * <li> There will only be one entry for a given key, there is no point in time where you can get two different values for the same key (as long as there was a strong reference to key between two calls) * <li> If the key is in the cache, getting the value will rarely block (it can block in case the cache is being resized and at the same time some keys/values were cleared). * </ul> * * <p> * Warning: use this class only if you know that both the key and value will also be kept around somewhere. * Also check the {@link WeakHashMap} that keeps a strong reference to the Value, however it uses reference equality on the key. * </p> * @author Davy Landman * * @param <K> * @param <V> */ public class WeakReferenceCache<K,V> { private volatile AtomicReferenceArray<Entry<K,V>> table; private volatile int count; private final ReferenceQueue<Object> cleared; private final StampedLock lock; private static final int MINIMAL_CAPACITY = 1 << 4; private static final int MAX_CAPACITY = 1 << 30; private final EntryChildConstructor<Entry<K,V>> keyBuilder; private final EntryChildConstructor<Entry<K,V>> valueBuilder; public WeakReferenceCache() { this(true, true); } public WeakReferenceCache(boolean weakKeys, boolean weakValues) { table = new AtomicReferenceArray<>(MINIMAL_CAPACITY); count = 0; cleared = new ReferenceQueue<>(); lock = new StampedLock(); keyBuilder = weakKeys ? WeakChild::new : StrongChild::new; valueBuilder = weakValues ? WeakChild::new : StrongChild::new; } public V get(K key, Function<K, V> generateValue) { if (key == null) { throw new IllegalArgumentException(); } cleanup(); int hash = key.hashCode(); AtomicReferenceArray<Entry<K,V>> table = this.table; int bucket = bucket(table.length(), hash); Entry<K, V> bucketHead = table.get(bucket); // just so we k V found = lookup(key, hash, bucketHead, null); if (found != null) { return found; } // not found resize(); return insertIfPossible(key, hash, bucketHead, generateValue.apply(key)); } private int bucket(int tableLength,int hash) { // since we are only using the last bits, take the msb and add them to the mix return (hash ^ (hash >> 16)) & (tableLength - 1); } private V insertIfPossible(final K key, final int hash, Entry<K, V> notFoundIn, final V result) { final Entry<K, V> toInsert = new Entry<>(key, result, hash, keyBuilder, valueBuilder, cleared); while (true) { final AtomicReferenceArray<Entry<K, V>> table = this.table; int bucket = bucket(table.length(), hash); Entry<K, V> currentBucketHead = table.get(bucket); if (currentBucketHead != notFoundIn) { // the head of the chain has changed, so it might be that now the key is there, so we have to lookup again, but we stop when we find the head again (or null) V otherResult = lookup(key, hash, currentBucketHead, notFoundIn); if (otherResult != null) { return otherResult; } notFoundIn = currentBucketHead; } toInsert.next.set(currentBucketHead); long stamp = lock.readLock(); // we get a read lock on the table, so we can put something in it, to protect against a table resize in process try { if (table == this.table && table.compareAndSet(bucket, currentBucketHead, toInsert)) { count++; return result; } } finally { lock.unlockRead(stamp); } } } private V lookup(K key, int hash, Entry<K, V> bucketEntry, Entry<K,V> stopAfter) { while (bucketEntry != null && bucketEntry != stopAfter) { if (bucketEntry.hash == hash) { Object other = bucketEntry.key.get(); if (other != null && key.equals(other)) { @SuppressWarnings("unchecked") V result = (V) bucketEntry.value.get(); if (result != null) { return result; } } } bucketEntry = bucketEntry.next.get(); } return null; } @SuppressWarnings("unchecked") private void cleanup() { WeakChild<Entry<K,V>> clearedReference = (WeakChild<Entry<K, V>>) cleared.poll(); if (clearedReference != null) { int totalCleared = 0; long stamp = lock.readLock(); // we get a read lock on the table, so we can remove some stuff, to protect against a table resize in process try { // quickly consumed the whole cleared pool: to avoid too many threads trying to do a cleanup List<WeakChild<Entry<K,V>>> toClear = new ArrayList<>(); while (clearedReference != null) { toClear.add(clearedReference); clearedReference = (WeakChild<Entry<K,V>>) cleared.poll(); } final AtomicReferenceArray<Entry<K, V>> table = this.table; final int currentLength = table.length(); for (WeakChild<Entry<K,V>> e: toClear) { Entry<K, V> mapNode = e.getParent(); if (mapNode != null) { // avoid multiple threads clearing the same entry (for example the key and value both got cleared) synchronized (mapNode) { mapNode = e.getParent(); // make sure it wasn't cleared since we got the lock if (mapNode != null) { int bucket = bucket(currentLength, mapNode.hash); while (true) { Entry<K,V> prev = null; Entry<K,V> cur = table.get(bucket); while (cur != mapNode) { prev = cur; cur = cur.next.get(); assert cur != null; // we have to find entry in this bucket } if (prev == null) { // at the head, so we can just replace the head if (table.compareAndSet(bucket, mapNode, mapNode.next.get())) { break; // we replaced the head, so continue } } else { if (prev.next.compareAndSet(mapNode, mapNode.next.get())) { break; // managed to replace the next pointer in the chain } } } count totalCleared++; // keep the next pointer intact, in case someone is following this chain. // we do clear the rest mapNode.key.clear(); mapNode.key.setParent(null); // marks the fact that the node has been cleared already mapNode.value.clear(); mapNode.value.setParent(null); } } } } } finally { lock.unlockRead(stamp); } if (totalCleared > 1024) { // let's check for a resize resize(); } } } private void resize() { final AtomicReferenceArray<Entry<K, V>> table = this.table; int newSize = calculateNewSize(table); if (newSize != table.length()) { // We have to grow, so we have to lock the table against new inserts. long stamp = lock.writeLock(); try { final AtomicReferenceArray<Entry<K, V>> oldTable = this.table; final int oldLength = oldTable.length(); if (oldTable != table) { // someone else already changed the table newSize = calculateNewSize(oldTable); if (newSize == oldLength) { return; } } final AtomicReferenceArray<Entry<K,V>> newTable = new AtomicReferenceArray<>(newSize); for (int i = 0; i < oldLength; i++) { Entry<K,V> current = oldTable.get(i); while (current != null) { int newBucket = bucket(newSize, current.hash); newTable.set(newBucket, new Entry<>(current.key, current.value, current.hash, newTable.get(newBucket))); current = current.next.get(); } } this.table = newTable; } finally { lock.unlockWrite(stamp); } } } private int calculateNewSize(final AtomicReferenceArray<Entry<K, V>> table) { int newSize = table.length(); int newCount = this.count + 1; if (newCount > newSize * 0.8) { newSize <<= 1; } else if (newSize != MINIMAL_CAPACITY && newCount < (newSize >> 2)) { // shrank quite a bit, so it makes sens to resize // find the smallest next power for the newSize = Integer.highestOneBit(newCount - 1) << 1; } if (newSize < 0 || newSize > MAX_CAPACITY) { newSize = MAX_CAPACITY; } else if (newSize == 0) { newSize = MINIMAL_CAPACITY; } return newSize; } private abstract static interface EntryChild<P> { Object get(); void clear(); P getParent(); void setParent(P parent); } @FunctionalInterface interface EntryChildConstructor<P> { EntryChild<P> construct(Object reference, P parent, ReferenceQueue<? super Object> clearQue); } private static final class WeakChild<P> extends WeakReference<Object> implements EntryChild<P> { private volatile P parent; public WeakChild(Object referent, P parent, ReferenceQueue<? super Object> q) { super(referent, q); this.parent = parent; } @Override public P getParent() { return parent; } @Override public void setParent(P parent) { this.parent = parent; } } private static final class StrongChild<P> implements EntryChild<P> { private volatile Object ref; public StrongChild(Object referent, P parent, ReferenceQueue<? super Object> q) { this.ref = referent; } @Override public Object get() { return ref; } @Override public void clear() { ref = null; } @Override public P getParent() { throw new RuntimeException("Should never be called"); } @Override public void setParent(P parent) { // noop to make the code simpeler } } private static final class Entry<K, V> { private final int hash; private final AtomicReference<Entry<K,V>> next; private final EntryChild<Entry<K,V>> key; private final EntryChild<Entry<K,V>> value; public Entry(K key, V value, int hash, EntryChildConstructor<Entry<K,V>> keyBuilder, EntryChildConstructor<Entry<K,V>> valueBuilder, ReferenceQueue<? super Object> q) { this.hash = hash; this.key = keyBuilder.construct(key, this, q); this.value = (key == value) && (keyBuilder == valueBuilder) ? this.key : valueBuilder.construct(value, this, q); // safe a reference in case of identity cache this.next = new AtomicReference<>(null); } public Entry(EntryChild<Entry<K,V>> key, EntryChild<Entry<K,V>> value, int hash, Entry<K,V> next) { this.hash = hash; this.key = key; this.value = value; this.next = new AtomicReference<>(next); key.setParent(this); value.setParent(this); } } }
/** * Calculator.java */ package it.scompo.testapp.lib.api.calculator; /** * Defines the methods a {@link Calculator} should have. * * @author mscomparin */ public interface Calculator{ /** * {@link Operands} defines operands. * * @author mscomparin * */ enum Operands { /** * Defines a sum. */ PLUS, /** * Defines a subtraction. */ MINUS } /** * Calculate something. * * @param operand * the operand to apply. * * @param first * the first {@link Integer}. * * @param second * the second {@link Integer}. * * @return the result of the operation. */ Integer calculate(Operands operand, Integer first, Integer second); }
package me.ferrybig.javacoding.teamspeakconnector; import io.netty.util.concurrent.Future; import java.util.Objects; /** * This class represents a Teamspeak server group. A server group has a type, * this type says what purpose the the group has. Most implementation should * filter on the first type, as its used by normal clients. * * @see Type */ public class Group extends UnresolvedGroup { private final int sortId; private final int icon; private final boolean savedb; private final String name; private final int memberRemovePrivilege; private final int memberAddPrivilege; private final int modifyPrivilege; private final int namemode; private final Type type; /** * Creates a new Teamspeak group, should only be used by the internal api. * * @param con Teamspeak connection that created this object * @param serverGroupId The id of this group * @param sortId * @param icon Icon id of this group, or -1 for no icon * @param savedb unknown * @param name Name of the group * @param memberRemovePrivilege Privilege to remove a member to this group * @param memberAddPrivilege Privilege to add a member to this group * @param modifyPrivilege Privilege to modify this group * @param namemode unknown * @param type Type of the group */ public Group(TeamspeakConnection con, int serverGroupId, int sortId, int icon, boolean savedb, String name, int memberRemovePrivilege, int memberAddPrivilege, int modifyPrivilege, int namemode, Type type) { super(con, serverGroupId); this.sortId = sortId; this.icon = icon; this.savedb = savedb; this.name = Objects.requireNonNull(name, "name"); this.memberRemovePrivilege = memberRemovePrivilege; this.memberAddPrivilege = memberAddPrivilege; this.modifyPrivilege = modifyPrivilege; this.namemode = namemode; this.type = type; } @Override public boolean isResolved() { return true; } /** * Returns the sort id, if displaying server groups, this should be first * sorted on, before sorting on the id. * * @return the sort id */ public int getSortId() { return sortId; } /** * Returns the id of the icon used * * @return the id of the icon */ public int getIcon() { return icon; } /** * Unknown * * @return an unknown value observed to be 0 or 1 */ public boolean isSavedb() { return savedb; } /** * Returns the name of the group * * @return the name */ public String getName() { return name; } /** * Gets the privilege to remove a member of the group * * @return the value of privilege needed to remove members */ public int getMemberRemovePrivilege() { return memberRemovePrivilege; } /** * Gets the privilege level needed to add a member to the group * * @return the value of privilege needed to add a member to the group */ public int getMemberAddPrivilege() { return memberAddPrivilege; } /** * Gets the privilege needed to modify this group * * @return the value of privilege needed to modify the group */ public int getModifyPrivilege() { return modifyPrivilege; } /** * Unknown * * @return unknown */ public int getNamemode() { return namemode; } /** * Returns the type of the group * * @return the type */ public Type getType() { return type; } @Override public String toString() { return "Group{" + "serverGroupId=" + getServerGroupId() + ", icon=" + icon + ", savedb=" + savedb + ", name=" + name + ", memberRemovePrivilege=" + memberRemovePrivilege + ", memberAddPrivilege=" + memberAddPrivilege + ", modifyPrivilege=" + modifyPrivilege + ", namemode=" + namemode + ", type=" + type + '}'; } @Override public Future<Group> resolve() { return con.io().getCompletedFuture(this); } /** * Type of server groups observed in Teamspeak */ public enum Type { /** * 0: template group (used for new virtual servers) */ TEMPLATE(0), /** * 1: regular group (used for regular clients) */ REGULAR(1), /** * 2: global query group (used for ServerQuery clients) */ SERVERQUERY(2); private static final int MAP_LENGTH = 3; private static final Type[] map; static { map = new Type[MAP_LENGTH]; for (Type type : values()) { map[type.id] = type; } } private final int id; private Type(int id) { this.id = id; } /** * Gets the internal id of the type * * @return the id */ public int getId() { return id; } public static Type getById(int id) { if (id >= MAP_LENGTH || id < 0 || map[id] == null) { throw new IllegalArgumentException("No type found for id " + id); } return map[id]; } }; }
package com.nestedworld.nestedworld.ui.fight.battle; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.TextView; import com.bumptech.glide.Glide; import com.nestedworld.nestedworld.R; import com.nestedworld.nestedworld.database.models.Monster; import com.nestedworld.nestedworld.helpers.log.LogHelper; import java.util.ArrayList; import java.util.List; import butterknife.BindView; import butterknife.ButterKnife; import static com.orm.util.ContextUtil.getContext; public class BattleMonsterAdapter extends RecyclerView.Adapter<BattleMonsterAdapter.BattleMonsterViewHolder> { private final List<Monster> mMonsters = new ArrayList<>(); /* ** Constructor */ public BattleMonsterAdapter() { } /* ** Life cycle */ @Override public BattleMonsterViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { // create a new view View v = LayoutInflater.from(parent.getContext()).inflate(R.layout.item_fight_battlemonster, parent, false); BattleMonsterViewHolder battleMonsterViewHolder = new BattleMonsterViewHolder(v); ButterKnife.bind(battleMonsterViewHolder, v); return battleMonsterViewHolder; } @Override public void onBindViewHolder(BattleMonsterViewHolder holder, int position) { //Get selectedMonster Monster monster = mMonsters.get(position); if (monster != null) { populateMonsterInfo(holder, monster); } } @Override public int getItemCount() { return mMonsters.size(); } /* ** Public method */ public void add(@Nullable final Monster monster) { mMonsters.add(monster); notifyItemInserted(mMonsters.size() - 1); } public void addAll(@NonNull final List<Monster> monsters) { mMonsters.addAll(monsters); notifyItemRangeChanged(mMonsters.size() - monsters.size(), mMonsters.size()); } public void clear() { int oldSize = mMonsters.size(); mMonsters.clear(); notifyItemRangeRemoved(0, oldSize); } /* ** Internal method */ private void populateMonsterInfo(@NonNull final BattleMonsterViewHolder holder, @NonNull final Monster monster) { holder.textViewMonsterName.setText(monster.name); //Display monster picture Glide.with(getContext()) .load(monster.sprite) .placeholder(R.drawable.default_monster) .centerCrop() .into(holder.imageViewMonster); } public class BattleMonsterViewHolder extends RecyclerView.ViewHolder { @BindView(R.id.textview_monster_name) TextView textViewMonsterName; @BindView(R.id.imageView_monster) ImageView imageViewMonster; @BindView(R.id.imageView_monster_status) ImageView imageViewMonsterStatus; public BattleMonsterViewHolder(View itemView) { super(itemView); } } }
package nl.infcomtec.jparticle; import java.io.BufferedReader; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.net.MalformedURLException; import java.net.ProtocolException; import java.net.URL; import java.net.URLEncoder; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; import java.util.TreeMap; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import javax.net.ssl.HttpsURLConnection; /** * * @author walter */ public class Cloud { /** * Date and Time parser */ private static final DateFormat fmt = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSSZ"); /** * Handles asynchronous calls to the cloud. */ private static final ExecutorService pool = Executors.newWorkStealingPool(); /** * Handles polling variables or functions */ private static final ScheduledExecutorService poll = Executors.newScheduledThreadPool(0); /** * Convert a Particle date-time to a Java Date. * * @param dateString As found in the JSON data. * @return A Java Date object. */ public static Date parseDateTime(String dateString) { synchronized (fmt) { if (dateString == null) { return null; } if (dateString.contains("T")) { dateString = dateString.replace('T', ' '); } if (dateString.contains("Z")) { dateString = dateString.replace("Z", "+0000"); } else { dateString = dateString.substring(0, dateString.lastIndexOf(':')) + dateString.substring(dateString.lastIndexOf(':') + 1); } try { return fmt.parse(dateString); } catch (Exception e) { e.printStackTrace(); return null; } } } /** * Your accessToken */ public final String accessToken; /** * Currently registered subscribers */ private final TreeMap<UUID, DeviceEvent> callBacks = new TreeMap<>(); /** * Currently registered poll call-backs */ private final TreeMap<UUID, ScheduledFuture<?>> polls = new TreeMap<>(); /** * Your devices */ public final TreeMap<String, Device> devices = new TreeMap<>(); /** * Constructor. * * @param accessToken Your access token. * @param readMine If true will start a thread to collect publications from * your devices. * @param readAll If true will start a thread to collect publications of all * devices -- not recommended. */ public Cloud(String accessToken, boolean readMine, boolean readAll) { if (accessToken.startsWith("Bearer ")) { this.accessToken = accessToken; } else { this.accessToken = "Bearer " + accessToken; } try { for (Device d : Device.getDevices(this.accessToken)) { devices.put(d.name, d); } } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(ex); } if (readMine) { new Thread(new PublishedReader(true)).start(); } if (readAll) { new Thread(new PublishedReader(false)).start(); } } /** * Call a function on a device. * * @param device Device eventName. * @param funcName Function eventName. * @param funcArgs Argument(s) for the function call. * @return The value from the function on the device. */ public int call(String device, String funcName, String funcArgs) { try { return devices.get(device).callFunction(funcName, funcArgs, accessToken); } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(ex); } } /** * Async call to a function on a device. * * @param device Device eventName. * @param funcName Function eventName. * @param funcArgs Argument(s) for the function call. * @return A Future to obtain the value from. */ public Future<Integer> callF(final String device, final String funcName, final String funcArgs) { return pool.submit(new Callable<Integer>() { @Override public Integer call() throws Exception { return Cloud.this.call(device, funcName, funcArgs); } }); } /** * Async call to a function on a device. This version ignores the function * result. * * @param device Device eventName. * @param funcName Function eventName. * @param funcArgs Argument(s) for the function call. */ public void callTask(final String device, final String funcName, final String funcArgs) { pool.submit(new Runnable() { @Override public void run() { Cloud.this.call(device, funcName, funcArgs); } }); } /** * Get a value for a variable from a device. * * @param device Device eventName. * @param varName Name of the variable. * @return Value of the variable or null on errors. */ public Double getDouble(String device, String varName) { try { return devices.get(device).readDouble(varName, accessToken); } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(ex); } } /** * Async version to get a value for a variable from a device. * * @param device Device eventName. * @param varName Name of the variable. * @return Value of the variable or null on errors. */ public Future<Double> getDoubleF(final String device, final String varName) { return pool.submit(new Callable<Double>() { @Override public Double call() throws Exception { return getDouble(device, varName); } }); } /** * Get a value for a variable from a device. * * @param device Device eventName. * @param varName Name of the variable. * @return Value of the variable or null on errors. */ public Boolean getBoolean(String device, String varName) { try { return devices.get(device).readBoolean(varName, accessToken); } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(ex); } } /** * Async version to get a value for a variable from a device. * * @param device Device eventName. * @param varName Name of the variable. * @return Value of the variable or null on errors. */ public Future<Boolean> getBooleanF(final String device, final String varName) { return pool.submit(new Callable<Boolean>() { @Override public Boolean call() throws Exception { return getBoolean(device, varName); } }); } /** * Get a value for a variable from a device. * * @param device Device eventName. * @param varName Name of the variable. * @return Value of the variable or null on errors. */ public Integer getInt(String device, String varName) { try { return devices.get(device).readInt(varName, accessToken); } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(ex); } } /** * Async version to get a value for a variable from a device. * * @param device Device eventName. * @param varName Name of the variable. * @return Value of the variable or null on errors. */ public Future<Integer> getIntF(final String device, final String varName) { return pool.submit(new Callable<Integer>() { @Override public Integer call() throws Exception { return getInt(device, varName); } }); } /** * Get a value for a variable from a device. * * @param device Device eventName. * @param varName Name of the variable. * @return Value of the variable or null on errors. */ public String getString(String device, String varName) { try { return devices.get(device).readString(varName, accessToken); } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(ex); } } /** * Async version to get a value for a variable from a device. * * @param device Device eventName. * @param varName Name of the variable. * @return Value of the variable or null on errors. */ public Future<String> getStringF(final String device, final String varName) { return pool.submit(new Callable<String>() { @Override public String call() throws Exception { return getString(device, varName); } }); } /** * Publish an event. * * @param name Name for the event. * @param data Content for the event. */ public void publish(String name, String data) { try { publishTask(name, data, false, 60); } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(ex); } } /** * Publish a private event. * * @param name Name for the event. * @param data Content for the event. */ public void publishPrivate(String name, String data) { try { publishTask(name, data, true, 60); } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(ex); } } /** * Publish an event. * * @param name Name for the event. * @param data Content for the event. * @param priv Private event. * @param ttl Time to live. * */ public void publishTask(final String name, final String data, final boolean priv, final int ttl) { pool.submit(new Runnable() { @Override public void run() { try { URL url = new URL("https://api.particle.io/v1/devices/events"); HttpsURLConnection conn = (HttpsURLConnection) url.openConnection(); conn.setRequestMethod("POST"); conn.setRequestProperty("Authorization", accessToken); conn.setDoOutput(true); try (DataOutputStream wr = new DataOutputStream(conn.getOutputStream())) { wr.writeBytes("name="); wr.writeBytes(URLEncoder.encode(name, "UTF-8")); wr.writeBytes("&data="); wr.writeBytes(URLEncoder.encode(data, "UTF-8")); wr.writeBytes("&private="); wr.writeBytes(URLEncoder.encode(Boolean.toString(priv), "UTF-8")); wr.writeBytes("&ttl="); wr.writeBytes(URLEncoder.encode(Integer.toString(ttl), "UTF-8")); wr.flush(); } conn.getResponseCode(); } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(ex); } } }); } /** * Refresh the status of your devices. */ public void refresh() { pool.submit(new Runnable() { @Override public void run() { try { for (Device d : devices.values()) { devices.put(d.name, d.refresh(accessToken)); } } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(ex); } } }); } /** * Subscribe to an event. * * @param cb The call-back object holds all needed parameters. */ public void subscribe(DeviceEvent cb) { synchronized (callBacks) { callBacks.put(cb.uuid(), cb); } } /** * Un-subscribe from an event. * * @param cb The call-back object holds all needed parameters. */ public void unSubscribe(DeviceEvent cb) { synchronized (callBacks) { callBacks.remove(cb.uuid()); } } /** * Poll something, * * @param poller This is called at the appropriate intervals, as defined by * the callback object. Probably you would use this to read a variable or * call a function. */ public void poll(DevicePoll poller) { synchronized (polls) { ScheduledFuture<?> handle = poll.scheduleWithFixedDelay(poller, 0, poller.interval(), TimeUnit.MILLISECONDS); polls.put(poller.uuid(), handle); } } /** * Cancel calling the poller. * * @param poller The poller to cancel. */ public void cancel(DevicePoll poller) { synchronized (polls) { ScheduledFuture<?> handle = polls.remove(poller.uuid()); if (null != handle) { handle.cancel(true); } } } /** * Background task to process published events. */ private class PublishedReader implements Runnable { private final boolean mine; public AtomicReference<Event> unclaimedEvent = new AtomicReference<>(); public PublishedReader(boolean mine) { this.mine = mine; } @Override public void run() { Thread.currentThread().setName("PublishedReader:" + mine); while (true) { try { // Normally the below should keep runnning but of course // we CAN have an interruption in our network service. // If so we briefly wait and start again. doIO(); } catch (Exception ex) { ex.printStackTrace(); try { Thread.sleep(5000); } catch (InterruptedException done) { // this probably means we should really stop System.exit(4); // EINTR 4 Interrupted system call } } } } private void doIO() throws IOException, ProtocolException, MalformedURLException { URL url = new URL(mine ? "https: HttpsURLConnection conn = (HttpsURLConnection) url.openConnection(); conn.setRequestMethod("GET"); conn.setRequestProperty("Authorization", Cloud.this.accessToken); conn.setDoOutput(false); try (BufferedReader bfr = new BufferedReader(new InputStreamReader(conn.getInputStream()))) { String s; while (null != (s = bfr.readLine())) { if (s.startsWith("event: ")) { String eventName = s.substring(7); String data = bfr.readLine(); //System.out.println(eventName + " " + data); if (data.startsWith("data: ")) { synchronized (callBacks) { AnyJSON aj = new AnyJSON(data.substring(6)); final Event e = new Event(devices, eventName, aj.getObject()); //System.out.println(Thread.currentThread().getName()+" "+e); for (final DeviceEvent cb : callBacks.values()) { if (null != cb.forDeviceName() && cb.forDeviceName().equals(e.deviceName)) { if (null == cb.forEventName() || e.eventName.equals(cb.forEventName())) { pool.submit(new Runnable() { @Override public void run() { cb.event(e); } }); } } else if (null != cb.forDeviceId() && cb.forDeviceId().equals(e.coreId)) { if (null == cb.forEventName() || e.eventName.equals(cb.forEventName())) { pool.submit(new Runnable() { @Override public void run() { cb.event(e); } }); } } else if (null == cb.forDeviceName() && null == cb.forDeviceId()) { if (null == cb.forEventName() || e.eventName.equals(cb.forEventName())) { pool.submit(new Runnable() { @Override public void run() { cb.event(e); } }); } } } } } } } } } } }
package me.normanmaurer.niosmtp.impl; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.net.InetSocketAddress; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executors; import org.jboss.netty.bootstrap.ClientBootstrap; import org.jboss.netty.channel.ChannelFuture; import org.jboss.netty.channel.ChannelFutureListener; import org.jboss.netty.channel.ChannelPipelineFactory; import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory; import me.normanmaurer.niosmtp.DeliveryRecipientStatus; import me.normanmaurer.niosmtp.SMTPClient; import me.normanmaurer.niosmtp.SMTPClientConfig; import me.normanmaurer.niosmtp.SMTPClientFuture; import me.normanmaurer.niosmtp.SMTPCommand; import me.normanmaurer.niosmtp.impl.internal.ChannelLocalSupport; import me.normanmaurer.niosmtp.impl.internal.SMTPClientConfigImpl; import me.normanmaurer.niosmtp.impl.internal.SMTPClientFutureImpl; import me.normanmaurer.niosmtp.impl.internal.SMTPClientPipelineFactory; /** * {@link SMTPClient} implementation which will create a new Connection for * every * {@link #deliver(InetSocketAddress, String, List, InputStream, SMTPClientConfig)} * call. * * So no pooling is active * * @author Norman Maurer * */ public class UnpooledSMTPClient implements SMTPClient, ChannelLocalSupport { protected final NioClientSocketChannelFactory socketFactory = new NioClientSocketChannelFactory(Executors.newCachedThreadPool(), Executors.newCachedThreadPool()); protected ChannelPipelineFactory createChannelPipelineFactory() { return new SMTPClientPipelineFactory(); } /* * (non-Javadoc) * @see me.normanmaurer.niosmtp.SMTPClient#deliver(java.net.InetSocketAddress, java.lang.String, java.util.List, java.io.InputStream, me.normanmaurer.niosmtp.SMTPClientConfig) */ public SMTPClientFuture deliver(InetSocketAddress host, final String mailFrom, final List<String> recipients, final InputStream msg, final SMTPClientConfig config) { final SMTPClientFutureImpl future = new SMTPClientFutureImpl(); ClientBootstrap bootstrap = new ClientBootstrap(socketFactory); bootstrap.setOption("connectTimeoutMillis", config.getConnectionTimeout() * 1000); bootstrap.setPipelineFactory(createChannelPipelineFactory()); InetSocketAddress local = config.getLocalAddress(); bootstrap.connect(host, local).addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture cf) throws Exception { if (cf.isSuccess()) { Map<String, Object> attrs = new HashMap<String, Object>(); attrs.put(FUTURE_KEY, future); attrs.put(NEXT_COMMAND_KEY, SMTPCommand.HELO); attrs.put(MAIL_FROM_KEY, mailFrom); attrs.put(RECIPIENTS_KEY, new LinkedList<String>(recipients)); attrs.put(MSG_KEY, msg); attrs.put(SMTP_CONFIG_KEY, config); ATTRIBUTES.set(cf.getChannel(), attrs); } // Set the channel so we can close it for cancel later future.setChannel(cf.getChannel()); } }); return future; } /** * Call this method to destroy the {@link SMTPClient} and release all resources */ public void destroy() { socketFactory.releaseExternalResources(); } public static void main(String[] args) throws InterruptedException, ExecutionException { UnpooledSMTPClient client = new UnpooledSMTPClient(); SMTPClientFuture future = client.deliver(new InetSocketAddress("192.168.0.254", 25), "test@test.de", Arrays.asList("nm@normanmaurer.me", "nm2@normanmaurer.me"), new ByteArrayInputStream("Subject: test\r\n\r\ntest".getBytes()), new SMTPClientConfigImpl()); Iterator<DeliveryRecipientStatus> statusIt = future.get().getRecipientStatus(); while(statusIt.hasNext()) { DeliveryRecipientStatus rs = statusIt.next(); System.out.println(rs.getAddress() + "=> " + rs.getResponse().getCode() + " " + rs.getResponse().getLines().toString()); } } }
package jodd.http; import jodd.datetime.TimeUtil; import jodd.http.up.ByteArrayUploadable; import jodd.http.up.FileUploadable; import jodd.http.up.Uploadable; import jodd.io.FastCharArrayWriter; import jodd.io.FileNameUtil; import jodd.io.StreamUtil; import jodd.upload.FileUpload; import jodd.upload.MultipartStreamParser; import jodd.util.MimeTypes; import jodd.util.RandomStringUtil; import jodd.util.StringPool; import jodd.util.StringUtil; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.io.StringWriter; import java.io.UnsupportedEncodingException; import java.util.Map; import static jodd.util.StringPool.CRLF; /** * Base class for {@link HttpRequest} and {@link HttpResponse}. */ @SuppressWarnings("unchecked") public abstract class HttpBase<T> { public static final String HEADER_ACCEPT = "Accept"; public static final String HEADER_ACCEPT_ENCODING = "Accept-Encoding"; public static final String HEADER_CONTENT_TYPE = "Content-Type"; public static final String HEADER_CONTENT_LENGTH = "Content-Length"; public static final String HEADER_CONTENT_ENCODING = "Content-Encoding"; public static final String HEADER_HOST = "Host"; public static final String HEADER_ETAG = "ETag"; public static final String HEADER_CONNECTION = "Connection"; public static final String HEADER_KEEP_ALIVE = "Keep-Alive"; public static final String HEADER_CLOSE = "Close"; public static final String HTTP_1_0 = "HTTP/1.0"; public static final String HTTP_1_1 = "HTTP/1.1"; protected String httpVersion = HTTP_1_1; protected HttpValuesMap headers = new HttpValuesMap(); protected HttpValuesMap form; // holds form data (when used) protected String body; // holds raw body string (always) /** * Returns HTTP version string. By default it's "HTTP/1.1". */ public String httpVersion() { return httpVersion; } /** * Sets the HTTP version string. Must be formed like "HTTP/1.1". */ public T httpVersion(String httpVersion) { this.httpVersion = httpVersion; return (T) this; } /** * Returns value of header parameter. * If multiple headers with the same names exist, * the first value will be returned. Returns <code>null</code> * if header doesn't exist. */ public String header(String name) { String key = name.trim().toLowerCase(); Object value = headers.getFirst(key); if (value == null) { return null; } return value.toString(); } /** * Returns all values for given header name. */ public String[] headers(String name) { String key = name.trim().toLowerCase(); return headers.getStrings(key); } /** * Removes all header parameters for given name. */ public void removeHeader(String name) { String key = name.trim().toLowerCase(); headers.remove(key); } /** * Adds header parameter. If a header with the same name exist, * it will not be overwritten, but the new header with the same * name is going to be added. * The order of header parameters is preserved. * Also detects 'Content-Type' header and extracts * {@link #mediaType() media type} and {@link #charset() charset} * values. */ public T header(String name, String value) { return header(name, value, false); } /** * Adds or sets header parameter. * @see #header(String, String) */ public T header(String name, String value, boolean overwrite) { String key = name.trim().toLowerCase(); value = value.trim(); if (key.equalsIgnoreCase(HEADER_CONTENT_TYPE)) { mediaType = HttpUtil.extractMediaType(value); charset = HttpUtil.extractContentTypeCharset(value); } if (overwrite == true) { headers.set(key, value); } else { headers.add(key, value); } return (T) this; } /** * Internal direct header setting. */ protected void _header(String name, String value, boolean overwrite) { String key = name.trim().toLowerCase(); value = value.trim(); if (overwrite) { headers.set(key, value); } else { headers.add(key, value); } } /** * Adds <code>int</code> value as header parameter, * @see #header(String, String) */ public T header(String name, int value) { _header(name, String.valueOf(value), false); return (T) this; } /** * Adds date value as header parameter. * @see #header(String, String) */ public T header(String name, long millis) { _header(name, TimeUtil.formatHttpDate(millis), false); return (T) this; } /** * Returns {@link jodd.http.HttpValuesMap all headers}. Returned * instance is the same as one used in request/response object. */ // public HttpValuesMap headers() { // return headers; protected String charset; /** * Returns charset, as defined by 'Content-Type' header. * If not set, returns <code>null</code> - indicating * the default charset (ISO-8859-1). */ public String charset() { return charset; } /** * Defines just content type charset. Setting this value to * <code>null</code> will remove the charset information from * the header. */ public T charset(String charset) { this.charset = null; contentType(null, charset); return (T) this; } protected String mediaType; /** * Returns media type, as defined by 'Content-Type' header. * If not set, returns <code>null</code> - indicating * the default media type, depending on request/response. */ public String mediaType() { return mediaType; } /** * Defines just content media type. * Setting this value to <code>null</code> will * not have any effects. */ public T mediaType(String mediaType) { contentType(mediaType, null); return (T) this; } /** * Returns full "Content-Type" header. * It consists of {@link #mediaType() media type} * and {@link #charset() charset}. */ public String contentType() { return header(HEADER_CONTENT_TYPE); } /** * Sets full "Content-Type" header. Both {@link #mediaType() media type} * and {@link #charset() charset} are overridden. */ public T contentType(String contentType) { header(HEADER_CONTENT_TYPE, contentType, true); return (T) this; } /** * Sets "Content-Type" header by defining media-type and/or charset parameter. * This method may be used to update media-type and/or charset by passing * non-<code>null</code> value for changes. * <p> * Important: if Content-Type header has some other parameters, they will be removed! */ public T contentType(String mediaType, String charset) { if (mediaType == null) { mediaType = this.mediaType; } else { this.mediaType = mediaType; } if (charset == null) { charset = this.charset; } else { this.charset = charset; } String contentType = mediaType; if (charset != null) { contentType += ";charset=" + charset; } _header(HEADER_CONTENT_TYPE, contentType, true); return (T) this; } /** * Defines "Connection" header as "Keep-Alive" or "Close". * Existing value is overwritten. */ public T connectionKeepAlive(boolean keepAlive) { if (keepAlive) { header(HEADER_CONNECTION, HEADER_KEEP_ALIVE, true); } else { header(HEADER_CONNECTION, HEADER_CLOSE, true); } return (T) this; } /** * Returns <code>true</code> if connection is persistent. * If "Connection" header does not exist, returns <code>true</code> * for HTTP 1.1 and <code>false</code> for HTTP 1.0. If * "Connection" header exist, checks if it is equal to "Close". * <p> * In HTTP 1.1, all connections are considered persistent unless declared otherwise. * Under HTTP 1.0, there is no official specification for how keepalive operates. */ public boolean isConnectionPersistent() { String connection = header(HEADER_CONNECTION); if (connection == null) { return !httpVersion.equalsIgnoreCase(HTTP_1_0); } return !connection.equalsIgnoreCase(HEADER_CLOSE); } /** * Returns full "Content-Length" header or * <code>null</code> if not set. */ public String contentLength() { return header(HEADER_CONTENT_LENGTH); } /** * Sets the full "Content-Length" header. */ public T contentLength(int value) { _header(HEADER_CONTENT_LENGTH, String.valueOf(value), true); return (T) this; } /** * Returns "Content-Encoding" header. */ public String contentEncoding() { return header(HEADER_CONTENT_ENCODING); } /** * Returns "Accept" header. */ public String accept() { return header(HEADER_ACCEPT); } /** * Sets "Accept" header. */ public T accept(String encodings) { header(HEADER_ACCEPT, encodings, true); return (T) this; } /** * Returns "Accept-Encoding" header. */ public String acceptEncoding() { return header(HEADER_ACCEPT_ENCODING); } /** * Sets "Accept-Encoding" header. */ public T acceptEncoding(String encodings) { header(HEADER_ACCEPT_ENCODING, encodings, true); return (T) this; } protected void initForm() { if (form == null) { form = new HttpValuesMap(); } } /** * Wraps non-Strings form values with {@link jodd.http.up.Uploadable uploadable content}. * Detects invalid types and throws an exception. So all uploadable values * are of the same type. */ protected Object wrapFormValue(Object value) { if (value == null) { return null; } if (value instanceof CharSequence) { return value.toString(); } if (value instanceof File) { return new FileUploadable((File) value); } if (value instanceof byte[]) { return new ByteArrayUploadable((byte[]) value, null); } if (value instanceof Uploadable) { return value; } throw new HttpException("Unsupported value type: " + value.getClass().getName()); } /** * Adds the form parameter. Existing parameter will not be overwritten. */ public T form(String name, Object value) { initForm(); value = wrapFormValue(value); form.add(name, value); return (T) this; } /** * Sets form parameter. Optionally overwrite existing one. */ public T form(String name, Object value, boolean overwrite) { initForm(); value = wrapFormValue(value); if (overwrite) { form.set(name, value); } else { form.add(name, value); } return (T) this; } /** * Sets many form parameters at once. */ public T form(String name, Object value, Object... parameters) { initForm(); form(name, value); for (int i = 0; i < parameters.length; i += 2) { name = parameters[i].toString(); form(name, parameters[i + 1]); } return (T) this; } /** * Sets many form parameters at once. */ public T form(Map<String, Object> formMap) { initForm(); for (Map.Entry<String, Object> entry : formMap.entrySet()) { form(entry.getKey(), entry.getValue()); } return (T) this; } /** * Return map of form parameters. * Note that all uploadable values are wrapped with {@link jodd.http.up.Uploadable}. */ public Map<String, Object[]> form() { return form; } protected String formEncoding = JoddHttp.defaultFormEncoding; /** * Defines encoding for forms parameters. Default value is * copied from {@link JoddHttp#defaultFormEncoding}. * It is overridden by {@link #charset() charset} value. */ public T formEncoding(String encoding) { this.formEncoding = encoding; return (T) this; } /** * Returns <b>raw</b> body as received or set (always in ISO-8859-1 encoding). * If body content is a text, use {@link #bodyText()} to get it converted. * Returns <code>null</code> if body is not specified! */ public String body() { return body; } /** * Returns <b>raw</b> body bytes. Returns <code>null</code> if body is not specified. */ public byte[] bodyBytes() { if (body == null) { return null; } try { return body.getBytes(StringPool.ISO_8859_1); } catch (UnsupportedEncodingException ignore) { return null; } } /** * Returns {@link #body() body content} as text. If {@link #charset() charset parameter} * of "Content-Type" header is defined, body string charset is converted, otherwise * the same raw body content is returned. Never returns <code>null</code>. */ public String bodyText() { if (body == null) { return StringPool.EMPTY; } if (charset != null) { return StringUtil.convertCharset(body, StringPool.ISO_8859_1, charset); } return body(); } /** * Sets <b>raw</b> body content and discards all form parameters. * Important: body string is in RAW format, meaning, ISO-8859-1 encoding. * Also sets "Content-Length" parameter. However, "Content-Type" is not set * and it is expected from user to set this one. */ public T body(String body) { this.body = body; this.form = null; contentLength(body.length()); return (T) this; } /** * Defines body text and content type (as media type and charset). * Body string will be converted to {@link #body(String) raw body string} * and "Content-Type" header will be set. */ public T bodyText(String body, String mediaType, String charset) { body = StringUtil.convertCharset(body, charset, StringPool.ISO_8859_1); contentType(mediaType, charset); body(body); return (T) this; } /** * Defines {@link #bodyText(String, String, String) body text content} * that will be encoded in {@link JoddHttp#defaultBodyEncoding default body encoding}. */ public T bodyText(String body, String mediaType) { return bodyText(body, mediaType, JoddHttp.defaultBodyEncoding); } /** * Defines {@link #bodyText(String, String, String) body text content} * that will be encoded as {@link JoddHttp#defaultBodyMediaType default body media type} * in {@link JoddHttp#defaultBodyEncoding default body encoding}. */ public T bodyText(String body) { return bodyText(body, JoddHttp.defaultBodyMediaType, JoddHttp.defaultBodyEncoding); } /** * Sets <b>raw</b> body content and discards form parameters. * Also sets "Content-Length" and "Content-Type" parameter. * @see #body(String) */ public T body(byte[] content, String contentType) { String body = null; try { body = new String(content, StringPool.ISO_8859_1); } catch (UnsupportedEncodingException ignore) { } contentType(contentType); return body(body); } protected boolean multipart = false; /** * Returns <code>true</code> if form contains {@link jodd.http.up.Uploadable}. */ protected boolean isFormMultipart() { if (multipart) { return true; } for (Object[] values : form.values()) { if (values == null) { continue; } for (Object value : values) { if (value instanceof Uploadable) { return true; } } } return false; } /** * Creates form {@link jodd.http.Buffer buffer} and sets few headers. */ protected Buffer formBuffer() { Buffer buffer = new Buffer(); if (form == null || form.isEmpty()) { return buffer; } if (!isFormMultipart()) { // determine form encoding String formEncoding = charset; if (formEncoding == null) { formEncoding = this.formEncoding; } // encode String formQueryString = HttpUtil.buildQuery(form, formEncoding); contentType("application/x-www-form-urlencoded", null); contentLength(formQueryString.length()); buffer.append(formQueryString); return buffer; } String boundary = StringUtil.repeat('-', 10) + RandomStringUtil.randomAlphaNumeric(10); for (Map.Entry<String, Object[]> entry : form.entrySet()) { buffer.append(" buffer.append(boundary); buffer.append(CRLF); String name = entry.getKey(); Object[] values = entry.getValue(); for (Object value : values) { if (value instanceof String) { String string = (String) value; buffer.append("Content-Disposition: form-data; name=\"").append(name).append('"').append(CRLF); buffer.append(CRLF); buffer.append(string); } else if (value instanceof Uploadable) { Uploadable uploadable = (Uploadable) value; String fileName = uploadable.getFileName(); if (fileName == null) { fileName = name; } buffer.append("Content-Disposition: form-data; name=\"").append(name); buffer.append("\"; filename=\"").append(fileName).append('"').append(CRLF); String mimeType = uploadable.getMimeType(); if (mimeType == null) { mimeType = MimeTypes.getMimeType(FileNameUtil.getExtension(fileName)); } buffer.append(HEADER_CONTENT_TYPE).append(": ").append(mimeType).append(CRLF); buffer.append("Content-Transfer-Encoding: binary").append(CRLF); buffer.append(CRLF); buffer.append(uploadable); //byte[] bytes = uploadable.getBytes(); //for (byte b : bytes) { //buffer.append(CharUtil.toChar(b)); } else { // should never happened! throw new HttpException("Unsupported type"); } buffer.append(CRLF); } } buffer.append("--").append(boundary).append("--"); // the end contentType("multipart/form-data; boundary=" + boundary); contentLength(buffer.size()); return buffer; } /** * Returns string representation of this request or response. */ public String toString() { return toString(true); } /** * Returns full request/response, or just headers. * Useful for debugging. */ public String toString(boolean fullResponse) { Buffer buffer = buffer(fullResponse); StringWriter stringWriter = new StringWriter(); try { buffer.writeTo(stringWriter); } catch (IOException ioex) { throw new HttpException(ioex); } return stringWriter.toString(); } /** * Returns byte array of request or response. */ public byte[] toByteArray() { Buffer buffer = buffer(true); ByteArrayOutputStream baos = new ByteArrayOutputStream(buffer.size()); try { buffer.writeTo(baos); } catch (IOException ioex) { throw new HttpException(ioex); } return baos.toByteArray(); } /** * Creates {@link jodd.http.Buffer buffer} ready to be consumed. * Buffer can, optionally, contains just headers. */ protected abstract Buffer buffer(boolean full); protected HttpProgressListener httpProgressListener; /** * Sends request or response to output stream. */ public void sendTo(OutputStream out) throws IOException { Buffer buffer = buffer(true); if (httpProgressListener == null) { buffer.writeTo(out); } else { buffer.writeTo(out, httpProgressListener); } out.flush(); } /** * Parses headers. */ protected void readHeaders(BufferedReader reader) { while (true) { String line; try { line = reader.readLine(); } catch (IOException ioex) { throw new HttpException(ioex); } if (StringUtil.isBlank(line)) { break; } int ndx = line.indexOf(':'); if (ndx != -1) { header(line.substring(0, ndx), line.substring(ndx + 1)); } else { throw new HttpException("Invalid header: " + line); } } } /** * Parses body. */ protected void readBody(BufferedReader reader) { String bodyString = null; // content length String contentLen = contentLength(); int contentLenValue = -1; if (contentLen != null) { contentLenValue = Integer.parseInt(contentLen); if (contentLenValue > 0) { FastCharArrayWriter fastCharArrayWriter = new FastCharArrayWriter(contentLenValue); try { StreamUtil.copy(reader, fastCharArrayWriter, contentLenValue); } catch (IOException ioex) { throw new HttpException(ioex); } bodyString = fastCharArrayWriter.toString(); } } // chunked encoding String transferEncoding = header("Transfer-Encoding"); if (transferEncoding != null && transferEncoding.equalsIgnoreCase("chunked")) { FastCharArrayWriter fastCharArrayWriter = new FastCharArrayWriter(); try { while (true) { String line = reader.readLine(); int len = Integer.parseInt(line, 16); if (len > 0) { StreamUtil.copy(reader, fastCharArrayWriter, len); reader.readLine(); } else { // end reached, read trailing headers, if there is any readHeaders(reader); break; } } } catch (IOException ioex) { throw new HttpException(ioex); } bodyString = fastCharArrayWriter.toString(); } // no body yet - special case if (bodyString == null && contentLenValue != 0) { // body ends when stream closes FastCharArrayWriter fastCharArrayWriter = new FastCharArrayWriter(); try { StreamUtil.copy(reader, fastCharArrayWriter); } catch (IOException ioex) { throw new HttpException(ioex); } bodyString = fastCharArrayWriter.toString(); } // BODY READY - PARSE BODY String charset = this.charset; if (charset == null) { charset = StringPool.ISO_8859_1; } body = bodyString; String mediaType = mediaType(); if (mediaType == null) { mediaType = StringPool.EMPTY; } else { mediaType = mediaType.toLowerCase(); } if (mediaType.equals("application/x-www-form-urlencoded")) { form = HttpUtil.parseQuery(bodyString, true); return; } if (mediaType.equals("multipart/form-data")) { form = new HttpValuesMap(); MultipartStreamParser multipartParser = new MultipartStreamParser(); try { byte[] bodyBytes = bodyString.getBytes(StringPool.ISO_8859_1); ByteArrayInputStream bin = new ByteArrayInputStream(bodyBytes); multipartParser.parseRequestStream(bin, charset); } catch (IOException ioex) { throw new HttpException(ioex); } // string parameters for (String paramName : multipartParser.getParameterNames()) { String[] values = multipartParser.getParameterValues(paramName); if (values.length == 1) { form.add(paramName, values[0]); } else { form.put(paramName, values); } } // file parameters for (String paramName : multipartParser.getFileParameterNames()) { FileUpload[] values = multipartParser.getFiles(paramName); if (values.length == 1) { form.add(paramName, values[0]); } else { form.put(paramName, values); } } return; } // body is a simple content form = null; } }
package me.prettyprint.cassandra.model; import me.prettyprint.cassandra.model.ConsistencyLevelPolicy.OperationType; import me.prettyprint.cassandra.service.CassandraClient; import me.prettyprint.cassandra.service.Cluster; import me.prettyprint.cassandra.service.Keyspace; import me.prettyprint.cassandra.utils.Assert; public /*final*/ class KeyspaceOperator { private ConsistencyLevelPolicy consistencyLevelPolicy; private final Cluster cluster; private final String keyspace; public KeyspaceOperator(String keyspace, Cluster cluster, ConsistencyLevelPolicy consistencyLevelPolicy) { Assert.noneNull(keyspace, cluster, consistencyLevelPolicy); this.keyspace = keyspace; this.cluster = cluster; this.consistencyLevelPolicy = consistencyLevelPolicy; } public void setConsistencyLevelPolicy(ConsistencyLevelPolicy cp) { this.consistencyLevelPolicy = cp; } public Cluster getCluster() { return cluster; } @Override public String toString() { return "KeyspaceOperator(" + keyspace +"," + cluster + ")"; } public long createTimestamp() { return cluster.createTimestamp(); } /*package*/ <T> ExecutionResult<T> doExecute(KeyspaceOperationCallback<T> koc) throws HectorException { CassandraClient c = null; Keyspace ks = null; try { c = cluster.borrowClient(); ks = c.getKeyspace(keyspace, consistencyLevelPolicy.get(OperationType.READ)); return koc.doInKeyspaceAndMeasure(ks); } finally { if ( ks != null ) { cluster.releaseClient(ks.getClient()); } } } }
package me.devsaki.hentoid.fragments.preferences; import android.app.Activity; import android.content.Intent; import android.net.Uri; import android.os.Bundle; import android.os.Handler; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.CheckBox; import android.widget.ProgressBar; import android.widget.RadioButton; import android.widget.TextView; import androidx.annotation.IntDef; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.documentfile.provider.DocumentFile; import androidx.fragment.app.DialogFragment; import androidx.fragment.app.FragmentManager; import com.annimon.stream.Optional; import com.google.android.material.snackbar.BaseTransientBottomBar; import com.google.android.material.snackbar.Snackbar; import org.threeten.bp.Instant; import java.io.IOException; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import io.reactivex.Observable; import io.reactivex.Single; import io.reactivex.android.schedulers.AndroidSchedulers; import io.reactivex.disposables.Disposable; import io.reactivex.disposables.Disposables; import io.reactivex.schedulers.Schedulers; import me.devsaki.hentoid.HentoidApp; import me.devsaki.hentoid.R; import me.devsaki.hentoid.database.CollectionDAO; import me.devsaki.hentoid.database.ObjectBoxDAO; import me.devsaki.hentoid.database.domains.Content; import me.devsaki.hentoid.database.domains.ErrorRecord; import me.devsaki.hentoid.enums.ErrorType; import me.devsaki.hentoid.enums.Site; import me.devsaki.hentoid.enums.StatusContent; import me.devsaki.hentoid.json.JsonContentCollection; import me.devsaki.hentoid.util.Consts; import me.devsaki.hentoid.util.ContentHelper; import me.devsaki.hentoid.util.FileHelper; import me.devsaki.hentoid.util.Helper; import me.devsaki.hentoid.util.JsonHelper; import me.devsaki.hentoid.util.Preferences; import timber.log.Timber; import static androidx.core.view.ViewCompat.requireViewById; import static com.google.android.material.snackbar.BaseTransientBottomBar.LENGTH_LONG; public class LibImportDialogFragment extends DialogFragment { private static int RQST_PICK_IMPORT_FILE = 4; @IntDef({Result.OK, Result.CANCELED, Result.INVALID_FOLDER, Result.OTHER}) @Retention(RetentionPolicy.SOURCE) public @interface Result { int OK = 0; int CANCELED = 1; int INVALID_FOLDER = 2; int OTHER = 3; } private ViewGroup rootView; private View selectFileBtn; private TextView progressTxt; private ProgressBar progressBar; private CheckBox libraryChk; private CheckBox queueChk; private View runBtn; // Variable used during the selection process private Uri selectedFileUri; // Variable used during the import process private CollectionDAO dao; private int totalBooks; private int currentProgress; private int nbSuccess; private Map<Site, DocumentFile> siteFoldersCache = null; private Map<Site, List<DocumentFile>> bookFoldersCache = new HashMap<>(); // Disposable for RxJava private Disposable importDisposable = Disposables.empty(); public static void invoke(@NonNull final FragmentManager fragmentManager) { LibImportDialogFragment fragment = new LibImportDialogFragment(); fragment.show(fragmentManager, null); } @Nullable @Override public View onCreateView(@NonNull LayoutInflater inflater, ViewGroup container, Bundle savedState) { return inflater.inflate(R.layout.dialog_prefs_import, container, false); } @Override public void onViewCreated(@NonNull View rootView, @Nullable Bundle savedInstanceState) { super.onViewCreated(rootView, savedInstanceState); if (rootView instanceof ViewGroup) this.rootView = (ViewGroup) rootView; progressTxt = requireViewById(rootView, R.id.import_progress_text); progressBar = requireViewById(rootView, R.id.import_progress_bar); selectFileBtn = requireViewById(rootView, R.id.import_select_file_btn); selectFileBtn.setOnClickListener(v -> askFile()); } private void askFile() { Intent intent = new Intent(Intent.ACTION_OPEN_DOCUMENT); intent.addCategory(Intent.CATEGORY_OPENABLE); intent.setType(JsonHelper.JSON_MIME_TYPE); intent.putExtra("android.content.extra.SHOW_ADVANCED", true); HentoidApp.LifeCycleListener.disable(); // Prevents the app from displaying the PIN lock when returning from the SAF dialog startActivityForResult(intent, RQST_PICK_IMPORT_FILE); } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); HentoidApp.LifeCycleListener.enable(); // Restores autolock on app going to background @Result int result = processPickerResult(requestCode, resultCode, data); switch (result) { case Result.OK: // File selected DocumentFile doc = DocumentFile.fromSingleUri(requireContext(), selectedFileUri); if (null == doc) return; selectFileBtn.setVisibility(View.GONE); checkFile(doc); break; case Result.CANCELED: Snackbar.make(rootView, R.string.import_canceled, BaseTransientBottomBar.LENGTH_LONG).show(); break; case Result.INVALID_FOLDER: Snackbar.make(rootView, R.string.import_invalid, BaseTransientBottomBar.LENGTH_LONG).show(); break; case Result.OTHER: Snackbar.make(rootView, R.string.import_other, BaseTransientBottomBar.LENGTH_LONG).show(); break; default: // Nothing should happen here } } private @Result int processPickerResult( int requestCode, int resultCode, final Intent data) { HentoidApp.LifeCycleListener.enable(); // Restores autolock on app going to background // Return from the SAF picker if (requestCode == RQST_PICK_IMPORT_FILE && resultCode == Activity.RESULT_OK) { // Get Uri from Storage Access Framework Uri fileUri = data.getData(); if (fileUri != null) { selectedFileUri = fileUri; return Result.OK; } else return Result.INVALID_FOLDER; } else if (resultCode == Activity.RESULT_CANCELED) { return Result.CANCELED; } else return Result.OTHER; } private void checkFile(@NonNull DocumentFile jsonFile) { // TODO display an indefinite progress bar just in case ? importDisposable = Single.fromCallable(() -> deserialiseJson(jsonFile)) .subscribeOn(Schedulers.io()) .observeOn(AndroidSchedulers.mainThread()) .subscribe( c -> onFileDeserialized(c, jsonFile), Timber::w ); } private void onFileDeserialized(Optional<JsonContentCollection> collectionOptional, DocumentFile jsonFile) { importDisposable.dispose(); TextView errorTxt = requireViewById(rootView, R.id.import_file_invalid_text); if (collectionOptional.isEmpty()) { errorTxt.setText(getResources().getString(R.string.import_file_invalid, jsonFile.getName())); errorTxt.setVisibility(View.VISIBLE); } else { selectFileBtn.setVisibility(View.GONE); errorTxt.setVisibility(View.GONE); JsonContentCollection collection = collectionOptional.get(); libraryChk = requireViewById(rootView, R.id.import_file_library_chk); int librarySize = collection.getLibrary().size(); if (librarySize > 0) { libraryChk.setText(getResources().getQuantityString(R.plurals.import_file_library, librarySize, librarySize)); libraryChk.setOnCheckedChangeListener((buttonView, isChecked) -> refreshDisplay()); libraryChk.setVisibility(View.VISIBLE); } queueChk = requireViewById(rootView, R.id.import_file_queue_chk); int queueSize = collection.getQueue().size(); if (queueSize > 0) { queueChk.setText(getResources().getQuantityString(R.plurals.import_file_queue, queueSize, queueSize)); queueChk.setOnCheckedChangeListener((buttonView, isChecked) -> refreshDisplay()); queueChk.setVisibility(View.VISIBLE); } requireViewById(rootView, R.id.import_warning_img).setVisibility(View.VISIBLE); requireViewById(rootView, R.id.import_file_help_text).setVisibility(View.VISIBLE); runBtn = requireViewById(rootView, R.id.import_run_btn); runBtn.setVisibility(View.VISIBLE); runBtn.setEnabled(false); RadioButton addChk = requireViewById(rootView, R.id.import_mode_add); runBtn.setOnClickListener(v -> runImport(collection, addChk.isChecked(), libraryChk.isChecked(), queueChk.isChecked())); } } // Gray out run button if no option is selected // TODO create a custom style to visually gray out the button when it's disabled private void refreshDisplay() { runBtn.setEnabled(queueChk.isChecked() || libraryChk.isChecked()); } private Optional<JsonContentCollection> deserialiseJson(@NonNull DocumentFile jsonFile) { JsonContentCollection result; try { result = JsonHelper.jsonToObject(requireContext(), jsonFile, JsonContentCollection.class); } catch (IOException e) { Timber.w(e); return Optional.empty(); } return Optional.of(result); } private void runImport(@NonNull JsonContentCollection collection, boolean add, boolean importLibrary, boolean importQueue) { requireViewById(rootView, R.id.import_mode).setEnabled(false); libraryChk.setEnabled(false); queueChk.setEnabled(false); runBtn.setVisibility(View.GONE); setCancelable(false); dao = new ObjectBoxDAO(requireContext()); if (!add) { if (importLibrary) dao.deleteAllLibraryBooks(); if (importQueue) dao.deleteAllQueuedBooks(); } List<Content> all = new ArrayList<>(); if (importLibrary) all.addAll(collection.getLibrary()); if (importQueue) all.addAll(collection.getQueue()); totalBooks = all.size(); currentProgress = 0; nbSuccess = 0; progressBar.setMax(totalBooks); importDisposable = Observable.fromIterable(all) .observeOn(Schedulers.io()) .map(c -> importContent(c, dao)) .observeOn(AndroidSchedulers.mainThread()) .subscribe( this::nextOK, this::nextKO, this::finish ); } private boolean importContent(@NonNull Content c, CollectionDAO dao) { // Try to map the imported content to an existing book in the downloads folder // Folder names can be formatted in many ways _but_ they always contain the book unique ID ! if (null == siteFoldersCache) siteFoldersCache = getSiteFolders(); DocumentFile siteFolder = siteFoldersCache.get(c.getSite()); if (siteFolder != null) mapToContent(c, siteFolder); Content duplicate = dao.selectContentBySourceAndUrl(c.getSite(), c.getUrl()); if (null == duplicate) dao.insertContent(c); return true; } private void mapToContent(@NonNull Content c, @NonNull DocumentFile siteFolder) { List<DocumentFile> bookfolders; if (bookFoldersCache.containsKey(c.getSite())) bookfolders = bookFoldersCache.get(c.getSite()); else { bookfolders = FileHelper.listFolders(requireContext(), siteFolder); bookFoldersCache.put(c.getSite(), bookfolders); } boolean filesFound = false; if (bookfolders != null) { // Look for the book ID c.populateUniqueSiteId(); for (DocumentFile f : bookfolders) if (f.getName() != null && f.getName().contains("[" + c.getUniqueSiteId() + "]")) { // Cache folder Uri c.setStorageUri(f.getUri().toString()); // Cache JSON Uri DocumentFile json = FileHelper.findFile(requireContext(), f, Consts.JSON_FILE_NAME_V2); if (json != null) c.setJsonUri(json.getUri().toString()); // Create the images from detected files c.setImageFiles(ContentHelper.createImageListFromFolder(requireContext(), f)); filesFound = true; break; } } // If no local storage found for the book, it goes in the errors queue if (!filesFound) { c.setStatus(StatusContent.ERROR); List<ErrorRecord> errors = new ArrayList<>(); errors.add(new ErrorRecord(ErrorType.IMPORT, "", "Book", "No local images found when importing - Please redownload", Instant.now())); c.setErrorLog(errors); } } private Map<Site, DocumentFile> getSiteFolders() { Helper.assertNonUiThread(); Map<Site, DocumentFile> result = new HashMap<>(); if (!Preferences.getStorageUri().isEmpty()) { Uri rootUri = Uri.parse(Preferences.getStorageUri()); DocumentFile rootFolder = DocumentFile.fromTreeUri(requireContext(), rootUri); if (rootFolder != null && rootFolder.exists()) { List<DocumentFile> subfolders = FileHelper.listFolders(requireContext(), rootFolder); String folderName; for (DocumentFile f : subfolders) if (f.getName() != null) { folderName = f.getName().toLowerCase(); for (Site s : Site.values()) { if (folderName.equals(s.getFolder().toLowerCase())) { result.put(s, f); break; } } } } } return result; } private void nextOK(boolean dummy) { nbSuccess++; updateProgress(); } private void nextKO(Throwable e) { Timber.w(e); updateProgress(); } private void updateProgress() { currentProgress++; progressTxt.setText(getResources().getString(R.string.book_progress, currentProgress, totalBooks)); progressBar.setProgress(currentProgress); progressTxt.setVisibility(View.VISIBLE); progressBar.setVisibility(View.VISIBLE); } private void finish() { importDisposable.dispose(); Snackbar.make(rootView, getResources().getString(R.string.import_result, nbSuccess), LENGTH_LONG).show(); // Dismiss after 3s, for the user to be able to see the snackbar new Handler().postDelayed(this::dismiss, 3000); } }
package mingzuozhibi.service.amazon; import mingzuozhibi.persist.disc.Disc; import mingzuozhibi.persist.disc.Disc.UpdateType; import mingzuozhibi.persist.disc.Sakura; import mingzuozhibi.support.Dao; import org.hibernate.Session; import org.hibernate.criterion.Restrictions; import org.hibernate.exception.DataException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import java.time.LocalDateTime; import java.util.*; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.stream.Stream; import static mingzuozhibi.service.amazon.AmazonTaskScheduler.AmazonFetchStatus.startFullUpdate; @Component public class AmazonTaskScheduler { enum AmazonFetchStatus { waitingForUpdate, startFullUpdate } private static final Logger LOGGER = LoggerFactory.getLogger(AmazonTaskScheduler.class); private AmazonFetchStatus amazonFetchStatus = AmazonFetchStatus.waitingForUpdate; @Autowired private AmazonTaskService service; @Autowired private Dao dao; public void fetchData() { if (amazonFetchStatus == AmazonFetchStatus.waitingForUpdate) { checkAmazonHotData(); } } private void checkAmazonHotData() { LOGGER.debug("[Amzon(Hot)]"); Set<Disc> discs = new LinkedHashSet<>(); dao.execute(session -> { findActiveSakura(session).forEach(sakura -> { findAmazonDiscs(sakura).sorted().limit(5).forEach(discs::add); }); }); LOGGER.debug("[Amzon(Hot)][{}]", discs.size()); AtomicInteger updateCount = new AtomicInteger(discs.size()); discs.forEach(disc -> { service.createRankTask(disc.getAsin(), checkHotCB(updateCount, disc)); }); } private Consumer<AmazonTask> checkHotCB(AtomicInteger updateCount, Disc disc) { return task -> { updateCount.decrementAndGet(); AtomicReference<Integer> newRank = new AtomicReference<>(); getRank(task).ifPresent(rank -> { newRank.set(rank); if (!rank.equals(disc.getThisRank())) { amazonFetchStatus = startFullUpdate; } }); LOGGER.debug("[Amzon(Hot)][{}][{}->{}][{}][disc={}]", Objects.equals(disc.getThisRank(), newRank.get()) ? "" : "", disc.getThisRank(), newRank.get(), updateCount.get(), disc.getTitle()); if (updateCount.get() == 0) { service.printFetchers(); if (amazonFetchStatus == startFullUpdate) { startFullUpdate(); } } }; } private Optional<Integer> getRank(AmazonTask task) { if (task.isDone()) { String rankText = DocumentReader.getText(task.getDocument(), "Items", "Item", "SalesRank"); return Optional.ofNullable(rankText).map(Integer::parseInt); } else { return Optional.empty(); } } @SuppressWarnings("unchecked") private List<Sakura> findActiveSakura(Session session) { return (List<Sakura>) session.createCriteria(Sakura.class) .add(Restrictions.eq("enabled", true)) .add(Restrictions.ne("key", "9999-99")) .list(); } private Stream<Disc> findAmazonDiscs(Sakura sakura) { return sakura.getDiscs().stream().filter(disc -> { UpdateType updateType = disc.getUpdateType(); return updateType == UpdateType.Amazon || updateType == UpdateType.Both; }); } private void startFullUpdate() { LOGGER.info("[Amzon(ALL)]"); LocalDateTime startTime = LocalDateTime.now().withNano(0); LinkedHashSet<Disc> discs = new LinkedHashSet<>(); LinkedHashMap<String, Integer> results = new LinkedHashMap<>(); dao.execute(session -> { findActiveSakura(session).forEach(sakura -> { findAmazonDiscs(sakura).forEach(discs::add); }); }); AtomicInteger updateCount = new AtomicInteger(discs.size()); LOGGER.info("[Amzon(ALL)][{}]", discs.size()); discs.stream().sorted().forEach(disc -> { service.createRankTask(disc.getAsin(), fullUpdateCB(startTime, discs, updateCount, results)); }); } private Consumer<AmazonTask> fullUpdateCB(LocalDateTime startTime, LinkedHashSet<Disc> discs, AtomicInteger updateCount, LinkedHashMap<String, Integer> results) { return task -> { updateCount.decrementAndGet(); getRank(task).ifPresent(rank -> { results.put(task.getAsin(), rank); }); if (updateCount.get() % 5 == 0 || updateCount.get() < 10) { LOGGER.info("[Amzon(ALL)][{}]", updateCount.get()); } else { LOGGER.debug("[Amzon(ALL)][{}]", updateCount.get()); } if (updateCount.get() == 0) { finishTheUpdate(discs, startTime, results); } }; } private void finishTheUpdate(LinkedHashSet<Disc> discs, LocalDateTime startTime, LinkedHashMap<String, Integer> results) { LOGGER.info("[Amzon(ALL)]]"); discs.forEach(disc -> { for (int i = 0; i < 3; i++) { try { Integer rank = results.get(disc.getAsin()); dao.refresh(disc); disc.setPrevRank(disc.getThisRank()); if (rank != null) { disc.setThisRank(rank); } disc.setModifyTime(startTime); dao.update(disc); break; } catch (DataException ignore) { } } }); LOGGER.info("[Amzon(ALL)]"); service.printFetchers(); amazonFetchStatus = AmazonFetchStatus.waitingForUpdate; } }
package net.bingosoft.oss.imclient.utils; import net.bingosoft.oss.imclient.model.ObjectType; import net.bingosoft.oss.imclient.model.SendMessage; import net.bingosoft.oss.imclient.model.msg.Content; import net.bingosoft.oss.imclient.model.msg.FileContent; import net.bingosoft.oss.imclient.model.msg.Text; import java.util.HashSet; import java.util.Set; import java.util.UUID; /** * * {@link SendMessage} * * @author kael. */ public class MessageBuilder { protected String taskId; protected String msgId; protected int msgType; protected Content content; protected int fromType; protected String fromId; protected String fromName; protected String fromCompany; protected int toType; protected String toId; protected String toName; protected String toCompany; protected boolean recReceipt=false; protected boolean isCountUnread=true; protected boolean isDeleteAfterRead=false; protected boolean isNeedReadReceipt=false; protected Set<String> toDeviceTypes = new HashSet<String>(); protected Set<String> atUserIds = new HashSet<String>(); public static MessageBuilder custom(){ return new MessageBuilder(); } public static MessageBuilder userMessage(){ return new MessageBuilder().setFromType(ObjectType.USER); } public static MessageBuilder snoMessage(){ return new MessageBuilder().setFromType(ObjectType.SNO); } public static MessageBuilder textMessage(String text){ return new MessageBuilder().setTextContent(text); } public static <T extends FileContent> MessageBuilder fileMessage(T content){ return new MessageBuilder().setContent(content); } public MessageBuilder setTaskId(String taskId){ this.taskId = taskId; return this; } public MessageBuilder setMsgId(String msgId){ this.msgId = msgId; return this; } public MessageBuilder setMsgType(int msgType){ this.msgType = msgType; return this; } public MessageBuilder setContent(Content content){ this.content = content; return this; } public MessageBuilder setTextContent(String text){ return setContent(new Text(text)); } public <T extends FileContent> MessageBuilder setFileContent(T content){ return setFileContent(content); } public MessageBuilder setFromType(int fromType){ this.fromType = fromType; return this; } public MessageBuilder setFromId(String fromId){ this.fromId = fromId; return this; } public MessageBuilder setFromName(String fromName){ this.fromName = fromName; return this; } public MessageBuilder setFromCompany(String fromCompany){ this.fromCompany = fromCompany; return this; } public MessageBuilder setToType(int toType){ this.toType = toType; return this; } public MessageBuilder setToId(String toId){ this.toId = toId; return this; } public MessageBuilder setToName(String toName){ this.toName = toName; return this; } public MessageBuilder setToCompany(String toCompany){ this.toCompany = toCompany; return this; } public MessageBuilder addToDevciceTypes(String toDeviceTypes){ this.toDeviceTypes.add(toDeviceTypes); return this; } public MessageBuilder setRecReceipt(boolean recReceipt){ this.recReceipt = recReceipt; return this; } public MessageBuilder setIsCountUnread(boolean isCountUnread){ this.isCountUnread = isCountUnread; return this; } public MessageBuilder setIsDeleteAfterRead(boolean isDeleteAfterRead){ this.isDeleteAfterRead = isDeleteAfterRead; return this; } public MessageBuilder setIsNeedReadReceipt(boolean isNeedReadReceipt){ this.isNeedReadReceipt = isNeedReadReceipt; return this; } public MessageBuilder addAtUser(String userId){ atUserIds.add(userId); return this; } public SendMessage build(){ SendMessage message = new SendMessage(); message.setTaskId(taskId); message.setMsgId(null == msgId? UUID.randomUUID().toString():msgId); message.setMsgType(msgType); message.setContent(content); message.setFromType(fromType); message.setFromId(fromId); message.setFromName(fromName); message.setFromCompany(fromCompany); message.setToType(toType); message.setToId(toId); message.setToName(toName); message.setToCompany(toCompany); message.setRecReceipt(recReceipt); message.setIsCountUnread(isCountUnread); message.setIsDeleteAfterRead(isDeleteAfterRead); message.setIsNeedReadReceipt(isNeedReadReceipt); if(toDeviceTypes.size() > 0){ StringBuilder dt = new StringBuilder(); for (String s : toDeviceTypes){ dt.append(s+","); } dt.deleteCharAt(dt.length()-1); message.setToDeviceTypes(dt.toString()); } if(atUserIds.size() > 0){ StringBuilder atIds = new StringBuilder(); for (String s : atUserIds){ atIds.append(s+","); } atIds.deleteCharAt(atIds.length()-1); message.setAtUserIds(atIds.toString()); } return message; } }
package org.extratrees; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Random; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; public abstract class AbstractTrees<E extends AbstractBinaryTree> { Matrix input; Random random = new Random(); double[] weights; boolean useWeights; boolean hasNaN = false; int[] bagSizes = null; int[][] bagElems = null; protected final static double zero=1e-7; /** value to be returned when there is no answer, i.e., not available */ protected final static double NA = Double.NaN; /** for multi-task learning, stores task indeces (null if not present) */ int[] tasks; /** number of tasks: (tasks are indexed from 0 to (nTasks-1) */ int nTasks; ArrayList<E> trees; /** number of threads */ int numThreads = 1; /** number of times task cut is tried */ int numRandomTaskCuts = 1; /** probability of trying task cutting (lambda) */ double probOfTaskCuts = 1.0; /** number of random cuts tried for each feature */ int numRandomCuts = 1; /** whether random cuts are totally uniform or evenly uniform */ boolean evenCuts = false; /** later shuffled and used for choosing random columns at each node: */ ArrayList<Integer> cols; public boolean isHasNaN() { return hasNaN; } public void setHasNaN(boolean hasNaN) { this.hasNaN = hasNaN; } public int getNumThreads() { return numThreads; } public void setNumThreads(int numThreads) { this.numThreads = numThreads; } public int getNumRandomTaskCuts() { return numRandomTaskCuts; } /** sets the number of times random cut for tasks is tried (assuming multitask setting). * default is 1. * */ public void setNumRandomTaskCuts(int numRandomTaskCuts) { this.numRandomTaskCuts = numRandomTaskCuts; } public double getProbOfTaskCuts() { return probOfTaskCuts; } /** * sets the probability of trying task cutting. * @param probOfTaskCuts */ public void setProbOfTaskCuts(double probOfTaskCuts) { this.probOfTaskCuts = probOfTaskCuts; } /** * @return number of trees used in ExtraTrees */ public int getNumTrees() { return trees.size(); } public boolean isEvenCuts() { return evenCuts; } /** * @param evenCuts - whether the random cuts (if more than 1) are * sampled from fixed even intervals (true) * or just sampled ordinary uniform way (false) */ public void setEvenCuts(boolean evenCuts) { this.evenCuts = evenCuts; } public int getNumRandomCuts() { return numRandomCuts; } public void setNumRandomCuts(int numRandomCuts) { this.numRandomCuts = numRandomCuts; } public void setInput(Matrix input) { this.input = input; // making cols list for later use: this.cols = new ArrayList<Integer>(input.ncols); for (int i=0; i<input.ncols; i++) { cols.add(i); } } public void setTasks(int[] tasks) { // making a list of tasks: this.tasks = tasks; this.nTasks = 1; if (this.tasks!=null) { for (int i=0; i<tasks.length; i++) { //taskNames.add(tasks[i]); if (nTasks < tasks[i] + 1) { nTasks = tasks[i] + 1; } } } } public void setWeights(double[] weights) { if (weights != null && input.nrows != weights.length) { throw(new IllegalArgumentException("Input and weights do not have the same number of data points.")); } this.weights = weights; this.useWeights = (weights!=null); } /** * Sets bag size to bagSize, so each tree is only built with bagSize (randomly selected) samples. * @param bagSize */ public void setBagging(int bagSize) { if (bagSize > input.nrows) { throw( new IllegalArgumentException("Supplied bagSize exceeds the number of samples.") ); } this.bagSizes = new int[]{ bagSize }; } /** * Sets bag sizes for each bag label group. * @param bagSizes int[] size of the bag for each label * @param bagLabels int[] bag label for each sample, all from 0 to (Nbags - 1) */ public void setBagging(int[] bagSizes, int[] bagLabels) { if (bagLabels.length != input.nrows) { throw( new IllegalArgumentException("size of bagLabels has to equal to the number of input rows.") ); } this.bagSizes = bagSizes; int[] counts = new int[bagSizes.length]; for (int i=0; i<input.nrows; i++) { counts[ bagLabels[i] ]++; } // making sure all bags have enough elements: this.bagElems = new int[counts.length][]; for (int bag=0; bag < counts.length; bag++) { if (counts[bag] < bagSizes[bag]) { throw( new IllegalArgumentException( String.format("Bag %d has less elements (%d) than requested by bag size (%d).", bag, counts[bag], bagSizes[bag] ))); } this.bagElems[bag] = new int[ counts[bag] ]; } // adding elements to the appropriate bags: for (int i=0; i < input.nrows; i++) { int bag = bagLabels[i]; int j = this.bagElems[bag].length - counts[bag]; counts[bag] this.bagElems[bag][j] = i; } } /** * @param col_min * @param diff * @param repeat only used when evenCuts==true. * @return random cut from col_min to col_min+diff. */ protected double getRandomCut(double col_min, double diff, int repeat) { double t; if (evenCuts) { double iStart = col_min + repeat*diff/numRandomCuts; double iStop = col_min + (repeat+1)*diff/numRandomCuts; t = getRandom()*(iStop-iStart) + iStart; } else { t = getRandom()*diff + col_min; } return t; } /** * @param ids * @param col * @param input * @return array of min and max values. */ protected static double[] getRange(int[] ids, int col, Matrix input) { double[] range = new double[2]; range[0] = Double.POSITIVE_INFINITY; range[1] = Double.NEGATIVE_INFINITY; for (int n=0; n<ids.length; n++) { double v = input.get(ids[n], col); if ( v<range[0] ) { range[0] = v; } if ( v>range[1] ) { range[1] = v; } } return range; } /** * @param input * @return array of size two: min and max values. */ protected static double[] getRange(double[] input) { double[] range = new double[2]; range[0] = Double.POSITIVE_INFINITY; range[1] = Double.NEGATIVE_INFINITY; for (int n=0; n<input.length; n++) { if ( input[n]<range[0] ) { range[0] = input[n]; } if ( input[n]>range[1] ) { range[1] = input[n]; } } return range; } static protected class CutResult { double score; boolean leftConst; boolean rightConst; int countLeft; int countRight; double nanWeigth; public CutResult() {} public int getCountLeft() { return countLeft; } public void setCountLeft(int countLeft) { this.countLeft = countLeft; } public int getCountRight() { return countRight; } public void setCountRight(int countRight) { this.countRight = countRight; } public boolean isLeftConstant() { return leftConst; } public void setLeftConstant(boolean leftConstant) { this.leftConst = leftConstant; } public boolean isRightConstant() { return rightConst; } public void setRightConstant(boolean rightConstant) { this.rightConst = rightConstant; } public double getScore() { return score; } public void setScore(double score) { this.score = score; } } abstract public E makeLeaf(int[] ids, Set<Integer> leftTaskSet); /** * Same as buildTrees() except computes in parallel. * @param nmin * @param K * @param nTrees * @return */ public ArrayList<E> buildTreesParallel(int nmin, int K, int nTrees) { // creating a thread pool and using it to compute nTrees: ExecutorService executor = Executors.newFixedThreadPool(numThreads); List<TreeCallable> callables = new ArrayList<TreeCallable>(); // adding tasks: using the same task for each tree TreeCallable task = new TreeCallable(nmin, K); for (int i=0; i<nTrees; i++) { callables.add( task ); } // computing and fetching results: List<Future<E>> results; try { results = executor.invokeAll(callables); } catch (InterruptedException e) { // not solving this error here: throw new RuntimeException(e); } // fetching all BinaryTrees and storing them: ArrayList<E> trees = new ArrayList<E>(nTrees); for (Future<E> f : results) { try { trees.add( f.get() ); } catch (InterruptedException e) { throw new RuntimeException(e); } catch (ExecutionException e) { throw new RuntimeException(e); } } executor.shutdown(); return trees; } /** Nested class for making BinaryTrees */ public class TreeCallable implements Callable<E> { int nmin, K; public TreeCallable(int nmin, int K) { this.nmin = nmin; this.K = K; } @Override public E call() throws Exception { return AbstractTrees.this.buildTree(nmin, K); } } /** * good values: * n_min = 2 (size of tree element) * K = 5 (# of random choices) * M = 50 (# of trees) * if n_min is chosen by CV, then we have pruned version * @param nmin - size of tree element * @param K - # of random choices * @param nTrees - # of trees * Single threaded computation. * @return learned trees */ public ArrayList<E> buildTrees(int nmin, int K, int nTrees) { ArrayList<E> trees = new ArrayList<E>(nTrees); // single-threading: for (int t=0; t<nTrees; t++) { trees.add( this.buildTree(nmin, K) ); } return trees; } /** * stores trees with the AbstractTrees object. * Uses multiple threads if set. * @param nmin * @param K * @param nTrees */ public void learnTrees(int nmin, int K, int nTrees) { if (numThreads<=1) { //if (numThreads<=1) { this.trees = buildTrees(nmin, K, nTrees); } else { this.trees = buildTreesParallel(nmin, K, nTrees); } } /** * @param n * @return int array of [0, 1, ..., n-1]. */ public static int[] seq(int n) { int[] seq = new int[n]; for (int i=0; i < n; i++) { seq[i] = i; } return seq; } /** * @param nStart * @param nEnd * @return int array of [nStart, nStart+1, ..., nEnd-1]. */ public static int[] seq(int nStart, int nEnd) { int[] seq = new int[nEnd - nStart]; for (int i=nStart; i < nEnd; i++) { seq[i-nStart] = i; } return seq; } /** * Main method that performs tree training. * @param nmin * @param K * @return */ public E buildTree(int nmin, int K) { int[] ids = getInitialSamples(); ShuffledIterator<Integer> cols = new ShuffledIterator<Integer>(this.cols); // finding task set: HashSet<Integer> taskSet = getSequenceSet(nTasks); return buildTree(nmin, K, ids, cols, taskSet ); } protected static ArrayList<Integer> arrayToList(int[] array) { ArrayList<Integer> list = new ArrayList<Integer>(array.length); for (int value : array) { list.add(value); } return list; } protected static int[] listToArray(ArrayList<Integer> list) { int[] a = new int[ list.size() ]; for (int i=0; i < a.length; i++) { a[i] = list.get(i); } return a; } protected int[] getInitialSamples() { if (bagSizes == null) { return seq( input.nrows ); } if (bagSizes.length == 1) { ArrayList<Integer> allIds = arrayToList( seq( input.nrows ) ); ShuffledIterator<Integer> shuffle = new ShuffledIterator<Integer>(allIds); int[] bag = new int[ bagSizes[0] ]; for (int i=0; i < bag.length; i++) { bag[i] = shuffle.next(); } return bag; } // selecting random samples from each bag: int[] bag = new int[ sum(bagSizes) ]; int i = 0; for (int b=0; b < bagSizes.length; b++) { ArrayList<Integer> ids = arrayToList( bagElems[b] ); ShuffledIterator<Integer> shuffle = new ShuffledIterator<Integer>(ids); // filling with elements from bag[b]: for (int n = i + bagSizes[b]; i < n; i++) { bag[i] = shuffle.next(); } } return bag; } /** @return penalty for NaN values for the given ids */ protected abstract double get1NaNScore(int[] ids); protected abstract void calculateCutScore(int[] ids, int col, double t, CutResult result); /** * @param ids * @param bestScore * @return array of booleans specifying the cut (true for left tree, false for left). * Returns null if no cut better (smaller score) than bestScore was found. */ protected abstract TaskCutResult getTaskCut(int[] ids, Set<Integer> tasks, double bestScore); /** * @param m data matrix * @param ids row numbers to be split * @param dim which dimension to use for splitting * @param cut splitting value * @return filters ids <b>ids</b> into two arrays, one whose values in data values * {@code m[id, dim]} are below cut and others whose values are higher. */ public static int[][] splitIds(Matrix m, int[] ids, int dim, double cut) { int[][] out = new int[2][]; int lenLower = 0; for (int i=0; i<ids.length; i++) { if (m.get(ids[i], dim)<cut) { lenLower++; } } // two vectors: lower and higher out[0] = new int[lenLower]; out[1] = new int[ids.length - lenLower]; int i0 = 0; int i1 = 0; for (int i=0; i<ids.length; i++) { if (m.get(ids[i], dim)<cut) { out[0][i0] = ids[i]; i0++; } else { out[1][i1] = ids[i]; i1++; } } return out; } /** * * @param ids * @param leftTasks * @return filters <b>ids</b> into two arrays, one whose tasks[i] is is * in leftTasks and others whose values are not. */ public int[][] splitIdsByTask(int[] ids, Set<Integer> leftTasks) { int[][] out = new int[2][]; int lenLower = 0; for (int i=0; i<ids.length; i++) { if ( leftTasks.contains(tasks[ids[i]]) ) { lenLower++; } } // two vectors: lower and higher out[0] = new int[lenLower]; out[1] = new int[ids.length - lenLower]; int i0 = 0; int i1 = 0; for (int i=0; i<ids.length; i++) { if ( leftTasks.contains(tasks[ids[i]]) ) { out[0][i0] = ids[i]; i0++; } else { out[1][i1] = ids[i]; i1++; } } return out; } public static int sum(int[] array) { int s = 0; for (int i=0; i<array.length; i++) { s += array[i]; } return s; } public static double sum(double[] array) { double s = 0; for (int i=0; i<array.length; i++) { s += array[i]; } return s; } public static HashSet<Integer> getSequenceSet(int n) { HashSet<Integer> taskSet; taskSet = new HashSet<Integer>(n); for (int i=0; i<n; i++) { taskSet.add(i); } return taskSet; } /** * @return random between 0.0 and 1.0. */ protected double getRandom() { return random.nextDouble(); } public void setSeed(long seed) { this.random = new Random(seed); } /** * @param xmin * @param xmax * @return uniformly random value between xmin and xmax. */ protected double getRandom(double xmin, double xmax) { return xmin + getRandom()*(xmax - xmin); } abstract protected E makeFilledTree(E leftTree, E rightTree, int col_best, double t_best, int nSuccessors); /** * * @param nmin * @param K * @param ids * @param randomCols - passed to save memory (maybe not needed) * @return */ public E buildTree(int nmin, int K, int[] ids, ShuffledIterator<Integer> randomCols, Set<Integer> taskSet) { if (ids.length<nmin) { return makeLeaf(ids, taskSet); } // doing a shuffle of cols: randomCols.reset(); // trying K trees or the number of non-constant columns, // whichever is smaller: int k = 0, col_best=-1; double t_best=Double.NaN; double nanPenalty = hasNaN ?get1NaNScore(ids) :0; CutResult bestResult = new CutResult(); bestResult.score = Double.POSITIVE_INFINITY; loopThroughColumns: while( randomCols.hasNext() ) { int col = randomCols.next(); // calculating columns min and max: double[] range = getRange(ids, col, input); if (range[1]-range[0] < zero) { // skipping, because column is constant continue; } // picking random test point numRepeatTries: double diff = (range[1]-range[0]); for (int repeat=0; repeat<this.numRandomCuts; repeat++) { double t; t = getRandomCut(range[0], diff, repeat); CutResult result = new CutResult(); calculateCutScore(ids, col, t, result); if (hasNaN) { result.score += result.nanWeigth * nanPenalty; } if (result.score < bestResult.score) { // found a better scoring cut col_best = col; t_best = t; bestResult.score = result.score; bestResult.leftConst = result.leftConst; bestResult.rightConst = result.rightConst; bestResult.countLeft = result.countLeft; bestResult.countRight = result.countRight; if (bestResult.leftConst && bestResult.rightConst && ! hasNaN) { // the result cannot be improved: break loopThroughColumns; } } } k++; if (k>=K) { // checked enough columns, stopping: break; } } // multi-task learning: TaskCutResult taskCutResult = null; if (taskSet.size() > 1) { // checking whether to perform task splitting if ( probOfTaskCuts > getRandom() ) { // calculating task order: taskCutResult = getTaskCut(ids, taskSet, bestResult.score); } } // outputting the tree using the best score cut: int[] idsLeft = new int[bestResult.countLeft]; int[] idsRight = new int[bestResult.countRight]; if (col_best < 0 && taskCutResult==null) { // no feature or task split found return makeLeaf(ids, taskSet); } int[][] split; Set<Integer> leftTaskSet, rightTaskSet; if (taskCutResult!=null) { // task cut: split = splitIdsByTask(ids, taskCutResult.leftTasks); leftTaskSet = taskCutResult.leftTasks; rightTaskSet = taskCutResult.rightTasks; col_best = -1; t_best = Double.NaN; } else { // splitting according to feature: split = splitIds(input, ids, col_best, t_best); leftTaskSet = taskSet; rightTaskSet = taskSet; } idsLeft = split[0]; idsRight = split[1]; E leftTree, rightTree; if (bestResult.leftConst) { // left child's output is constant leftTree = makeLeaf(idsLeft, leftTaskSet); } else { leftTree = this.buildTree(nmin, K, idsLeft, randomCols, leftTaskSet); } if (bestResult.rightConst) { // right child's output is constant rightTree = makeLeaf(idsRight, rightTaskSet); } else { rightTree = this.buildTree(nmin, K, idsRight, randomCols, rightTaskSet); } E bt = makeFilledTree(leftTree, rightTree, col_best, t_best, ids.length); bt.tasks = taskSet; return bt; } }
package controllers; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import java.net.HttpURLConnection; import java.net.URL; import java.net.URLEncoder; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.HashMap; import java.util.Map; import net.sourceforge.jwbf.core.RequestBuilder; import net.sourceforge.jwbf.core.actions.util.HttpAction; import net.sourceforge.jwbf.mediawiki.ApiRequestBuilder; import net.sourceforge.jwbf.mediawiki.actions.util.MWAction; import net.sourceforge.jwbf.mediawiki.bots.MediaWikiBot; import org.apache.commons.dbutils.DbUtils; import org.spout.cereal.config.ConfigurationNode; import org.spout.cereal.config.yaml.YamlConfiguration; import play.Logger; import play.mvc.Result; import play.mvc.Results; import com.afforess.assembly.util.DatabaseAccess; import com.afforess.assembly.util.Utils; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.limewoodMedia.nsapi.NationStates; public class NSWikiController extends NationStatesController { private final String nswikiAdmin; private final String nswikiPass; public NSWikiController(DatabaseAccess access, YamlConfiguration config, NationStates api) { super(access, config, api); ConfigurationNode nswiki = getConfig().getChild("nswiki"); nswikiAdmin = nswiki.getChild("admin").getString(null); nswikiPass = nswiki.getChild("password").getString(null); } public Result verifyNationLogin() throws IOException, SQLException { Result ret = Utils.validateRequest(request(), response(), getAPI(), getDatabase(), false); if (ret != null) { return ret; } String nation = Utils.getPostValue(request(), "nation"); String password = Utils.getPostValue(request(), "password"); if (password == null || password.isEmpty() || password.length() < 8) { Logger.warn("User [" + nation + "] attempted an invalid password: [" + password + "]"); return Results.badRequest("Invalid password"); } final String title; Connection conn = null; PreparedStatement select = null; ResultSet set = null; try { conn = getConnection(); select = conn.prepareStatement("SELECT title FROM assembly.nation WHERE name = ?"); select.setString(1, Utils.sanitizeName(nation)); set = select.executeQuery(); set.next(); title = set.getString(1); } finally { DbUtils.closeQuietly(conn); DbUtils.closeQuietly(select); DbUtils.closeQuietly(set); } if (doesNSWikiUserExist(title)) { Logger.info("Updating password for " + title); changePassword(title, password); return Results.ok(); } return createNSWikiUser(title, password); } private static void changePassword(String user, String password) throws IOException { Process cmdProc = Runtime.getRuntime().exec(new String[] {"php", "/etc/mediawiki/maintenance/changePassword.php", "--user=" + user + "", "--password=" + password + ""}); BufferedReader stdoutReader = new BufferedReader(new InputStreamReader(cmdProc.getInputStream())); String line; while ((line = stdoutReader.readLine()) != null) { Logger.info("[NSWIKI PASSWORD] " + line); } BufferedReader stderrReader = new BufferedReader(new InputStreamReader(cmdProc.getErrorStream())); while ((line = stderrReader.readLine()) != null) { Logger.warn("[NSWIKI PASSWORD] " + line); } } private static boolean doesNSWikiUserExist(String user) throws IOException { URL userPage = new URL("http://nswiki.org/index.php?title=User:" + URLEncoder.encode(user, "UTF-8")); HttpURLConnection conn = (HttpURLConnection) userPage.openConnection(); conn.connect(); return conn.getResponseCode() / 100 == 2; } private Result createNSWikiUser(String nation, String password) throws IOException { MediaWikiBot wikibot = new MediaWikiBot("http://nswiki.org/"); wikibot.login(nswikiAdmin, nswikiPass); String result = wikibot.performAction(new CreateUser(nation, password)); if (result.contains("success")) { return Results.ok(); } else { Logger.warn("Unable to create NS Wiki user: " + result); return Results.internalServerError(); } } private static class CreateUser extends MWAction { private String token = null; private int count = 0; private final String name; private final String password; public CreateUser(String name, String password) { this.name = name; this.password = password; } @Override public HttpAction getNextMessage() { try { RequestBuilder rb = new ApiRequestBuilder().action("createaccount").param("format", "json").param("name", URLEncoder.encode(name, "UTF-8")).param("password", URLEncoder.encode(password, "UTF-8")); if (token != null) { rb.param("token", token); } return rb.buildPost(); } catch (UnsupportedEncodingException e) { throw new RuntimeException(e); } } @SuppressWarnings("unchecked") @Override public String processAllReturningText(final String s) { try { if (token == null) { Map<String, Object> result = new ObjectMapper().readValue(s, new TypeReference<HashMap<String,Object>>() {}); token = ((Map<String, String>)result.get("createaccount")).get("token"); } } catch (Exception e) { throw new RuntimeException(e); } return s; } @Override public boolean hasMoreMessages() { if (++count < 3) { return true; } return false; } } }
package net.darkhax.bookshelf.handler; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import net.minecraft.creativetab.CreativeTabs; import net.minecraft.entity.EntityLivingBase; import net.minecraft.entity.passive.EntityHorse; import net.minecraft.item.ItemStack; import net.minecraftforge.event.AnvilUpdateEvent; import net.minecraftforge.event.entity.EntityEvent; import net.minecraftforge.event.entity.EntityJoinWorldEvent; import net.minecraftforge.event.entity.living.LivingEvent.LivingUpdateEvent; import net.minecraftforge.event.entity.living.LivingHurtEvent; import cpw.mods.fml.common.eventhandler.SubscribeEvent; import net.darkhax.bookshelf.asm.ASMHelper; import net.darkhax.bookshelf.buff.BuffEffect; import net.darkhax.bookshelf.buff.BuffHelper; import net.darkhax.bookshelf.common.BookshelfRegistry; import net.darkhax.bookshelf.common.EntityProperties; import net.darkhax.bookshelf.event.CreativeTabEvent; import net.darkhax.bookshelf.event.PotionCuredEvent; import net.darkhax.bookshelf.items.ItemHorseArmor; import net.darkhax.bookshelf.lib.Constants; import net.darkhax.bookshelf.lib.util.*; public class ForgeEventHandler { @SubscribeEvent public void onAnvilUsed (AnvilUpdateEvent event) { for (BookshelfRegistry.AnvilRecipe recipe : BookshelfRegistry.getAnvilRecipes()) { if (recipe != null && ItemStackUtils.isValidStack(recipe.output) && ItemStackUtils.areStacksSimilarWithSize(event.left, recipe.inputLeft) && ItemStackUtils.areStacksSimilarWithSize(event.right, recipe.inputRight)) { event.cost = recipe.getExperienceCost(event.left, event.right, event.name); event.materialCost = recipe.getMaterialCost(event.left, event.right, event.name); if (recipe.nameTaxt != null && !recipe.nameTaxt.isEmpty()) { if (recipe.nameTaxt.equalsIgnoreCase(event.name)) event.output = recipe.getOutput(event.left, event.right, event.name); return; } event.output = recipe.getOutput(event.left, event.right, event.name); return; } } } @SubscribeEvent public void onPotionsCured (PotionCuredEvent event) { BuffHelper.cureBuffs(event.entityLiving, event.stack); } @SubscribeEvent public void afterCreativeTabLoaded (CreativeTabEvent.Post event) { if (event.tab == CreativeTabs.tabDecorations) for (ItemStack stack : SkullUtils.getMHFSkulls()) event.itemList.add(stack); } @SubscribeEvent public void onEntityUpdate (LivingUpdateEvent event) { if (!ASMHelper.isASMEnabled) Constants.LOG.warn("The ASM has not been initialized, there is an error with your setup!"); else if (event.entity instanceof EntityHorse) { EntityHorse horse = (EntityHorse) event.entity; ItemStack customArmor = Utilities.getCustomHorseArmor(horse); if (customArmor != null && customArmor.getItem() instanceof ItemHorseArmor) { ItemHorseArmor armor = (ItemHorseArmor) customArmor.getItem(); armor.onHorseUpdate(horse, customArmor); } } EntityLivingBase entity = event.entityLiving; List<BuffEffect> destroyBuff = new ArrayList<BuffEffect>(); List<BuffEffect> list = BuffHelper.getEntityEffects(entity); for (BuffEffect buff:list) { if (buff.getBuff().canUpdate()) buff.getBuff().onBuffTick(entity.worldObj, entity, buff.duration, buff.power); buff.duration if (!entity.worldObj.isRemote) { if (buff.duration <= 0) { destroyBuff.add(buff); } } } if(!destroyBuff.isEmpty()){ for (BuffEffect buff:destroyBuff) { buff.getBuff().onEffectEnded(); EntityProperties.getProperties(entity).remove(buff, false); } } } @SubscribeEvent public void onEntityHurt (LivingHurtEvent event) { if (!ASMHelper.isASMEnabled) Constants.LOG.warn("The ASM has not been initialized, there is an error with your setup!"); else if (event.entity instanceof EntityHorse) { EntityHorse horse = (EntityHorse) event.entity; ItemStack customArmor = Utilities.getCustomHorseArmor(horse); if (customArmor != null && customArmor.getItem() instanceof ItemHorseArmor) { ItemHorseArmor armor = (ItemHorseArmor) customArmor.getItem(); event.setCanceled(armor.onHorseDamaged(horse, customArmor, event.source, event.ammount)); } } } @SubscribeEvent public void onEntityConstructing (EntityEvent.EntityConstructing event) { if (event.entity instanceof EntityLivingBase && !EntityProperties.hasProperties((EntityLivingBase) event.entity)) EntityProperties.setProperties((EntityLivingBase) event.entity); } @SubscribeEvent public void onEntityJoinWorld (EntityJoinWorldEvent event) { if (event.entity instanceof EntityLivingBase && !event.entity.worldObj.isRemote && EntityProperties.hasProperties((EntityLivingBase) event.entity)) EntityProperties.getProperties((EntityLivingBase) event.entity).sync(false); } }
package org.csstudio.scan.data; import java.io.PrintStream; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.List; import org.csstudio.scan.util.TextTable; /** Iterate over {@link ScanData} as spreadsheet-type table * * <p>Each column represents the samples for a device. * {@link ScanSample} serials are used to correlate samples. * Each "line" of the spreadsheet contains samples for the * same serial. * Values for a device are repeated on following lines until * a sample with new serial is received. * * @author Kay Kasemir */ @SuppressWarnings("nls") public class ScanDataIterator { /** "Comma Separator" is actually comma, * but could in the future be changed to '\t' or other */ final private static String CSV_SEPARATOR = ","; /** Device names, i.e. columns in spreadsheet */ final private String[] device_names; /** Raw data for each device */ final private List<ScanSample>[] data; /** Index within data for each device */ final private int[] index; /** Timestamp of current spreadsheet line */ private Date timestamp; /** Values for current spreadsheet line */ final private ScanSample[] value; /** Initialize for all devices in the {@link ScanData} * @param scan_data Scan data */ public ScanDataIterator(final ScanData scan_data) { // Determine for which devices we have samples this(scan_data, scan_data.getDevices()); } /** Initialize for specific devices * @param scan_data Scan data * @param device_names Devices that must be in the scan data */ @SuppressWarnings("unchecked") public ScanDataIterator(final ScanData scan_data, final String... device_names) { this.device_names = device_names; final int N = device_names.length; data = new List[N]; value = new ScanSample[N]; index = new int[N]; for (int i=0; i<N; ++i) { data[i] = scan_data.getSamples(device_names[i]); if (data[i] == null) data[i] = Collections.emptyList(); index[i] = 0; value[i] = null; } } /** @param i Device index * @return data[index[i]] */ private ScanSample getCurrentSample(final int i) { if (data[i].size() > index[i]) return data[i].get(index[i]); return null; } /** @return Device names, i.e. spreadsheet columns */ public String[] getDevices() { return device_names; } /** @return <code>true</code> if there is another line in the spreadsheet */ public boolean hasNext() { // Find oldest serial final int N = device_names.length; long oldest = Long.MAX_VALUE; timestamp = null; for (int i=0; i<N; ++i) { final ScanSample sample = getCurrentSample(i); if (sample == null) continue; if (sample.getSerial() < oldest) oldest = sample.getSerial(); } if (oldest == Long.MAX_VALUE) return false; // 'oldest' now defines the current spreadsheet line. // Determine value for that line. for (int i=0; i<N; ++i) { final ScanSample sample = getCurrentSample(i); if (sample == null) { // No more data for device // Leave value[i] as is, "still valid" continue; } // Device #i has data if (sample.getSerial() <= oldest) { // Use that as the 'value' for this line value[i] = sample; // Set index to the next sample, which will // be used as 'value' once we reach that time slot. ++index[i]; } // else: sample[i] already points to a sample // _after_ the current line, so leave value[i] as is // For time stamp, use the newest stamp on current line if (timestamp == null || timestamp.before(sample.getTimestamp())) timestamp = sample.getTimestamp(); } return true; } /** @return Time stamp of the current spreadsheet line */ public Date getTimestamp() { return timestamp; } /** @return Samples on the current spreadsheet line */ public ScanSample[] getSamples() { // Copy because value[] will be overridden with next line return Arrays.copyOf(value, value.length); } /** Write spreadsheet to stream with fixed-sized columns * @param out {@link PrintStream} */ public void printTable(final PrintStream out) { final TextTable table = new TextTable(out); // Header table.addColumn("Time"); for (String device : getDevices()) table.addColumn(device); // Iterate over device data in 'spreadsheet' manner while (hasNext()) { table.addCell(ScanSampleFormatter.format(getTimestamp())); // Print current line for (ScanSample sample : getSamples()) { if (sample == null) table.addCell(" else table.addCell(ScanSampleFormatter.asString(sample)); } } table.flush(); } /** Write spreadsheet to stream in CVS format * @param out {@link PrintStream} */ public void printCSV(final PrintStream out) { // Header out.append("Time"); for (String device : getDevices()) out.append(CSV_SEPARATOR).append(device); out.println(); // Iterate over device data in 'spreadsheet' manner while (hasNext()) { out.append(ScanSampleFormatter.format(getTimestamp())); // Print current line for (ScanSample sample : getSamples()) { out.append(CSV_SEPARATOR); if (sample == null) out.append(" else out.append(ScanSampleFormatter.asString(sample)); } out.println(); } } }
package org.goldenorb.event; public class OrbEvent { final static int LOST_MEMBER = 1; final static int NEW_MEMBER = 2; final static int LEADERSHIP_CHANGE = 3; final static int NEW_JOB = 4; final static int JOB_COMPLETE = 5; final static int JOB_DEATH = 6; final static int ORB_EXCEPTION = 8; int type; public OrbEvent(int type){ this.type = type; } public int getType(){ return type; } public void setType(int type){ this.type = type; } }
package net.imglib2.meta; /** * @author Barry DeZonia */ public class CombinedCalibratedRealInterval<A extends CalibratedAxis, S extends CalibratedRealInterval<A>> extends CombinedRealInterval<A, S> implements CalibratedRealInterval<A> { // TODO - these methods will need some TLC. Maybe this class will store its // own copy of calibration values and units. And then setUnit() and // setCalibration() on an axis does a unit converted scaling of existing axes // cal values. Pulling values out of this interval will use views and sampling // as needed to get values along unit/calibration converted points of the // underlying axes. @Override public void setUnit(String unit, int d) { axis(d).setUnit(unit); } @Override public String unit(int d) { return axis(d).unit(); } @Override public double calibration(int d) { return axis(d).calibration(); } @Override public void calibration(double[] cal) { for (int i = 0; i < cal.length; i++) cal[i] = calibration(i); } @Override public void calibration(float[] cal) { for (int i = 0; i < cal.length; i++) cal[i] = (float) calibration(i); } @Override public void setCalibration(double cal, int d) { // TODO: we could throw an UnsupportedOperationException. But this class // is already broken. Update this method later. axis(d).setCalibration(cal); } @Override public void setCalibration(double[] cal) { for (int i = 0; i < cal.length; i++) setCalibration(cal[i], i); } @Override public void setCalibration(float[] cal) { for (int i = 0; i < cal.length; i++) setCalibration(cal[i], i); } }
package org.jenetics; import static org.jenetics.util.object.checkProbability; import static org.jenetics.util.object.eq; import static org.jenetics.util.object.hashCodeOf; public abstract class AbstractAlterer<G extends Gene<?, G>> implements Alterer<G> { /** * Return an alterer which does nothing. * * @return an alterer which does nothing. */ public static final <G extends Gene<?, G>> Alterer<G> Null() { return new Alterer<G>() { @Override public <C extends Comparable<? super C>> int alter( final Population<G, C> population, final int generation ) { return 0; } @Override public int hashCode() { return hashCodeOf(getClass()).value(); } @Override public boolean equals(final Object obj) { if (obj == this) { return true; } if (obj == null) { return false; } return obj.getClass() == getClass(); } @Override public String toString() { return "Alterer.Null"; } }; } public static final double DEFAULT_ALTER_PROBABILITY = 0.2; /** * The altering probability. */ protected final double _probability; protected AbstractAlterer(final double probability) { _probability = checkProbability(probability); } /** * Return the recombination/alter probability for this alterer. * * @return The recombination probability. */ public double getProbability() { return _probability; } @Override public int hashCode() { return hashCodeOf(getClass()).and(_probability).value(); } @Override public boolean equals(final Object obj) { if (obj == this) { return true; } if (!(obj instanceof AbstractAlterer<?>)) { return false; } final AbstractAlterer<?> alterer = (AbstractAlterer<?>)obj; return eq(_probability, alterer._probability); } }
package pacman.modele; import java.util.Scanner; import pacman.carte.Labyrinthe; import pacman.personnages.Pacman; public class MainClass { public MainClass(){ Pacman pacman = new Pacman(0, 0); Labyrinthe laby = new Labyrinthe(5, 5, pacman); Modele m = new Modele(laby); boolean exit = false; while(!exit){ System.out.println("Ecrire commande (L/R/U/D/S) \n"); Scanner sc = new Scanner(System.in); String entree = sc.nextLine(); switch(entree){ case "L": if(!m.murAGauche()){ m.deplacerPacmanGauche(); System.out.println("L"); } break; case "R": if(!m.murADroite()){ m.deplacerPacmanDroite(); System.out.println("R"); } break; case "U": if(!m.murEnHaut()){ m.deplacerPacmanHaut(); System.out.println("U"); } break; case "D": if(!m.murEnBas()){ m.deplacerPacmanBas(); System.out.println("D"); } break; case "S": System.out.println("S"); break; default: break; } sc.close(); } } public static void main(String[] args) { new MainClass(); } }
package org.lightmare.jpa; import java.io.IOException; import java.net.URL; import java.util.Collection; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import javax.naming.Context; import javax.naming.NamingException; import javax.persistence.EntityManagerFactory; import javax.persistence.Persistence; import org.apache.log4j.Logger; import org.lightmare.cache.ConnectionSemaphore; import org.lightmare.jndi.NamingUtils; import org.lightmare.jpa.jta.HibernateConfig; import org.lightmare.libraries.LibraryLoader; import org.lightmare.utils.ObjectUtils; /** * Creates and caches {@link EntityManagerFactory} for each ejb bean * {@link Class}'s appropriate field (annotated by @PersistenceContext) * * @author Levan * */ public class JPAManager { // Keeps unique EntityManagerFactories builded by unit names private static final ConcurrentMap<String, ConnectionSemaphore> CONNECTIONS = new ConcurrentHashMap<String, ConnectionSemaphore>(); private List<String> classes; private String path; private URL url; private Map<Object, Object> properties; private boolean swapDataSource; private boolean scanArchives; private ClassLoader loader; public static boolean pooledDataSource; private static final Logger LOG = Logger.getLogger(JPAManager.class); private JPAManager() { } public static boolean checkForEmf(String unitName) { boolean check = ObjectUtils.available(unitName); if (check) { check = CONNECTIONS.containsKey(unitName); } return check; } public static ConnectionSemaphore getSemaphore(String unitName) { return CONNECTIONS.get(unitName); } private static ConnectionSemaphore createSemaphore(String unitName) { ConnectionSemaphore semaphore = CONNECTIONS.get(unitName); ConnectionSemaphore current = null; if (semaphore == null) { semaphore = new ConnectionSemaphore(); semaphore.setUnitName(unitName); semaphore.setInProgress(Boolean.TRUE); semaphore.setCached(Boolean.TRUE); current = CONNECTIONS.putIfAbsent(unitName, semaphore); } if (current == null) { current = semaphore; } current.incrementUser(); return current; } public static ConnectionSemaphore setSemaphore(String unitName, String jndiName) { ConnectionSemaphore semaphore = null; if (ObjectUtils.available(unitName)) { semaphore = createSemaphore(unitName); if (ObjectUtils.available(jndiName)) { ConnectionSemaphore existent = CONNECTIONS.putIfAbsent( jndiName, semaphore); if (existent == null) { semaphore.setJndiName(jndiName); } } } return semaphore; } private static void awaitConnection(ConnectionSemaphore semaphore) { synchronized (semaphore) { boolean inProgress = semaphore.isInProgress() && !semaphore.isBound(); while (inProgress) { try { semaphore.wait(); inProgress = semaphore.isInProgress() && !semaphore.isBound(); } catch (InterruptedException ex) { inProgress = Boolean.FALSE; LOG.error(ex.getMessage(), ex); } } } } public static boolean isInProgress(String jndiName) { ConnectionSemaphore semaphore = CONNECTIONS.get(jndiName); boolean inProgress = ObjectUtils.notNull(semaphore); if (inProgress) { inProgress = semaphore.isInProgress() && !semaphore.isBound(); if (inProgress) { awaitConnection(semaphore); } } return inProgress; } private void addTransactionManager() { if (properties == null) { properties = new HashMap<Object, Object>(); } properties.put(HibernateConfig.FACTORY_KEY, HibernateConfig.FACTORY_VALUE); properties.put(HibernateConfig.PLATFORM_KEY, HibernateConfig.PLATFORM_VALUE); } /** * Creates {@link EntityManagerFactory} by hibernate or by extended builder * {@link Ejb3ConfigurationImpl} if entity classes or persistence.xml file * path are provided * * @see Ejb3ConfigurationImpl#configure(String, Map) and * Ejb3ConfigurationImpl#createEntityManagerFactory() * * @param unitName * @return {@link EntityManagerFactory} */ @SuppressWarnings("deprecation") private EntityManagerFactory buildEntityManagerFactory(String unitName) throws IOException { EntityManagerFactory emf; Ejb3ConfigurationImpl cfg; boolean pathCheck = ObjectUtils.available(path); boolean urlCheck = checkForURL(); Ejb3ConfigurationImpl.Builder builder = new Ejb3ConfigurationImpl.Builder(); if (loader == null) { loader = LibraryLoader.getContextClassLoader(); } if (ObjectUtils.available(classes)) { builder.setClasses(classes); // Loads entity classes to current ClassLoader instance LibraryLoader.loadClasses(classes, loader); } if (pathCheck || urlCheck) { Enumeration<URL> xmls; ConfigLoader configLoader = new ConfigLoader(); if (pathCheck) { xmls = configLoader.readFile(path); } else { xmls = configLoader.readURL(url); } builder.setXmls(xmls); String shortPath = configLoader.getShortPath(); builder.setShortPath(shortPath); } builder.setSwapDataSource(swapDataSource); builder.setScanArchives(scanArchives); builder.setOverridenClassLoader(loader); cfg = builder.build(); if (ObjectUtils.isFalse(swapDataSource)) { addTransactionManager(); } Ejb3ConfigurationImpl configured = cfg.configure(unitName, properties); emf = ObjectUtils.notNull(configured) ? configured .buildEntityManagerFactory() : null; return emf; } /** * Checks if entity persistence.xml {@link URL} is provided * * @return boolean */ private boolean checkForURL() { return ObjectUtils.notNull(url) && ObjectUtils.available(url.toString()); } /** * Checks if entity classes or persistence.xml path are provided * * @param classes * @return boolean */ private boolean checkForBuild() { return ObjectUtils.available(classes) || ObjectUtils.available(path) || checkForURL() || swapDataSource || scanArchives; } /** * Checks if entity classes or persistence.xml file path are provided to * create {@link EntityManagerFactory} * * @see #buildEntityManagerFactory(String, String, Map, List) * * @param unitName * @param properties * @param path * @param classes * @return {@link EntityManagerFactory} * @throws IOException */ private EntityManagerFactory createEntityManagerFactory(String unitName) throws IOException { EntityManagerFactory emf; if (checkForBuild()) { emf = buildEntityManagerFactory(unitName); } else if (properties == null) { emf = Persistence.createEntityManagerFactory(unitName); } else { emf = Persistence.createEntityManagerFactory(unitName, properties); } return emf; } /** * Binds {@link EntityManagerFactory} to {@link javax.naming.InitialContext} * * @param jndiName * @param unitName * @param emf * @throws IOException */ private void bindJndiName(ConnectionSemaphore semaphore) throws IOException { boolean bound = semaphore.isBound(); if (!bound) { String jndiName = semaphore.getJndiName(); if (ObjectUtils.available(jndiName)) { NamingUtils namingUtils = new NamingUtils(); try { Context context = namingUtils.getContext(); String fullJndiName = NamingUtils .createJpaJndiName(jndiName); if (context.lookup(fullJndiName) == null) { namingUtils.getContext().rebind(fullJndiName, semaphore.getEmf()); } semaphore.setBound(Boolean.TRUE); } catch (NamingException ex) { throw new IOException(String.format( "could not bind connection %s", semaphore.getUnitName()), ex); } } else { semaphore.setBound(Boolean.TRUE); } } } public void setConnection(String unitName) throws IOException { ConnectionSemaphore semaphore = CONNECTIONS.get(unitName); if (semaphore.isInProgress()) { EntityManagerFactory emf = createEntityManagerFactory(unitName); semaphore.setEmf(emf); semaphore.setInProgress(Boolean.FALSE); bindJndiName(semaphore); } else if (semaphore.getEmf() == null) { throw new IOException(String.format( "Connection %s was not in progress", unitName)); } else { bindJndiName(semaphore); } } /** * Gets {@link ConnectionSemaphore} from cache, awaits if connection * instantiation is in progress * * @param unitName * @return {@link ConnectionSemaphore} * @throws IOException */ public static ConnectionSemaphore getConnection(String unitName) throws IOException { ConnectionSemaphore semaphore = CONNECTIONS.get(unitName); if (ObjectUtils.notNull(semaphore)) { awaitConnection(semaphore); } return semaphore; } /** * Gets {@link EntityManagerFactory} from {@link ConnectionSemaphore}, * awaits if connection * * @param unitName * @return {@link EntityManagerFactory} * @throws IOException */ public static EntityManagerFactory getEntityManagerFactory(String unitName) throws IOException { EntityManagerFactory emf = null; ConnectionSemaphore semaphore = CONNECTIONS.get(unitName); if (ObjectUtils.notNull(semaphore)) { awaitConnection(semaphore); emf = semaphore.getEmf(); } return emf; } /** * Unbinds connection from {@link javax.naming.Context} * * @param semaphore */ private static void unbindConnection(ConnectionSemaphore semaphore) { String jndiName = semaphore.getJndiName(); if (ObjectUtils.notNull(jndiName) && semaphore.isBound()) { NamingUtils namingUtils = new NamingUtils(); try { Context context = namingUtils.getContext(); String fullJndiName = NamingUtils.createJpaJndiName(jndiName); if (ObjectUtils.notNull(context.lookup(fullJndiName))) { context.unbind(fullJndiName); } } catch (NamingException ex) { LOG.error(String.format( "Could not unbind jndi name %s cause %s", jndiName, ex.getMessage()), ex); } catch (IOException ex) { LOG.error(String.format( "Could not unbind jndi name %s cause %s", jndiName, ex.getMessage()), ex); } } } /** * Closes connection ({@link EntityManagerFactory}) in passed * {@link ConnectionSemaphore} * * @param semaphore */ private static void closeConnection(ConnectionSemaphore semaphore) { int users = semaphore.decrementUser(); if (users <= 0) { EntityManagerFactory emf = semaphore.getEmf(); closeEntityManagerFactory(emf); unbindConnection(semaphore); CONNECTIONS.remove(semaphore.getUnitName()); String jndiName = semaphore.getJndiName(); if (ObjectUtils.available(jndiName)) { CONNECTIONS.remove(jndiName); semaphore.setBound(Boolean.FALSE); semaphore.setCached(Boolean.FALSE); } } } /** * Removes {@link ConnectionSemaphore} from cache and unbinds name from * {@link javax.naming.Context} * * @param unitName */ public static void removeConnection(String unitName) { ConnectionSemaphore semaphore = CONNECTIONS.get(unitName); if (ObjectUtils.notNull(semaphore)) { awaitConnection(semaphore); unbindConnection(semaphore); closeConnection(semaphore); } } /** * Closes passed {@link EntityManagerFactory} * * @param emf */ private static void closeEntityManagerFactory(EntityManagerFactory emf) { if (ObjectUtils.notNull(emf) && emf.isOpen()) { emf.close(); } } /** * Closes all existing {@link EntityManagerFactory} instances kept in cache */ public static void closeEntityManagerFactories() { Collection<ConnectionSemaphore> semaphores = CONNECTIONS.values(); EntityManagerFactory emf; for (ConnectionSemaphore semaphore : semaphores) { emf = semaphore.getEmf(); closeEntityManagerFactory(emf); } CONNECTIONS.clear(); } /** * Builder class to create {@link JPAManager} class object * * @author Levan * */ public static class Builder { private JPAManager manager; public Builder() { manager = new JPAManager(); manager.scanArchives = Boolean.TRUE; } public Builder setClasses(List<String> classes) { manager.classes = classes; return this; } public Builder setURL(URL url) { manager.url = url; return this; } public Builder setPath(String path) { manager.path = path; return this; } public Builder setProperties(Map<Object, Object> properties) { manager.properties = properties; return this; } public Builder setSwapDataSource(boolean swapDataSource) { manager.swapDataSource = swapDataSource; return this; } public Builder setScanArchives(boolean scanArchives) { manager.scanArchives = scanArchives; return this; } public Builder setDataSourcePooledType(boolean dsPooledType) { JPAManager.pooledDataSource = dsPooledType; return this; } public Builder setClassLoader(ClassLoader loader) { manager.loader = loader; return this; } public JPAManager build() { return manager; } } }
package org.animotron.graph.traverser; import org.animotron.graph.handler.GraphHandler; import org.animotron.graph.serializer.CachedSerializer; import org.animotron.manipulator.PFlow; import org.animotron.manipulator.QCAVector; import org.animotron.statement.Prefix; import org.animotron.statement.Statement; import org.animotron.statement.ml.ELEMENT; import org.animotron.statement.ml.MLOperator; import org.animotron.statement.ml.NS; import org.animotron.statement.ml.QNAME; import org.animotron.statement.value.VALUE; import org.neo4j.graphdb.Relationship; import java.io.IOException; /** * @author <a href="mailto:shabanovd@gmail.com">Dmitriy Shabanov</a> * @author <a href="mailto:gazdovskyd@gmail.com">Evgeny Gazdovsky</a> * */ public class MLResultTraverser extends ResultTraverser { public static MLResultTraverser _ = new MLResultTraverser(); protected MLResultTraverser() {} @Override protected void process(GraphHandler handler, PFlow pf, Statement s, Statement parent, QCAVector rr, int level, boolean isOne, int pos, boolean isLast) throws IOException { if (s != null) { if (s instanceof MLOperator || s instanceof VALUE) { if (s instanceof Prefix) { node = rr.getClosest().getEndNode(); It it = new It(node); String[] param = {null, null}; try { if (it.hasNext()) { Object p = it.next(); param[0] = param(pf, p); if (!(s instanceof ELEMENT)) { param[1] = param(pf, it); if (param[1] == null) { if (s instanceof NS) { if (QNAME._.name().equals(p instanceof String ? p : ((Relationship) p).getType().name())) { param[1] = ""; } else { param[1] = param[0]; param[0] = ""; } } } } handler.start(s, parent, param, level++, isOne, pos, isLast); iterate(handler, pf, rr, s, it, level); handler.end(s, parent, param, --level, isOne, pos, isLast); } } finally { it.remove(); } } else if (!(s instanceof VALUE) || (s instanceof VALUE && level > 0)) { String param = CachedSerializer.STRING.serialize(pf, rr); handler.start(s, parent, param, level++, isOne, pos, isLast); handler.end(s, parent, param, --level, isOne, pos, isLast); } } else { super.process(handler, pf, s, parent, rr, level, isOne, pos, isLast); } } } private String param(PFlow pf, It it) throws IOException { if (it.hasNext()) { return param(pf, it.next()); } return null; } private String param(PFlow pf, Object o) throws IOException { return o instanceof Relationship ? CachedSerializer.STRING.serialize(pf, (Relationship) o) : (String) node.getProperty((String) o); } }
package org.lightmare.jpa; import java.io.IOException; import java.net.URL; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.Persistence; import org.apache.log4j.Logger; import org.lightmare.cache.ConnectionContainer; import org.lightmare.cache.ConnectionSemaphore; import org.lightmare.config.Configuration; import org.lightmare.jndi.JndiManager; import org.lightmare.jpa.jta.HibernateConfig; import org.lightmare.libraries.LibraryLoader; import org.lightmare.utils.NamingUtils; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.StringUtils; /** * Creates and caches {@link EntityManagerFactory} for each EJB bean * {@link Class}'s appropriate field (annotated by @PersistenceContext) * * @author Levan * */ public class JpaManager { private List<String> classes; private String path; private URL url; private Map<Object, Object> properties; private boolean swapDataSource; private boolean scanArchives; private ClassLoader loader; private static final String COULD_NOT_BIND_JNDI_ERROR = "could not bind connection"; private static final Logger LOG = Logger.getLogger(JpaManager.class); private JpaManager() { } /** * Checks if entity persistence.xml {@link URL} is provided * * @return boolean */ private boolean checkForURL() { return ObjectUtils.notNull(url) && ObjectUtils.available(url.toString()); } /** * Checks if entity classes or persistence.xml path are provided * * @param classes * @return boolean */ private boolean checkForBuild() { return ObjectUtils.available(classes) || ObjectUtils.available(path) || checkForURL() || swapDataSource || scanArchives; } /** * Added transaction properties for JTA data sources */ private void addTransactionManager() { if (properties == null) { properties = new HashMap<Object, Object>(); } HibernateConfig[] hibernateConfigs = HibernateConfig.values(); for (HibernateConfig hibernateConfig : hibernateConfigs) { properties.put(hibernateConfig.key, hibernateConfig.value); } } /** * Creates {@link EntityManagerFactory} by hibernate or by extended builder * {@link Ejb3ConfigurationImpl} if entity classes or persistence.xml file * path are provided * * @see Ejb3ConfigurationImpl#configure(String, Map) and * Ejb3ConfigurationImpl#createEntityManagerFactory() * * @param unitName * @return {@link EntityManagerFactory} */ @SuppressWarnings("deprecation") private EntityManagerFactory buildEntityManagerFactory(String unitName) throws IOException { EntityManagerFactory emf; Ejb3ConfigurationImpl cfg; boolean pathCheck = ObjectUtils.available(path); boolean urlCheck = checkForURL(); Ejb3ConfigurationImpl.Builder builder = new Ejb3ConfigurationImpl.Builder(); if (loader == null) { loader = LibraryLoader.getContextClassLoader(); } if (ObjectUtils.available(classes)) { builder.setClasses(classes); // Loads entity classes to current ClassLoader instance LibraryLoader.loadClasses(classes, loader); } if (pathCheck || urlCheck) { Enumeration<URL> xmls; ConfigLoader configLoader = new ConfigLoader(); if (pathCheck) { xmls = configLoader.readFile(path); } else { xmls = configLoader.readURL(url); } builder.setXmls(xmls); String shortPath = configLoader.getShortPath(); builder.setShortPath(shortPath); } builder.setSwapDataSource(swapDataSource); builder.setScanArchives(scanArchives); builder.setOverridenClassLoader(loader); cfg = builder.build(); if (ObjectUtils.notTrue(swapDataSource)) { addTransactionManager(); } Ejb3ConfigurationImpl configured = cfg.configure(unitName, properties); emf = ObjectUtils.notNull(configured) ? configured .buildEntityManagerFactory() : null; return emf; } /** * Checks if entity classes or persistence.xml file path are provided to * create {@link EntityManagerFactory} * * @see #buildEntityManagerFactory(String, String, Map, List) * * @param unitName * @param properties * @param path * @param classes * @return {@link EntityManagerFactory} * @throws IOException */ private EntityManagerFactory createEntityManagerFactory(String unitName) throws IOException { EntityManagerFactory emf; if (checkForBuild()) { emf = buildEntityManagerFactory(unitName); } else if (properties == null) { emf = Persistence.createEntityManagerFactory(unitName); } else { emf = Persistence.createEntityManagerFactory(unitName, properties); } return emf; } /** * Binds {@link EntityManagerFactory} to {@link javax.naming.InitialContext} * * @param jndiName * @param unitName * @param emf * @throws IOException */ private void bindJndiName(ConnectionSemaphore semaphore) throws IOException { boolean bound = semaphore.isBound(); if (ObjectUtils.notTrue(bound)) { String jndiName = semaphore.getJndiName(); if (ObjectUtils.available(jndiName)) { JndiManager jndiManager = new JndiManager(); try { String fullJndiName = NamingUtils .createJpaJndiName(jndiName); if (jndiManager.lookup(fullJndiName) == null) { jndiManager.rebind(fullJndiName, semaphore.getEmf()); } } catch (IOException ex) { LOG.error(ex.getMessage(), ex); String errorMessage = StringUtils.concat( COULD_NOT_BIND_JNDI_ERROR, semaphore.getUnitName()); throw new IOException(errorMessage, ex); } } } semaphore.setBound(Boolean.TRUE); } /** * Builds connection, wraps it in {@link ConnectionSemaphore} locks and * caches appropriate instance * * @param unitName * @throws IOException */ public void setConnection(String unitName) throws IOException { ConnectionSemaphore semaphore = ConnectionContainer .getSemaphore(unitName); if (semaphore.isInProgress()) { EntityManagerFactory emf = createEntityManagerFactory(unitName); semaphore.setEmf(emf); semaphore.setInProgress(Boolean.FALSE); bindJndiName(semaphore); } else if (semaphore.getEmf() == null) { throw new IOException(String.format( "Connection %s was not in progress", unitName)); } else { bindJndiName(semaphore); } } /** * Closes passed {@link EntityManagerFactory} * * @param emf */ public static void closeEntityManagerFactory(EntityManagerFactory emf) { if (ObjectUtils.notNull(emf) && emf.isOpen()) { emf.close(); } } public static void closeEntityManager(EntityManager em) { if (ObjectUtils.notNull(em) && em.isOpen()) { em.close(); } } /** * Builder class to create {@link JpaManager} class object * * @author Levan * */ public static class Builder { private JpaManager manager; public Builder() { manager = new JpaManager(); manager.scanArchives = Boolean.TRUE; } /** * Sets {@link javax.persistence.Entity} class names to initialize * * @param classes * @return {@link Builder} */ public Builder setClasses(List<String> classes) { manager.classes = classes; return this; } /** * Sets {@link URL} for persistence.xml file * * @param url * @return {@link Builder} */ public Builder setURL(URL url) { manager.url = url; return this; } /** * Sets path for persistence.xml file * * @param path * @return {@link Builder} */ public Builder setPath(String path) { manager.path = path; return this; } /** * Sets additional persistence properties * * @param properties * @return {@link Builder} */ public Builder setProperties(Map<Object, Object> properties) { manager.properties = properties; return this; } /** * Sets boolean check property to swap jta data source value with non * jta data source value * * @param swapDataSource * @return {@link Builder} */ public Builder setSwapDataSource(boolean swapDataSource) { manager.swapDataSource = swapDataSource; return this; } /** * Sets boolean check to scan deployed archive files for * {@link javax.persistence.Entity} annotated classes * * @param scanArchives * @return {@link Builder} */ public Builder setScanArchives(boolean scanArchives) { manager.scanArchives = scanArchives; return this; } /** * Sets {@link ClassLoader} for persistence classes * * @param loader * @return {@link Builder} */ public Builder setClassLoader(ClassLoader loader) { manager.loader = loader; return this; } public Builder configure(Configuration configuration) { setPath(configuration.getPersXmlPath()) .setProperties(configuration.getPersistenceProperties()) .setSwapDataSource(configuration.isSwapDataSource()) .setScanArchives(configuration.isScanArchives()); return this; } public JpaManager build() { return manager; } } }
package org.lightmare.utils; import org.apache.log4j.Logger; /** * Utility class for logging * * @author levan * @since 0.0.81-SNAPSHOT */ public class LogUtils { /** * Generates logging messages * * @param message * @param formats * @return {@link String} */ public static String logMessage(String message, Object... formats) { String logMessage; if (CollectionUtils.valid(formats)) { logMessage = String.format(message, formats); } else { logMessage = message; } return logMessage; } /** * Generated fatal log * * @param log * @param ex * @param message * @param formats */ public static void fatal(Logger log, Throwable ex, String message, Object... formats) { String logMessage = logMessage(message, formats); if (ex == null) { log.fatal(logMessage); } else { log.fatal(logMessage, ex); } } /** * Generates fatal logs * * @param log * @param message * @param formats */ public static void fatal(Logger log, String message, Object... formats) { fatal(log, null, message, formats); } /** * Generates error log * * @param log * @param ex * @param message * @param formats */ public static void error(Logger log, Throwable ex, String message, Object... formats) { String logMessage = logMessage(message, formats); if (ex == null) { log.error(logMessage); } else { log.error(logMessage, ex); } } /** * Generates error logs * * @param log * @param message * @param formats */ public static void error(Logger log, String message, Object... formats) { error(log, null, message, formats); } /** * Generates debug logs * * @param log * @param ex * @param message * @param formats */ public static void debug(Logger log, Throwable ex, String message, Object... formats) { String logMessage = logMessage(message, formats); if (ex == null) { log.debug(logMessage); } else { log.debug(logMessage, ex); } } /** * Generates debug logs * * @param log * @param message * @param formats */ public static void debug(Logger log, String message, Object... formats) { debug(log, null, message, formats); } /** * Generates info logs * * @param log * @param ex * @param message * @param formats */ public static void info(Logger log, Throwable ex, String message, Object... formats) { String logMessage = logMessage(message, formats); if (ex == null) { log.info(logMessage); } else { log.info(logMessage, ex); } } /** * Generates info logs * * @param log * @param message * @param formats */ public static void info(Logger log, String message, Object... formats) { info(log, null, message, formats); } }
package com.github.mkopylec.webbackend.authentication; import org.springframework.security.core.Authentication; import org.springframework.security.web.authentication.AuthenticationSuccessHandler; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; public class EmptyAuthenticationHandler implements AuthenticationSuccessHandler { @Override public void onAuthenticationSuccess(HttpServletRequest request, HttpServletResponse response, Authentication authentication) throws IOException, ServletException { } }
package com.intellij.xml.util; import com.intellij.xml.XmlElementDescriptor; import com.intellij.xml.util.documentation.HtmlDescriptorsTable; import com.intellij.psi.xml.*; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiReference; import com.intellij.psi.PsiFile; import com.intellij.psi.filters.ElementFilter; import com.intellij.psi.scope.PsiScopeProcessor; import com.intellij.psi.impl.source.resolve.reference.PsiReferenceProvider; import com.intellij.psi.impl.source.resolve.reference.ReferenceType; import com.intellij.psi.impl.source.resolve.reference.impl.providers.FileReferenceSet; import com.intellij.psi.html.HtmlTag; import com.intellij.openapi.fileTypes.StdFileTypes; import com.intellij.openapi.util.Key; import java.util.*; import gnu.trove.THashSet; public class HtmlUtil { private HtmlUtil() {} private static final String EMPTY_TAGS[] = { "base","hr","meta","link","frame","br","basefont","param","img","area","input","isindex","col" }; private static final Set<String> EMPTY_TAGS_MAP = new THashSet<String>(); private static final String OPTIONAL_END_TAGS[] = { //"html", "head", //"body", "p", "li", "dd", "dt", "thead", "tfoot", "tbody", "colgroup", "tr", "th", "td", "option" }; private static final Set<String> OPTIONAL_END_TAGS_MAP = new THashSet<String>(); private static final String BLOCK_TAGS[] = { "p", "h1", "h2", "h3", "h4", "h5", "h6", "ul", "ol", "dir", "menu", "pre", "dl", "div", "center", "noscript", "noframes", "blockquote", "form", "isindex", "hr", "table", "fieldset", "address", // nonexplicitly specified "map", // flow elements "body", "object", "applet", "ins", "del", "dd", "li", "button", "th", "td", "iframe" }; private static final Set<String> BLOCK_TAGS_MAP = new THashSet<String>(); private static final String INLINE_ELEMENTS_CONTAINER[] = { "p", "h1", "h2", "h3", "h4", "h5", "h6", "pre", "dt" }; private static final Set<String> INLINE_ELEMENTS_CONTAINER_MAP = new THashSet<String>(); private static final String EMPTY_ATTRS[] = { "nowrap", "compact", "disabled", "readonly", "selected", "multiple", "nohref", "ismap", "declare", "noshade" }; private static final Set<String> EMPTY_ATTRS_MAP = new THashSet<String>(); static { for(int i=0;i<EMPTY_TAGS.length;++i) { EMPTY_TAGS_MAP.add(EMPTY_TAGS[i]); } for(int i=0;i<EMPTY_ATTRS.length;++i) { EMPTY_ATTRS_MAP.add(EMPTY_ATTRS[i]); } for (int i = 0; i < OPTIONAL_END_TAGS.length; i++) { String optionalEndTag = OPTIONAL_END_TAGS[i]; OPTIONAL_END_TAGS_MAP.add(optionalEndTag); } for (int i = 0; i < BLOCK_TAGS.length; i++) { String blockTag = BLOCK_TAGS[i]; BLOCK_TAGS_MAP.add(blockTag); } for (int i = 0; i < INLINE_ELEMENTS_CONTAINER.length; i++) { String blockTag = INLINE_ELEMENTS_CONTAINER[i]; INLINE_ELEMENTS_CONTAINER_MAP.add(blockTag); } } public static final boolean isSingleHtmlTag(String tagName) { return EMPTY_TAGS_MAP.contains(tagName.toLowerCase()); } public static final boolean isOptionalEndForHtmlTag(String tagName) { return OPTIONAL_END_TAGS_MAP.contains(tagName.toLowerCase()); } public static boolean isSingleHtmlAttribute(String attrName) { return EMPTY_ATTRS_MAP.contains(attrName.toLowerCase()); } public static boolean isHtmlBlockTag(String tagName) { return BLOCK_TAGS_MAP.contains(tagName.toLowerCase()); } public static boolean isInlineTagContainer(String tagName) { return INLINE_ELEMENTS_CONTAINER_MAP.contains(tagName.toLowerCase()); } public static void addHtmlSpecificCompletions(final XmlElementDescriptor descriptor, final XmlTag element, final List<XmlElementDescriptor> variants) { // add html block completions for tags with optional ends! String name = descriptor.getName(element); if (isOptionalEndForHtmlTag(name)) { PsiElement parent = element.getParent(); if (parent!=null) { // we need grand parent since completion already uses parent's descriptor parent = parent.getParent(); } if (parent instanceof HtmlTag) { final XmlElementDescriptor parentDescriptor = ((HtmlTag)parent).getDescriptor(); if (parentDescriptor!=descriptor && parentDescriptor!=null) { final XmlElementDescriptor[] elementsDescriptors = parentDescriptor.getElementsDescriptors((XmlTag)parent); for (int i = 0; i < elementsDescriptors.length; i++) { final XmlElementDescriptor elementsDescriptor = elementsDescriptors[i]; if (isHtmlBlockTag(elementsDescriptor.getName())) { variants.add(elementsDescriptor); } } } } } } public static class HtmlReferenceProvider implements PsiReferenceProvider { private static final Key<PsiReference[]> cachedReferencesKey = Key.create("html.cachedReferences"); public ElementFilter getFilter() { return new ElementFilter() { public boolean isAcceptable(Object _element, PsiElement context) { PsiElement element = (PsiElement) _element; PsiFile file = element.getContainingFile(); if (file.getFileType() == StdFileTypes.HTML || file.getFileType() == StdFileTypes.XHTML || file.getFileType() == StdFileTypes.JSPX || file.getFileType() == StdFileTypes.JSP ) { final PsiElement parent = ((PsiElement)element).getParent(); if (parent instanceof XmlAttribute) { XmlAttribute xmlAttribute = (XmlAttribute) parent; final String attrName = xmlAttribute.getName(); XmlTag tag = xmlAttribute.getParent(); final String tagName = tag.getName(); return ( attrName.equalsIgnoreCase("src") && (tagName.equalsIgnoreCase("img") || tagName.equalsIgnoreCase("script") ) ) || ( attrName.equalsIgnoreCase("href") && tagName.equalsIgnoreCase("a") ); } } return false; } public boolean isClassAcceptable(Class hintClass) { return true; } }; } public PsiReference[] getReferencesByElement(PsiElement element) { PsiReference[] refs = element.getUserData(cachedReferencesKey); if (refs == null) { String text = element.getText(); int offset = 0; if (text.charAt(0) == '"' || text.charAt(0) == '\'') ++offset; text = text.substring(offset,text.length() - offset); int ind = text.lastIndexOf(' if (ind != -1) text = text.substring(0,ind); ind = text.lastIndexOf('?'); if (ind!=-1) text = text.substring(0,ind); if (text.length() > 0 && !text.startsWith("http://") && !text.startsWith("mailto:") && !text.startsWith("javascript:") ) { refs = new FileReferenceSet(text, element, offset, ReferenceType.FILE_TYPE, this).getAllReferences(); } else { refs = PsiReference.EMPTY_ARRAY; } element.putUserData(cachedReferencesKey,refs); } return refs; } public PsiReference[] getReferencesByElement(PsiElement element, ReferenceType type) { return PsiReference.EMPTY_ARRAY; } public PsiReference[] getReferencesByString(String str, PsiElement position, ReferenceType type, int offsetInPosition) { return PsiReference.EMPTY_ARRAY; } public void handleEmptyContext(PsiScopeProcessor processor, PsiElement position) { } } public static String[] getHtmlTagNames() { return HtmlDescriptorsTable.getHtmlTagNames(); } }
package net.sf.iwant.wsdef; import java.util.Arrays; import java.util.List; import java.util.SortedSet; import java.util.TreeSet; import net.sf.iwant.api.EclipseSettings; import net.sf.iwant.api.EmmaCoverage; import net.sf.iwant.api.EmmaInstrumentation; import net.sf.iwant.api.EmmaReport; import net.sf.iwant.api.FromRepository; import net.sf.iwant.api.IwantWorkspace; import net.sf.iwant.api.JavaBinModule; import net.sf.iwant.api.JavaClassesAndSources; import net.sf.iwant.api.JavaModule; import net.sf.iwant.api.JavaSrcModule; import net.sf.iwant.api.JavaSrcModule.IwantSrcModuleSpex; import net.sf.iwant.api.Path; import net.sf.iwant.api.SideEffect; import net.sf.iwant.api.SideEffectDefinitionContext; import net.sf.iwant.api.Source; import net.sf.iwant.api.Target; import net.sf.iwant.api.TestedIwantDependencies; public class WorkspaceForIwant implements IwantWorkspace { @Override public List<? extends Target> targets() { return Arrays.asList(emmaCoverageReport()); } @Override public List<? extends SideEffect> sideEffects( SideEffectDefinitionContext ctx) { return Arrays.asList(EclipseSettings.with().name("eclipse-settings") .modules(ctx.wsdefdefJavaModule(), ctx.wsdefJavaModule()) .modules(allModules()).end()); } private static IwantSrcModuleSpex iwantSrcModule(String subName) { String fullName = "iwant-" + subName; return JavaSrcModule.with().name(fullName) .locationUnderWsRoot(fullName).mainJava("src/main/java") .testJava("src/test/java"); } private static SortedSet<JavaModule> allModules() { return new TreeSet<JavaModule>(Arrays.asList(commonsMath(), iwantDistillery(), iwantDistillery2(), iwantDocs(), iwantWxampleWsdef(), junit(), iwantMockWsroot(), iwantTestarea(), iwantTestrunner(), iwantTutorialWsdefs())); } // the targets private static Path emma() { return TestedIwantDependencies.emma(); } private static Target emmaCoverageReport() { // TODO enable distillery when it passes return EmmaReport .with() .name("emma-coverage-report") .emma(emma()) .instrumentations(distilleryEmmaInstrumentation(), distillery2EmmaInstrumentation(), testareaEmmaInstrumentation(), testrunnerEmmaInstrumentation()) .coverages(distilleryEmmaCoverage(), distillery2EmmaCoverage(), testrunnerEmmaCoverage()).end(); } private static EmmaCoverage distilleryEmmaCoverage() { return EmmaCoverage .with() .name("iwant-distillery.emmacoverage") .antJars(TestedIwantDependencies.antJar(), TestedIwantDependencies.antLauncherJar()) .emma(emma()) .instrumentations(distilleryEmmaInstrumentation(), testareaEmmaInstrumentation()) .nonInstrumentedClasses(iwantDistillery().testArtifact(), iwantDistilleryClasspathMarker().mainArtifact(), iwantDistilleryTestResources().mainArtifact(), junit().mainArtifact(), iwantTestareaClassdir().mainArtifact()) .mainClassAndArguments("org.junit.runner.JUnitCore", "net.sf.iwant.IwantDistillerySuite").end(); } private static EmmaInstrumentation distilleryEmmaInstrumentation() { JavaSrcModule mod = iwantDistillery(); return EmmaInstrumentation.of( new JavaClassesAndSources(mod.mainArtifact(), mod .mainJavasAsPaths())).using(emma()); } private static EmmaCoverage distillery2EmmaCoverage() { return EmmaCoverage .with() .name("iwant-distillery2.emmacoverage") .antJars(TestedIwantDependencies.antJar(), TestedIwantDependencies.antLauncherJar()) .emma(emma()) .instrumentations(distilleryEmmaInstrumentation(), distillery2EmmaInstrumentation(), testareaEmmaInstrumentation()) .nonInstrumentedClasses( iwantDistilleryClasspathMarker().mainArtifact(), iwantDistillery2().testArtifact(), junit().mainArtifact(), iwantTestareaClassdir().mainArtifact(), iwantTestrunner().testArtifact()) .mainClassAndArguments("org.junit.runner.JUnitCore", "net.sf.iwant.IwantDistillery2Suite").end(); } private static EmmaInstrumentation distillery2EmmaInstrumentation() { JavaSrcModule mod = iwantDistillery2(); return EmmaInstrumentation.of( new JavaClassesAndSources(mod.mainArtifact(), mod .mainJavasAsPaths())).using(emma()); } private static EmmaCoverage testrunnerEmmaCoverage() { return EmmaCoverage .with() .name("iwant-testrunner.emmacoverage") .antJars(TestedIwantDependencies.antJar(), TestedIwantDependencies.antLauncherJar()) .emma(emma()) .instrumentations(testrunnerEmmaInstrumentation()) .nonInstrumentedClasses(junit().mainArtifact(), iwantTestrunner().testArtifact()) .mainClassAndArguments("org.junit.runner.JUnitCore", "net.sf.iwant.testrunner.IwantTestRunnerTest").end(); } private static EmmaInstrumentation testareaEmmaInstrumentation() { JavaSrcModule mod = iwantTestarea(); return EmmaInstrumentation.of( new JavaClassesAndSources(mod.mainArtifact(), mod .mainJavasAsPaths())).using(emma()); } private static EmmaInstrumentation testrunnerEmmaInstrumentation() { JavaSrcModule mod = iwantTestrunner(); return EmmaInstrumentation.of( new JavaClassesAndSources(mod.mainArtifact(), mod .mainJavasAsPaths())).using(emma()); } // the modules private static JavaModule commonsMath() { return JavaBinModule.providing(FromRepository.ibiblio() .group("commons-math").name("commons-math").version("1.2")); } private static JavaSrcModule iwantDistillery() { return iwantSrcModule("distillery") .mainJava("as-some-developer/with/java") .testResources("src/test/resources") .mainDeps(junit(), iwantTestarea()) .testDeps(iwantDistilleryClasspathMarker()).end(); } private static JavaBinModule iwantDistilleryClasspathMarker() { return JavaBinModule.providing(Source .underWsroot("iwant-distillery/classpath-marker")); } private static JavaBinModule iwantDistilleryTestResources() { return JavaBinModule.providing(Source .underWsroot("iwant-distillery/src/test/resources")); } private static JavaSrcModule iwantDistillery2() { return iwantSrcModule("distillery2").mainDeps(iwantDistillery()) .testDeps(junit(), iwantTestarea()).end(); } private static JavaModule iwantDocs() { return iwantSrcModule("docs").end(); } private static JavaModule iwantWxampleWsdef() { return iwantSrcModule("example-wsdef").noTestJava() .mainDeps(iwantDistillery2()).end(); } private static JavaModule iwantMockWsroot() { IwantSrcModuleSpex mod = iwantSrcModule("mock-wsroot").noMainJava() .noTestJava(); mod.mainJava("iwant-distillery/src/main/java"); mod.mainJava("iwant-testrunner/src/main/java"); mod.mainJava("iwant-testarea/src/main/java"); mod.mainJava("iwant-distillery/src/test/java"); mod.mainJava("iwant-distillery2/src/test/java"); mod.mainJava("iwant-distillery2/src/main/java"); mod.mainJava("iwant-distillery/as-some-developer/with/java"); return mod.mainDeps(junit()).end(); } private static JavaSrcModule iwantTestarea() { return iwantSrcModule("testarea").noTestJava() .mainDeps(iwantTestareaClassdir()).end(); } private static JavaBinModule iwantTestareaClassdir() { return JavaBinModule.providing(Source .underWsroot("iwant-testarea/testarea-classdir")); } private static JavaSrcModule iwantTestrunner() { return iwantSrcModule("testrunner").mainDeps(junit()).end(); } private static JavaModule iwantTutorialWsdefs() { return iwantSrcModule("tutorial-wsdefs").noMainJava().noTestJava() .mainJava("src").mainDeps(commonsMath(), iwantDistillery2()) .end(); } private static JavaModule junit() { return JavaBinModule.providing(FromRepository.ibiblio().group("junit") .name("junit").version("4.8.2")); } }
package autodiff.reasoning.deductions; import static autodiff.reasoning.deductions.Basics.*; import static autodiff.reasoning.expressions.Expressions.*; import static autodiff.reasoning.proofs.ElementaryVerification.*; import static autodiff.reasoning.tactics.Auto.*; import static autodiff.reasoning.tactics.PatternMatching.match; import static autodiff.reasoning.tactics.Stack.*; import static multij.tools.Tools.array; import static multij.tools.Tools.ignore; import java.util.Map; import autodiff.reasoning.proofs.Deduction; import autodiff.reasoning.proofs.ElementaryVerification; import autodiff.reasoning.proofs.Substitution; import autodiff.reasoning.tactics.Auto; import autodiff.reasoning.tactics.Stack.AbortException; import autodiff.rules.SimpleRule; import autodiff.rules.TryRule; import autodiff.rules.Variable; import multij.tools.IllegalInstantiationException; /** * @author codistmonk (creation 2016-08-31) */ public final class ScalarAlgebra { private ScalarAlgebra() { throw new IllegalInstantiationException(); } public static final Auto.Simplifier CANONICALIZER = new Simplifier(Simplifier.Mode.DEFINE) .add(newElementarySimplificationRule2()) .add(newAdditionSimplificationRule()) .add(newIgnoreRule()); public static final Object[] NUMERIC_TYPES = { N, Z, Q, R }; public static final Object[] RELATIVE_TYPES = { Z, Q, R }; public static final Object[] RATIONAL_TYPES = { Q, R }; public static final void load() { Sets.load(); for (final Object type : NUMERIC_TYPES) { for (final Object operator : array($("+"), $("*"))) { final Object _x = $new("x"); final Object _y = $new("y"); suppose("stability_of_" + operator + "_in_" + type, $(FORALL, _x, ",", _y, IN, type, $($(_x, operator, _y), IN, type))); } } for (final Object type : RELATIVE_TYPES) { for (final Object operator : array($("-"))) { final Object _x = $new("x"); final Object _y = $new("y"); suppose("stability_of_" + operator + "_in_" + type, $(FORALL, _x, ",", _y, IN, type, $($(_x, operator, _y), IN, type))); } } for (final Object type : RATIONAL_TYPES) { for (final Object operator : array($("/"))) { final Object _x = $new("x"); final Object _y = $new("y"); suppose("stability_of_" + operator + "_in_" + type, $(FORALL, _x, ",", _y, IN, type, $rule(notZero(_y), $($(_x, operator, _y), IN, type)))); } } for (final Object type : array(Z)) { for (final Object operator : array($("/"))) { final Object _x = $new("x"); final Object _y = $new("y"); suppose("stability_of_" + operator + "_in_" + type, $(FORALL, _x, ",", _y, IN, type, $rule($($(_x, "%", _y), "=", 0), $($(_x, operator, _y), IN, type)))); } } for (final Object operator : array($("+"), $("*"))) { { final Object _x = $new("x"); final Object _y = $new("y"); suppose("commutativity_of_" + operator + "_in_" + R, $(FORALL, _x, ",", _y, IN, R, $($(_x, operator, _y), "=", $(_y, operator, _x)))); } { final Object _x = $new("x"); final Object _y = $new("y"); final Object _z = $new("z"); suppose("associativity_of_" + operator + "_" + operator + "_in_" + R, $(FORALL, _x, ",", _y, ",", _z, IN, R, $($(_x, operator, $(_y, operator, _z)), "=", $($(_x, operator, _y), operator, _z)))); } } { final Object _x = $new("x"); final Object _y = $new("y"); suppose("definition_of_-_in_" + R, $(FORALL, _x, ",", _y, IN, R, $($(_x, "-", _y), "=", $(_x, "+", $("-", _y))))); } { final Object _x = $new("x"); final Object _y = $new("y"); suppose("definition_of_/_in_" + R, $(FORALL, _x, ",", _y, IN, R, $rule(notZero(_y), $($(_x, "/", _y), "=", $(_x, "*", $(_y, "^", -1)))))); } { final Object _x = $new("x"); suppose("conversion_from_" + Z + "_to_" + N, $(FORALL, _x, IN, Z, $rule($(0, LE, _x), $(_x, IN, N)))); } { final Object _x = $new("x"); suppose("definition_of_x_^_0", $(FORALL, _x, IN, R, $(_x, "^", 0), "=", 1)); } { final Object _x = $new("x"); suppose("definition_of_x_^_1", $(FORALL, _x, IN, R, $(_x, "^", 1), "=", _x)); } { final Object _x = $new("x"); final Object _a = $new("a"); final Object _b = $new("b"); suppose("simplification_of_nonzero_x_^_a_*_x_^_b", $(FORALL, _x, IN, R, $(FORALL, _a, ",", _b, IN, Z, $rule(notZero(_x), $($(_x, "^", _a), "*", $(_x, "^", _b)), "=", $(_x, "^", $(_a, "+", _b)))))); } { final Object _x = $new("x"); final Object _a = $new("a"); final Object _b = $new("b"); suppose("simplification_of_x_^_a_*_x_^_b", $(FORALL, _x, IN, R, $(FORALL, _a, ",", _b, IN, N, $($(_x, "^", _a), "*", $(_x, "^", _b)), "=", $(_x, "^", $(_a, "+", _b))))); } { final Object _x = $new("x"); final Object _a = $new("a"); final Object _b = $new("b"); suppose("simplification_of_nonzero_x_^_a_^_b", $(FORALL, _x, IN, R, $(FORALL, _a, ",", _b, IN, Z, $rule(notZero(_x), $($($(_x, "^", _a), "^", _b), "=", $(_x, "^", $(_a, "*", _b))))))); } { final Object _x = $new("x"); final Object _a = $new("a"); final Object _b = $new("b"); suppose("simplification_of_x_^_a_^_b", $(FORALL, _x, IN, R, $(FORALL, _a, ",", _b, IN, N, $($($(_x, "^", _a), "^", _b), "=", $(_x, "^", $(_a, "*", _b)))))); } { final Object _x = $new("x"); final Object _a = $new("a"); final Object _b = $new("b"); suppose("simplification_of_a_*_x_+_b_*_x", $(FORALL, _x, ",", _a, ",", _b, IN, R, $($($(_a, "*", _x), "+", $(_b, "*", _x)), "=", $($(_a, "+", _b), "*", _x)))); } { final Object _x = $new("x"); suppose("neutrality_of_0", $(FORALL, _x, IN, R, $($(_x, "+", 0), "=", _x))); } { final Object _x = $new("x"); suppose("neutrality_of_1", $(FORALL, _x, IN, R, $($(1, "*", _x), "=", _x))); } { final Object _x = $new("x"); suppose("absorbingness_of_0", $(FORALL, _x, IN, R, $($(0, "*", _x), "=", 0))); } loadAutoHints(); } public static final Object notZero(final Object _x) { return $(LNOT, $(_x, "=", 0)); } public static final void loadAutoHints() { { final Variable vx = new Variable("x"); final Variable vy = new Variable("y"); hintAutodeduce(tryMatch($($(vx, "+", vy), IN, R), (e, m) -> { subdeduction(); autobind("stability_of_+_in_" + R, vx.get(), vy.get()); autoapply(name(-1)); conclude(); return true; })); } { final Variable vx = new Variable("x"); final Variable vy = new Variable("y"); hintAutodeduce(tryMatch($($(vx, "-", vy), IN, R), (e, m) -> { subdeduction(); autobind("stability_of_-_in_" + R, vx.get(), vy.get()); autoapply(name(-1)); conclude(); return true; })); } { final Variable vx = new Variable("x"); final Variable vy = new Variable("y"); hintAutodeduce(tryMatch($($(vx, "*", vy), IN, R), (e, m) -> { subdeduction(); autobind("stability_of_*_in_" + R, vx.get(), vy.get()); autoapply(name(-1)); conclude(); return true; })); } { final Variable vx = new Variable("x"); final Variable vy = new Variable("y"); hintAutodeduce(tryMatch($($(vx, "/", vy), IN, R), (e, m) -> { subdeduction(); autobind("stability_of_/_in_" + R, vx.get(), vy.get()); autoapply(name(-1)); conclude(); return true; })); } } public static final SimpleRule<Object, Boolean> newElementarySimplificationRule() { return new SimpleRule<>((e, m) -> { try { final Object f = ElementaryVerification.Evaluator.INSTANCE.apply(e); return !f.equals(e) && !Substitution.deepContains(f, null); } catch (final AbortException exception) { throw exception; } catch (final Exception exception) { ignore(exception); } return false; }, (e, m) -> { try { final Object f = ElementaryVerification.Evaluator.INSTANCE.apply(e); verifyElementaryProposition($(e, "=", f)); return true; } catch (final AbortException exception) { throw exception; } catch (final Exception exception) { ignore(exception); } return false; }); } public static final TryRule<Object> newIgnoreRule() { return new TryRule<Object>() { @Override public final boolean test(final Object object, final Map<Variable, Object> mapping) { return true; } @Override public final Boolean applyTo(final Object object, final Map<Variable, Object> mapping) { return false; } private static final long serialVersionUID = 5917717573706684334L; }; } public static final TryRule<Object> newElementarySimplificationRule2() { return (e, m) -> { try { final Object f = ElementaryVerification.Evaluator.INSTANCE.apply(e); if (!f.equals(e) && !Substitution.deepContains(f, null)) { verifyElementaryProposition($(e, "=", f)); return true; } } catch (final AbortException exception) { throw exception; } catch (final Exception exception) { ignore(exception); } return false; }; } public static final TryRule<Object> newAdditionSimplificationRule() { return (e, m) -> { final Variable vx = new Variable("x"); final Variable va = new Variable("a"); final Variable vb = new Variable("b"); if (match($($(va, "*", vx), "+", $(vb, "*", vx)), e)) { final Object _x = vx.get(); final Object _a = va.get(); final Object _b = vb.get(); if (_a instanceof Number && _b instanceof Number) { try { autobindTrim("simplification_of_a_*_x_+_b_*_x", _x, _a, _b); return true; } catch (final AbortException exception) { throw exception; } catch (final Exception exception) { ignore(exception); } } } if (match($(vx, "+", $(vb, "*", vx)), e)) { final Object _x = vx.get(); final Object _b = vb.get(); final Deduction deduction = subdeduction(); if (_b instanceof Number) { try { bind("identity", e); autobindTrim("neutrality_of_1", _x); rewriteRight(name(-2), name(-1), 2); autobindTrim("simplification_of_a_*_x_+_b_*_x", _x, 1, _b); rewrite(name(-2), name(-1)); conclude(); return true; } catch (final AbortException exception) { throw exception; } catch (final Exception exception) { ignore(exception); popTo(deduction.getParent()); } } } if (match($($(va, "*", vx), "+", vx), e)) { final Object _x = vx.get(); final Object _a = va.get(); final Deduction deduction = subdeduction(); if (_a instanceof Number) { try { bind("identity", e); autobindTrim("neutrality_of_1", _x); rewriteRight(name(-2), name(-1), 3); autobindTrim("simplification_of_a_*_x_+_b_*_x", _x, _a, 1); rewrite(name(-2), name(-1)); conclude(); return true; } catch (final AbortException exception) { throw exception; } catch (final Exception exception) { ignore(exception); popTo(deduction.getParent()); } } } if (match($(vx, "+", vx), e)) { final Object _x = vx.get(); final Deduction deduction = subdeduction(); try { bind("identity", e); autobindTrim("neutrality_of_1", _x); rewriteRight(name(-2), name(-1), 2, 3); autobindTrim("simplification_of_a_*_x_+_b_*_x", _x, 1, 1); rewrite(name(-2), name(-1)); conclude(); return true; } catch (final AbortException exception) { throw exception; } catch (final Exception exception) { ignore(exception); popTo(deduction.getParent()); } } return false; }; } }
package com.asakusafw.testdriver; import java.io.File; import java.io.IOException; import java.util.List; import com.asakusafw.testtools.TestUtils; /** * asakusa-test-toolsAPI * */ public class TestDriverTestToolsBase extends TestDriverBase { protected TestUtils testUtils; /** TestUtils */ protected List<File> testDataFileList; /** TestUtilstestDataFileList) */ protected File testDataDir; /** * * * @throws RuntimeException */ public TestDriverTestToolsBase() { super(null); } /** * * * @param testDataFileList File * @throws RuntimeException */ public TestDriverTestToolsBase(List<File> testDataFileList) { super(null); this.testDataFileList = testDataFileList; } @Override protected void initialize() { super.initialize(); try { System.setProperty("ASAKUSA_TESTTOOLS_CONF", buildProperties.getProperty("asakusa.jdbc.conf")); System.setProperty("ASAKUSA_TEMPLATEGEN_OUTPUT_DIR", buildProperties.getProperty("asakusa.testdatasheet.output")); String testDataDirPath = buildProperties.getProperty("asakusa.testdriver.testdata.dir"); if (testDataDirPath == null) { testDataDirPath = TestDriverBase.TESTDATA_DIR_DEFAULT; } if (testDataFileList == null) { testDataDir = new File(testDataDirPath + System.getProperty("file.separator") + driverContext.getClassName() + System.getProperty("file.separator") + driverContext.getMethodName()); testUtils = new TestUtils(testDataDir); } else { testUtils = new TestUtils(testDataFileList); } } catch (IOException e) { throw new RuntimeException(e); } } }
package org.pkcs11.jacknji11.jna; import org.pkcs11.jacknji11.CKA; import org.pkcs11.jacknji11.CKM; import org.pkcs11.jacknji11.CK_C_INITIALIZE_ARGS; import org.pkcs11.jacknji11.CK_INFO; import org.pkcs11.jacknji11.CK_MECHANISM_INFO; import org.pkcs11.jacknji11.CK_NOTIFY; import org.pkcs11.jacknji11.CK_SESSION_INFO; import org.pkcs11.jacknji11.CK_SLOT_INFO; import org.pkcs11.jacknji11.CK_TOKEN_INFO; import org.pkcs11.jacknji11.LongRef; import org.pkcs11.jacknji11.NativePointer; import org.pkcs11.jacknji11.NativeProvider; import org.pkcs11.jacknji11.ULong; import com.sun.jna.NativeLong; import com.sun.jna.Pointer; import com.sun.jna.ptr.NativeLongByReference; /** * JNA PKCS#11 provider. Does mapping between jacknji11 structs and * JNA structs and calls through to {@link JNANativeI} native methods. * @author Joel Hockey (joel.hockey@gmail.com) */ public class JNA implements NativeProvider { { // set ULong size ULong.ULONG_SIZE = NativeLong.SIZE == 4 ? ULong.ULongSize.ULONG4 : ULong.ULongSize.ULONG8; } private JNANativeI jnaNative = null; public JNA(){ this("cryptoki"); } public JNA(String customLibrary) { jnaNative = (JNANativeI) com.sun.jna.Native.loadLibrary(customLibrary, JNANativeI.class); } public long C_Initialize(CK_C_INITIALIZE_ARGS pInitArgs) { if(pInitArgs.createMutex == null && pInitArgs.destroyMutex == null && pInitArgs.lockMutex == null && pInitArgs.unlockMutex == null) return jnaNative.C_Initialize(null); return jnaNative.C_Initialize(new JNA_CK_C_INITIALIZE_ARGS(pInitArgs)); } public long C_Finalize(NativePointer pReserved) { return jnaNative.C_Finalize(new Pointer(pReserved.getAddress())); } public long C_GetInfo(CK_INFO pInfo) { JNA_CK_INFO jna_pInfo = new JNA_CK_INFO().readFrom(pInfo); long rv = jnaNative.C_GetInfo(jna_pInfo); jna_pInfo.writeTo(pInfo); return rv; } public long C_GetSlotList(boolean tokenPresent, long[] pSlotList, LongRef pulCount) { LongArray jna_pSlotList = new LongArray(pSlotList); NativeLongByReference jna_pulCount = NLP(pulCount.value); long rv = jnaNative.C_GetSlotList(tokenPresent ? (byte)1 : (byte)0, jna_pSlotList, jna_pulCount); jna_pSlotList.update(); pulCount.value = jna_pulCount.getValue().longValue(); return rv; } public long C_GetSlotInfo(long slotID, CK_SLOT_INFO pInfo) { JNA_CK_SLOT_INFO jna_pInfo = new JNA_CK_SLOT_INFO().readFrom(pInfo); long rv = jnaNative.C_GetSlotInfo(NL(slotID), jna_pInfo); jna_pInfo.writeTo(pInfo); return rv; } public long C_GetTokenInfo(long slotID, CK_TOKEN_INFO pInfo) { JNA_CK_TOKEN_INFO jna_pInfo = new JNA_CK_TOKEN_INFO().readFrom(pInfo); long rv = jnaNative.C_GetTokenInfo(NL(slotID), jna_pInfo); jna_pInfo.writeTo(pInfo); return rv; } public long C_WaitForSlotEvent(long flags, LongRef pSlot, NativePointer pReserved) { NativeLongByReference jna_pSlot = NLP(pSlot.value); Pointer jna_pReserved = new Pointer(pReserved.getAddress()); long rv = jnaNative.C_WaitForSlotEvent(NL(flags), jna_pSlot, jna_pReserved); pSlot.value = jna_pSlot.getValue().longValue(); pReserved.setAddress(Pointer.nativeValue(jna_pReserved)); return rv; } public long C_GetMechanismList(long slotID, long[] pMechanismList, LongRef pulCount) { LongArray jna_pMechanismList = new LongArray(pMechanismList); NativeLongByReference jna_pulCount = NLP(pulCount.value); long rv = jnaNative.C_GetMechanismList(NL(slotID), jna_pMechanismList, jna_pulCount); jna_pMechanismList.update(); pulCount.value = jna_pulCount.getValue().longValue(); return rv; } public long C_GetMechanismInfo(long slotID, long type, CK_MECHANISM_INFO pInfo) { JNA_CK_MECHANISM_INFO jna_pInfo = new JNA_CK_MECHANISM_INFO().readFrom(pInfo); long rv = jnaNative.C_GetMechanismInfo(NL(slotID), NL(type), jna_pInfo); jna_pInfo.writeTo(pInfo); return rv; } public long C_InitToken(long slotID, byte[] pPin, long ulPinLen, byte[] pLabel32) { return jnaNative.C_InitToken(NL(slotID), pPin, NL(ulPinLen), pLabel32); } public long C_InitPIN(long hSession, byte[] pPin, long ulPinLen) { return jnaNative.C_InitPIN(NL(hSession), pPin, NL(ulPinLen)); } public long C_SetPIN(long hSession, byte[] pOldPin, long ulOldLen, byte[] pNewPin, long ulNewLen) { return jnaNative.C_SetPIN(NL(hSession), pOldPin, NL(ulOldLen), pNewPin, NL(ulNewLen)); } public long C_OpenSession(long slotID, long flags, NativePointer application, final CK_NOTIFY notify, LongRef phSession) { Pointer jna_application = new Pointer(application.getAddress()); final JNA_CK_NOTIFY jna_notify; if (notify == null) { jna_notify = null; } else { jna_notify = new JNA_CK_NOTIFY() { public NativeLong invoke(NativeLong hSession, NativeLong event, Pointer pApplication) { return NL(notify.invoke(hSession.longValue(), event.longValue(), new NativePointer(Pointer.nativeValue(pApplication)))); } }; } NativeLongByReference jna_phSession = NLP(phSession.value); long rv = jnaNative.C_OpenSession(NL(slotID), NL(flags), jna_application, jna_notify, jna_phSession); application.setAddress(Pointer.nativeValue(jna_application)); phSession.value = jna_phSession.getValue().longValue(); return rv; } public long C_CloseSession(long hSession) { return jnaNative.C_CloseSession(NL(hSession)); } public long C_CloseAllSessions(long slotID) { return jnaNative.C_CloseAllSessions(NL(slotID)); } public long C_GetSessionInfo(long hSession, CK_SESSION_INFO pInfo) { JNA_CK_SESSION_INFO jna_pInfo = new JNA_CK_SESSION_INFO().readFrom(pInfo); long rv = jnaNative.C_GetSessionInfo(NL(hSession), jna_pInfo); jna_pInfo.writeTo(pInfo); return rv; } public long C_GetOperationState(long hSession, byte[] pOperationState, LongRef pulOperationStateLen) { NativeLongByReference jna_pulOperationStateLen = NLP(pulOperationStateLen.value); long rv = jnaNative.C_GetOperationState(NL(hSession), pOperationState, jna_pulOperationStateLen); pulOperationStateLen.value = jna_pulOperationStateLen.getValue().longValue(); return rv; } public long C_SetOperationState(long hSession, byte[] pOperationState, long ulOperationStateLen, long hEncryptionKey, long hAuthenticationKey) { return jnaNative.C_SetOperationState(NL(hSession), pOperationState, NL(ulOperationStateLen), NL(hEncryptionKey), NL(hAuthenticationKey)); } public long C_Login(long hSession, long userType, byte[] pPin, long ulPinLen) { return jnaNative.C_Login(NL(hSession), NL(userType), pPin, NL(ulPinLen)); } public long C_Logout(long hSession) { return jnaNative.C_Logout(NL(hSession)); } public long C_CreateObject(long hSession, CKA[] pTemplate, long ulCount, LongRef phObject) { Template jna_pTemplate = new Template(pTemplate); NativeLongByReference jna_phObject = NLP(phObject.value); long rv = jnaNative.C_CreateObject(NL(hSession), jna_pTemplate, NL(ulCount), jna_phObject); jna_pTemplate.update(); phObject.value = jna_phObject.getValue().longValue(); return rv; } public long C_CopyObject(long hSession, long hObject, CKA[] pTemplate, long ulCount, LongRef phNewObject) { Template jna_pTemplate = new Template(pTemplate); NativeLongByReference jna_phNewObject = NLP(phNewObject.value); long rv = jnaNative.C_CopyObject(NL(hSession), NL(hObject), jna_pTemplate, NL(ulCount), jna_phNewObject); jna_pTemplate.update(); phNewObject.value = jna_phNewObject.getValue().longValue(); return rv; } public long C_DestroyObject(long hSession, long hObject) { return jnaNative.C_DestroyObject(NL(hSession), NL(hObject)); } public long C_GetObjectSize(long hSession, long hObject, LongRef pulSize) { NativeLongByReference jna_pulSize = NLP(pulSize.value); long rv = jnaNative.C_GetObjectSize(NL(hSession), NL(hObject), jna_pulSize); pulSize.value = jna_pulSize.getValue().longValue(); return rv; } public long C_GetAttributeValue(long hSession, long hObject, CKA[] pTemplate, long ulCount) { Template jna_pTemplate = new Template(pTemplate); long rv = jnaNative.C_GetAttributeValue(NL(hSession), NL(hObject), jna_pTemplate, NL(ulCount)); jna_pTemplate.update(); return rv; } public long C_SetAttributeValue(long hSession, long hObject, CKA[] pTemplate, long ulCount) { Template jna_pTemplate = new Template(pTemplate); long rv = jnaNative.C_SetAttributeValue(NL(hSession), NL(hObject), jna_pTemplate, NL(ulCount)); jna_pTemplate.update(); return rv; } public long C_FindObjectsInit(long hSession, CKA[] pTemplate, long ulCount) { Template jna_pTemplate = new Template(pTemplate); long rv = jnaNative.C_FindObjectsInit(NL(hSession), jna_pTemplate, NL(ulCount)); jna_pTemplate.update(); return rv; } public long C_FindObjects(long hSession, long[] phObject, long ulMaxObjectCount, LongRef pulObjectCount) { LongArray jna_phObject = new LongArray(phObject); NativeLongByReference jna_pulObjectCOunt = NLP(pulObjectCount.value); long rv = jnaNative.C_FindObjects(NL(hSession), jna_phObject, NL(ulMaxObjectCount), jna_pulObjectCOunt); jna_phObject.update(); pulObjectCount.value = jna_pulObjectCOunt.getValue().longValue(); return rv; } public long C_FindObjectsFinal(long hSession) { return jnaNative.C_FindObjectsFinal(NL(hSession)); } public long C_EncryptInit(long hSession, CKM pMechanism, long hKey) { JNA_CKM jna_pMechanism = new JNA_CKM().readFrom(pMechanism); return jnaNative.C_EncryptInit(NL(hSession), jna_pMechanism, NL(hKey)); } public long C_Encrypt(long hSession, byte[] pData, long ulDataLen, byte[] pEncryptedData, LongRef pulEncryptedDataLen) { NativeLongByReference jna_pulEncryptedDataLen = NLP(pulEncryptedDataLen.value); long rv = jnaNative.C_Encrypt(NL(hSession), pData, NL(ulDataLen), pEncryptedData, jna_pulEncryptedDataLen); pulEncryptedDataLen.value = jna_pulEncryptedDataLen.getValue().longValue(); return rv; } public long C_EncryptUpdate(long hSession, byte[] pPart, long ulPartLen, byte[] pEncryptedPart, LongRef pulEncryptedPartLen) { NativeLongByReference jna_pulEncryptedPartLen = NLP(pulEncryptedPartLen.value); long rv = jnaNative.C_EncryptUpdate(NL(hSession), pPart, NL(ulPartLen), pEncryptedPart, jna_pulEncryptedPartLen); pulEncryptedPartLen.value = jna_pulEncryptedPartLen.getValue().longValue(); return rv; } public long C_EncryptFinal(long hSession, byte[] pLastEncryptedPart, LongRef pulLastEncryptedPartLen) { NativeLongByReference jna_pulLastEncryptedPartLen = NLP(pulLastEncryptedPartLen.value); long rv = jnaNative.C_EncryptFinal(NL(hSession), pLastEncryptedPart, jna_pulLastEncryptedPartLen); pulLastEncryptedPartLen.value = jna_pulLastEncryptedPartLen.getValue().longValue(); return rv; } public long C_DecryptInit(long hSession, CKM pMechanism, long hKey) { JNA_CKM jna_pMechanism = new JNA_CKM().readFrom(pMechanism); return jnaNative.C_DecryptInit(NL(hSession), jna_pMechanism, NL(hKey)); } public long C_Decrypt(long hSession, byte[] pEncryptedData, long ulEncryptedDataLen, byte[] pData, LongRef pulDataLen) { NativeLongByReference jna_pulDataLen = NLP(pulDataLen.value); long rv = jnaNative.C_Decrypt(NL(hSession), pEncryptedData, NL(ulEncryptedDataLen), pData, jna_pulDataLen); pulDataLen.value= jna_pulDataLen.getValue().longValue(); return rv; } public long C_DecryptUpdate(long hSession, byte[] pEncryptedPart, long ulEncryptedPartLen, byte[] pData, LongRef pulDataLen) { NativeLongByReference jna_pulDataLen = NLP(pulDataLen.value); long rv = jnaNative.C_DecryptUpdate(NL(hSession), pEncryptedPart, NL(ulEncryptedPartLen), pData, jna_pulDataLen); pulDataLen.value = jna_pulDataLen.getValue().longValue(); return rv; } public long C_DecryptFinal(long hSession, byte[] pLastPart, LongRef pulLastPartLen) { NativeLongByReference jna_pulLastPartLen = NLP(pulLastPartLen.value); long rv = jnaNative.C_DecryptFinal(NL(hSession), pLastPart, jna_pulLastPartLen); pulLastPartLen.value = jna_pulLastPartLen.getValue().longValue(); return rv; } public long C_DigestInit(long hSession, CKM pMechanism) { JNA_CKM jna_pMechanism = new JNA_CKM().readFrom(pMechanism); return jnaNative.C_DigestInit(NL(hSession), jna_pMechanism); } public long C_Digest(long hSession, byte[] pData, long ulDataLen, byte[] pDigest, LongRef pulDigestLen) { NativeLongByReference jna_pulDigestLen = NLP(pulDigestLen.value); long rv = jnaNative.C_Digest(NL(hSession), pData, NL(ulDataLen), pDigest, jna_pulDigestLen); pulDigestLen.value = jna_pulDigestLen.getValue().longValue(); return rv; } public long C_DigestUpdate(long hSession, byte[] pPart, long ulPartLen) { return jnaNative.C_DigestUpdate(NL(hSession), pPart, NL(ulPartLen)); } public long C_DigestKey(long hSession, long hKey) { return jnaNative.C_DigestKey(NL(hSession), NL(hKey)); } public long C_DigestFinal(long hSession, byte[] pDigest, LongRef pulDigestLen) { NativeLongByReference jna_pulDigestLen = NLP(pulDigestLen.value); long rv = jnaNative.C_DigestFinal(NL(hSession), pDigest, jna_pulDigestLen); pulDigestLen.value = jna_pulDigestLen.getValue().longValue(); return rv; } public long C_SignInit(long hSession, CKM pMechanism, long hKey) { JNA_CKM jna_pMechanism = new JNA_CKM().readFrom(pMechanism); return jnaNative.C_SignInit(NL(hSession), jna_pMechanism, NL(hKey)); } public long C_Sign(long hSession, byte[] pData, long ulDataLen, byte[] pSignature, LongRef pulSignatureLen) { NativeLongByReference jna_pulSignatureLen = NLP(pulSignatureLen.value); long rv = jnaNative.C_Sign(NL(hSession), pData, NL(ulDataLen), pSignature, jna_pulSignatureLen); pulSignatureLen.value = jna_pulSignatureLen.getValue().longValue(); return rv; } public long C_SignUpdate(long hSession, byte[] pPart, long ulPartLen) { return jnaNative.C_SignUpdate(NL(hSession), pPart, NL(ulPartLen)); } public long C_SignFinal(long hSession, byte[] pSignature, LongRef pulSignatureLen) { NativeLongByReference jna_pulSignatureLen = NLP(pulSignatureLen.value); long rv = jnaNative.C_SignFinal(NL(hSession), pSignature, jna_pulSignatureLen); pulSignatureLen.value = jna_pulSignatureLen.getValue().longValue(); return rv; } public long C_SignRecoverInit(long hSession, CKM pMechanism, long hKey) { JNA_CKM jna_pMechanism = new JNA_CKM().readFrom(pMechanism); return jnaNative.C_SignRecoverInit(NL(hSession), jna_pMechanism, NL(hKey)); } public long C_SignRecover(long hSession, byte[] pData, long ulDataLen, byte[] pSignature, LongRef pulSignatureLen) { NativeLongByReference jna_pulSignatureLen = NLP(pulSignatureLen.value); long rv = jnaNative.C_SignRecover(NL(hSession), pData, NL(ulDataLen), pSignature, jna_pulSignatureLen); pulSignatureLen.value = jna_pulSignatureLen.getValue().longValue(); return rv; } public long C_VerifyInit(long hSession, CKM pMechanism, long hKey) { JNA_CKM jna_pMechanism = new JNA_CKM().readFrom(pMechanism); return jnaNative.C_VerifyInit(NL(hSession), jna_pMechanism, NL(hKey)); } public long C_Verify(long hSession, byte[] pData, long ulDataLen, byte[] pSignature, long ulSignatureLen) { return jnaNative.C_Verify(NL(hSession), pData, NL(ulDataLen), pSignature, NL(ulSignatureLen)); } public long C_VerifyUpdate(long hSession, byte[] pPart, long ulPartLen) { return jnaNative.C_VerifyUpdate(NL(hSession), pPart, NL(ulPartLen)); } public long C_VerifyFinal(long hSession, byte[] pSignature, long ulSignatureLen) { return jnaNative.C_VerifyFinal(NL(hSession), pSignature, NL(ulSignatureLen)); } public long C_VerifyRecoverInit(long hSession, CKM pMechanism, long hKey) { JNA_CKM jna_pMechanism = new JNA_CKM().readFrom(pMechanism); return jnaNative.C_VerifyRecoverInit(NL(hSession), jna_pMechanism, NL(hKey)); } public long C_VerifyRecover(long hSession, byte[] pSignature, long ulSignatureLen, byte[] pData, LongRef pulDataLen) { NativeLongByReference jna_pulDataLen = NLP(pulDataLen.value); long rv = jnaNative.C_VerifyRecover(NL(hSession), pSignature, NL(ulSignatureLen), pData, jna_pulDataLen); pulDataLen.value = jna_pulDataLen.getValue().longValue(); return rv; } public long C_DigestEncryptUpdate(long hSession, byte[] pPart, long ulPartLen, byte[] pEncryptedPart, LongRef pulEncryptedPartLen) { NativeLongByReference jna_pulEncryptedPartLen = NLP(pulEncryptedPartLen.value); long rv = jnaNative.C_DigestEncryptUpdate(NL(hSession), pPart, NL(ulPartLen), pEncryptedPart, jna_pulEncryptedPartLen); pulEncryptedPartLen.value = jna_pulEncryptedPartLen.getValue().longValue(); return rv; } public long C_DecryptDigestUpdate(long hSession, byte[] pEncryptedPart, long ulEncryptedPartLen, byte[] pPart, LongRef pulPartLen) { NativeLongByReference jna_pulPartLen = NLP(pulPartLen.value); long rv = jnaNative.C_DecryptDigestUpdate(NL(hSession), pEncryptedPart, NL(ulEncryptedPartLen), pPart, jna_pulPartLen); pulPartLen.value = jna_pulPartLen.getValue().longValue(); return rv; } public long C_SignEncryptUpdate(long hSession, byte[] pPart, long ulPartLen, byte[] pEncryptedPart, LongRef pulEncryptedPartLen) { NativeLongByReference jna_pulEncryptPartLen = NLP(pulEncryptedPartLen.value); long rv = jnaNative.C_SignEncryptUpdate(NL(hSession), pPart, NL(ulPartLen), pEncryptedPart, jna_pulEncryptPartLen); pulEncryptedPartLen.value = jna_pulEncryptPartLen.getValue().longValue(); return rv; } public long C_DecryptVerifyUpdate(long hSession, byte[] pEncryptedPart, long ulEncryptedPartLen, byte[] pPart, LongRef pulPartLen) { NativeLongByReference jna_pulPartLen = NLP(pulPartLen.value); long rv = jnaNative.C_DecryptVerifyUpdate(NL(hSession), pEncryptedPart, NL(ulEncryptedPartLen), pPart, jna_pulPartLen); pulPartLen.value = jna_pulPartLen.getValue().longValue(); return rv; } public long C_GenerateKey(long hSession, CKM pMechanism, CKA[] pTemplate, long ulCount, LongRef phKey) { JNA_CKM jna_pMechanism = new JNA_CKM().readFrom(pMechanism); Template jna_pTemplate = new Template(pTemplate); NativeLongByReference jna_phKey = NLP(phKey.value); long rv = jnaNative.C_GenerateKey(NL(hSession), jna_pMechanism, jna_pTemplate, NL(ulCount), jna_phKey); phKey.value = jna_phKey.getValue().longValue(); return rv; } public long C_GenerateKeyPair(long hSession, CKM pMechanism, CKA[] pPublicKeyTemplate, long ulPublicKeyAttributeCount, CKA[] pPrivateKeyTemplate, long ulPrivateKeyAttributeCount, LongRef phPublicKey, LongRef phPrivateKey) { JNA_CKM jna_pMechanism = new JNA_CKM().readFrom(pMechanism); Template jna_pPublicKeyTemplate = new Template(pPublicKeyTemplate); Template jna_pPrivateKeyTemplate = new Template(pPrivateKeyTemplate); NativeLongByReference jna_phPublicKey = NLP(phPublicKey.value); NativeLongByReference jna_phPrivateKey = NLP(phPrivateKey.value); long rv = jnaNative.C_GenerateKeyPair(NL(hSession), jna_pMechanism, jna_pPublicKeyTemplate, NL(ulPublicKeyAttributeCount), jna_pPrivateKeyTemplate, NL(ulPrivateKeyAttributeCount), jna_phPublicKey, jna_phPrivateKey); phPublicKey.value = jna_phPublicKey.getValue().longValue(); phPrivateKey.value = jna_phPrivateKey.getValue().longValue(); return rv; } public long C_WrapKey(long hSession, CKM pMechanism, long hWrappingKey, long hKey, byte[] pWrappedKey, LongRef pulWrappedKeyLen) { JNA_CKM jna_pMechanism = new JNA_CKM().readFrom(pMechanism); NativeLongByReference jna_pulWrappedKeyLen = NLP(pulWrappedKeyLen.value); long rv = jnaNative.C_WrapKey(NL(hSession), jna_pMechanism, NL(hWrappingKey), NL(hKey), pWrappedKey, jna_pulWrappedKeyLen); pulWrappedKeyLen.value = jna_pulWrappedKeyLen.getValue().longValue(); return rv; } public long C_UnwrapKey(long hSession, CKM pMechanism, long hUnwrappingKey, byte[] pWrappedKey, long ulWrappedKeyLen, CKA[] pTemplate, long ulAttributeCount, LongRef phKey) { JNA_CKM jna_pMechanism = new JNA_CKM().readFrom(pMechanism); Template jna_pTemplate = new Template(pTemplate); NativeLongByReference jna_phKey = NLP(phKey.value); long rv = jnaNative.C_UnwrapKey(NL(hSession), jna_pMechanism, NL(hUnwrappingKey), pWrappedKey, NL(ulWrappedKeyLen), jna_pTemplate, NL(ulAttributeCount), jna_phKey); phKey.value = jna_phKey.getValue().longValue(); return rv; } public long C_DeriveKey(long hSession, CKM pMechanism, long hBaseKey, CKA[] pTemplate, long ulAttributeCount, LongRef phKey) { JNA_CKM jna_pMechanism = new JNA_CKM().readFrom(pMechanism); Template jna_pTemplate = new Template(pTemplate); NativeLongByReference jna_phKey = NLP(phKey.value); long rv = jnaNative.C_DeriveKey(NL(hSession), jna_pMechanism, NL(hBaseKey), jna_pTemplate, NL(ulAttributeCount), jna_phKey); phKey.value = jna_phKey.getValue().longValue(); return rv; } public long C_SeedRandom(long hSession, byte[] pSeed, long ulSeedLen) { return jnaNative.C_SeedRandom(NL(hSession), pSeed, NL(ulSeedLen)); } public long C_GenerateRandom(long hSession, byte[] pRandomData, long ulRandomLen) { return jnaNative.C_GenerateRandom(NL(hSession), pRandomData, NL(ulRandomLen)); } public long C_GetFunctionStatus(long hSession) { return jnaNative.C_GetFunctionStatus(NL(hSession)); } public long C_CancelFunction(long hSession) { return jnaNative.C_CancelFunction(NL(hSession)); } private static NativeLong NL(long l) { return new NativeLong(l); } private static NativeLongByReference NLP(long l) { return new NativeLongByReference(new NativeLong(l)); } }
package org.rakam; import com.google.auto.service.AutoService; import com.google.common.base.Splitter; import com.google.inject.Binder; import io.airlift.configuration.Config; import io.sentry.Sentry; import io.sentry.SentryClient; import io.sentry.jul.SentryHandler; import org.rakam.plugin.RakamModule; import org.rakam.util.RakamClient; import java.util.Arrays; import java.util.logging.Level; import java.util.logging.LogManager; import java.util.logging.Logger; @AutoService(RakamModule.class) public class LogModule extends RakamModule { private static final String SENTRY_DSN = "https://76daa36329be422ab9b592ab7239c2aa@sentry.io/1290994"; @Override protected void setup(Binder binder) { LogManager manager = LogManager.getLogManager(); LogConfig logConfig = buildConfigObject(LogConfig.class); if (logConfig.getLogActive()) { if (!Arrays.stream(manager.getLogger("").getHandlers()) .anyMatch(e -> e instanceof SentryHandler)) { Logger rootLogger = manager.getLogger(""); SentryClient client = Sentry.init(SENTRY_DSN); if (logConfig.getTags() != null) { for (String item : Splitter.on(',').split(logConfig.getTags())) { String[] split = item.split("=", 2); client.addTag(split[0], split.length > 1 ? split[1] : "true"); } } client.setRelease(RakamClient.RELEASE); SentryHandler sentryHandler = new SentryHandler(); sentryHandler.setLevel(Level.SEVERE); rootLogger.addHandler(sentryHandler); } } } @Override public String name() { return null; } @Override public String description() { return null; } public static class LogConfig { private boolean logActive = true; private String tags; public boolean getLogActive() { return logActive; } @Config("log-active") public LogConfig setLogActive(boolean logActive) { this.logActive = logActive; return this; } public String getTags() { return tags; } @Config("log-identifier") public LogConfig setTags(String tags) { this.tags = tags; return this; } } }
package org.pujun.correl; import org.apache.commons.math3.stat.correlation.PearsonsCorrelation; import org.pujun.interp.InterpPm; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.*; import static java.lang.Math.abs; public class Correlation { private double pearsonsResult; private double[] lagResult = {0,0}; /** * Input two time series and get correlation value in return * * @param x * @param y * @return */ public double getPearsonsResult(double[] x, double[] y){ PearsonsCorrelation pearsonsCorrelation = new PearsonsCorrelation(); pearsonsResult = pearsonsCorrelation.correlation(x,y); return pearsonsResult; } /** * Input two time seires then get the best lag and its correlation value in return * lag * @param x * @param y * @return LagResult[0]: best lag, lagResult[1]: correlation value */ public double[] getLagResult(double[] x, double[] y){ //arrayYlagarrayXarrayY double thisLagResult = 0; for (int i = 0; i < x.length/2; i++) { x = Arrays.copyOfRange(x, i, x.length); y = Arrays.copyOfRange(y, 0, y.length - i); PearsonsCorrelation pearsonsCorrelation = new PearsonsCorrelation(); thisLagResult = pearsonsCorrelation.correlation(x, y); if (abs(thisLagResult) > lagResult[1]){ lagResult[0] = i; lagResult[1] = thisLagResult; } } return lagResult; } public double[] getInterpPM25TimeSeries(double lat, double lon, String startTime, String endTime) throws ParseException { Date startDate, endDate; SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); df.setCalendar(new GregorianCalendar(new SimpleTimeZone(0, "GMT"))); startDate = df.parse(startTime); endDate = df.parse(endTime); ArrayList<Double> interpPm25ResultList = new ArrayList<Double>(); InterpPm interpPm = new InterpPm(df.format(startDate)); Calendar cal = Calendar.getInstance(); cal.setTime(startDate); while(startDate.before(endDate)) { interpPm.date = startDate; double n = interpPm.pm25(lat, lon); interpPm25ResultList.add(n); //System.out.println(n); cal.add(Calendar.HOUR, 1); startDate = cal.getTime(); } double[] interpPm25Result = new double[interpPm25ResultList.size()]; for (int i = 0; i < interpPm25ResultList.size(); i++) { interpPm25Result[i] = interpPm25ResultList.get(i); } return interpPm25Result; } public static void main(String[] args) throws ParseException { double[] x,y; Correlation correlation = new Correlation(); x = correlation.getInterpPM25TimeSeries(40.0239, 116.2202, "2015-02-08 00:00:00", "2015-02-11 09:00:00"); y = correlation.getInterpPM25TimeSeries(38.1006, 114.4995, "2015-02-08 00:00:00", "2015-02-11 09:00:00"); double[] lag = correlation.getLagResult(y, x); System.out.println(lag[0]); System.out.println(lag[1]); } }
package ae3.service.experiment; import ae3.dao.GeneSolrDAO; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import uk.ac.ebi.gxa.data.*; import uk.ac.ebi.gxa.utils.Pair; import uk.ac.ebi.microarray.atlas.model.UpDownCondition; import uk.ac.ebi.microarray.atlas.model.UpDownExpression; import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; /** * This class is used to populate the best genes table on the experiment page * * @author Robert Petryszak */ public class AtlasExperimentAnalyticsViewService { private static final Logger log = LoggerFactory.getLogger(AtlasExperimentAnalyticsViewService.class); private GeneSolrDAO geneSolrDAO; public void setGeneSolrDAO(GeneSolrDAO geneSolrDAO) { this.geneSolrDAO = geneSolrDAO; } /** * Returns the list of top design elements found in the experiment. The search is based on the pre-calculated * statistic values (e.g. T-value, P-value): the better the statistics, the higher the element in the list. * <p/> * A note regarding geneIds, factors and factorValues parameters: * - If all parameters are empty the search is done for all data (design elements, ef and efv pairs); * - Filling any parameter narrows one of the search dimensions. * * @param geneIds a list of gene ids of interest * @param factorValues a list of Pairs of factor-factor value to find best statistics for * Note also that ee don't currently allow search for best design elements by either just an ef * or just an efv - both need to be specified * @param upDownCondition an up/down expression filter * @param offset Start position within the result set * @param limit how many design elements to return * @return an instance of {@link ae3.service.experiment.BestDesignElementsResult} * @throws uk.ac.ebi.gxa.data.AtlasDataException, * StatisticsNotFoundException if data could not be ready from ncdf */ public BestDesignElementsResult findBestGenesForExperiment( final @Nonnull ExperimentPart expPart, final @Nonnull List<Long> geneIds, final @Nonnull Set<Pair<String, String>> factorValues, final @Nonnull UpDownCondition upDownCondition, final int offset, final int limit) throws AtlasDataException, StatisticsNotFoundException { final BestDesignElementsResult result = new BestDesignElementsResult(); result.setArrayDesignAccession(expPart.getArrayDesign().getAccession()); long startTime = System.currentTimeMillis(); // Set bounds of the window through the matching design elements int deCount = 1; int from = Math.max(1, offset); int to = offset + limit - 1; // Retrieved data from ncdf long startTime1 = System.currentTimeMillis(); List<Long> allGeneIds = new ArrayList<Long>(expPart.getGeneIds()); final List<KeyValuePair> uEFVs = expPart.getUniqueEFVs(); final TwoDFloatArray pvals = expPart.getPValues(); final TwoDFloatArray tstat = expPart.getTStatistics(); String[] designElementAccessions = expPart.getDesignElementAccessions(); log.debug("Retrieved data from ncdf in: " + (System.currentTimeMillis() - startTime1) + " ms"); boolean factorValuesSpecified = !factorValues.isEmpty(); boolean genesSpecified = !geneIds.isEmpty(); // Retrieve qualifying BestDesignElementCandidate's startTime1 = System.currentTimeMillis(); List<BestDesignElementCandidate> candidates = new ArrayList<BestDesignElementCandidate>(); for (int i = 0; i < pvals.getRowCount(); i++) { BestDesignElementCandidate bestSoFar = null; if (!designElementQualifies(genesSpecified, allGeneIds, geneIds, i)) continue; for (int j = 0; j < uEFVs.size(); j++) { if (!efvQualifies(factorValuesSpecified, uEFVs.get(j), factorValues)) continue; final UpDownExpression expression = UpDownExpression.valueOf(pvals.get(i, j), tstat.get(i, j)); if (upDownCondition.apply(expression)) { BestDesignElementCandidate current = new BestDesignElementCandidate(pvals.get(i, j), tstat.get(i, j), i, j); if (bestSoFar == null || current.compareTo(bestSoFar) < 0) bestSoFar = current; } } if (bestSoFar != null) candidates.add(bestSoFar); } log.debug("Loaded " + candidates.size() + " candidates in: " + (System.currentTimeMillis() - startTime1) + " ms"); // Sort BestDesignElementCandidate's by pVal/tStat startTime1 = System.currentTimeMillis(); Collections.sort(candidates); log.debug("Sorted DE candidates in: " + (System.currentTimeMillis() - startTime1) + " ms"); startTime1 = System.currentTimeMillis(); for (BestDesignElementCandidate candidate : candidates.subList(from, to)) { final int deIndex = candidate.getDEIndex(); final int uEfvIndex = candidate.getUEFVIndex(); final KeyValuePair efv = uEFVs.get(uEfvIndex); result.add( geneSolrDAO.getGeneById(allGeneIds.get(deIndex)).getGene(), deIndex, designElementAccessions[deIndex], pvals.get(deIndex, uEfvIndex), tstat.get(deIndex, uEfvIndex), efv.key, efv.value); } log.debug("Assembled BestDesignElementsResult in: " + (System.currentTimeMillis() - startTime1) + " ms"); result.setTotalSize(candidates.size()); log.info("Finished findBestGenesForExperiment in: " + (System.currentTimeMillis() - startTime) + " ms"); return result; } private boolean efvQualifies( boolean factorValuesSpecified, final @Nonnull KeyValuePair efv, final @Nonnull Set<Pair<String, String>> factorValues) { return !factorValuesSpecified || factorValues.contains(Pair.create(efv.key, efv.value)) || factorValues.contains(Pair.create(efv.key, null)); // allow search by factor only } private boolean designElementQualifies( boolean genesSpecified, final List<Long> allGeneIds, final List<Long> geneIds, int deIndex) { return allGeneIds.get(deIndex) > 0 && (!genesSpecified || geneIds.contains(allGeneIds.get(deIndex))); } }
package org.eclipse.birt.report.designer.ui.cubebuilder.page; import org.eclipse.birt.report.designer.data.ui.property.AbstractTitlePropertyDialog; import org.eclipse.birt.report.designer.data.ui.property.PropertyNode; import org.eclipse.birt.report.designer.internal.ui.util.IHelpContextIds; import org.eclipse.birt.report.designer.internal.ui.util.UIUtil; import org.eclipse.birt.report.designer.ui.cubebuilder.nls.Messages; import org.eclipse.birt.report.model.api.DesignElementHandle; import org.eclipse.birt.report.model.api.activity.NotificationEvent; import org.eclipse.birt.report.model.api.olap.CubeHandle; import org.eclipse.birt.report.model.api.olap.TabularCubeHandle; import org.eclipse.jface.preference.IPreferencePageContainer; import org.eclipse.jface.preference.IPreferenceStore; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Shell; public class CubeBuilder extends AbstractTitlePropertyDialog implements IPreferencePageContainer { // public static final String MEASURESPAGE = // "org.eclipse.birt.datasource.editor.cubebuilder.measurespage"; public static final String GROUPPAGE = "org.eclipse.birt.datasource.editor.cubebuilder.grouppage"; public static final String DATASETSELECTIONPAGE = "org.eclipse.birt.datasource.editor.cubebuilder.datasetselectionpage"; public static final String LINKGROUPSPAGE = "org.eclipse.birt.datasource.editor.cubebuilder.linkgroupspage"; public CubeBuilder( Shell parentShell, TabularCubeHandle input ) { super( parentShell, input ); addCommonPage( input ); } private DatasetSelectionPage datasetPage = null; private GroupsPage groupsPage = null; private LinkGroupsPage linkGroupsPage = null; private void addCommonPage( TabularCubeHandle model ) { datasetNode = new PropertyNode( DATASETSELECTIONPAGE, Messages.getString( "DatasetPage.Title" ), null, datasetPage = new DatasetSelectionPage( this, model ) ); groupsNode = new PropertyNode( GROUPPAGE, Messages.getString( "GroupsPage.Title" ), null, groupsPage = new GroupsPage( this, model ) ); linkGroupNode = new PropertyNode( LINKGROUPSPAGE, Messages.getString( "LinkGroupsPage.Title" ), null, linkGroupsPage = new LinkGroupsPage( this, model ) ); addNodeTo( "/", datasetNode ); addNodeTo( "/", groupsNode ); addNodeTo( "/", linkGroupNode ); } private String showNodeId; public void showPage( String nodeId ) { this.showNodeId = nodeId; } public boolean performCancel( ) { return true; } public boolean performOk( ) { return true; } protected Control createContents( Composite parent ) { String title = Messages.getString( "CubeBuilder.Title" ); getShell( ).setText( title ); if ( showNodeId != null ) { setDefaultNode( showNodeId ); } Control control = super.createContents( parent ); return control; } private boolean okEnable = true; private PropertyNode datasetNode; private PropertyNode groupsNode; private PropertyNode linkGroupNode; public void setOKEnable( boolean okEnable ) { this.okEnable = okEnable; if ( getOkButton( ) != null ) getOkButton( ).setEnabled( this.okEnable ); } protected void createButtonsForButtonBar( Composite parent ) { super.createButtonsForButtonBar( parent ); getOkButton( ).setEnabled( this.okEnable ); } public void elementChanged( DesignElementHandle focus, NotificationEvent ev ) { if ( getOkButton( ) != null ) { if ( ( (CubeHandle) getModel( ) ).getName( ) != null && !( (CubeHandle) getModel( ) ).getName( ) .trim( ) .equals( "" ) ) { getOkButton( ).setEnabled( true ); } else getOkButton( ).setEnabled( false ); } } public IPreferenceStore getPreferenceStore( ) { // TODO Auto-generated method stub return null; } public void updateButtons( ) { // TODO Auto-generated method stub } public void updateMessage( ) { // TODO Auto-generated method stub } public void updateTitle( ) { // TODO Auto-generated method stub } protected Point getDefaultSize( ) { return new Point( 820, 600 ); } public PropertyNode getLinkGroupNode( ) { return linkGroupNode; } public PropertyNode getDatasetNode( ) { return datasetNode; } public PropertyNode getGroupsNode( ) { return groupsNode; } }
package be.ugent.oomo.groep12.studgent.view; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.view.View; import be.ugent.oomo.groep12.studgent.common.IPointOfInterest; public class POIView extends View implements Comparable<POIView>{ private IPointOfInterest poi; private Paint cpaint; private Paint tpaint; private int screenHeight; private int circleRadius = 35; private int minWidth; public POIView(Context context) { super(context); init(context); } public POIView(Context context, IPointOfInterest poi) { super(context); init(context); this.poi = poi; } /* * Initializes all the needed variables used for proper drawing of the View * * @param context The current application context */ private void init(Context context) { // Paint object for POI marker cpaint = new Paint(); cpaint.setAntiAlias(true); cpaint.setColor(Color.GRAY); cpaint.setStrokeWidth(7); cpaint.setStyle(Paint.Style.STROKE); // Paint object for POI text tpaint = new Paint(); tpaint.setAntiAlias(true); tpaint.setColor(Color.GRAY); tpaint.setTextSize(18.0f); // Get the device's screen height screenHeight = context.getResources().getDisplayMetrics().heightPixels; // Make invisible until we're on screen this.setVisibility(View.INVISIBLE); } /* * The onDraw method gets called when this View gets drawn by its parent View. * Here the POI marker and text gets drawn. * * @param canvas The Canvas object on which this View will be drawn. * * (non-Javadoc) * @see android.view.View#onDraw(android.graphics.Canvas) */ @Override public void onDraw(Canvas canvas) { int circleWidth = (int) (circleRadius*2 + cpaint.getStrokeWidth()*2); int textWidth = (int) (tpaint.measureText(poi.getName())); int yPos = (int) ((screenHeight /2) - ((tpaint.descent() + tpaint.ascent()) / 2)) + 48; if (circleWidth > textWidth) { // Circle is biggest int xPos = (circleWidth /2) - (textWidth / 2); canvas.drawCircle((circleWidth / 2), (screenHeight / 2), circleRadius, cpaint); canvas.drawText(poi.getName(), xPos, yPos, tpaint); } else { // Text is biggest int xPos = (textWidth / 2); canvas.drawCircle(xPos, (screenHeight / 2), circleRadius, cpaint); canvas.drawText(poi.getName(), 0, yPos, tpaint); } } /* * The onMeasure method reports the width and height of this View to the parent * for layout drawing * * @param widthMeasureSpec The requested width for this View, encoded as MeasureSpec * @param heightMeasureSpec The requested height for this View, encoded as MeasureSpec * * (non-Javadoc) * @see android.view.View#onMeasure(int, int) */ @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { minWidth = 0; int minHeight = 0; int textWidth = (int) tpaint.measureText(poi.getName()); int iconWidth = (int) (circleRadius*2 + cpaint.getStrokeWidth()*2); minWidth = Math.max(textWidth, iconWidth); minHeight = screenHeight; setMeasuredDimension(MeasureSpec.makeMeasureSpec(minWidth, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(minHeight, MeasureSpec.EXACTLY)); } public IPointOfInterest getPoi() { return poi; } public void setPoi(IPointOfInterest poi) { this.poi = poi; } public int getMinWidth() { return minWidth; } /* * Custom compareTo method to make POIViews sortable * * (non-Javadoc) * @see java.lang.Comparable#compareTo(java.lang.Object) */ @Override public int compareTo(POIView another) { // TODO Auto-generated method stub return ((Double)poi.getDistance()).compareTo(another.getPoi().getDistance()); } }
package org.spigotmc.builder; import com.google.common.base.Charsets; import com.google.common.base.Predicate; import com.google.common.base.Throwables; import com.google.common.collect.Iterables; import com.google.common.hash.Hasher; import com.google.common.hash.Hashing; import com.google.common.io.ByteStreams; import com.google.common.io.Files; import com.google.common.io.Resources; import difflib.DiffUtils; import difflib.Patch; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileDescriptor; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.PrintStream; import java.net.URL; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.security.cert.X509Certificate; import java.util.Arrays; import java.util.Date; import java.util.Enumeration; import java.util.List; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLSession; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; import lombok.RequiredArgsConstructor; import org.apache.commons.io.FileUtils; import org.apache.commons.io.output.TeeOutputStream; import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.ResetCommand; import org.eclipse.jgit.api.errors.GitAPIException; import org.eclipse.jgit.revwalk.RevCommit; public class Builder { public static final String LOG_FILE = "BuildTools.log.txt"; public static final boolean IS_WINDOWS = System.getProperty( "os.name" ).startsWith( "Windows" ); public static final boolean IS_MAC = System.getProperty( "os.name" ).startsWith( "Mac" ); public static final File CWD = new File( "." ); public static final String MC_VERSION = "1.8"; private static boolean dontUpdate; private static boolean skipCompile; public static void main(String[] args) throws Exception { for ( String s : args ) { if ( "--disable-certificate-check".equals( s ) ) { disableHttpsCertificateCheck(); } if ( "--dont-update".equals( s ) ) { dontUpdate = true; } if ( "--skip-compile".endsWith( s ) ) { skipCompile = true; } } logOutput(); if ( IS_MAC && !Boolean.getBoolean( "mac.supported" ) ) { System.out.println( "Sorry, but Macintosh is not currently a supported platform for compilation at this time." ); System.out.println( "If you feel like testing Macintosh support please run this script with the -Dmac.supported=true option." ); System.out.println( "Else please run this script on a Windows or Linux PC and then copy the jars to this computer." ); System.exit( 1 ); } try { runProcess( CWD, "bash", "-c", "exit" ); } catch ( Exception ex ) { System.out.println( "You must run this jar through bash (msysgit)" ); System.exit( 1 ); } try { runProcess( CWD, "git", "config", "--global", "user.name" ); } catch ( Exception ex ) { System.out.println( "Git name not set, setting it to default value." ); runProcess( CWD, "git", "config", "--global", "user.name", "BuildTools" ); } try { runProcess( CWD, "git", "config", "--global", "user.email" ); } catch ( Exception ex ) { System.out.println( "Git email not set, setting it to default value." ); runProcess( CWD, "git", "config", "--global", "user.email", "unconfigured@null.spigotmc.org" ); } File workDir = new File( "work" ); workDir.mkdir(); File bukkit = new File( "Bukkit" ); if ( !bukkit.exists() ) { clone( "https://hub.spigotmc.org/stash/scm/spigot/bukkit.git", bukkit ); } File craftBukkit = new File( "CraftBukkit" ); if ( !craftBukkit.exists() ) { clone( "https://hub.spigotmc.org/stash/scm/spigot/craftbukkit.git", craftBukkit ); } File spigot = new File( "Spigot" ); if ( !spigot.exists() ) { clone( "https://hub.spigotmc.org/stash/scm/spigot/spigot.git", spigot ); } File buildData = new File( "BuildData" ); if ( !buildData.exists() ) { clone( "https://hub.spigotmc.org/stash/scm/spigot/builddata.git", buildData ); } File maven = new File( "apache-maven-3.2.3" ); if ( !maven.exists() ) { System.out.println( "Maven does not exist, downloading. Please wait." ); File mvnTemp = new File( "mvn.zip" ); mvnTemp.deleteOnExit(); download( "http://static.spigotmc.org/maven/apache-maven-3.2.3-bin.zip", mvnTemp ); unzip( mvnTemp, new File( "." ) ); } String mvn = maven.getAbsolutePath() + "/bin/mvn"; Git bukkitGit = Git.open( bukkit ); Git craftBukkitGit = Git.open( craftBukkit ); Git spigotGit = Git.open( spigot ); Git buildGit = Git.open( buildData ); if ( !dontUpdate ) { pull( bukkitGit ); pull( craftBukkitGit ); pull( spigotGit ); pull( buildGit ); } File vanillaJar = new File( workDir, "minecraft_server." + MC_VERSION + ".jar" ); if ( !vanillaJar.exists() ) { download( String.format( "https://s3.amazonaws.com/Minecraft.Download/versions/%1$s/minecraft_server.%1$s.jar", MC_VERSION ), vanillaJar ); } Iterable<RevCommit> mappings = buildGit.log() .addPath( "mappings/bukkit-1.8.at" ) .addPath( "mappings/bukkit-1.8-cl.csrg" ) .addPath( "mappings/bukkit-1.8-members.csrg" ) .addPath( "mappings/package.srg" ) .setMaxCount( 1 ).call(); Hasher mappingsHash = Hashing.md5().newHasher(); for ( RevCommit rev : mappings ) { mappingsHash.putString( rev.getName(), Charsets.UTF_8 ); } String mappingsVersion = mappingsHash.hash().toString().substring( 24 ); // Last 8 chars File finalMappedJar = new File( workDir, "mapped." + mappingsVersion + ".jar" ); if ( !finalMappedJar.exists() ) { System.out.println( "Final mapped jar: " + finalMappedJar + " does not exist, creating!" ); File clMappedJar = new File( finalMappedJar + "-cl" ); File mMappedJar = new File( finalMappedJar + "-m" ); runProcess( CWD, "java", "-jar", "BuildData/bin/SpecialSource.jar", "-i", vanillaJar.getPath(), "-m", "BuildData/mappings/bukkit-1.8-cl.csrg", "-o", clMappedJar.getPath() ); runProcess( CWD, "java", "-jar", "BuildData/bin/SpecialSource-2.jar", "map", "-i", clMappedJar.getPath(), "-m", "BuildData/mappings/bukkit-1.8-members.csrg", "-o", mMappedJar.getPath() ); runProcess( CWD, "java", "-jar", "BuildData/bin/SpecialSource.jar", "-i", mMappedJar.getPath(), "--access-transformer", "BuildData/mappings/bukkit-1.8.at", "-m", "BuildData/mappings/package.srg", "-o", finalMappedJar.getPath() ); } runProcess( CWD, "sh", mvn, "install:install-file", "-Dfile=" + finalMappedJar, "-Dpackaging=jar", "-DgroupId=org.spigotmc", "-DartifactId=minecraft-server", "-Dversion=1.8-SNAPSHOT" ); File decompileDir = new File( workDir, "decompile-" + mappingsVersion ); if ( !decompileDir.exists() ) { decompileDir.mkdir(); File clazzDir = new File( decompileDir, "classes" ); unzip( finalMappedJar, clazzDir, new Predicate<String>() { @Override public boolean apply(String input) { return input.startsWith( "net/minecraft/server" ); } } ); runProcess( CWD, "java", "-jar", "BuildData/bin/fernflower.jar", "-dgs=1", "-hdc=0", "-rbr=0", "-asc=1", clazzDir.getPath(), decompileDir.getPath() ); } System.out.println( "Applying CraftBukkit Patches" ); File nmsDir = new File( craftBukkit, "src/main/java/net" ); if ( nmsDir.exists() ) { System.out.println( "Backing up NMS dir" ); FileUtils.moveDirectory( nmsDir, new File( workDir, "nms.old." + System.currentTimeMillis() ) ); } File patchDir = new File( craftBukkit, "nms-patches" ); for ( File file : patchDir.listFiles() ) { String targetFile = "net/minecraft/server/" + file.getName().replaceAll( ".patch", ".java" ); File clean = new File( decompileDir, targetFile ); File t = new File( nmsDir.getParentFile(), targetFile ); t.getParentFile().mkdirs(); System.out.println( "Patching with " + file.getName() ); List<String> readFile = Files.readLines( file, Charsets.UTF_8 ); // Manually append prelude if it is not found in the first few lines. boolean preludeFound = false; for ( int i = 0; i < Math.min( 3, readFile.size() ); i++ ) { if ( readFile.get( i ).startsWith( "+++" ) ) { preludeFound = true; break; } } if ( !preludeFound ) { readFile.add( 0, "+++" ); } Patch parsedPatch = DiffUtils.parseUnifiedDiff( readFile ); List<?> modifiedLines = DiffUtils.patch( Files.readLines( clean, Charsets.UTF_8 ), parsedPatch ); BufferedWriter bw = new BufferedWriter( new FileWriter( t ) ); for ( String line : (List<String>) modifiedLines ) { bw.write( line ); bw.newLine(); } bw.close(); } File tmpNms = new File( craftBukkit, "tmp-nms" ); FileUtils.copyDirectory( nmsDir, tmpNms ); craftBukkitGit.branchDelete().setBranchNames( "patched" ).setForce( true ).call(); craftBukkitGit.checkout().setCreateBranch( true ).setForce( true ).setName( "patched" ).call(); craftBukkitGit.add().addFilepattern( "src/main/java/net/" ).call(); craftBukkitGit.commit().setMessage( "CraftBukkit $ " + new Date() ).call(); craftBukkitGit.checkout().setName( "master" ).call(); FileUtils.moveDirectory( tmpNms, nmsDir ); File spigotApi = new File( spigot, "Bukkit" ); if ( !spigotApi.exists() ) { clone( "file://" + bukkit.getAbsolutePath(), spigotApi ); } File spigotServer = new File( spigot, "CraftBukkit" ); if ( !spigotServer.exists() ) { clone( "file://" + craftBukkit.getAbsolutePath(), spigotServer ); } // Git spigotApiGit = Git.open( spigotApi ); // Git spigotServerGit = Git.open( spigotServer ); if ( !skipCompile ) { System.out.println( "Compiling Bukkit" ); runProcess( bukkit, "sh", mvn, "clean", "install" ); System.out.println( "Compiling CraftBukkit" ); runProcess( craftBukkit, "sh", mvn, "clean", "install" ); } try { runProcess( spigot, "bash", "applyPatches.sh" ); System.out.println( "*** Spigot patches applied!" ); System.out.println( "Compiling Spigot & Spigot-API" ); if ( !skipCompile ) { runProcess( spigot, "sh", mvn, "clean", "install" ); } } catch ( Exception ex ) { System.err.println( "Error compiling Spigot, are you running this jar via msysgit?" ); ex.printStackTrace(); System.exit( 1 ); } for ( int i = 0; i < 35; i++ ) { System.out.println( " " ); } System.out.println( "Success! Everything compiled successfully. Copying final .jar files now." ); copyJar( "CraftBukkit/target", "craftbukkit", "craftbukkit-" + MC_VERSION + ".jar" ); copyJar( "Spigot/Spigot-Server/target", "spigot", "spigot-" + MC_VERSION + ".jar" ); } public static void copyJar(String path, final String jarPrefix, String outJarName) throws Exception { File[] files = new File( path ).listFiles( new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.startsWith( jarPrefix ) && name.endsWith( ".jar" ); } } ); for ( File file : files ) { System.out.println( "Copying " + file.getName() + " to " + CWD.getAbsolutePath() ); Files.copy( file, new File( CWD, outJarName ) ); System.out.println( " - Saved as " + outJarName ); } } public static void pull(Git repo) throws Exception { System.out.println( "Pulling updates for " + repo.getRepository().getDirectory() ); repo.reset().setRef( "origin/master" ).setMode( ResetCommand.ResetType.HARD ).call(); boolean result = repo.pull().call().isSuccessful(); if ( !result ) { throw new RuntimeException( "Could not pull updates!" ); } System.out.println( "Successfully pulled updates!" ); } public static int runProcess(File workDir, String... command) throws Exception { ProcessBuilder pb = new ProcessBuilder( command ); pb.directory( workDir ); pb.environment().put( "JAVA_HOME", System.getProperty( "java.home" ) ); if ( !pb.environment().containsKey( "MAVEN_OPTS" ) ) { pb.environment().put( "MAVEN_OPTS", "-Xmx1024M" ); } final Process ps = pb.start(); new Thread( new StreamRedirector( ps.getInputStream(), System.out ) ).start(); new Thread( new StreamRedirector( ps.getErrorStream(), System.err ) ).start(); int status = ps.waitFor(); if ( status != 0 ) { throw new RuntimeException( "Error running command, return status !=0: " + Arrays.toString( command ) ); } return status; } @RequiredArgsConstructor private static class StreamRedirector implements Runnable { private final InputStream in; private final PrintStream out; @Override public void run() { BufferedReader br = new BufferedReader( new InputStreamReader( in ) ); try { String line; while ( ( line = br.readLine() ) != null ) { out.println( line ); } } catch ( IOException ex ) { throw Throwables.propagate( ex ); } } } public static void unzip(File zipFile, File targetFolder) throws IOException { unzip( zipFile, targetFolder, null ); } public static void unzip(File zipFile, File targetFolder, Predicate<String> filter) throws IOException { targetFolder.mkdir(); ZipFile zip = new ZipFile( zipFile ); for ( Enumeration<? extends ZipEntry> entries = zip.entries(); entries.hasMoreElements(); ) { ZipEntry entry = entries.nextElement(); if ( filter != null ) { if ( !filter.apply( entry.getName() ) ) { continue; } } File outFile = new File( targetFolder, entry.getName() ); if ( entry.isDirectory() ) { outFile.mkdirs(); continue; } if ( outFile.getParentFile() != null ) { outFile.getParentFile().mkdirs(); } InputStream is = zip.getInputStream( entry ); OutputStream os = new FileOutputStream( outFile ); try { ByteStreams.copy( is, os ); } finally { is.close(); os.close(); } System.out.println( "Extracted: " + outFile ); } } public static void clone(String url, File target) throws GitAPIException { System.out.println( "Starting clone of " + url + " to " + target ); Git result = Git.cloneRepository().setURI( url ).setDirectory( target ).call(); try { System.out.println( "Cloned git repository " + url + " to " + target.getAbsolutePath() + ". Current HEAD: " + commitHash( result ) ); } finally { result.close(); } } public static String commitHash(Git repo) throws GitAPIException { return Iterables.getOnlyElement( repo.log().setMaxCount( 1 ).call() ).getName(); } public static File download(String url, File target) throws IOException { System.out.println( "Starting download of " + url ); byte[] bytes = Resources.toByteArray( new URL( url ) ); System.out.println( "Downloaded file: " + target + " with md5: " + Hashing.md5().hashBytes( bytes ).toString() ); Files.write( bytes, target ); return target; } public static void disableHttpsCertificateCheck() { // This globally disables certificate checking try { TrustManager[] trustAllCerts = new TrustManager[] { new X509TrustManager() { @Override public java.security.cert.X509Certificate[] getAcceptedIssuers() { return null; } @Override public void checkClientTrusted(X509Certificate[] certs, String authType) { } @Override public void checkServerTrusted(X509Certificate[] certs, String authType) { } } }; // Trust SSL certs SSLContext sc = SSLContext.getInstance( "SSL" ); sc.init( null, trustAllCerts, new SecureRandom() ); HttpsURLConnection.setDefaultSSLSocketFactory( sc.getSocketFactory() ); // Trust host names HostnameVerifier allHostsValid = new HostnameVerifier() { @Override public boolean verify(String hostname, SSLSession session) { return true; } }; HttpsURLConnection.setDefaultHostnameVerifier( allHostsValid ); } catch ( NoSuchAlgorithmException ex ) { System.out.println( "Failed to disable https certificate check" ); ex.printStackTrace( System.err ); } catch ( KeyManagementException ex ) { System.out.println( "Failed to disable https certificate check" ); ex.printStackTrace( System.err ); } } public static void logOutput() { try { final OutputStream logOut = new BufferedOutputStream( new FileOutputStream( LOG_FILE ) ); Runtime.getRuntime().addShutdownHook( new Thread() { @Override public void run() { System.setOut( new PrintStream( new FileOutputStream( FileDescriptor.out ) ) ); System.setErr( new PrintStream( new FileOutputStream( FileDescriptor.err ) ) ); try { logOut.close(); } catch ( IOException ex ) { // We're shutting the jvm down anyway. } } } ); System.setOut( new PrintStream( new TeeOutputStream( System.out, logOut ) ) ); System.setErr( new PrintStream( new TeeOutputStream( System.err, logOut ) ) ); } catch ( FileNotFoundException ex ) { System.err.println( "Failed to create log file: " + LOG_FILE ); } } }
package org.apereo.cas.configuration.model.core.web; import org.apereo.cas.configuration.support.RequiresModule; import java.io.Serializable; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; /** * Configuration properties class for message.bundle. * * @author Dmitriy Kopylenko * @since 5.0.0 */ @RequiresModule(name = "cas-server-core-web", automated = true) public class MessageBundleProperties implements Serializable { private static final long serialVersionUID = 3769733438559663237L; /** * Message bundle character encoding. */ private String encoding = StandardCharsets.UTF_8.name(); /** * Cache size. */ private int cacheSeconds = 180; /** * Flag that controls whether to fallback to the default system locale if no locale is specified explicitly. */ private boolean fallbackSystemLocale; /** * Flag that controls whether to use code message. */ private boolean useCodeMessage = true; /** * A list of strings representing base names for this message bundle. */ private List<String> baseNames = Stream.of("classpath:custom_messages", "classpath:messages") .collect(Collectors.toList()); /** * A list of strings representing common names for this message bundle. * <p> * Entries in last common names override first values (as opposed to baseNames used in message bundles). */ private List<String> commonNames = Stream.of("classpath:common_messages.properties", "file:/etc/cas/config/common_messages.properties") .collect(Collectors.toList()); public String getEncoding() { return encoding; } public void setEncoding(final String encoding) { this.encoding = encoding; } public int getCacheSeconds() { return cacheSeconds; } public void setCacheSeconds(final int cacheSeconds) { this.cacheSeconds = cacheSeconds; } public boolean isFallbackSystemLocale() { return fallbackSystemLocale; } public void setFallbackSystemLocale(final boolean fallbackSystemLocale) { this.fallbackSystemLocale = fallbackSystemLocale; } public boolean isUseCodeMessage() { return useCodeMessage; } public void setUseCodeMessage(final boolean useCodeMessage) { this.useCodeMessage = useCodeMessage; } public List<String> getBaseNames() { return baseNames; } public void setBaseNames(final List<String> baseNames) { this.baseNames = baseNames; } public List<String> getCommonNames() { return commonNames; } public void setCommonNames(final List<String> commonNames) { this.commonNames = commonNames; } }
package org.bouncycastle.crypto.tls; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.security.SecureRandom; import java.util.Enumeration; import java.util.Hashtable; import org.bouncycastle.util.Arrays; import org.bouncycastle.util.Integers; public class DTLSProtocolHandler { private static final Integer EXT_RenegotiationInfo = Integers .valueOf(ExtensionType.renegotiation_info); private static final byte[] EMPTY_BYTES = new byte[0]; private final SecureRandom secureRandom; public DTLSProtocolHandler(SecureRandom secureRandom) { if (secureRandom == null) throw new IllegalArgumentException("'secureRandom' cannot be null"); this.secureRandom = secureRandom; } public DTLSTransport connect(TlsClient client, DatagramTransport transport) throws IOException { if (client == null) throw new IllegalArgumentException("'client' cannot be null"); if (transport == null) throw new IllegalArgumentException("'transport' cannot be null"); HandshakeState state = new HandshakeState(); TlsClientContextImpl clientContext = createClientContext(); client.init(clientContext); DTLSRecordLayer recordLayer = new DTLSRecordLayer(transport, clientContext, ContentType.handshake); DTLSReliableHandshake handshake = new DTLSReliableHandshake(recordLayer); byte[] clientHello = generateClientHello(state, clientContext, client); handshake.sendMessage(HandshakeType.client_hello, clientHello); DTLSReliableHandshake.Message serverMessage = handshake.receiveMessage(); // NOTE: After receiving a record from the server, we discover the version it chose ProtocolVersion server_version = recordLayer.getDiscoveredServerVersion(); if (server_version.getFullVersion() < clientContext.getClientVersion().getFullVersion()) { // TODO Alert } clientContext.setServerVersion(server_version); client.notifyServerVersion(server_version); if (serverMessage.getType() == HandshakeType.hello_verify_request) { byte[] cookie = parseHelloVerifyRequest(clientContext, serverMessage.getBody()); byte[] patched = patchClientHelloWithCookie(clientHello, cookie); handshake.sendMessage(HandshakeType.client_hello, patched); serverMessage = handshake.receiveMessage(); } if (serverMessage.getType() != HandshakeType.server_hello) { // TODO Alert } parseServerHello(state, clientContext, client, serverMessage.getBody()); short previousMessageType = HandshakeType.server_hello; while (serverMessage.getType() != HandshakeType.server_hello_done) { // TODO Process server message previousMessageType = serverMessage.getType(); serverMessage = handshake.receiveMessage(); } // ServerHelloDone should have empty body if (serverMessage.getBody().length != 0) { // TODO Alert } // TODO Lots more handshake messages... handshake.finish(); // TODO Needs to be attached to the record layer using ContentType.application_data return new DTLSTransport(recordLayer); } private void assertEmpty(ByteArrayInputStream is) throws IOException { if (is.available() > 0) { // throw new TlsFatalAlert(AlertDescription.decode_error); // TODO ALert } } private TlsClientContextImpl createClientContext() { SecurityParameters securityParameters = new SecurityParameters(); securityParameters.clientRandom = new byte[32]; secureRandom.nextBytes(securityParameters.clientRandom); TlsUtils.writeGMTUnixTime(securityParameters.clientRandom, 0); return new TlsClientContextImpl(secureRandom, securityParameters); } private byte[] generateClientHello(HandshakeState state, TlsClientContextImpl clientContext, TlsClient client) throws IOException { ByteArrayOutputStream buf = new ByteArrayOutputStream(); ProtocolVersion client_version = client.getClientVersion(); if (!client_version.isDTLS()) { // TODO Alert } clientContext.setClientVersion(client_version); TlsUtils.writeVersion(client_version, buf); buf.write(clientContext.getSecurityParameters().getClientRandom()); // Length of Session id TlsUtils.writeUint8((short) 0, buf); // Length of cookie TlsUtils.writeUint8((short) 0, buf); /* * Cipher suites */ state.offeredCipherSuites = client.getCipherSuites(); for (int cipherSuite : state.offeredCipherSuites) { switch (cipherSuite) { case CipherSuite.TLS_RSA_EXPORT_WITH_RC4_40_MD5: case CipherSuite.TLS_RSA_WITH_RC4_128_MD5: case CipherSuite.TLS_RSA_WITH_RC4_128_SHA: case CipherSuite.TLS_DH_anon_EXPORT_WITH_RC4_40_MD5: case CipherSuite.TLS_DH_anon_WITH_RC4_128_MD5: case CipherSuite.TLS_PSK_WITH_RC4_128_SHA: case CipherSuite.TLS_DHE_PSK_WITH_RC4_128_SHA: case CipherSuite.TLS_RSA_PSK_WITH_RC4_128_SHA: case CipherSuite.TLS_ECDH_ECDSA_WITH_RC4_128_SHA: case CipherSuite.TLS_ECDHE_ECDSA_WITH_RC4_128_SHA: case CipherSuite.TLS_ECDH_RSA_WITH_RC4_128_SHA: case CipherSuite.TLS_ECDHE_RSA_WITH_RC4_128_SHA: case CipherSuite.TLS_ECDH_anon_WITH_RC4_128_SHA: // TODO Alert throw new IllegalStateException( "Client offered an RC4 cipher suite: RC4 MUST NOT be used with DTLS"); } } // Integer -> byte[] state.clientExtensions = client.getClientExtensions(); // Cipher Suites (and SCSV) { /* * RFC 5746 3.4. The client MUST include either an empty "renegotiation_info" extension, * or the TLS_EMPTY_RENEGOTIATION_INFO_SCSV signaling cipher suite value in the * ClientHello. Including both is NOT RECOMMENDED. */ boolean noRenegExt = state.clientExtensions == null || state.clientExtensions.get(EXT_RenegotiationInfo) == null; int count = state.offeredCipherSuites.length; if (noRenegExt) { // Note: 1 extra slot for TLS_EMPTY_RENEGOTIATION_INFO_SCSV ++count; } TlsUtils.writeUint16(2 * count, buf); TlsUtils.writeUint16Array(state.offeredCipherSuites, buf); if (noRenegExt) { TlsUtils.writeUint16(CipherSuite.TLS_EMPTY_RENEGOTIATION_INFO_SCSV, buf); } } // Compression methods state.offeredCompressionMethods = client.getCompressionMethods(); TlsUtils.writeUint8((short) state.offeredCompressionMethods.length, buf); TlsUtils.writeUint8Array(state.offeredCompressionMethods, buf); // Extensions if (state.clientExtensions != null) { ByteArrayOutputStream ext = new ByteArrayOutputStream(); Enumeration keys = state.clientExtensions.keys(); while (keys.hasMoreElements()) { Integer extType = (Integer) keys.nextElement(); TlsProtocolHandler.writeExtension(ext, extType, (byte[]) state.clientExtensions.get(extType)); } TlsUtils.writeOpaque16(ext.toByteArray(), buf); } return buf.toByteArray(); } private byte[] parseHelloVerifyRequest(TlsClientContextImpl clientContext, byte[] body) throws IOException { ByteArrayInputStream buf = new ByteArrayInputStream(body); ProtocolVersion server_version = TlsUtils.readVersion(buf); if (!server_version.equals(clientContext.getServerVersion())) { // TODO Alert } byte[] cookie = TlsUtils.readOpaque8(buf); assertEmpty(buf); if (cookie.length < 1 || cookie.length > 32) { // TODO Alert } return cookie; } private void parseServerHello(HandshakeState state, TlsClientContextImpl clientContext, TlsClient client, byte[] body) throws IOException { ByteArrayInputStream buf = new ByteArrayInputStream(body); ProtocolVersion server_version = TlsUtils.readVersion(buf); if (!server_version.equals(clientContext.getServerVersion())) { // TODO Alert } byte[] server_random = new byte[32]; TlsUtils.readFully(server_random, buf); clientContext.getSecurityParameters().serverRandom = server_random; byte[] sessionID = TlsUtils.readOpaque8(buf); if (sessionID.length > 32) { // TODO Alert } client.notifySessionID(sessionID); int selectedCipherSuite = TlsUtils.readUint16(buf); if (!TlsProtocolHandler.arrayContains(state.offeredCipherSuites, selectedCipherSuite) || selectedCipherSuite == CipherSuite.TLS_EMPTY_RENEGOTIATION_INFO_SCSV) { // TODO Alert } client.notifySelectedCipherSuite(selectedCipherSuite); short selectedCompressionMethod = TlsUtils.readUint8(buf); if (!TlsProtocolHandler.arrayContains(state.offeredCompressionMethods, selectedCompressionMethod)) { // TODO Alert } client.notifySelectedCompressionMethod(selectedCompressionMethod); /* * RFC3546 2.2 The extended server hello message format MAY be sent in place of the server * hello message when the client has requested extended functionality via the extended * client hello message specified in Section 2.1. ... Note that the extended server hello * message is only sent in response to an extended client hello message. This prevents the * possibility that the extended server hello message could "break" existing TLS 1.0 * clients. */ /* * TODO RFC 3546 2.3 If [...] the older session is resumed, then the server MUST ignore * extensions appearing in the client hello, and send a server hello containing no * extensions. */ // Integer -> byte[] Hashtable serverExtensions = new Hashtable(); if (buf.available() > 0) { // Process extensions from extended server hello byte[] extBytes = TlsUtils.readOpaque16(buf); ByteArrayInputStream ext = new ByteArrayInputStream(extBytes); while (ext.available() > 0) { Integer extType = Integers.valueOf(TlsUtils.readUint16(ext)); byte[] extValue = TlsUtils.readOpaque16(ext); /* * RFC 5746 Note that sending a "renegotiation_info" extension in response to a * ClientHello containing only the SCSV is an explicit exception to the prohibition * in RFC 5246, Section 7.4.1.4, on the server sending unsolicited extensions and is * only allowed because the client is signaling its willingness to receive the * extension via the TLS_EMPTY_RENEGOTIATION_INFO_SCSV SCSV. TLS implementations * MUST continue to comply with Section 7.4.1.4 for all other extensions. */ if (!extType.equals(EXT_RenegotiationInfo) && state.clientExtensions.get(extType) == null) { /* * RFC 3546 2.3 Note that for all extension types (including those defined in * future), the extension type MUST NOT appear in the extended server hello * unless the same extension type appeared in the corresponding client hello. * Thus clients MUST abort the handshake if they receive an extension type in * the extended server hello that they did not request in the associated * (extended) client hello. */ // TODO Alert } if (serverExtensions.containsKey(extType)) { /* * RFC 3546 2.3 Also note that when multiple extensions of different types are * present in the extended client hello or the extended server hello, the * extensions may appear in any order. There MUST NOT be more than one extension * of the same type. */ // TODO Alert } serverExtensions.put(extType, extValue); } } assertEmpty(buf); /* * RFC 5746 3.4. When a ServerHello is received, the client MUST check if it includes the * "renegotiation_info" extension: */ { boolean secure_negotiation = serverExtensions.containsKey(EXT_RenegotiationInfo); /* * If the extension is present, set the secure_renegotiation flag to TRUE. The client * MUST then verify that the length of the "renegotiated_connection" field is zero, and * if it is not, MUST abort the handshake (by sending a fatal handshake_failure alert). */ if (secure_negotiation) { byte[] renegExtValue = (byte[]) serverExtensions.get(EXT_RenegotiationInfo); if (!Arrays.constantTimeAreEqual(renegExtValue, TlsProtocolHandler.createRenegotiationInfo(EMPTY_BYTES))) { // TODO Alert } } client.notifySecureRenegotiation(secure_negotiation); } if (state.clientExtensions != null) { client.processServerExtensions(serverExtensions); } state.keyExchange = client.getKeyExchange(); } private byte[] patchClientHelloWithCookie(byte[] clientHello, byte[] cookie) throws IOException { int sessionIDPos = 34; int sessionIDLength = TlsUtils.readUint8(clientHello, sessionIDPos); int cookieLengthPos = sessionIDPos + 1 + sessionIDLength; int cookiePos = cookieLengthPos + 1; byte[] patched = new byte[clientHello.length + cookie.length]; System.arraycopy(clientHello, 0, patched, 0, cookieLengthPos); TlsUtils.writeUint8((short) cookie.length, patched, cookieLengthPos); System.arraycopy(cookie, 0, patched, cookiePos, cookie.length); System.arraycopy(clientHello, cookiePos, patched, cookiePos + cookie.length, clientHello.length - cookiePos); return patched; } private static class HandshakeState { int[] offeredCipherSuites = null; Hashtable clientExtensions = null; short[] offeredCompressionMethods = null; TlsKeyExchange keyExchange = null; } }
package bitronix.tm.resource.jms; import bitronix.tm.BitronixTransaction; import bitronix.tm.internal.BitronixSystemException; import bitronix.tm.internal.BitronixRollbackSystemException; import bitronix.tm.utils.Decoder; import bitronix.tm.resource.common.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.jms.*; import javax.transaction.SystemException; import javax.transaction.xa.XAResource; import java.io.Serializable; import java.util.*; /** * JMS Session wrapper that will send calls to either a XASession or to a non-XA Session depending on the calling * context. * <p>&copy; Bitronix 2005, 2006, 2007</p> * * @author lorban */ public class DualSessionWrapper extends AbstractXAResourceHolder implements Session, StateChangeListener { private final static Logger log = LoggerFactory.getLogger(DualSessionWrapper.class); private JmsPooledConnection pooledConnection; private boolean transacted; private int acknowledgeMode; private XASession xaSession; private Session session; private XAResource xaResource; private MessageListener listener; //TODO: shouldn't producers/consumers/subscribers be separated between XA and non-XA session ? private Map messageProducers = new HashMap(); private Map messageConsumers = new HashMap(); private Map topicSubscribers = new HashMap(); public DualSessionWrapper(JmsPooledConnection pooledConnection, boolean transacted, int acknowledgeMode) { this.pooledConnection = pooledConnection; this.transacted = transacted; this.acknowledgeMode = acknowledgeMode; if (log.isDebugEnabled()) log.debug("getting session handle from " + pooledConnection); setState(STATE_ACCESSIBLE); addStateChangeEventListener(this); } public PoolingConnectionFactory getPoolingConnectionFactory() { return pooledConnection.getPoolingConnectionFactory(); } public Session getSession() throws JMSException { return getSession(false); } public Session getSession(boolean forceXa) throws JMSException { if (getState() == STATE_CLOSED) throw new JMSException("session handle is closed"); if (forceXa) { if (log.isDebugEnabled()) log.debug("choosing XA session (forced)"); return createXASession(); } else { BitronixTransaction currentTransaction = TransactionContextHelper.currentTransaction(); if (currentTransaction != null) { if (log.isDebugEnabled()) log.debug("choosing XA session"); return createXASession(); } if (log.isDebugEnabled()) log.debug("choosing non-XA session"); return createNonXASession(); } } private Session createNonXASession() throws JMSException { // non-XA if (session == null) { session = pooledConnection.getXAConnection().createSession(transacted, acknowledgeMode); if (listener != null) { session.setMessageListener(listener); if (log.isDebugEnabled()) log.debug("get non-XA session registered message listener: " + listener); } } return session; } private Session createXASession() throws JMSException { if (xaSession == null) { xaSession = pooledConnection.getXAConnection().createXASession(); if (listener != null) { xaSession.setMessageListener(listener); if (log.isDebugEnabled()) log.debug("get XA session registered message listener: " + listener); } xaResource = xaSession.getXAResource(); } return xaSession.getSession(); } public String toString() { return "a DualSessionWrapper in state " + Decoder.decodeXAStatefulHolderState(getState()) + " of " + pooledConnection; } /* wrapped Session methods that have special XA semantics */ public void close() throws JMSException { if (getState() != STATE_ACCESSIBLE) { if (log.isDebugEnabled()) log.debug("not closing already closed " + this); return; } if (log.isDebugEnabled()) log.debug("closing " + this); //TODO: even if delisting fails, requeuing should be done or we'll have a session leak here // delisting try { TransactionContextHelper.delistFromCurrentTransaction(this, pooledConnection.getPoolingConnectionFactory()); } catch (BitronixRollbackSystemException ex) { throw (JMSException) new JMSException("unilateral rollback of " + xaResourceHolderState).initCause(ex); } catch (SystemException ex) { throw (JMSException) new JMSException("error delisting " + xaResourceHolderState).initCause(ex); } // requeuing try { TransactionContextHelper.requeue(this, pooledConnection.getPoolingConnectionFactory()); } catch (BitronixSystemException ex) { throw (JMSException) new JMSException("error delisting " + xaResourceHolderState).initCause(ex); } } public Date getLastReleaseDate() { return null; } /* * When the session is closed (directly or deferred) the action is to change its state to IN_POOL. * There is no such state for JMS sessions, this just means that it has been closed -> force a * state switch to CLOSED then clean up. */ public void stateChanged(XAStatefulHolder source, int oldState, int newState) { if (newState == STATE_IN_POOL) { setState(STATE_CLOSED); } else if (newState == STATE_CLOSED) { if (log.isDebugEnabled()) log.debug("session state changing to CLOSED, cleaning it up: " + this); if (xaSession != null) { try { xaSession.close(); } catch (JMSException ex) { log.error("error closing XA session", ex); } xaSession = null; xaResource = null; } if (session != null) { try { session.close(); } catch (JMSException ex) { log.error("error closing session", ex); } session = null; } Iterator it = messageProducers.entrySet().iterator(); while (it.hasNext()) { Map.Entry entry = (Map.Entry) it.next(); MessageProducerWrapper messageProducerWrapper = (MessageProducerWrapper) entry.getValue(); try { messageProducerWrapper.close(); } catch (JMSException ex) { log.error("error closing message producer", ex); } } messageProducers.clear(); it = messageConsumers.entrySet().iterator(); while (it.hasNext()) { Map.Entry entry = (Map.Entry) it.next(); MessageConsumerWrapper messageConsumerWrapper = (MessageConsumerWrapper) entry.getValue(); try { messageConsumerWrapper.close(); } catch (JMSException ex) { log.error("error closing message consumer", ex); } } messageConsumers.clear(); } // if newState == STATE_CLOSED } public void stateChanging(XAStatefulHolder source, int currentState, int futureState) { } public MessageProducer createProducer(Destination destination) throws JMSException { MessageProducerConsumerKey key = new MessageProducerConsumerKey(destination); if (log.isDebugEnabled()) log.debug("looking for producer based on " + key); MessageProducerWrapper messageProducer = (MessageProducerWrapper) messageProducers.get(key); if (messageProducer == null) { if (log.isDebugEnabled()) log.debug("found no producer based on " + key + ", creating it"); messageProducer = new MessageProducerWrapper(getSession().createProducer(destination), this, pooledConnection.getPoolingConnectionFactory()); if (pooledConnection.getPoolingConnectionFactory().getCacheProducersConsumers()) { if (log.isDebugEnabled()) log.debug("caching producer via key " + key); messageProducers.put(key, messageProducer); } } else if (log.isDebugEnabled()) log.debug("found producer based on " + key + ", recycling it: " + messageProducer); return messageProducer; } public MessageConsumer createConsumer(Destination destination) throws JMSException { MessageProducerConsumerKey key = new MessageProducerConsumerKey(destination); if (log.isDebugEnabled()) log.debug("looking for consumer based on " + key); MessageConsumerWrapper messageConsumer = (MessageConsumerWrapper) messageConsumers.get(key); if (messageConsumer == null) { if (log.isDebugEnabled()) log.debug("found no consumer based on " + key + ", creating it"); messageConsumer = new MessageConsumerWrapper(getSession().createConsumer(destination), this, pooledConnection.getPoolingConnectionFactory()); if (pooledConnection.getPoolingConnectionFactory().getCacheProducersConsumers()) { if (log.isDebugEnabled()) log.debug("caching consumer via key " + key); messageConsumers.put(key, messageConsumer); } } else if (log.isDebugEnabled()) log.debug("found consumer based on " + key + ", recycling it: " + messageConsumer); return messageConsumer; } public MessageConsumer createConsumer(Destination destination, String messageSelector) throws JMSException { MessageProducerConsumerKey key = new MessageProducerConsumerKey(destination, messageSelector); if (log.isDebugEnabled()) log.debug("looking for consumer based on " + key); MessageConsumerWrapper messageConsumer = (MessageConsumerWrapper) messageConsumers.get(key); if (messageConsumer == null) { if (log.isDebugEnabled()) log.debug("found no consumer based on " + key + ", creating it"); messageConsumer = new MessageConsumerWrapper(getSession().createConsumer(destination, messageSelector), this, pooledConnection.getPoolingConnectionFactory()); if (pooledConnection.getPoolingConnectionFactory().getCacheProducersConsumers()) { if (log.isDebugEnabled()) log.debug("caching consumer via key " + key); messageConsumers.put(key, messageConsumer); } } else if (log.isDebugEnabled()) log.debug("found consumer based on " + key + ", recycling it: " + messageConsumer); return messageConsumer; } public MessageConsumer createConsumer(Destination destination, String messageSelector, boolean noLocal) throws JMSException { MessageProducerConsumerKey key = new MessageProducerConsumerKey(destination, messageSelector, noLocal); if (log.isDebugEnabled()) log.debug("looking for consumer based on " + key); MessageConsumerWrapper messageConsumer = (MessageConsumerWrapper) messageConsumers.get(key); if (messageConsumer == null) { if (log.isDebugEnabled()) log.debug("found no consumer based on " + key + ", creating it"); messageConsumer = new MessageConsumerWrapper(getSession().createConsumer(destination, messageSelector, noLocal), this, pooledConnection.getPoolingConnectionFactory()); if (pooledConnection.getPoolingConnectionFactory().getCacheProducersConsumers()) { if (log.isDebugEnabled()) log.debug("caching consumer via key " + key); messageConsumers.put(key, messageConsumer); } } else if (log.isDebugEnabled()) log.debug("found consumer based on " + key + ", recycling it: " + messageConsumer); return messageConsumer; } public TopicSubscriber createDurableSubscriber(Topic topic, String name) throws JMSException { MessageProducerConsumerKey key = new MessageProducerConsumerKey(topic); if (log.isDebugEnabled()) log.debug("looking for durable subscriber based on " + key); TopicSubscriberWrapper topicSubscriber = (TopicSubscriberWrapper) topicSubscribers.get(key); if (topicSubscriber == null) { if (log.isDebugEnabled()) log.debug("found no durable subscriber based on " + key + ", creating it"); topicSubscriber = new TopicSubscriberWrapper(getSession().createDurableSubscriber(topic, name), this, pooledConnection.getPoolingConnectionFactory()); if (pooledConnection.getPoolingConnectionFactory().getCacheProducersConsumers()) { if (log.isDebugEnabled()) log.debug("caching durable subscriber via key " + key); topicSubscribers.put(key, topicSubscriber); } } else if (log.isDebugEnabled()) log.debug("found durable subscriber based on " + key + ", recycling it: " + topicSubscriber); return topicSubscriber; } public TopicSubscriber createDurableSubscriber(Topic topic, String name, String messageSelector, boolean noLocal) throws JMSException { MessageProducerConsumerKey key = new MessageProducerConsumerKey(topic, messageSelector, noLocal); if (log.isDebugEnabled()) log.debug("looking for durable subscriber based on " + key); TopicSubscriberWrapper topicSubscriber = (TopicSubscriberWrapper) topicSubscribers.get(key); if (topicSubscriber == null) { if (log.isDebugEnabled()) log.debug("found no durable subscriber based on " + key + ", creating it"); topicSubscriber = new TopicSubscriberWrapper(getSession().createDurableSubscriber(topic, name, messageSelector, noLocal), this, pooledConnection.getPoolingConnectionFactory()); if (pooledConnection.getPoolingConnectionFactory().getCacheProducersConsumers()) { if (log.isDebugEnabled()) log.debug("caching durable subscriber via key " + key); topicSubscribers.put(key, topicSubscriber); } } else if (log.isDebugEnabled()) log.debug("found durable subscriber based on " + key + ", recycling it: " + topicSubscriber); return topicSubscriber; } public MessageListener getMessageListener() throws JMSException { return listener; } public void setMessageListener(MessageListener listener) throws JMSException { if (getState() == STATE_CLOSED) throw new JMSException("session handle is closed"); if (session != null) session.setMessageListener(listener); if (xaSession != null) xaSession.setMessageListener(listener); this.listener = listener; } public void run() { try { Session session = getSession(true); if (log.isDebugEnabled()) log.debug("running XA session " + session); session.run(); } catch (JMSException ex) { log.error("error getting session", ex); } } /* XAResourceHolder implementation */ public XAResource getXAResource() { return xaResource; } /* dumb wrapping of Session methods */ public boolean getTransacted() throws JMSException { return getSession().getTransacted(); } public int getAcknowledgeMode() throws JMSException { return getSession().getAcknowledgeMode(); } public void commit() throws JMSException { getSession().commit(); } public void rollback() throws JMSException { getSession().rollback(); } public BytesMessage createBytesMessage() throws JMSException { return getSession().createBytesMessage(); } public MapMessage createMapMessage() throws JMSException { return getSession().createMapMessage(); } public Message createMessage() throws JMSException { return getSession().createMessage(); } public ObjectMessage createObjectMessage() throws JMSException { return getSession().createObjectMessage(); } public ObjectMessage createObjectMessage(Serializable serializable) throws JMSException { return getSession().createObjectMessage(serializable); } public StreamMessage createStreamMessage() throws JMSException { return getSession().createStreamMessage(); } public TextMessage createTextMessage() throws JMSException { return getSession().createTextMessage(); } public TextMessage createTextMessage(String text) throws JMSException { return getSession().createTextMessage(text); } public void recover() throws JMSException { getSession().recover(); } public javax.jms.Queue createQueue(String queueName) throws JMSException { return getSession().createQueue(queueName); } public Topic createTopic(String topicName) throws JMSException { return getSession().createTopic(topicName); } public QueueBrowser createBrowser(javax.jms.Queue queue) throws JMSException { return getSession().createBrowser(queue); } public QueueBrowser createBrowser(javax.jms.Queue queue, String messageSelector) throws JMSException { return getSession().createBrowser(queue, messageSelector); } public TemporaryQueue createTemporaryQueue() throws JMSException { return getSession().createTemporaryQueue(); } public TemporaryTopic createTemporaryTopic() throws JMSException { return getSession().createTemporaryTopic(); } public void unsubscribe(String name) throws JMSException { getSession().unsubscribe(name); } public List getXAResourceHolders() { List holders = new ArrayList(1); holders.add(this); return holders; } public Object getConnectionHandle() throws Exception { return null; } }
package org.jasig.portal; import org.xml.sax.DocumentHandler; import org.xml.sax.ext.LexicalHandler; import org.xml.sax.SAXException; import java.util.Vector; import java.util.Enumeration; /** * SAXBufferImpl acts like a SAXFilter, but it can also buffer * SAX events to be released at a later time. * @author Peter Kharchenko * @version $Revision$ */ public class SAXBufferImpl implements DocumentHandler, LexicalHandler { protected DocumentHandler outDocumentHandler; protected LexicalHandler outLexicalHandler; protected Vector eventTypes; protected Vector eventArguments; protected boolean buffering; // types of SAX events public static final int STARTDOCUMENT = 0; public static final int ENDDOCUMENT = 1; public static final int STARTELEMENT = 2; public static final int ENDELEMENT = 3; public static final int CHARACTERS = 4; public static final int IGNORABLEWHITESPACE = 5; public static final int PROCESSINGINSTRUCTION = 6; public static final int SETDOCUMENTLOCATOR = 7; public static final int STARTDTD = 8; public static final int ENDDTD = 9; public static final int STARTENTITY = 10; public static final int ENDENTITY = 11; public static final int STARTCDATA = 12; public static final int ENDCDATA = 13; public static final int COMMENT = 14; /** * Empty constructor */ public SAXBufferImpl () { buffering = true; eventTypes = new Vector (); eventArguments = new Vector (); } /** * Constructor with a defined DocumentHandler. * @param handler output DocumentHandler */ public SAXBufferImpl (DocumentHandler handler) { buffering = false; this.outDocumentHandler = handler; if(handler instanceof LexicalHandler) this.outLexicalHandler=(LexicalHandler) handler; } public SAXBufferImpl (DocumentHandler handler, boolean bufferSetting) { this (handler); this.buffering = bufferSetting; } public synchronized void startBuffering () { buffering=true; } public synchronized boolean isEmpty() { return eventTypes.isEmpty(); } public synchronized void outputBuffer(DocumentHandler dh) throws SAXException { this.setDocumentHandler(dh); this.outputBuffer(); } public synchronized void outputBuffer() throws SAXException { // unqueue all of the buffered events if (outDocumentHandler != null) { Enumeration args = eventArguments.elements (); for (Enumeration types = eventTypes.elements (); types.hasMoreElements () ;) { int type = ((Integer)types.nextElement ()).intValue (); switch (type) { case STARTDOCUMENT: outDocumentHandler.startDocument (); break; case ENDDOCUMENT: outDocumentHandler.endDocument (); break; case STARTELEMENT: StartElementData sed = (StartElementData) args.nextElement (); outDocumentHandler.startElement (sed.getName (), sed.getAtts ()); break; case ENDELEMENT: String elname = (String) args.nextElement (); outDocumentHandler.endElement (elname); break; case CHARACTERS: CharactersData cd = (CharactersData) args.nextElement (); outDocumentHandler.characters (cd.getCh (), cd.getStart (), cd.getLength ()); break; case IGNORABLEWHITESPACE: CharactersData ws = (CharactersData) args.nextElement (); outDocumentHandler.ignorableWhitespace (ws.getCh (), ws.getStart (), ws.getLength ()); break; case PROCESSINGINSTRUCTION: ProcessingInstructionData pid = (ProcessingInstructionData) args.nextElement (); outDocumentHandler.processingInstruction (pid.getTarget (), pid.getData ()); break; case SETDOCUMENTLOCATOR: org.xml.sax.Locator loc = (org.xml.sax.Locator) args.nextElement (); outDocumentHandler.setDocumentLocator (loc); break; case STARTDTD: if(outLexicalHandler!=null) { StartDTDData dd=(StartDTDData) args.nextElement(); outLexicalHandler.startDTD(dd.getName(),dd.getPublicId(),dd.getSystemId()); } else Logger.log(Logger.WARN,"SAXBufferImpl.stopBuffering() : trying to output lexical events while LexicalHandler is null"); break; case ENDDTD: if(outLexicalHandler!=null) { outLexicalHandler.endDTD(); } else Logger.log(Logger.WARN,"SAXBufferImpl.stopBuffering() : trying to output lexical events while LexicalHandler is null"); break; case STARTENTITY: if(outLexicalHandler!=null) { String n=(String) args.nextElement(); outLexicalHandler.startEntity(n); } else Logger.log(Logger.WARN,"SAXBufferImpl.stopBuffering() : trying to output lexical events while LexicalHandler is null"); break; case ENDENTITY: if(outLexicalHandler!=null) { String n=(String) args.nextElement(); outLexicalHandler.endEntity(n); } else Logger.log(Logger.WARN,"SAXBufferImpl.stopBuffering() : trying to output lexical events while LexicalHandler is null"); break; case STARTCDATA: if(outLexicalHandler!=null) { outLexicalHandler.startCDATA(); } else Logger.log(Logger.WARN,"SAXBufferImpl.stopBuffering() : trying to output lexical events while LexicalHandler is null"); break; case ENDCDATA: if(outLexicalHandler!=null) { outLexicalHandler.endCDATA(); } else Logger.log(Logger.WARN,"SAXBufferImpl.stopBuffering() : trying to output lexical events while LexicalHandler is null"); break; case COMMENT: if(outLexicalHandler!=null) { CharactersData ccd = (CharactersData) args.nextElement (); outLexicalHandler.comment (ccd.getCh (), ccd.getStart (), ccd.getLength ()); } else Logger.log(Logger.WARN,"SAXBufferImpl.stopBuffering() : trying to output lexical events while LexicalHandler is null"); break; } } } else Logger.log (Logger.ERROR, "SAXBufferImpl:stopBuffering() : trying to ouput buffer to a null DocumentHandler."); } public synchronized void clearBuffer() { // clean out the vectors eventTypes.clear(); eventArguments.clear(); } public synchronized void stopBuffering () throws SAXException { this.outputBuffer(); this.clearBuffer(); buffering = false; } public DocumentHandler getDocumentHandler () { return outDocumentHandler; } public void setDocumentHandler (DocumentHandler handler) { this.outDocumentHandler=handler; if(handler instanceof LexicalHandler) this.outLexicalHandler=(LexicalHandler) handler; } public LexicalHandler getLexicalHandler() { return outLexicalHandler; } public void setLexicalHandler(LexicalHandler handler) { this.outLexicalHandler=handler; } public void characters (char ch[], int start, int length) throws SAXException { if (buffering) { eventTypes.add (new Integer (CHARACTERS)); eventArguments.add (new CharactersData (ch, start, length)); } else { if (outDocumentHandler != null) { outDocumentHandler.characters (ch, start, length); } } } public void startDocument () throws SAXException { if (buffering) { eventTypes.add (new Integer (STARTDOCUMENT)); } else { if (outDocumentHandler != null) { outDocumentHandler.startDocument (); } } } public void endDocument () throws SAXException { if (buffering) { eventTypes.add (new Integer (ENDDOCUMENT)); } else { if (outDocumentHandler != null) outDocumentHandler.endDocument (); } } public void startElement (java.lang.String name, org.xml.sax.AttributeList atts) throws SAXException { if (buffering) { eventTypes.add (new Integer (STARTELEMENT)); eventArguments.add (new StartElementData (name, atts)); } else { if (outDocumentHandler != null) outDocumentHandler.startElement (name, atts); } } public void endElement (java.lang.String name) throws SAXException { if (buffering) { eventTypes.add (new Integer (ENDELEMENT)); eventArguments.add (new String(name)); } else { if (outDocumentHandler != null) outDocumentHandler.endElement (name); } } public void ignorableWhitespace (char[] ch, int start, int length) throws SAXException { if (buffering) { eventTypes.add (new Integer (IGNORABLEWHITESPACE)); eventArguments.add (new CharactersData (ch, start, length)); } else { if (outDocumentHandler != null) outDocumentHandler.ignorableWhitespace (ch, start, length); } } public void processingInstruction (java.lang.String target, java.lang.String data) throws SAXException { if (buffering) { eventTypes.add (new Integer (PROCESSINGINSTRUCTION)); eventArguments.add (new ProcessingInstructionData (target, data)); } else { if (outDocumentHandler != null) outDocumentHandler.processingInstruction (target, data); } } public void setDocumentLocator (org.xml.sax.Locator locator) { if (buffering) { eventTypes.add (new Integer (SETDOCUMENTLOCATOR)); eventArguments.add (locator); } else { if (outDocumentHandler != null) outDocumentHandler.setDocumentLocator (locator); } } public void startDTD (String name, String publicId, String systemId) throws SAXException { if (buffering) { eventTypes.add(new Integer(STARTDTD)); eventArguments.add(new StartDTDData(name,publicId,systemId)); } else { if(outLexicalHandler!=null) outLexicalHandler.startDTD(name,publicId,systemId); } } public void endDTD () throws SAXException { if (buffering) { eventTypes.add(new Integer(ENDDTD)); } else { if(outLexicalHandler!=null) outLexicalHandler.endDTD(); } } public void startEntity (String name) throws SAXException { if (buffering) { eventTypes.add(new Integer(STARTENTITY)); eventArguments.add(new String(name)); } else { if(outLexicalHandler!=null) outLexicalHandler.startEntity(name); } } public void endEntity (String name) throws SAXException { if (buffering) { eventTypes.add(new Integer(ENDENTITY)); eventArguments.add(new String(name)); } else { if(outLexicalHandler!=null) outLexicalHandler.endEntity(name); } } public void startCDATA () throws SAXException { if (buffering) { eventTypes.add(new Integer(STARTCDATA)); } else { if(outLexicalHandler!=null) outLexicalHandler.startCDATA(); } } public void endCDATA () throws SAXException { if (buffering) { eventTypes.add(new Integer(ENDCDATA)); } else { if(outLexicalHandler!=null) outLexicalHandler.endCDATA(); } } public void comment (char ch[], int start, int length) throws SAXException { if (buffering) { eventTypes.add (new Integer (COMMENT)); eventArguments.add (new CharactersData (ch, start, length)); } else { if(outLexicalHandler!=null) outLexicalHandler.comment(ch,start,length); } } // supporting utility classes private class ProcessingInstructionData { public String s_target; public String s_data; ProcessingInstructionData (String target, String data) { this.s_target = target; this.s_data = data; } public String getTarget () { return s_target; } public String getData () { return s_data; } } private class CharactersData { public char[] ca_ch; public int i_start; public int i_length; CharactersData (char ch[],int start, int length) { this.ca_ch=ch; this.i_start=start; this.i_length=length; } public char[] getCh () { return ca_ch; } public int getStart () { return i_start; } public int getLength () { return i_length; } } private class StartElementData { private String s_name; private org.xml.sax.AttributeList al_atts; org.xml.sax.helpers.AttributeListImpl li; StartElementData (String name, org.xml.sax.AttributeList atts) { this.s_name = name; li = new org.xml.sax.helpers.AttributeListImpl (atts); } public String getName () {return s_name; } public org.xml.sax.AttributeList getAtts () { return li; } } private class StartDTDData { public String s_name; public String s_publicId; public String s_systemId; StartDTDData (String name, String publicId, String systemId) { this.s_name=name; this.s_publicId=publicId; this.s_systemId=systemId; } public String getName () { return s_name; } public String getPublicId () { return s_publicId; } public String getSystemId () { return s_systemId; } } }
package org.zeropage; import java.util.Set; public class CacheLinkSource extends LinkSourceDecorator { CacheStorage cache; public CacheLinkSource(LinkSource innerObject, CacheStorage cacheStorage) { this.innerSource = innerObject; this.cache = cacheStorage; } @Override public Set<String> getLinks(String from) throws Exception { if(this.cache.hasKey(from)) { Set<String> result = this.cache.getData(from); if(result != null) { return result; } } if(innerSource==null) { return null; } else { return innerSource.getLinks(from); } } }
package com.mutualmobile.barricade.compiler; import com.mutualmobile.barricade.IBarricadeConfig; import com.mutualmobile.barricade.response.BarricadeResponse; import com.mutualmobile.barricade.response.BarricadeResponseSet; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.FieldSpec; import com.squareup.javapoet.JavaFile; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.ParameterizedTypeName; import com.squareup.javapoet.TypeSpec; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.processing.Messager; import javax.annotation.processing.ProcessingEnvironment; import javax.tools.Diagnostic; import static com.squareup.javapoet.JavaFile.builder; import static com.squareup.javapoet.TypeSpec.classBuilder; import static javax.lang.model.element.Modifier.FINAL; import static javax.lang.model.element.Modifier.PRIVATE; import static javax.lang.model.element.Modifier.PUBLIC; import static javax.lang.model.element.Modifier.STATIC; /** * Generates code for a Barricade configuration. */ final class CodeGenerator { private static final String CLASS_NAME = "BarricadeConfig"; private static final String ENDPOINTS_CLASS_NAME = "Endpoints"; private static final String RESPONSES_CLASS_NAME = "Responses"; private static final String PACKAGE_NAME = "com.mutualmobile.barricade"; private static final ClassName TYPE_BARRICADE_RESPONSE_SET = ClassName.get(BarricadeResponseSet.class); private static final ParameterizedTypeName TYPE_CONFIG = ParameterizedTypeName.get(ClassName.get(HashMap.class), ClassName.get(String.class), ClassName.get(BarricadeResponseSet.class)); private CodeGenerator() { } /** * Generates the code for the Barricade configuration based on the annotations found. * * @param processingEnv Processing environment * @param configs Configuration detected by annotation processing * @param messager Messager to print logs * @throws IOException */ public static void generateClass(ProcessingEnvironment processingEnv, HashMap<String, BarricadeResponseSet> configs, Messager messager) throws IOException { messager.printMessage(Diagnostic.Kind.NOTE, "Generating configuration code..."); TypeSpec.Builder classBuilder = classBuilder(CLASS_NAME).addModifiers(PUBLIC, FINAL); FieldSpec valuesField = FieldSpec.builder(TYPE_CONFIG, "configs").addModifiers(PRIVATE).build(); FieldSpec instanceField = FieldSpec.builder(ClassName.get(PACKAGE_NAME, CLASS_NAME), "barricadeConfig") .addModifiers(PRIVATE, STATIC) .build(); MethodSpec.Builder instanceMethodBuilder = generateGetInstanceMethodBuilder(); MethodSpec.Builder constructorMethodBuilder = generateConstructorBuilder(configs, messager); MethodSpec.Builder valuesMethod = generateGetConfigsMethodBuilder(); MethodSpec.Builder getResponseMethodBuilder = generateGetResponseMethodBuilder(); classBuilder.addType(generateEndpointsInnerClass(configs.keySet())); classBuilder.addType(generateResponsesInnerClass(configs)); classBuilder.addField(instanceField); classBuilder.addField(valuesField); classBuilder.addMethod(instanceMethodBuilder.build()); classBuilder.addMethod(constructorMethodBuilder.build()); classBuilder.addMethod(valuesMethod.build()); classBuilder.addMethod(getResponseMethodBuilder.build()); classBuilder.addSuperinterface(IBarricadeConfig.class); JavaFile.Builder javaFileBuilder = builder(PACKAGE_NAME, classBuilder.build()); JavaFile javaFile = javaFileBuilder.build(); javaFile.writeTo(processingEnv.getFiler()); messager.printMessage(Diagnostic.Kind.NOTE, "Code generation complete!"); } private static TypeSpec generateEndpointsInnerClass(Set<String> endPoints) { TypeSpec.Builder classBuilder = classBuilder(ENDPOINTS_CLASS_NAME).addModifiers(PUBLIC, STATIC, FINAL); for (String endPoint : endPoints) { FieldSpec valuesField = FieldSpec.builder(String.class, endPoint.toUpperCase().replace(" ", "")) .addModifiers(PUBLIC, STATIC, FINAL) .initializer("$S", endPoint) .build(); classBuilder.addField(valuesField); } return classBuilder.build(); } private static TypeSpec generateResponsesInnerClass( HashMap<String, BarricadeResponseSet> configs) { TypeSpec.Builder classBuilder = classBuilder(RESPONSES_CLASS_NAME).addModifiers(PUBLIC, STATIC, FINAL); for (String endpoint : configs.keySet()) { classBuilder.addType( generateEndpointsResponsesInnerClass(endpoint, configs.get(endpoint).responses)); } return classBuilder.build(); } private static TypeSpec generateEndpointsResponsesInnerClass(String endpoint, List<BarricadeResponse> responses) { TypeSpec.Builder classBuilder = classBuilder(StringUtils.toCamelCase(endpoint)).addModifiers(PUBLIC, STATIC, FINAL); int count = 0; for (BarricadeResponse response : responses) { FieldSpec valuesField = FieldSpec.builder(int.class, response.responseFileName.toUpperCase().replace(" ","")) .addModifiers(PUBLIC, STATIC, FINAL) .initializer("$L", count) .build(); classBuilder.addField(valuesField); count++; } return classBuilder.build(); } private static MethodSpec.Builder generateGetConfigsMethodBuilder() { return MethodSpec.methodBuilder("getConfigs") .returns(TYPE_CONFIG) .addModifiers(PUBLIC) .addStatement("return configs"); } private static MethodSpec.Builder generateConstructorBuilder( HashMap<String, BarricadeResponseSet> values, Messager messager) { MethodSpec.Builder methodBuilder = MethodSpec.constructorBuilder().addModifiers(PUBLIC); methodBuilder.addStatement("configs = new HashMap<>()"); for (Map.Entry<String, BarricadeResponseSet> entry : values.entrySet()) { BarricadeResponseSet barricadeResponseSet = entry.getValue(); String listName = "barricadeResponsesFor" + entry.getKey(); methodBuilder.addStatement("$T<$T> " + listName + " = new $T<>()", List.class, BarricadeResponse.class, ArrayList.class); for (BarricadeResponse barricadeResponse : barricadeResponseSet.responses) { methodBuilder.addStatement(listName + ".add(new $T($L, $S, $S))", BarricadeResponse.class, barricadeResponse.statusCode, barricadeResponse.responseFileName, barricadeResponse.contentType); } methodBuilder.addStatement( "configs.put($S, new $T(" + listName + ", " + barricadeResponseSet.defaultIndex + "))", entry.getKey(), TYPE_BARRICADE_RESPONSE_SET); } return methodBuilder; } private static MethodSpec.Builder generateGetInstanceMethodBuilder() { return MethodSpec.methodBuilder("getInstance") .returns(ClassName.get(PACKAGE_NAME, CLASS_NAME)) .addModifiers(PUBLIC, STATIC) .addStatement("return barricadeConfig = barricadeConfig != null? barricadeConfig:" + " new BarricadeConfig()"); } private static MethodSpec.Builder generateGetResponseMethodBuilder() { return MethodSpec.methodBuilder("getResponseForEndpoint") .addModifiers(PUBLIC) .addParameter(String.class, "endpoint") .returns(BarricadeResponse.class) .addStatement("$T responseSet = configs.get(endpoint)", BarricadeResponseSet.class) .addStatement("return responseSet.responses.get(responseSet.defaultIndex)"); } }
package org.ovirt.engine.api.restapi.resource; import javax.ws.rs.core.Response; import org.ovirt.engine.api.model.GlusterVolume; import org.ovirt.engine.api.model.GlusterVolumes; import org.ovirt.engine.api.resource.GlusterVolumeResource; import org.ovirt.engine.api.resource.GlusterVolumesResource; import org.ovirt.engine.core.common.action.CreateGlusterVolumeParameters; import org.ovirt.engine.core.common.action.VdcActionType; import org.ovirt.engine.core.common.action.VdcReturnValueBase; import org.ovirt.engine.core.common.action.VdsGroupParametersBase; import org.ovirt.engine.core.common.businessentities.GlusterVolumeEntity; import org.ovirt.engine.core.compat.Guid; public class BackendGlusterVolumesResource extends AbstractBackendCollectionResource<GlusterVolume, org.ovirt.engine.core.common.businessentities.GlusterVolumeEntity> implements GlusterVolumesResource { static final String[] SUB_COLLECTIONS = { "bricks", "options" }; private String clusterId; public BackendGlusterVolumesResource(String clusterId) { super(GlusterVolume.class, org.ovirt.engine.core.common.businessentities.GlusterVolumeEntity.class, SUB_COLLECTIONS); setClusterId(clusterId); } @Override public GlusterVolumes list() { VdcReturnValueBase result = backend.RunAction(VdcActionType.ListGlusterVolumes, new VdsGroupParametersBase(Guid.createGuidFromString(getClusterId()))); GlusterVolumeEntity[] volumes = (GlusterVolumeEntity[])result.getActionReturnValue(); return mapCollection(volumes); } protected GlusterVolumes mapCollection(GlusterVolumeEntity[] entities) { GlusterVolumes collection = new GlusterVolumes(); for (GlusterVolumeEntity entity : entities) { collection.getGlusterVolumes().add(populate(map(entity), entity)); } return collection; } @Override @SingleEntityResource public GlusterVolumeResource getGlusterVolumeSubResource(String id) { return inject(new BackendGlusterVolumeResource(id, this)); } @Override public Response add(GlusterVolume volume) { validateParameters(volume, "volumeName", "volumeType", "bricks"); try { GlusterVolumeEntity volumeEntity = getMapper(GlusterVolume.class, GlusterVolumeEntity.class).map(volume, null); return performAction(VdcActionType.CreateGlusterVolume, new CreateGlusterVolumeParameters(Guid.createGuidFromString(getClusterId()), volumeEntity)); } catch (Exception e) { return handleError(e, false); } } public String getClusterId() { return clusterId; } public void setClusterId(String clusterId) { this.clusterId = clusterId; } @Override protected Response performRemove(String id) { // TODO Invoke VDSM to remove the volume return null; } }
package com.exedio.cope; public class ReportTest extends DatabaseLibTest { private static final String TABLE1 = "SumItem"; private static final String TABLE1X = "SumItemX"; private static final String COLUMN1 = "num2"; private static final String COLUMN1X = "num2X"; public static final int CHECK = ReportConstraint.TYPE_CHECK; public static final int PK = ReportConstraint.TYPE_PRIMARY_KEY; public static final int FK = ReportConstraint.TYPE_FOREIGN_KEY; public static final int UNIQUE = ReportConstraint.TYPE_UNIQUE; public void testReport() { final String column1Type; { final Report report = model.reportDatabase(); final ReportTable table = report.getTable(TABLE1); assertNotNull(table); assertEquals(true, table.required()); assertEquals(true, table.exists()); assertEquals(null, table.getError()); assertEquals(Report.COLOR_OK, table.getParticularColor()); final ReportColumn column = table.getColumn(COLUMN1); assertEquals(true, column.required()); assertEquals(true, column.exists()); assertEquals(null, column.getError()); assertEquals(Report.COLOR_OK, column.getParticularColor()); column1Type = column.getType(); assertNotNull(column1Type); column.renameTo(COLUMN1X); } // COLUMN RENAMED { final Report report = model.reportDatabase(); final ReportTable table = report.getTable(TABLE1); assertNotNull(table); assertEquals(true, table.required()); assertEquals(true, table.exists()); assertEquals(null, table.getError()); assertEquals(Report.COLOR_OK, table.getParticularColor()); { final ReportColumn column = table.getColumn(COLUMN1); assertEquals(true, column.required()); assertEquals(false, column.exists()); assertEquals("missing", column.getError()); assertEquals(Report.COLOR_ERROR, column.getParticularColor()); assertEquals(column1Type, column.getType()); } { final ReportColumn columnX = table.getColumn(COLUMN1X); assertEquals(false, columnX.required()); assertEquals(true, columnX.exists()); assertEquals("not used", columnX.getError()); assertEquals(Report.COLOR_WARNING, columnX.getParticularColor()); assertEquals(column1Type, columnX.getType()); columnX.renameTo(COLUMN1); } } { final Report report = model.reportDatabase(); final ReportTable table = report.getTable(TABLE1); assertNotNull(table); assertEquals(true, table.required()); assertEquals(true, table.exists()); assertEquals(null, table.getError()); assertEquals(Report.COLOR_OK, table.getParticularColor()); final ReportColumn column = table.getColumn(COLUMN1); assertEquals(true, column.required()); assertEquals(true, column.exists()); assertEquals(null, column.getError()); assertEquals(Report.COLOR_OK, column.getParticularColor()); assertEquals(column1Type, column.getType()); column.drop(); } // COLUMN DROPPED { final Report report = model.reportDatabase(); final ReportTable table = report.getTable(TABLE1); assertNotNull(table); assertEquals(true, table.required()); assertEquals(true, table.exists()); assertEquals(null, table.getError()); assertEquals(Report.COLOR_OK, table.getParticularColor()); final ReportColumn column = table.getColumn(COLUMN1); assertEquals(true, column.required()); assertEquals(false, column.exists()); assertEquals("missing", column.getError()); assertEquals(Report.COLOR_ERROR, column.getParticularColor()); assertEquals(column1Type, column.getType()); column.create(); } { final Report report = model.reportDatabase(); final ReportTable table = report.getTable(TABLE1); assertNotNull(table); assertEquals(true, table.required()); assertEquals(true, table.exists()); assertEquals(null, table.getError()); assertEquals(Report.COLOR_OK, table.getParticularColor()); final ReportColumn column = table.getColumn(COLUMN1); assertEquals(true, column.required()); assertEquals(true, column.exists()); assertEquals(null, column.getError()); assertEquals(Report.COLOR_OK, column.getParticularColor()); assertEquals(column1Type, column.getType()); table.renameTo(TABLE1X); } // TABLE RENAMED { final Report report = model.reportDatabase(); { final ReportTable table = report.getTable(TABLE1); assertNotNull(table); assertEquals(true, table.required()); assertEquals(false, table.exists()); assertEquals("MISSING !!!", table.getError()); assertEquals(Report.COLOR_ERROR, table.getParticularColor()); final ReportColumn column = table.getColumn(COLUMN1); assertEquals(true, column.required()); assertEquals(false, column.exists()); assertEquals("missing", column.getError()); assertEquals(Report.COLOR_ERROR, column.getParticularColor()); assertEquals(column1Type, column.getType()); } { final ReportTable tableX = report.getTable(TABLE1X); assertNotNull(tableX); assertEquals(false, tableX.required()); assertEquals(true, tableX.exists()); assertEquals("not used", tableX.getError()); assertEquals(Report.COLOR_WARNING, tableX.getParticularColor()); final ReportColumn column = tableX.getColumn(COLUMN1); assertEquals(false, column.required()); assertEquals(true, column.exists()); assertEquals("not used", column.getError()); assertEquals(Report.COLOR_WARNING, column.getParticularColor()); assertEquals(column1Type, column.getType()); tableX.renameTo(TABLE1); } } { final Report report = model.reportDatabase(); final ReportTable table = report.getTable(TABLE1); assertNotNull(table); assertEquals(true, table.required()); assertEquals(true, table.exists()); assertEquals(null, table.getError()); assertEquals(Report.COLOR_OK, table.getParticularColor()); final ReportColumn column = table.getColumn(COLUMN1); assertEquals(true, column.required()); assertEquals(true, column.exists()); assertEquals(null, column.getError()); assertEquals(Report.COLOR_OK, column.getParticularColor()); assertEquals(column1Type, column.getType()); table.drop(); } // TABLE DROPPED { final Report report = model.reportDatabase(); { final ReportTable table = report.getTable(TABLE1); assertNotNull(table); assertEquals(true, table.required()); assertEquals(false, table.exists()); assertEquals("MISSING !!!", table.getError()); assertEquals(Report.COLOR_ERROR, table.getParticularColor()); final ReportColumn column = table.getColumn(COLUMN1); assertEquals(true, column.required()); assertEquals(false, column.exists()); assertEquals("missing", column.getError()); assertEquals(Report.COLOR_ERROR, column.getParticularColor()); assertEquals(column1Type, column.getType()); table.create(); } } { final Report report = model.reportDatabase(); final ReportTable table = report.getTable(TABLE1); assertNotNull(table); assertEquals(true, table.required()); assertEquals(true, table.exists()); assertEquals(null, table.getError()); assertEquals(Report.COLOR_OK, table.getParticularColor()); final ReportColumn column = table.getColumn(COLUMN1); assertEquals(true, column.required()); assertEquals(true, column.exists()); assertEquals(null, column.getError()); assertEquals(Report.COLOR_OK, column.getParticularColor()); assertEquals(column1Type, column.getType()); } { assertEquals(!mysql, model.supportsCheckConstraints()); final Report report = model.reportDatabase(); final ReportTable attributeItem = report.getTable("AttributeItem"); assertNotNull(attributeItem); assertEquals(null, attributeItem.getError()); assertEquals(Report.COLOR_OK, attributeItem.getParticularColor()); assertConstraint(attributeItem, CHECK, "AttrItem_somNotNullStr_Ck", protect("someNotNullString")+" IS NOT NULL"); assertConstraint(attributeItem, CHECK, "AttribuItem_someBoolea_Ck", "("+protect("someBoolean")+" IN (0,1)) OR ("+protect("someBoolean")+" IS NULL)"); assertConstraint(attributeItem, CHECK, "AttrItem_somNotNullBoo_Ck", "("+protect("someNotNullBoolean")+" IS NOT NULL) AND ("+protect("someNotNullBoolean")+" IN (0,1))"); assertConstraint(attributeItem, CHECK, "AttribuItem_someEnumer_Ck", "("+protect("someEnumeration")+" IN (100,200,300)) OR ("+protect("someEnumeration")+" IS NULL)"); assertConstraint(attributeItem, CHECK, "AttrItem_somNotNullEnu_Ck", "("+protect("someNotNullEnumeration")+" IS NOT NULL) AND ("+protect("someNotNullEnumeration")+" IN (100,200,300))"); assertConstraint(attributeItem, CHECK, "AttriItem_someDataMajo_Ck", "((LENGTH("+protect("someDataMajor")+")>=1) AND (LENGTH("+protect("someDataMajor")+")<=30)) OR ("+protect("someDataMajor")+" IS NULL)"); assertConstraint(attributeItem, PK, "AttributeItem_Pk", null); assertConstraint(attributeItem, FK, "AttributeItem_someItem_Fk", null); final ReportTable uniqueItem = report.getTable("ItemWithSingleUnique"); assertNotNull(uniqueItem); assertEquals(null, uniqueItem.getError()); assertEquals(Report.COLOR_OK, uniqueItem.getParticularColor()); assertConstraint(uniqueItem, UNIQUE, "ItemWithSingUni_unStr_Unq", "("+protect("uniqueString")+")"); final ReportTable doubleUniqueItem = report.getTable("ItemWithDoubleUnique"); assertNotNull(doubleUniqueItem); assertEquals(null, doubleUniqueItem.getError()); assertEquals(Report.COLOR_OK, doubleUniqueItem.getParticularColor()); assertConstraint(doubleUniqueItem, UNIQUE, "ItemWithDoubUni_doUni_Unq", "("+protect("string")+","+protect("integer")+")"); final ReportTable stringItem = report.getTable("StringItem"); assertNotNull(stringItem); assertEquals(null, stringItem.getError()); assertEquals(Report.COLOR_OK, stringItem.getParticularColor()); final ReportColumn min4Max8 = stringItem.getColumn("min4Max8"); assertEquals(null, min4Max8.getError()); assertEquals(Report.COLOR_OK, min4Max8.getParticularColor()); if(hsqldb) assertEquals("varchar(8)", min4Max8.getType()); else if(mysql) assertEquals("varchar(8) binary", min4Max8.getType()); else assertEquals("VARCHAR2(8)", min4Max8.getType()); assertConstraint(stringItem, CHECK, "StringItem_min4_Ck", "(LENGTH("+protect("min4")+")>=4) OR ("+protect("min4")+" IS NULL)"); assertConstraint(stringItem, CHECK, "StringItem_max4_Ck", "(LENGTH("+protect("max4")+")<=4) OR ("+protect("max4")+" IS NULL)"); assertConstraint(stringItem, CHECK, "StringItem_min4Max8_Ck", "((LENGTH("+protect("min4Max8")+")>=4) AND (LENGTH("+protect("min4Max8")+")<=8)) OR ("+protect("min4Max8")+" IS NULL)"); } } private void assertConstraint(final ReportTable table, final int constraintType, final String constraintName, final String requiredCondition) { final ReportConstraint constraint = table.getConstraint(constraintName); if(model.supportsCheckConstraints() || constraintType!=CHECK) { assertNotNull("no such constraint "+constraintName+", but has "+table.getConstraints(), constraint); assertEquals(constraintName, constraintType, constraint.type); assertEquals(constraintName, requiredCondition, constraint.requiredCondition); assertEquals(constraintName, null, constraint.getError()); assertEquals(constraintName, Report.COLOR_OK, constraint.getParticularColor()); } else assertEquals(constraintName, null, constraint); } private final String protect(final String name) { return model.getDatabase().protectName(name); } }
package whelk.importer; import io.prometheus.client.Counter; import se.kb.libris.util.marc.Datafield; import se.kb.libris.util.marc.Field; import se.kb.libris.util.marc.MarcRecord; import whelk.Document; import whelk.IdGenerator; import whelk.JsonLd; import whelk.Whelk; import whelk.component.ElasticSearch; import whelk.component.PostgreSQLComponent; import whelk.converter.MarcJSONConverter; import whelk.converter.marc.MarcFrameConverter; import whelk.exception.TooHighEncodingLevelException; import whelk.filter.LinkFinder; import whelk.util.LegacyIntegrationTools; import whelk.util.PropertyLoader; import whelk.triples.*; import java.io.IOException; import java.io.UncheckedIOException; import java.sql.*; import java.util.*; class XL { private static final String ENC_PRELIMINARY_STATUS = "marc:PartialPreliminaryLevel"; private static final String ENC_PREPUBLICATION_STATUS = "marc:PrepublicationLevel"; private static final String ENC_ABBREVIVATED_STATUS = "marc:AbbreviatedLevel"; private static final String ENC_MINMAL_STATUS = "marc:MinimalLevel"; private Whelk m_whelk; private LinkFinder m_linkfinder; private Parameters m_parameters; private Properties m_properties; private MarcFrameConverter m_marcFrameConverter; private static boolean verbose = false; // The predicates listed here are those that must always be represented as lists in jsonld, even if the list // has only a single member. private Set<String> m_repeatableTerms; private final static String IMPORT_SYSTEM_CODE = "batch import"; XL(Parameters parameters) throws IOException { m_parameters = parameters; verbose = m_parameters.getVerbose(); m_properties = PropertyLoader.loadProperties("secret"); m_whelk = Whelk.createLoadedSearchWhelk(m_properties); m_repeatableTerms = m_whelk.getJsonld().getRepeatableTerms(); m_marcFrameConverter = m_whelk.createMarcFrameConverter(); m_linkfinder = new LinkFinder(m_whelk.getStorage()); } /** * Write a ISO2709 MarcRecord to LibrisXL. returns a resource ID if the resulting document (merged or new) was in "bib". * This ID should then be passed (as 'relatedWithBibResourceId') when importing any subsequent related holdings post. * Returns null when supplied a hold post. */ String importISO2709(MarcRecord incomingMarcRecord, String relatedWithBibResourceId, Counter importedBibRecords, Counter importedHoldRecords, Counter enrichedBibRecords, Counter enrichedHoldRecords, Counter encounteredMulBibs) throws Exception { String collection = "bib"; // assumption if (incomingMarcRecord.getLeader(6) == 'u' || incomingMarcRecord.getLeader(6) == 'v' || incomingMarcRecord.getLeader(6) == 'x' || incomingMarcRecord.getLeader(6) == 'y') collection = "hold"; Set<String> duplicateIDs = getDuplicates(incomingMarcRecord, collection, relatedWithBibResourceId); String resultingResourceId = null; //System.err.println("Incoming [" + collection + "] document had: " + duplicateIDs.size() + " existing duplicates:\n" + duplicateIDs); // If an incoming holding record is marked deleted, attempt to find any duplicates for it in Libris and delete them. if (collection.equals("hold") && incomingMarcRecord.getLeader(5) == 'd') { for (String id : duplicateIDs) m_whelk.remove(id, IMPORT_SYSTEM_CODE, null); return null; } if (duplicateIDs.size() == 0) // No coinciding documents, simple import { resultingResourceId = importNewRecord(incomingMarcRecord, collection, relatedWithBibResourceId, null); if (collection.equals("bib")) importedBibRecords.inc(); else importedHoldRecords.inc(); } else if (duplicateIDs.size() == 1) // Enrich ("merge") or replace { if (collection.equals("bib")) { if ( m_parameters.getReplaceBib() ) { String idToReplace = duplicateIDs.iterator().next(); resultingResourceId = importNewRecord(incomingMarcRecord, collection, relatedWithBibResourceId, idToReplace); importedBibRecords.inc(); } else // Merge bib { resultingResourceId = enrichRecord((String) duplicateIDs.toArray()[0], incomingMarcRecord, collection, relatedWithBibResourceId); enrichedBibRecords.inc(); } } else // collection = hold { if ( m_parameters.getReplaceHold() ) // Replace hold { String idToReplace = duplicateIDs.iterator().next(); resultingResourceId = importNewRecord(incomingMarcRecord, collection, relatedWithBibResourceId, idToReplace); importedHoldRecords.inc(); } else // Merge hold { resultingResourceId = enrichRecord((String) duplicateIDs.toArray()[0], incomingMarcRecord, collection, relatedWithBibResourceId); enrichedHoldRecords.inc(); } } } else { // Multiple coinciding documents. encounteredMulBibs.inc(); if (m_parameters.getEnrichMulDup()) { for (String id : duplicateIDs) { enrichRecord( id, incomingMarcRecord, collection, relatedWithBibResourceId ); } } if (collection.equals("bib")) { // In order to keep the program deterministic, the bib post to which subsequent holdings should attach // when there are multiple duplicates is defined as the one with the "lowest" alpha numeric id. List<String> duplicateList = new ArrayList<>(duplicateIDs); Collections.sort(duplicateList); String selectedDuplicateId = duplicateList.get(0); if (!selectedDuplicateId.startsWith(Document.getBASE_URI().toString())) selectedDuplicateId = Document.getBASE_URI().toString() + selectedDuplicateId; resultingResourceId = m_whelk.getStorage().getThingId(selectedDuplicateId); } else resultingResourceId = null; } return resultingResourceId; } private String importNewRecord(MarcRecord marcRecord, String collection, String relatedWithBibResourceId, String replaceSystemId) { String incomingId = IdGenerator.generate(); if (replaceSystemId != null) incomingId = replaceSystemId; Document rdfDoc = convertToRDF(marcRecord, incomingId); if (collection.equals("hold")) rdfDoc.setHoldingFor(relatedWithBibResourceId); if (!m_parameters.getReadOnly()) { rdfDoc.setRecordStatus(ENC_PRELIMINARY_STATUS); // Doing a replace (but preserving old IDs) if (replaceSystemId != null) { try { m_whelk.getStorage().storeAtomicUpdate(replaceSystemId, false, IMPORT_SYSTEM_CODE, null, (Document doc) -> { String existingEncodingLevel = doc.getEncodingLevel(); String newEncodingLevel = rdfDoc.getEncodingLevel(); if (existingEncodingLevel == null || !mayOverwriteExistingEncodingLevel(existingEncodingLevel, newEncodingLevel)) throw new TooHighEncodingLevelException(); List<String> recordIDs = doc.getRecordIdentifiers(); List<String> thingIDs = doc.getThingIdentifiers(); doc.data = rdfDoc.data; // The mainID must remain unaffected. doc.deepPromoteId(recordIDs.get(0)); for (String recordID : recordIDs) doc.addRecordIdentifier(recordID); for (String thingID : thingIDs) doc.addThingIdentifier(thingID); }); } catch (TooHighEncodingLevelException e) { if ( verbose ) { System.out.println("info: Not replacing id: " + replaceSystemId + ", because it no longer has encoding level marc:PartialPreliminaryLevel"); } } } else { // Doing simple "new" m_whelk.createDocument(rdfDoc, IMPORT_SYSTEM_CODE, null, collection, false); } } else { if ( verbose ) { System.out.println("info: Would now (if --live had been specified) have written the following json-ld to whelk as a new record:\n" + rdfDoc.getDataAsString()); } } if (collection.equals("bib")) return rdfDoc.getThingIdentifiers().get(0); return null; } private String enrichRecord(String ourId, MarcRecord incomingMarcRecord, String collection, String relatedWithBibResourceId) throws IOException { Document rdfDoc = convertToRDF(incomingMarcRecord, ourId); if (collection.equals("hold")) rdfDoc.setHoldingFor(relatedWithBibResourceId); if (!m_parameters.getReadOnly()) { try { m_whelk.storeAtomicUpdate(ourId, false, IMPORT_SYSTEM_CODE, null, (Document doc) -> { if (collection.equals("bib")) { String existingEncodingLevel = doc.getEncodingLevel(); String newEncodingLevel = rdfDoc.getEncodingLevel(); if (existingEncodingLevel == null || !mayOverwriteExistingEncodingLevel(existingEncodingLevel, newEncodingLevel)) throw new TooHighEncodingLevelException(); } enrich( doc, rdfDoc ); }); } catch (TooHighEncodingLevelException e) { if ( verbose ) { System.out.println("info: Not enriching id: " + ourId + ", because it no longer has encoding level marc:PartialPreliminaryLevel"); } } } else { Document doc = m_whelk.getStorage().load( ourId ); enrich( doc, rdfDoc ); if ( verbose ) { System.out.println("info: Would now (if --live had been specified) have written the following (merged) json-ld to whelk:\n"); System.out.println("id:\n" + doc.getShortId()); System.out.println("data:\n" + doc.getDataAsString()); } } if (collection.equals("bib")) return rdfDoc.getThingIdentifiers().get(0); return null; } private boolean mayOverwriteExistingEncodingLevel(String existingEncodingLevel, String newEncodingLevel) { switch (newEncodingLevel) { case ENC_PRELIMINARY_STATUS: if (existingEncodingLevel.equals(ENC_PRELIMINARY_STATUS)) return true; break; case ENC_PREPUBLICATION_STATUS: if (existingEncodingLevel.equals(ENC_PRELIMINARY_STATUS) || existingEncodingLevel.equals(ENC_PREPUBLICATION_STATUS)) // 5 || 8 return true; break; case ENC_ABBREVIVATED_STATUS: if (existingEncodingLevel.equals(ENC_PRELIMINARY_STATUS) || existingEncodingLevel.equals(ENC_PREPUBLICATION_STATUS)) // 5 || 8 return true; break; case ENC_MINMAL_STATUS: if (existingEncodingLevel.equals(ENC_PRELIMINARY_STATUS) || existingEncodingLevel.equals(ENC_PREPUBLICATION_STATUS)) // 5 || 8 return true; break; } return false; } private void enrich(Document mutableDocument, Document withDocument) { JsonldSerializer serializer = new JsonldSerializer(); List<String[]> withTriples = serializer.deserialize(withDocument.data); List<String[]> originalTriples = serializer.deserialize(mutableDocument.data); Graph originalGraph = new Graph(originalTriples); Graph withGraph = new Graph(withTriples); // This is temporary, these special rules should not be hardcoded here, but rather obtained from (presumably) // whelk-core's marcframe.json. Map<String, Graph.PREDICATE_RULES> specialRules = new HashMap<>(); for (String term : m_repeatableTerms) specialRules.put(term, Graph.PREDICATE_RULES.RULE_AGGREGATE); specialRules.put("created", Graph.PREDICATE_RULES.RULE_PREFER_ORIGINAL); specialRules.put("controlNumber", Graph.PREDICATE_RULES.RULE_PREFER_ORIGINAL); specialRules.put("modified", Graph.PREDICATE_RULES.RULE_PREFER_INCOMING); specialRules.put("marc:encLevel", Graph.PREDICATE_RULES.RULE_PREFER_ORIGINAL); originalGraph.enrichWith(withGraph, specialRules); Map enrichedData = JsonldSerializer.serialize(originalGraph.getTriples(), m_repeatableTerms); boolean deleteUnreferencedData = true; JsonldSerializer.normalize(enrichedData, mutableDocument.getShortId(), deleteUnreferencedData); mutableDocument.data = enrichedData; } private Document convertToRDF(MarcRecord marcRecord, String id) { while (marcRecord.getControlfields("001").size() > 0) marcRecord.getFields().remove(marcRecord.getControlfields("001").get(0)); marcRecord.addField(marcRecord.createControlfield("001", id)); Map convertedData = m_marcFrameConverter.convert(MarcJSONConverter.toJSONMap(marcRecord), id); Document convertedDocument = new Document(convertedData); convertedDocument.deepReplaceId(Document.getBASE_URI().toString()+id); m_linkfinder.normalizeIdentifiers(convertedDocument); return convertedDocument; } private Set<String> getDuplicates(MarcRecord marcRecord, String collection, String relatedWithBibResourceId) throws SQLException { switch (collection) { case "bib": return getBibDuplicates(marcRecord); case "hold": return getHoldDuplicates(marcRecord, relatedWithBibResourceId); default: return new HashSet<>(); } } private Set<String> getHoldDuplicates(MarcRecord marcRecord, String relatedWithBibResourceId) throws SQLException { Set<String> duplicateIDs = new HashSet<>(); // Assumes the post being imported carries a valid libris id in 001, and "SE-LIBR" or "LIBRIS" in 003 duplicateIDs.addAll(getDuplicatesOnLibrisID(marcRecord, "hold")); duplicateIDs.addAll(getDuplicatesOnHeldByHoldingFor(marcRecord, relatedWithBibResourceId)); return duplicateIDs; } private Set<String> getBibDuplicates(MarcRecord marcRecord) throws SQLException { Set<String> duplicateIDs = new HashSet<>(); for (Parameters.DUPLICATION_TYPE dupType : m_parameters.getDuplicationTypes()) { switch (dupType) { case DUPTYPE_ISBNA: // International Standard Book Number (only from subfield A) for (Field field : marcRecord.getFields("020")) { String isbn = DigId.grepIsbna( (Datafield) field ); if (isbn != null) { duplicateIDs.addAll(getDuplicatesOnISBN( isbn.toUpperCase() )); } } break; case DUPTYPE_ISBNZ: // International Standard Book Number (only from subfield Z) for (Field field : marcRecord.getFields("020")) { String isbn = DigId.grepIsbnz( (Datafield) field ); if (isbn != null) { duplicateIDs.addAll(getDuplicatesOnISBN( isbn.toUpperCase() )); } } break; case DUPTYPE_ISSNA: // International Standard Serial Number (only from marc 022_A) for (Field field : marcRecord.getFields("022")) { String issn = DigId.grepIssn( (Datafield) field, 'a' ); if (issn != null) { duplicateIDs.addAll(getDuplicatesOnISSN( issn.toUpperCase() )); } } break; case DUPTYPE_ISSNZ: // International Standard Serial Number (only from marc 022_Z) for (Field field : marcRecord.getFields("022")) { String issn = DigId.grepIssn( (Datafield) field, 'z' ); if (issn != null) { duplicateIDs.addAll(getDuplicatesOnISSN( issn.toUpperCase() )); } } break; case DUPTYPE_035A: // Unique id number in another system. duplicateIDs.addAll(getDuplicatesOn035a(marcRecord)); break; case DUPTYPE_LIBRISID: // Assumes the post being imported carries a valid libris id in 001, and "SE-LIBR" or "LIBRIS" in 003 duplicateIDs.addAll(getDuplicatesOnLibrisID(marcRecord, "bib")); break; } } return duplicateIDs; } private List<String> getDuplicatesOnLibrisID(MarcRecord marcRecord, String collection) throws SQLException { String librisId = DigId.grepLibrisId(marcRecord); if (librisId == null) return new ArrayList<>(); // completely numeric? = classic voyager id. // In theory an xl id could (though insanely unlikely) also be numeric :( if (librisId.matches("[0-9]+")) { librisId = "http://libris.kb.se/"+collection+"/"+librisId; } else if ( ! librisId.startsWith(Document.getBASE_URI().toString())) { librisId = Document.getBASE_URI().toString() + librisId; } try(Connection connection = m_whelk.getStorage().getConnection(); PreparedStatement statement = getOnId_ps(connection, librisId); ResultSet resultSet = statement.executeQuery()) { return collectIDs(resultSet); } } private List<String> getDuplicatesOn035a(MarcRecord marcRecord) throws SQLException { List<String> results = new ArrayList<>(); for (Field field : marcRecord.getFields("035")) { String systemNumber = DigId.grep035a( (Datafield) field ); try(Connection connection = m_whelk.getStorage().getConnection(); PreparedStatement statement = getOnSystemNumber_ps(connection, systemNumber); ResultSet resultSet = statement.executeQuery()) { results.addAll( collectIDs(resultSet) ); } } return results; } private List<String> getDuplicatesOnISBN(String isbn) throws SQLException { if (isbn == null) return new ArrayList<>(); String numericIsbn = isbn.replaceAll("-", ""); try(Connection connection = m_whelk.getStorage().getConnection(); PreparedStatement statement = getOnISBN_ps(connection, numericIsbn); ResultSet resultSet = statement.executeQuery()) { return collectIDs(resultSet); } } private List<String> getDuplicatesOnISSN(String issn) throws SQLException { if (issn == null) return new ArrayList<>(); try(Connection connection = m_whelk.getStorage().getConnection(); PreparedStatement statement = getOnISSN_ps(connection, issn); ResultSet resultSet = statement.executeQuery()) { return collectIDs(resultSet); } } private List<String> getDuplicatesOnHeldByHoldingFor(MarcRecord marcRecord, String relatedWithBibResourceId) throws SQLException { if (marcRecord.getFields("852").size() < 1) return new ArrayList<>(); Datafield df = (Datafield) marcRecord.getFields("852").get(0); if (df.getSubfields("b").size() < 1) return new ArrayList<>(); String sigel = df.getSubfields("b").get(0).getData(); String library = LegacyIntegrationTools.legacySigelToUri(sigel); try(Connection connection = m_whelk.getStorage().getConnection(); PreparedStatement statement = getOnHeldByHoldingFor_ps(connection, library, relatedWithBibResourceId); ResultSet resultSet = statement.executeQuery()) { return collectIDs(resultSet); } } private PreparedStatement getOnId_ps(Connection connection, String id) throws SQLException { String query = "SELECT id FROM lddb__identifiers WHERE iri = ?"; PreparedStatement statement = connection.prepareStatement(query); statement.setString(1, id); return statement; } /** * "System number" is our ld equivalent of marc's 035a */ private PreparedStatement getOnSystemNumber_ps(Connection connection, String systemNumber) throws SQLException { String query = "SELECT id FROM lddb WHERE data#>'{@graph,0,identifiedBy}' @> ?"; PreparedStatement statement = connection.prepareStatement(query); statement.setObject(1, "[{\"@type\": \"SystemNumber\", \"value\": \"" + systemNumber + "\"}]", java.sql.Types.OTHER); return statement; } private PreparedStatement getOnISBN_ps(Connection connection, String isbn) throws SQLException { // required to be completely numeric (base 11, 0-9+x). if (!isbn.matches("[\\dxX]+")) isbn = "0"; String query = "SELECT id FROM lddb WHERE data#>'{@graph,1,identifiedBy}' @> ?"; PreparedStatement statement = connection.prepareStatement(query); statement.setObject(1, "[{\"@type\": \"ISBN\", \"value\": \"" + isbn + "\"}]", java.sql.Types.OTHER); return statement; } private PreparedStatement getOnISSN_ps(Connection connection, String issn) throws SQLException { // (base 11, 0-9+x and SINGLE hyphens only). if (!issn.matches("^(-[xX\\d]|[xX\\d])+$")) issn = "0"; String query = "SELECT id FROM lddb WHERE data#>'{@graph,1,identifiedBy}' @> ?"; PreparedStatement statement = connection.prepareStatement(query); statement.setObject(1, "[{\"@type\": \"ISSN\", \"value\": \"" + issn + "\"}]", java.sql.Types.OTHER); return statement; } private PreparedStatement getOnHeldByHoldingFor_ps(Connection connection, String heldBy, String holdingForId) throws SQLException { String libraryUri = LegacyIntegrationTools.legacySigelToUri(heldBy); // Here be dragons. The always-works query is this: /*String query = "SELECT lddb.id from lddb " + "INNER JOIN lddb__identifiers id1 ON lddb.data#>>'{@graph,1,itemOf,@id}' = id1.iri " + "INNER JOIN lddb__identifiers id2 ON id1.id = id2.id " + "WHERE " + "data#>>'{@graph,1,heldBy,@id}' = ? " + "AND " + "id2.iri = ?";*/ // This query REQUIRES that links be on the primary ID only. This works beacuse of link-finding step2, but if // that should ever change this query would break. String query = "SELECT id from lddb WHERE data#>>'{@graph,1,heldBy,@id}' = ? AND data#>>'{@graph,1,itemOf,@id}' = ? AND deleted = false"; PreparedStatement statement = connection.prepareStatement(query); statement.setString(1, libraryUri); statement.setString(2, holdingForId); return statement; } private List<String> collectIDs(ResultSet resultSet) throws SQLException { List<String> ids = new ArrayList<>(); while (resultSet.next()) { ids.add(resultSet.getString("id")); } return ids; } //private class TooHighEncodingLevelException extends RuntimeException {} }
package org.jfree.chart.fx; import java.awt.Graphics2D; import java.awt.Rectangle; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import java.util.ArrayList; import java.util.List; import javafx.scene.canvas.Canvas; import javafx.scene.control.Tooltip; import javafx.scene.input.MouseEvent; import javafx.scene.input.ScrollEvent; import org.jfree.chart.ChartMouseEvent; import org.jfree.chart.ChartRenderingInfo; import org.jfree.chart.JFreeChart; import org.jfree.chart.entity.ChartEntity; import org.jfree.chart.event.ChartChangeEvent; import org.jfree.chart.event.ChartChangeListener; import org.jfree.chart.fx.interaction.AnchorHandlerFX; import org.jfree.chart.fx.interaction.DispatchHandlerFX; import org.jfree.chart.fx.interaction.ChartMouseEventFX; import org.jfree.chart.fx.interaction.ChartMouseListenerFX; import org.jfree.chart.fx.interaction.TooltipHandlerFX; import org.jfree.chart.fx.interaction.ScrollHandlerFX; import org.jfree.chart.fx.interaction.PanHandlerFX; import org.jfree.chart.fx.interaction.MouseHandlerFX; import org.jfree.chart.plot.PlotRenderingInfo; import org.jfree.chart.util.ParamChecks; /** * A canvas for displaying a {@link JFreeChart} in JavaFX. You can use the * canvas directly to display charts, but usually the {@link ChartViewer} * class (which embeds a canvas) is a better option. * <p> * The canvas installs several default mouse handlers, if you don't like the * behaviour provided by these you can retrieve the handler by ID and * disable or remove it (the IDs are "tooltip", "scroll", "anchor", "pan" and * "dispatch"). * * <p>THE API FOR THIS CLASS IS SUBJECT TO CHANGE IN FUTURE RELEASES. This is * so that we can incorporate feedback on the (new) JavaFX support in * JFreeChart.</p> * * @since 1.0.18 */ public class ChartCanvas extends Canvas implements ChartChangeListener { /** The chart being displayed in the canvas (never null). */ private JFreeChart chart; /** * The graphics drawing context (will be an instance of FXGraphics2D). */ private Graphics2D g2; /** * The anchor point (can be null) is usually updated to reflect the most * recent mouse click and is used during chart rendering to update * crosshairs belonging to the chart. */ private Point2D anchor; /** The chart rendering info from the most recent drawing of the chart. */ private ChartRenderingInfo info; /** The tooltip object for the canvas (can be null). */ private Tooltip tooltip; /** * A flag that controls whether or not tooltips will be generated from the * chart as the mouse pointer moves over it. */ private boolean tooltipEnabled; /** Storage for registered chart mouse listeners. */ private transient List<ChartMouseListenerFX> chartMouseListeners; /** The current live handler (can be null). */ private MouseHandlerFX liveHandler; /** * The list of available live mouse handlers (can be empty but not null). */ private List<MouseHandlerFX> availableMouseHandlers; /** The auxiliary mouse handlers (can be empty but not null). */ private List<MouseHandlerFX> auxiliaryMouseHandlers; /** * Creates a new canvas to display the supplied chart in JavaFX. * * @param chart the chart ({@code null} not permitted). */ public ChartCanvas(JFreeChart chart) { ParamChecks.nullNotPermitted(chart, "chart"); this.chart = chart; this.chart.addChangeListener(this); this.tooltip = null; this.tooltipEnabled = true; this.chartMouseListeners = new ArrayList<ChartMouseListenerFX>(); widthProperty().addListener(evt -> draw()); heightProperty().addListener(evt -> draw()); this.g2 = new FXGraphics2D(getGraphicsContext2D()); this.liveHandler = null; this.availableMouseHandlers = new ArrayList<MouseHandlerFX>(); this.availableMouseHandlers.add(new PanHandlerFX("pan", true, false, false, false)); this.auxiliaryMouseHandlers = new ArrayList<MouseHandlerFX>(); this.auxiliaryMouseHandlers.add(new TooltipHandlerFX("tooltip")); this.auxiliaryMouseHandlers.add(new ScrollHandlerFX("scroll")); this.auxiliaryMouseHandlers.add(new AnchorHandlerFX("anchor")); this.auxiliaryMouseHandlers.add(new DispatchHandlerFX("dispatch")); setOnMouseMoved((MouseEvent e) -> { handleMouseMoved(e); }); setOnMouseClicked((MouseEvent e) -> { handleMouseClicked(e); }); setOnMousePressed((MouseEvent e) -> { handleMousePressed(e); }); setOnMouseDragged((MouseEvent e) -> { handleMouseDragged(e); }); setOnMouseReleased((MouseEvent e) -> { handleMouseReleased(e); }); setOnScroll((ScrollEvent event) -> { handleScroll(event); }); } /** * Returns the chart that is being displayed by this node. * * @return The chart (never {@code null}). */ public JFreeChart getChart() { return this.chart; } /** * Sets the chart to be displayed by this node. * * @param chart the chart ({@code null} not permitted). */ public void setChart(JFreeChart chart) { ParamChecks.nullNotPermitted(chart, "chart"); this.chart.removeChangeListener(this); this.chart = chart; this.chart.addChangeListener(this); draw(); } /** * Returns the rendering info from the most recent drawing of the chart. * * @return The rendering info (possibly {@code null}). */ public ChartRenderingInfo getRenderingInfo() { return this.info; } /** * Returns the flag that controls whether or not tooltips are enabled. * The default value is {@code true}. The {@link TooltipHandlerFX} * class will only update the tooltip if this flag is set to * {@code true}. * * @return The flag. */ public boolean isTooltipEnabled() { return this.tooltipEnabled; } /** * Sets the flag that controls whether or not tooltips are enabled. * * @param tooltipEnabled the new flag value. */ public void setTooltipEnabled(boolean tooltipEnabled) { this.tooltipEnabled = tooltipEnabled; } /** * Set the anchor point and forces a redraw of the chart (the anchor point * is used to determine the position of the crosshairs on the chart, if * they are visible). * * @param anchor the anchor ({@code null} permitted). */ public void setAnchor(Point2D anchor) { this.anchor = anchor; this.chart.setNotify(true); // force a redraw } /** * Registers a listener to receive {@link ChartMouseEvent} notifications. * * @param listener the listener ({@code null} not permitted). */ public void addChartMouseListener(ChartMouseListenerFX listener) { ParamChecks.nullNotPermitted(listener, "listener"); this.chartMouseListeners.add(listener); } /** * Removes a listener from the list of objects listening for chart mouse * events. * * @param listener the listener. */ public void removeChartMouseListener(ChartMouseListenerFX listener) { this.chartMouseListeners.remove(listener); } /** * Returns the mouse handler with the specified ID, or {@code null} if * there is no handler with that ID. This method will look for handlers * in both the regular and auxiliary handler lists. * * @param id the ID ({@code null} not permitted). * * @return The handler with the specified ID */ public MouseHandlerFX getMouseHandler(String id) { for (MouseHandlerFX h: this.availableMouseHandlers) { if (h.getID().equals(id)) { return h; } } for (MouseHandlerFX h: this.auxiliaryMouseHandlers) { if (h.getID().equals(id)) { return h; } } return null; } /** * Adds a mouse handler to the list of available handlers (handlers that * are candidates to take the position of live handler). The handler must * have an ID that uniquely identifies it amongst the handlers registered * with this canvas. * * @param handler the handler ({@code null} not permitted). */ public void addMouseHandler(MouseHandlerFX handler) { if (!this.hasUniqueID(handler)) { throw new IllegalArgumentException( "There is already a handler with that ID (" + handler.getID() + ")."); } this.availableMouseHandlers.add(handler); } /** * Removes a handler from the list of available handlers. * * @param handler the handler ({@code null} not permitted). */ public void removeMouseHandler(MouseHandlerFX handler) { this.availableMouseHandlers.remove(handler); } /** * Validates that the specified handler has an ID that uniquely identifies * it amongst the existing handlers for this canvas. * * @param handler the handler ({@code null} not permitted). * * @return A boolean. */ private boolean hasUniqueID(MouseHandlerFX handler) { for (MouseHandlerFX h: this.availableMouseHandlers) { if (handler.getID().equals(h.getID())) { return false; } } for (MouseHandlerFX h: this.auxiliaryMouseHandlers) { if (handler.getID().equals(h.getID())) { return false; } } return true; } /** * Clears the current live handler. This method is intended for use by the * handlers themselves, you should not call it directly. */ public void clearLiveHandler() { this.liveHandler = null; } /** * Draws the content of the canvas and updates the * {@code renderingInfo} attribute with the latest rendering * information. */ public final void draw() { getGraphicsContext2D().save(); double width = getWidth(); double height = getHeight(); if (width > 0 && height > 0) { this.info = new ChartRenderingInfo(); this.chart.draw(this.g2, new Rectangle((int) width, (int) height), this.anchor, this.info); } getGraphicsContext2D().restore(); this.anchor = null; } /** * Returns the data area (the area inside the axes) for the plot or subplot. * * @param point the selection point (for subplot selection). * * @return The data area. */ public Rectangle2D findDataArea(Point2D point) { PlotRenderingInfo plotInfo = this.info.getPlotInfo(); Rectangle2D result; if (plotInfo.getSubplotCount() == 0) { result = plotInfo.getDataArea(); } else { int subplotIndex = plotInfo.getSubplotIndex(point); if (subplotIndex == -1) { return null; } result = plotInfo.getSubplotInfo(subplotIndex).getDataArea(); } return result; } /** * Return {@code true} to indicate the canvas is resizable. * * @return {@code true}. */ @Override public boolean isResizable() { return true; } /** * Sets the tooltip text, with the (x, y) location being used for the * anchor. If the text is {@code null}, no tooltip will be displayed. * This method is intended for calling by the {@link TooltipHandlerFX} * class, you won't normally call it directly. * * @param text the text ({@code null} permitted). * @param x the x-coordinate of the mouse pointer. * @param y the y-coordinate of the mouse pointer. */ public void setTooltip(String text, double x, double y) { if (text != null) { if (this.tooltip == null) { this.tooltip = new Tooltip(text); Tooltip.install(this, this.tooltip); } else { this.tooltip.setText(text); this.tooltip.setAnchorX(x); this.tooltip.setAnchorY(y); } } else { Tooltip.uninstall(this, this.tooltip); this.tooltip = null; } } /** * Handles a mouse pressed event by (1) selecting a live handler if one * is not already selected, (2) passing the event to the live handler if * there is one, and (3) passing the event to all enabled auxiliary * handlers. * * @param e the mouse event. */ private void handleMousePressed(MouseEvent e) { if (this.liveHandler == null) { for (MouseHandlerFX handler: this.availableMouseHandlers) { if (handler.isEnabled() && handler.hasMatchingModifiers(e)) { this.liveHandler = handler; } } } if (this.liveHandler != null) { this.liveHandler.handleMousePressed(this, e); } // pass on the event to the auxiliary handlers for (MouseHandlerFX handler: this.auxiliaryMouseHandlers) { if (handler.isEnabled()) { handler.handleMousePressed(this, e); } } } /** * Handles a mouse moved event by passing it on to the registered handlers. * * @param e the mouse event. */ private void handleMouseMoved(MouseEvent e) { if (this.liveHandler != null && this.liveHandler.isEnabled()) { this.liveHandler.handleMouseMoved(this, e); } for (MouseHandlerFX handler: this.auxiliaryMouseHandlers) { if (handler.isEnabled()) { handler.handleMouseMoved(this, e); } } } /** * Handles a mouse dragged event by passing it on to the registered * handlers. * * @param e the mouse event. */ private void handleMouseDragged(MouseEvent e) { if (this.liveHandler != null && this.liveHandler.isEnabled()) { this.liveHandler.handleMouseDragged(this, e); } // pass on the event to the auxiliary handlers for (MouseHandlerFX handler: this.auxiliaryMouseHandlers) { if (handler.isEnabled()) { handler.handleMouseDragged(this, e); } } } /** * Handles a mouse released event by passing it on to the registered * handlers. * * @param e the mouse event. */ private void handleMouseReleased(MouseEvent e) { if (this.liveHandler != null && this.liveHandler.isEnabled()) { this.liveHandler.handleMouseReleased(this, e); } // pass on the event to the auxiliary handlers for (MouseHandlerFX handler: this.auxiliaryMouseHandlers) { if (handler.isEnabled()) { handler.handleMouseReleased(this, e); } } } /** * Handles a mouse released event by passing it on to the registered * handlers. * * @param e the mouse event. */ private void handleMouseClicked(MouseEvent e) { if (this.liveHandler != null && this.liveHandler.isEnabled()) { this.liveHandler.handleMouseClicked(this, e); } // pass on the event to the auxiliary handlers for (MouseHandlerFX handler: this.auxiliaryMouseHandlers) { if (handler.isEnabled()) { handler.handleMouseClicked(this, e); } } } /** * Handles a scroll event by passing it on to the registered handlers. * * @param e the scroll event. */ protected void handleScroll(ScrollEvent e) { if (this.liveHandler != null && this.liveHandler.isEnabled()) { this.liveHandler.handleScroll(this, e); } for (MouseHandlerFX handler: this.auxiliaryMouseHandlers) { if (handler.isEnabled()) { handler.handleScroll(this, e); } } } /** * Receives a notification from the chart that it has been changed and * responds by redrawing the chart entirely. * * @param event event information. */ @Override public void chartChanged(ChartChangeEvent event) { draw(); } public void dispatchMouseMovedEvent(Point2D point, MouseEvent e) { double x = point.getX(); double y = point.getY(); ChartEntity entity = this.info.getEntityCollection().getEntity(x, y); ChartMouseEventFX event = new ChartMouseEventFX(this.chart, e, entity); for (ChartMouseListenerFX listener : this.chartMouseListeners) { listener.chartMouseMoved(event); } } public void dispatchMouseClickedEvent(Point2D point, MouseEvent e) { double x = point.getX(); double y = point.getY(); ChartEntity entity = this.info.getEntityCollection().getEntity(x, y); ChartMouseEventFX event = new ChartMouseEventFX(this.chart, e, entity); for (ChartMouseListenerFX listener : this.chartMouseListeners) { listener.chartMouseClicked(event); } } }
package prope.reporting; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintWriter; public final class ReportWriter { private final Report report; private final String excelDelimiter = ";"; private final String rDelimiter = ","; public ReportWriter(Report report) { this.report = report; } public void printToConsole() { for (ReportEntry entry : report) { System.out.println(entry); } } public void writeToExcelFile(String outputFile) { if (report.getEntries().isEmpty()) { System.out .println("There is nothing to write... Did you use the correct analysis type?"); return; } writeToFile(outputFile, excelDelimiter); } private void writeToFile(String outputFile, String delimiter) { try (PrintWriter writer = new PrintWriter(new OutputStreamWriter( new FileOutputStream(outputFile), "UTF-8"))) { printHeader(writer, delimiter); printBody(writer, delimiter); } catch (IOException e) { System.err.println("Could no write to file " + outputFile + ": " + e.getMessage()); e.printStackTrace(); } } private void printHeader(PrintWriter writer, String delimiter) { writer.print("filename" + delimiter); for (String varName : report.getEntries().get(0).getVariableNames()) { writer.print(varName + delimiter); } writer.println(); } private void printBody(PrintWriter writer, String delimiter) { for (ReportEntry entry : report) { writer.println(entry.toStringWithSeparator(delimiter)); } } public void writeToRFile(String outputFile) { writeToFile(outputFile, rDelimiter); } }
package be.iminds.iot.dianne.things.output; import java.util.Hashtable; import java.util.UUID; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceRegistration; import be.iminds.iot.dianne.api.nn.module.ModuleException; import be.iminds.iot.dianne.tensor.Tensor; import be.iminds.iot.dianne.tensor.TensorOps; import be.iminds.iot.input.joystick.api.JoystickEvent; import be.iminds.iot.input.joystick.api.JoystickListener; import be.iminds.iot.input.keyboard.api.KeyboardEvent; import be.iminds.iot.input.keyboard.api.KeyboardListener; import be.iminds.iot.robot.api.Arm; import be.iminds.iot.robot.api.OmniDirectional; public class YoubotOutput extends ThingOutput implements JoystickListener, KeyboardListener { private enum Mode { IGNORE, DISCRETE, CONTINUOUS, STOCHASTIC, ANY } private OmniDirectional base; private Arm arm; private float speed = 0.1f; private float gripThreshold = 0.01f; private float ignoreGripThreshold = -0.02f; private float vx = 0; private float vy = 0; private float va = 0; private boolean grip = false; private Tensor sample = new Tensor(3); private volatile boolean skip = false; private volatile boolean stop = false; private ServiceRegistration registration; private Mode mode = Mode.ANY; public YoubotOutput(UUID id, String name, BundleContext context){ super(id, name, "Youbot"); String s = context.getProperty("be.iminds.iot.dianne.youbot.speed"); if(s!=null){ speed = Float.parseFloat(s); } s = context.getProperty("be.iminds.iot.dianne.youbot.gripThreshold"); if(s!=null){ gripThreshold = Float.parseFloat(s); } s = context.getProperty("be.iminds.iot.dianne.youbot.ignoreGripThreshold"); if(s!=null){ ignoreGripThreshold = Float.parseFloat(s); } } public void setBase(OmniDirectional b){ this.base = b; } public void setArm(Arm a){ this.arm = a; } @Override public void onForward(final UUID moduleId, final Tensor output, final String... tags) { if(output.dim() != 1){ System.out.println("Wrong output dimensions"); return; } if(mode == Mode.IGNORE){ return; } if(skip || stop){ return; } // TODO this code is replicated from the Environment to have same behavior // Should this somehow be merged together? int outputs = output.size(0); if(outputs == 7 && (mode == Mode.DISCRETE || mode == Mode.ANY)){ // treat as discrete outputs int action = TensorOps.argmax(output); if(action == 6 && output.get(6) < ignoreGripThreshold){ action = TensorOps.argmax(output.narrow(0, 5)); } grip = false; switch(action){ case 0: vx = 0; vy = speed; va = 0; break; case 1: vx = 0; vy = -speed; va = 0; break; case 2: vx = speed; vy = 0; va = 0; break; case 3: vx = -speed; vy = 0; va = 0; break; case 4: vx = 0; vy = 0; va = 2*speed; break; case 5: vx = 0; vy = 0; va = -2*speed; break; case 6: grip = true; } } else if(outputs == 3 && (mode == Mode.CONTINUOUS || mode == Mode.ANY)) { float[] action = output.get(); // treat as continuous outputs if(TensorOps.dot(output, output) < gripThreshold){ // grip grip = true; } else { // move grip = false; vx = action[0]*speed; vy = action[1]*speed; va = action[2]*speed*2; } } else if(outputs == 6 && (mode == Mode.STOCHASTIC || mode == Mode.ANY)) { sample.randn(); TensorOps.cmul(sample, sample, output.narrow(0, 3, 3)); TensorOps.add(sample, sample, output.narrow(0, 0, 3)); float[] action = sample.get(); // treat as continuous outputs if(TensorOps.dot(sample, sample) < gripThreshold){ // grip grip = true; } else { // move grip = false; vx = action[0]*speed; vy = action[1]*speed; va = action[2]*speed*2; } } if(grip){ base.stop(); arm.openGripper() .then(p -> arm.setPositions(2.92f, 0.0f, 0.0f, 0.0f, 2.875f)) .then(p -> arm.setPositions(2.92f, 1.76f, -1.37f, 2.55f)) .then(p -> arm.closeGripper()) .then(p -> arm.setPositions(0.01f, 0.8f)) .then(p -> arm.setPositions(0.01f, 0.8f, -1f, 2.9f)) .then(p -> arm.openGripper()) .then(p -> arm.setPosition(1, -1.3f)) .then(p -> arm.reset()).then(p -> {skip = false; return null;}); skip = true; } else { base.move(vx, vy, va); } } @Override public void onError(UUID moduleId, ModuleException e, String... tags) { } public void connect(UUID nnId, UUID outputId, BundleContext context){ if(!isConnected()){ stop = false; registration = context.registerService(new String[]{JoystickListener.class.getName(),KeyboardListener.class.getName()}, this, new Hashtable<>()); } super.connect(nnId, outputId, context); } public void disconnect(UUID moduleId, UUID outputId){ // stop youbot on disconnect super.disconnect(moduleId, outputId); if(!isConnected()){ stop = true; base.stop(); arm.stop(); registration.unregister(); } } @Override public void onEvent(JoystickEvent e) { switch(e.type){ case BUTTON_X_PRESSED: base.stop(); mode = Mode.IGNORE; System.out.println("Igore any neural net robot control signals"); break; case BUTTON_Y_PRESSED: mode = Mode.DISCRETE; System.out.println("Accept only discrete neural net robot control signals"); break; case BUTTON_A_PRESSED: mode = Mode.CONTINUOUS; System.out.println("Accept only continous neural net robot control signals"); break; case BUTTON_B_PRESSED: mode = Mode.STOCHASTIC; System.out.println("Accept only stochastic continuous neural net robot control signals"); break; default: } } @Override public void onEvent(KeyboardEvent e) { if(e.type!=KeyboardEvent.Type.PRESSED) return; switch(e.key){ case "1": base.stop(); mode = Mode.IGNORE; System.out.println("Igore any neural net robot control signals"); break; case "2": mode = Mode.DISCRETE; System.out.println("Accept only discrete neural net robot control signals"); break; case "3": mode = Mode.CONTINUOUS; System.out.println("Accept only continous neural net robot control signals"); break; case "4": mode = Mode.STOCHASTIC; System.out.println("Accept only stochastic continuous neural net robot control signals"); break; case "0": mode = Mode.ANY; System.out.println("Accept any robot control signals"); break; default: break; } } }
package com.yahoo.vespa.hosted.controller.restapi.application; import com.yahoo.component.Version; import com.yahoo.config.provision.InstanceName; import com.yahoo.config.provision.zone.ZoneId; import com.yahoo.container.jdisc.HttpResponse; import com.yahoo.vespa.hosted.controller.api.application.v4.model.DeployOptions; import com.yahoo.vespa.hosted.controller.api.integration.configserver.ConfigServerException; import com.yahoo.vespa.hosted.controller.api.integration.deployment.ApplicationVersion; import com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType; import com.yahoo.vespa.hosted.controller.application.ApplicationPackage; import com.yahoo.vespa.hosted.controller.deployment.ApplicationPackageBuilder; import com.yahoo.vespa.hosted.controller.deployment.DeploymentTester; import org.json.JSONException; import org.json.JSONObject; import org.junit.Test; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.net.URI; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.time.Duration; import java.time.Instant; import java.util.Optional; import static com.yahoo.vespa.hosted.controller.api.integration.configserver.ConfigServerException.ErrorCode.INVALID_APPLICATION_PACKAGE; import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.devAwsUsEast2a; import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.devUsEast1; import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.productionUsCentral1; import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.productionUsEast3; import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.productionUsWest1; import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.stagingTest; import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.systemTest; import static com.yahoo.vespa.hosted.controller.api.integration.deployment.JobType.testUsCentral1; import static com.yahoo.vespa.hosted.controller.api.integration.deployment.TesterCloud.Status.FAILURE; import static com.yahoo.vespa.hosted.controller.deployment.DeploymentContext.applicationPackage; import static com.yahoo.vespa.hosted.controller.deployment.RunStatus.deploymentFailed; import static com.yahoo.vespa.hosted.controller.deployment.RunStatus.installationFailed; import static com.yahoo.vespa.hosted.controller.deployment.RunStatus.running; import static com.yahoo.vespa.hosted.controller.deployment.RunStatus.testFailure; import static org.junit.Assert.assertEquals; /** * @author jonmv * @author freva */ public class JobControllerApiHandlerHelperTest { @Test public void testResponses() { ApplicationPackage applicationPackage = new ApplicationPackageBuilder() .region("us-central-1") .test("us-central-1") .parallel("us-west-1", "us-east-3") .build(); DeploymentTester tester = new DeploymentTester(); var app = tester.newDeploymentContext(); tester.clock().setInstant(Instant.EPOCH); // Revision 1 gets deployed everywhere. app.submit(applicationPackage).deploy(); ApplicationVersion revision1 = app.lastSubmission().get(); assertEquals(1000, tester.application().projectId().getAsLong()); tester.clock().advance(Duration.ofMillis(1000)); // Revision 2 gets deployed everywhere except in us-east-3. ApplicationVersion revision2 = app.submit(applicationPackage).lastSubmission().get(); app.runJob(systemTest); app.runJob(stagingTest); app.runJob(productionUsCentral1); app.runJob(testUsCentral1); tester.triggerJobs(); // us-east-3 eats the deployment failure and fails before deployment, while us-west-1 fails after. tester.configServer().throwOnNextPrepare(new ConfigServerException(URI.create("url"), "ERROR!", INVALID_APPLICATION_PACKAGE, null)); tester.runner().run(); assertEquals(deploymentFailed, tester.jobs().last(app.instanceId(), productionUsEast3).get().status()); ZoneId usWest1 = productionUsWest1.zone(tester.controller().system()); tester.configServer().convergeServices(app.instanceId(), usWest1); tester.configServer().convergeServices(app.testerId().id(), usWest1); tester.setEndpoints(app.instanceId(), usWest1); tester.setEndpoints(app.testerId().id(), usWest1); tester.runner().run(); tester.cloud().set(FAILURE); tester.runner().run(); assertEquals(testFailure, tester.jobs().last(app.instanceId(), productionUsWest1).get().status()); assertEquals(revision2, app.deployment(productionUsCentral1.zone(tester.controller().system())).applicationVersion()); assertEquals(revision1, app.deployment(productionUsEast3.zone(tester.controller().system())).applicationVersion()); assertEquals(revision2, app.deployment(productionUsWest1.zone(tester.controller().system())).applicationVersion()); tester.clock().advance(Duration.ofMillis(1000)); // Revision 3 starts. app.submit(applicationPackage) .runJob(systemTest).runJob(stagingTest); tester.triggerJobs(); // Starts runs for us-central-1 and a new staging test run. tester.runner().run(); assertEquals(running, tester.jobs().last(app.instanceId(), productionUsCentral1).get().status()); assertEquals(running, tester.jobs().last(app.instanceId(), stagingTest).get().status()); // Staging deployment expires and the job fails, and is immediately retried. tester.controller().applications().deactivate(app.instanceId(), stagingTest.zone(tester.controller().system())); tester.runner().run(); assertEquals(installationFailed, tester.jobs().last(app.instanceId(), stagingTest).get().status()); // Staging deployment expires again, the job fails for the second time, and won't be retried immediately. tester.clock().advance(Duration.ofMillis(100_000)); // Advance time to avoid immediate retry tester.triggerJobs(); tester.runner().run(); assertEquals(running, tester.jobs().last(app.instanceId(), stagingTest).get().status()); tester.controller().applications().deactivate(app.instanceId(), stagingTest.zone(tester.controller().system())); tester.runner().run(); assertEquals(installationFailed, tester.jobs().last(app.instanceId(), stagingTest).get().status()); tester.triggerJobs(); assertEquals(installationFailed, tester.jobs().last(app.instanceId(), stagingTest).get().status()); // System upgrades to a new version, which won't yet start. Version platform = new Version("7.1"); tester.controllerTester().upgradeSystem(platform); tester.upgrader().maintain(); tester.triggerJobs(); // us-central-1 has started, deployed, and is installing. Deployment is not yet verified. // us-east-3 is waiting for the failed staging test and us-central-1, while us-west-1 is waiting only for us-central-1. // Only us-east-3 is verified, on revision1. // staging-test has 5 runs: one success without sources on revision1, one success from revision1 to revision2, // one success from revision2 to revision3 and two failures from revision1 to revision3. assertResponse(JobControllerApiHandlerHelper.runResponse(tester.jobs().runs(app.instanceId(), stagingTest), URI.create("https://some.url:43/root")), "staging-runs.json"); assertResponse(JobControllerApiHandlerHelper.runDetailsResponse(tester.jobs(), tester.jobs().last(app.instanceId(), productionUsEast3).get().id(), "0"), "us-east-3-log-without-first.json"); assertResponse(JobControllerApiHandlerHelper.jobTypeResponse(tester.controller(), app.instanceId(), URI.create("https://some.url:43/root/")), "overview.json"); var userApp = tester.newDeploymentContext(app.instanceId().tenant().value(), app.instanceId().application().value(), "user"); userApp.runJob(devAwsUsEast2a, applicationPackage); assertResponse(JobControllerApiHandlerHelper.runResponse(tester.jobs().runs(userApp.instanceId(), devAwsUsEast2a), URI.create("https://some.url:43/root")), "dev-aws-us-east-2a-runs.json"); assertResponse(JobControllerApiHandlerHelper.jobTypeResponse(tester.controller(), userApp.instanceId(), URI.create("https://some.url:43/root/")), "overview-user-instance.json"); } @Test public void testDevResponses() { DeploymentTester tester = new DeploymentTester(); var app = tester.newDeploymentContext(); tester.clock().setInstant(Instant.EPOCH); ZoneId zone = JobType.devUsEast1.zone(tester.controller().system()); tester.jobs().deploy(app.instanceId(), JobType.devUsEast1, Optional.empty(), applicationPackage); tester.configServer().setLogStream("1554970337.935104\t17491290-v6-1.ostk.bm2.prod.ne1.yahoo.com\t5480\tcontainer\tstdout\tinfo\tERROR: Bundle canary-application [71] Unable to get module class path. (java.lang.NullPointerException)\n"); assertResponse(JobControllerApiHandlerHelper.runDetailsResponse(tester.jobs(), tester.jobs().last(app.instanceId(), devUsEast1).get().id(), null), "dev-us-east-1-log-first-part.json"); tester.configServer().setLogStream("Nope, this won't be logged"); tester.configServer().convergeServices(app.instanceId(), zone); tester.setEndpoints(app.instanceId(), zone); tester.runner().run(); assertResponse(JobControllerApiHandlerHelper.jobTypeResponse(tester.controller(), app.instanceId(), URI.create("https://some.url:43/root")), "dev-overview.json"); assertResponse(JobControllerApiHandlerHelper.runDetailsResponse(tester.jobs(), tester.jobs().last(app.instanceId(), devUsEast1).get().id(), "9"), "dev-us-east-1-log-second-part.json"); } @Test public void testResponsesWithDirectDeployment() { var tester = new DeploymentTester(); var app = tester.newDeploymentContext(); tester.clock().setInstant(Instant.EPOCH); var region = "us-west-1"; var applicationPackage = new ApplicationPackageBuilder().region(region).build(); // Deploy directly to production zone, like integration tests. tester.controller().applications().deploy(tester.instance().id(), ZoneId.from("prod", region), Optional.of(applicationPackage), new DeployOptions(true, Optional.empty(), false, false)); assertResponse(JobControllerApiHandlerHelper.jobTypeResponse(tester.controller(), app.instanceId(), URI.create("https://some.url:43/root/")), "jobs-direct-deployment.json"); } private void compare(HttpResponse response, String expected) throws JSONException, IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); response.render(baos); JSONObject actualJSON = new JSONObject(new String(baos.toByteArray())); JSONObject expectedJSON = new JSONObject(expected); assertEquals(expectedJSON.toString(), actualJSON.toString()); } private void assertResponse(HttpResponse response, String fileName) { try { Path path = Paths.get("src/test/java/com/yahoo/vespa/hosted/controller/restapi/application/responses/").resolve(fileName); String expected = Files.readString(path); compare(response, expected); } catch (Exception e) { throw new RuntimeException(e); } } }
package org.enner.flatbuffers.bench; import org.openjdk.jmh.annotations.*; import org.openjdk.jmh.runner.Runner; import org.openjdk.jmh.runner.RunnerException; import org.openjdk.jmh.runner.options.Options; import org.openjdk.jmh.runner.options.OptionsBuilder; import org.openjdk.jmh.runner.options.VerboseMode; import java.nio.ByteBuffer; import java.util.concurrent.TimeUnit; /** * @author Florian Enner < florian @ hebirobotics.com > * @since 23 Jan 2015 */ @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) @Fork(2) @Warmup(iterations = 10) @Measurement(iterations = 10) @State(Scope.Thread) public class BenchmarkComparison { public static void main(String[] args) throws RunnerException { Options options = new OptionsBuilder() .include(".*" + BenchmarkComparison.class.getSimpleName() + ".*") .verbosity(VerboseMode.NORMAL) .build(); new Runner(options).run(); } /** * OS: Windows 8.1 * JDK: Oracle JDK 1.8.0_20-b26 * CPU: Intel i5-3427U @ 1.80 Ghz * * Benchmark Mode Samples Score Score error Units * o.e.f.b.BenchmarkComparison.flatEncodeDirect avgt 20 1.137 0.036 us/op * o.e.f.b.BenchmarkComparison.flatUseDirect avgt 20 0.639 0.013 us/op * o.e.f.b.BenchmarkComparison.flatDecodeDirect avgt 20 0.009 0.001 us/op (included in use) * <p/> * o.e.f.b.BenchmarkComparison.flatEncodeHeap avgt 20 1.576 0.030 us/op * o.e.f.b.BenchmarkComparison.flatUseHeap avgt 20 0.732 0.029 us/op (included in use) * o.e.f.b.BenchmarkComparison.flatDecodeHeap avgt 20 0.011 0.000 us/op * <p/> * o.e.f.b.BenchmarkComparison.protoEncode avgt 20 1.652 0.049 us/op * o.e.f.b.BenchmarkComparison.protoUse avgt 20 0.037 0.001 us/op * o.e.f.b.BenchmarkComparison.protoDecode avgt 20 6.903 0.084 us/op */ public BenchmarkComparison() { flatBench.encode(heapReadBuffer); flatBench.encode(directReadBuffer); this.position = heapReadBuffer.position(); } ProtoBench protoBench = new ProtoBench(); FlatBench flatBench = new FlatBench(); ByteBuffer heapReadBuffer = ByteBuffer.allocate(1024); ByteBuffer directReadBuffer = ByteBuffer.allocateDirect(1024); int position; ByteBuffer heapWriteBuffer = ByteBuffer.allocate(1024); ByteBuffer directWriteBuffer = ByteBuffer.allocateDirect(1024); @Benchmark public Object protoEncode() { return protoBench.encode(); } @Benchmark public Object protoDecode() { return protoBench.decode(); } @Benchmark public long protoUse() { return protoBench.use(); } @Benchmark public Object flatEncodeHeap() { return flatBench.encode(heapWriteBuffer); } @Benchmark public Object flatDecodeHeap() { heapReadBuffer.position(position); return flatBench.decode(heapReadBuffer); } @Benchmark public long flatUseHeap() { heapReadBuffer.position(position); return flatBench.use(heapReadBuffer); } @Benchmark public Object flatEncodeDirect() { return flatBench.encode(directWriteBuffer); } @Benchmark public Object flatDecodeDirect() { directReadBuffer.position(position); return flatBench.decode(directReadBuffer); } @Benchmark public long flatUseDirect() { directReadBuffer.position(position); return flatBench.use(directReadBuffer); } }
package rsc.subscriber; import org.reactivestreams.Subscriber; import rsc.state.Backpressurable; import rsc.state.Cancellable; import rsc.state.Introspectable; import rsc.state.Requestable; import rsc.util.ExceptionHelper; import rsc.util.SubscriptionHelper; import rsc.util.UnsignalledExceptions; /** * Interface to receive generated signals from the callback function. * <p> * At least one of the methods should be called per invocation of the generator function * * @param <T> the output value type */ public interface SignalEmitter<T> extends Backpressurable, Introspectable, Cancellable, Requestable { /** * Signal the completion of the sequence. * @see Subscriber#onComplete() */ void complete(); /** * Signal the next value in the sequence. * @param t the value to signal, not-null * @see Subscriber#onNext(Object) * @return the result of the emission, see {@link Emission} enum. */ Emission emit(T t); /** * Signal and terminate the sequence with an error. * @param e the Throwable instance, not-null * @see Subscriber#onError(Throwable) */ void fail(Throwable e); /** * Indicate there won't be any further signals delivered by * the generator and the operator will stop calling it. * <p> * Call to this method will also trigger the state consumer. */ void stop(); /** * Try emitting or throw an unchecked exception. * @param t the value to emit. * @see #emit(Object) * @throws RuntimeException */ default void tryEmit(T t) { Emission emission = emit(t); if(emission.isOk()) { return; } if(emission.isBackpressured()){ SubscriptionHelper.reportMoreProduced(); return; } if(emission.isCancelled()){ UnsignalledExceptions.onNextDropped(t); return; } if(getError() != null){ throw ExceptionHelper.bubble(getError()); } throw new IllegalStateException("Emission has failed"); } /** * An acknowledgement signal returned by {@link #emit}. * {@link SignalEmitter.Emission#isOk()} is the only successful signal, the other define the emission failure cause. * */ enum Emission { FAILED, BACKPRESSURED, OK, CANCELLED; public boolean isBackpressured(){ return this == BACKPRESSURED; } public boolean isCancelled(){ return this == CANCELLED; } public boolean isFailed(){ return this == FAILED; } public boolean isOk(){ return this == OK; } } }
package com.phonegap; import java.util.TimeZone; import android.content.Context; import android.content.IntentFilter; import android.hardware.SensorManager; import android.net.Uri; import android.os.Vibrator; import android.telephony.TelephonyManager; import android.webkit.WebView; import android.media.Ringtone; import android.media.RingtoneManager; public class PhoneGap{ private static final String LOG_TAG = "PhoneGap"; /* * UUID, version and availability */ public boolean droid = true; public static String version = "0.8.0"; public static String platform = "Android"; public static String uuid; private Context mCtx; private WebView mAppView; AudioHandler audio; public PhoneGap(Context ctx, WebView appView) { this.mCtx = ctx; this.mAppView = appView; audio = new AudioHandler("/sdcard/tmprecording.mp3", ctx); uuid = getUuid(); } public void beep(long pattern) { Uri ringtone = RingtoneManager.getDefaultUri(RingtoneManager.TYPE_NOTIFICATION); Ringtone notification = RingtoneManager.getRingtone(mCtx, ringtone); for (long i = 0; i < pattern; ++i) { notification.play(); } } public void vibrate(long pattern){ // Start the vibration, 0 defaults to half a second. if (pattern == 0) pattern = 500; Vibrator vibrator = (Vibrator) mCtx.getSystemService(Context.VIBRATOR_SERVICE); vibrator.vibrate(pattern); } public String getPlatform() { return this.platform; } public String getUuid() { TelephonyManager operator = (TelephonyManager) mCtx.getSystemService(Context.TELEPHONY_SERVICE); String uuid = operator.getDeviceId(); return uuid; } public void init() { mAppView.loadUrl("javascript:Device.setData('Android','" + version + "','" + this.getUuid() + "')"); } public String getModel() { String model = android.os.Build.MODEL; return model; } public String getProductName() { String productname = android.os.Build.PRODUCT; return productname; } public String getOSVersion() { String osversion = android.os.Build.VERSION.RELEASE; return osversion; } public String getSDKVersion() { String sdkversion = android.os.Build.VERSION.SDK; return sdkversion; } public String getVersion() { return version; } public void httpGet(String url, String file) /** * grabs a file from specified url and saves it to a name and location * the base directory /sdcard is abstracted so that paths may be the same from one mobile OS to another * TODO: JavaScript call backs and error handling */ { HttpHandler http = new HttpHandler(); http.get(url, file); } /** * AUDIO * TODO: Basic functions done but needs more work on error handling and call backs, remove record hack */ public void startRecordingAudio(String file) { /* for this to work the recording needs to be specified in the constructor, * a hack to get around this, I'm moving the recording after it's complete */ audio.startRecording(file); } public void stopRecordingAudio() { audio.stopRecording(); } public void startPlayingAudio(String file) { audio.startPlaying(file); } public void stopPlayingAudio() { audio.stopPlaying(); } public long getCurrentPositionAudio() { System.out.println(audio.getCurrentPosition()); return(audio.getCurrentPosition()); } public long getDurationAudio(String file) { System.out.println(audio.getDuration(file)); return(audio.getDuration(file)); } public void setAudioOutputDevice(int output){ audio.setAudioOutputDevice(output); } public int getAudioOutputDevice(){ return audio.getAudioOutputDevice(); } public String getLine1Number() { TelephonyManager tm = (TelephonyManager)mCtx.getSystemService(Context.TELEPHONY_SERVICE); return(tm.getLine1Number()); } public String getVoiceMailNumber() { TelephonyManager tm = (TelephonyManager)mCtx.getSystemService(Context.TELEPHONY_SERVICE); return(tm.getVoiceMailNumber()); } public String getNetworkOperatorName(){ TelephonyManager tm = (TelephonyManager)mCtx.getSystemService(Context.TELEPHONY_SERVICE); return(tm.getNetworkOperatorName()); } public String getSimCountryIso(){ TelephonyManager tm = (TelephonyManager)mCtx.getSystemService(Context.TELEPHONY_SERVICE); return(tm.getSimCountryIso()); } public String getTimeZoneID() { TimeZone tz = TimeZone.getDefault(); return(tz.getID()); } }
package org.fenixedu.a3es.ui.strategy; import java.util.ArrayList; import java.util.Base64; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; import javax.ws.rs.client.Client; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.client.Entity; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.fenixedu.a3es.domain.CurricularUnitFile; import org.fenixedu.a3es.domain.DegreeFile; import org.fenixedu.a3es.domain.TeacherActivity; import org.fenixedu.a3es.domain.TeacherFile; import org.fenixedu.a3es.domain.util.ExportDegreeProcessBean; import org.fenixedu.academic.domain.CurricularCourse; import org.fenixedu.academic.domain.ExecutionSemester; import org.fenixedu.academic.domain.ExecutionYear; import org.fenixedu.academic.domain.curricularPeriod.CurricularPeriod; import org.fenixedu.academic.domain.degreeStructure.Context; import org.fenixedu.academic.domain.degreeStructure.CourseGroup; import org.fenixedu.bennu.A3esSpringConfiguration; import org.fenixedu.bennu.core.i18n.BundleUtil; import org.fenixedu.commons.i18n.I18N; import org.fenixedu.commons.i18n.LocalizedString; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.JSONValue; import org.springframework.context.MessageSource; import com.google.common.base.Strings; public class MigrationStrategy { protected static final int TEACHER_SERVICE_ITEMS = 10; private static final Locale PT = new Locale("pt"); private static final Locale UK = Locale.UK; private static final LocalizedString NO_GROUP = new LocalizedString(PT, "Sem Grupo").with(UK, "No group"); private static final String API_PROCESS = "api_process"; private static final String API_FORM = "api_form"; private static final String API_FOLDER = "api_folder"; private static final String API_ANNEX = "api_annex"; protected String base64Hash; protected String formId; private MessageSource messageSource; protected String competenceCoursesFolderIndex = "6.2.1."; public enum AccreditationType { PROGRAM_ACCREDITATION, NEW_PROGRAM_ACCREDITATION, ACCREDITATION_RENEWAL, INSTITUTIONAL_EVALUATION; @Override public String toString() { return getLocalizedName().getContent(); } public LocalizedString getLocalizedName() { return BundleUtil.getLocalizedString(A3esSpringConfiguration.BUNDLE, this.getClass().getName() + "." + name()); } } public static MigrationStrategy getStrategy(ExportDegreeProcessBean form) { if (form.getAccreditationType().equals(AccreditationType.INSTITUTIONAL_EVALUATION)) { return new InstitutionalMigrationStrategy(); } else if (form.getAccreditationType().equals(AccreditationType.ACCREDITATION_RENEWAL)) { return new AccreditationRenewalMigrationStrategy(); } else if (form.getAccreditationType().equals(AccreditationType.NEW_PROGRAM_ACCREDITATION)) { return new NewProgramAccreditationMigrationStrategy(); } return new MigrationStrategy(); } public List<String> exportCurricularUnitFilesToA3es(ExportDegreeProcessBean form, MessageSource messageSource) { this.messageSource = messageSource; initialize(form); List<String> output = uploadCompetenceCourses(form); return output; } public List<String> exportTeacherUnitFilesToA3es(ExportDegreeProcessBean form, MessageSource messageSource) { this.messageSource = messageSource; initialize(form); List<String> output = uploadTeacherCurriculum(form); return output; } public List<String> exportDegreeStudyPlanToA3es(ExportDegreeProcessBean form, MessageSource messageSource) { this.messageSource = messageSource; initialize(form); List<String> output = uploadDegreeStudyPlan(form); return output; } private void initialize(ExportDegreeProcessBean form) { initializeHash(form); initializeFormId(form); } protected void initializeHash(ExportDegreeProcessBean form) { base64Hash = form.getBase64Hash(); if (Strings.isNullOrEmpty(form.getBase64Hash())) { base64Hash = new String(Base64.getEncoder().encode((form.getUser() + ":" + form.getPassword()).getBytes())); form.setBase64Hash(base64Hash); } } protected void initializeFormId(ExportDegreeProcessBean form) { formId = form.getFormId(); if (Strings.isNullOrEmpty(form.getFormId())) { JSONArray processes = null; try { processes = invokeToArray(webResource().path(API_PROCESS)); } catch (javax.ws.rs.NotAuthorizedException e) { throw new RuntimeException(messageSource.getMessage("error.not.authorized", null, I18N.getLocale())); } JSONObject json = (JSONObject) processes.iterator().next(); String id = (String) json.get("id"); String name = (String) json.get("name"); String degreeCode = form.getDegreeFile().getDegreeCode(); if (!Strings.isNullOrEmpty(degreeCode) && degreeCode.trim().equalsIgnoreCase(name)) { JSONArray forms = invokeToArray(webResource().path(API_FORM).queryParam("processId", id)); for (Object object : forms) { JSONObject jsonForm = (JSONObject) object; if (form.getProcessFolderName().equals(jsonForm.get("name"))) { formId = (String) jsonForm.get("id"); form.setFormId(formId); return; } } throw new RuntimeException(message("error.process.without.evaluation.form", name)); } else { throw new RuntimeException(message("error.invalid.degree.code", degreeCode)); } } } public String getProcessFolderName() { return "Guião para a auto-avaliação"; } protected WebTarget webResource() { Client client = ClientBuilder.newClient(); return client.target(A3esSpringConfiguration.getConfiguration().a3esURL()); } protected JSONObject invoke(WebTarget resource) { return (JSONObject) JSONValue.parse(resource.request(MediaType.APPLICATION_JSON) .header("Authorization", "Basic " + base64Hash).get(String.class)); } protected JSONArray invokeToArray(WebTarget resource) { return (JSONArray) ((JSONObject) JSONValue.parse(resource.request(MediaType.APPLICATION_JSON) .header("Authorization", "Basic " + base64Hash).get(String.class))).get("list"); } protected Response post(WebTarget resource, String arg) { return resource.request(MediaType.APPLICATION_JSON).header("Authorization", "Basic " + base64Hash) .buildPost(Entity.text(arg)).invoke(); } protected Response delete(WebTarget resource) { return resource.request(MediaType.APPLICATION_JSON).header("Authorization", "Basic " + base64Hash).buildDelete().invoke(); } protected List<String> uploadCompetenceCourses(ExportDegreeProcessBean form) { competenceCoursesFolderIndex = form.getCompetenceCoursesFolderIndex(); List<String> output = new ArrayList<String>(); for (Object object : invokeToArray(webResource().path(API_FOLDER).queryParam("formId", formId))) { JSONObject folder = (JSONObject) object; if (form.getCompetenceCoursesFolderName().equals(folder.get("name"))) { String competencesId = (String) folder.get(getCompetenceCourseId()); for (Object annexObj : invokeToArray(webResource().path(API_ANNEX).queryParam("formId", formId) .queryParam("folderId", competencesId))) { JSONObject annex = (JSONObject) annexObj; delete(webResource().path(API_ANNEX).path((String) annex.get("id")).queryParam("formId", formId) .queryParam("folderId", competencesId)); } for (Entry<JSONObject, String> json : buildCompetenceCoursesJson(form.getDegreeFile()).entrySet()) { Response response = post(webResource().path(API_ANNEX).queryParam("formId", formId).queryParam("folderId", competencesId), json.getKey().toJSONString()); int status = response.getStatus(); if (status == 201) { output.add(status + ": " + ((JSONObject) json.getKey().get(getCompetenceCoursesFieldKey("1.1"))).get("pt") + ": " + json.getValue()); } else { output.add(status + ": " + ((JSONObject) json.getKey().get(getCompetenceCoursesFieldKey("1.1"))).get("pt") + ": " + response.getEntity() + " input: " + json.getKey().toJSONString()); } } break; } } return output; } public String getCompetenceCoursesFolderIndex() { return competenceCoursesFolderIndex; } public String getCompetenceCoursesFolderName() { return "6.2.1. Ficha das unidades curriculares"; } protected String getCompetenceCourseId() { return "id"; } protected String getCompetenceCoursesFieldKey(String keyIndex) { return "q-" + getCompetenceCoursesFolderIndex() + keyIndex; } protected String getCompetenceCoursesFieldKeyII(String keyIndex) { return "q-II." + getCompetenceCoursesFolderIndex() + keyIndex; } protected void setCompetenceCoursesField(JSONObject json, String keyIndex, Object value) { json.put(getCompetenceCoursesFieldKey(keyIndex), value); json.put(getCompetenceCoursesFieldKeyII(keyIndex), value); } protected Map<JSONObject, String> buildCompetenceCoursesJson(DegreeFile degreeFile) { Map<JSONObject, String> jsons = new HashMap<JSONObject, String>(); degreeFile.getCurricularUnitFileSet().forEach( curricularUnitFile -> { JSONObject json = new JSONObject(); StringBuilder output = new StringBuilder(); String ukLanguage = " (" + UK.getDisplayLanguage() + ")"; String ptLanguage = " (" + PT.getDisplayLanguage() + ")"; setCompetenceCoursesField(json, "1.1", curricularUnitFile.getFileName()); JSONObject q62111 = new JSONObject(); q62111.put( "en", cut(message("label.curricularUnitName") + ukLanguage, curricularUnitFile.getCurricularUnitName() .getContent(UK), output, 1000)); q62111.put( "pt", cut(message("label.curricularUnitName") + ptLanguage, curricularUnitFile.getCurricularUnitName() .getContent(PT), output, 1000)); setCompetenceCoursesField(json,"1.1", q62111); setCompetenceCoursesField(json,"1.2", cut(message("label.scientificAreaAcronym"), curricularUnitFile.getScientificArea(), output, 100)); setCompetenceCoursesField(json,"1.3", cut(message("label.courseRegime"), curricularUnitFile.getCourseRegime(), output, 100)); setCompetenceCoursesField(json,"1.4", cut(message("label.workingHours"), curricularUnitFile.getWorkingHours(), output, 100)); setCompetenceCoursesField(json,"1.5", cut(message("label.contactHours"), curricularUnitFile.getContactHours(), output, 100)); setCompetenceCoursesField(json,"1.6", cut(message("label.ects"), curricularUnitFile.getEcts(), output, 100)); JSONObject q62117 = new JSONObject(); q62117.put( "en", cut(message("label.observations") + ukLanguage, curricularUnitFile.getObservations().getContent(UK), output, 1000)); q62117.put( "pt", cut(message("label.observations") + ptLanguage, curricularUnitFile.getObservations().getContent(PT), output, 1000)); setCompetenceCoursesField(json,"1.7", q62117); setCompetenceCoursesField(json,"2", cut(message("label.responsibleTeacherAndTeachingHours"), curricularUnitFile.getResponsibleTeacherAndTeachingHours(), output, 100)); setCompetenceCoursesField(json,"3", curricularUnitFile.getOtherTeachersAndTeachingHours()); JSONObject q6214 = new JSONObject(); q6214.put( "en", cut(message("label.learningOutcomes") + ukLanguage, curricularUnitFile.getLearningOutcomes() .getContent(UK), output, 1000)); q6214.put( "pt", cut(message("label.learningOutcomes") + ptLanguage, curricularUnitFile.getLearningOutcomes() .getContent(PT), output, 1000)); setCompetenceCoursesField(json,"4", q6214); JSONObject q6215 = new JSONObject(); q6215.put( "en", cut(message("label.syllabus") + ukLanguage, curricularUnitFile.getSyllabus().getContent(UK), output, 1000)); q6215.put( "pt", cut(message("label.syllabus") + ptLanguage, curricularUnitFile.getSyllabus().getContent(PT), output, 1000)); setCompetenceCoursesField(json,"5", q6215); JSONObject q6216 = new JSONObject(); q6216.put( "en", cut(message("label.syllabusDemonstration") + ukLanguage, curricularUnitFile .getSyllabusDemonstration().getContent(UK), output, 1000)); q6216.put( "pt", cut(message("label.syllabusDemonstration") + ptLanguage, curricularUnitFile .getSyllabusDemonstration().getContent(PT), output, 1000)); setCompetenceCoursesField(json,"6", q6216); JSONObject q6217 = new JSONObject(); q6217.put( "en", cut(message("label.teachingMethodologies") + ukLanguage, curricularUnitFile .getTeachingMethodologies().getContent(UK), output, 1000)); q6217.put( "pt", cut(message("label.teachingMethodologies") + ptLanguage, curricularUnitFile .getTeachingMethodologies().getContent(PT), output, 1000)); setCompetenceCoursesField(json,"7", q6217); JSONObject q6218 = new JSONObject(); q6218.put( "en", cut(message("label.teachingMethodologiesDemonstration") + ukLanguage, curricularUnitFile .getTeachingMethodologiesDemonstration().getContent(UK), output, 3000)); q6218.put( "pt", cut(message("label.teachingMethodologiesDemonstration") + ptLanguage, curricularUnitFile .getTeachingMethodologiesDemonstration().getContent(PT), output, 3000)); setCompetenceCoursesField(json,"8", q6218); setCompetenceCoursesField(json, "9", cut(message("label.bibliographicReferences"), curricularUnitFile.getBibliographicReferences(), output, 1000)); jsons.put(json, output.toString()); }); return jsons; } protected List<String> uploadTeacherCurriculum(ExportDegreeProcessBean form) { List<String> output = new ArrayList<String>(); for (Object object : getTeacherCurriculumFolders()) { JSONObject folder = (JSONObject) object; if (isTeacherCurriculumnFolder(form.getTeacherCurriculumnFolderName(), folder)) { String teacherCurriculumId = (String) folder.get(getTeacherCurriculumId()); for (Object annexObj : invokeToArray(webResource().path(API_ANNEX).queryParam("formId", formId) .queryParam("folderId", teacherCurriculumId))) { JSONObject annex = (JSONObject) annexObj; delete(webResource().path(API_ANNEX).path((String) annex.get("id")).queryParam("formId", formId) .queryParam("folderId", teacherCurriculumId)); } for (Entry<JSONObject, String> json : buildTeacherCurriculumJson(form.getDegreeFile()).entrySet()) { Response response = post(webResource().path(API_ANNEX).queryParam("formId", formId) .queryParam("folderId", teacherCurriculumId), json.getKey().toJSONString()); int status = response.getStatus(); if (status == 201) { output.add("201 Created: " + json.getKey().get("q-cf-name") + ": " + json.getValue()); } else { output.add(status + ": " + json.getKey().get("q-cf-name") + ": " + response.getEntity() + " input: " + json.getKey().toJSONString()); } } break; } } return output; } public String getTeacherCurriculumnFolderName() { return "3.2. Fichas curriculares dos docentes do ciclo de estudos"; } protected String getTeacherCurriculumId() { return "id"; } protected boolean isTeacherCurriculumnFolder(String teacherCurriculumnFolderName, JSONObject folder) { return teacherCurriculumnFolderName.equals(folder.get("name")); } protected JSONArray getTeacherCurriculumFolders() { return invokeToArray(webResource().path(API_FOLDER).queryParam("formId", formId)); } protected Map<JSONObject, String> buildTeacherCurriculumJson(DegreeFile degreeFile) { Map<JSONObject, String> jsons = new HashMap<JSONObject, String>(); degreeFile.getTeacherFileSet() .forEach( teacherFile -> { JSONObject toplevel = new JSONObject(); StringBuilder output = new StringBuilder(); toplevel.put("q-cf-name", teacherFile.getFileName()); toplevel.put("q-new-cf-name", teacherFile.getFileName()); toplevel.put("q-cf-ies", teacherFile.getInstitution()); toplevel.put("q-new-cf-ies", teacherFile.getInstitution()); toplevel.put("q-cf-uo", teacherFile.getOrganicUnit()); toplevel.put("q-new-cf-uo", teacherFile.getOrganicUnit()); if (teacherFile.getA3esTeacherCategory() != null) { toplevel.put( "q-cf-cat", teacherFile.getA3esTeacherCategory().getName().getContent() .replaceAll(" ou equivalente", "")); toplevel.put( "q-new-cf-cat", teacherFile.getA3esTeacherCategory().getName().getContent() .replaceAll(" ou equivalente", "")); } else { output.append(message("message.fieldName.empty", "q-cf-cat")); } if (teacherFile.getRegime() != null) { toplevel.put("q-cf-time", Float.valueOf(teacherFile.getRegime())); toplevel.put("q-new-cf-time", Float.valueOf(teacherFile.getRegime())); } else { output.append(message("message.fieldName.empty", "q-cf-time")); } JSONObject file = new JSONObject(); { file.put("name", cut("nome", teacherFile.getTeacherName(), output, 200)); file.put("ies", cut("ies", teacherFile.getInstitution(), output, 200)); file.put("uo", cut("uo", teacherFile.getOrganicUnit(), output, 200)); file.put("research_center", cut("research_center", teacherFile.getResearchUnit(), output, 200)); if (teacherFile.getA3esTeacherCategory() != null) { file.put( "cat", teacherFile.getA3esTeacherCategory().getName().getContent() .replaceAll(" ou equivalente", "")); } else { output.append(message("message.fieldName.empty", "cat")); } if (teacherFile.getA3esDegreeType() != null) { file.put("deg", teacherFile.getA3esDegreeType().toString()); } else { output.append(message("message.fieldName.empty", "deg")); } if (teacherFile.getDegreeScientificArea() != null) { file.put("degarea", cut("area cientifica", teacherFile.getDegreeScientificArea(), output, 200)); } else { output.append(message("message.fieldName.empty", "degarea")); } if (teacherFile.getDegreeYear() != null) { file.put("ano_grau", teacherFile.getDegreeYear()); } else { output.append(message("message.fieldName.empty", "ano_grau")); } if (teacherFile.getDegreeInstitution() != null) { file.put("instituicao_conferente", cut("instituicao_conferente", teacherFile.getDegreeInstitution(), output, 200)); } else { output.append(message("message.fieldName.empty", "instituicao_conferente")); } if (teacherFile.getRegime() != null) { file.put("regime", Float.valueOf(teacherFile.getRegime())); } else { output.append(message("message.fieldName.empty", "regime")); } JSONArray academicArray = new JSONArray(); teacherFile.getA3esQualifications().forEach( qualification -> { JSONObject academic = new JSONObject(); if (qualification.getYear() != null) { academic.put("year", qualification.getYear()); } else { output.append(message("message.fieldName.empty", "year")); } if (qualification.getDegree() != null) { academic.put("degree", cut("degree", qualification.getDegree(), output, 30)); } else { output.append(message("message.fieldName.empty", "degree")); } if (qualification.getArea() != null) { academic.put("area", cut("area", qualification.getArea(), output, 100)); } else { output.append(message("message.fieldName.empty", "area")); } if (qualification.getInstitution() != null) { academic.put("ies", cut("ies", qualification.getInstitution(), output, 100)); } else { output.append(message("message.fieldName.empty", "ies")); } if (qualification.getClassification() != null) { academic.put("rank", cut("classificação", qualification.getClassification(), output, 30)); } else { output.append(message("message.fieldName.empty", "rank")); } academicArray.add(academic); }); file.put("form-academic", academicArray); file.put( "form-investigation", getJsonActivities(teacherFile.getScientificActivitySet(), "investigation", "investigation", 500, output)); file.put( "form-highlevelactivities", getJsonActivities(teacherFile.getDevelopmentActivitySet(), "highlevelactivities", "actividade", 200, output)); file.put( "form-otherpublications", getJsonActivities(teacherFile.getOtherPublicationActivitySet(), "otherpublications", "outras publicações", 500, output)); file.put( "form-professional", getJsonActivities(teacherFile.getOtherProfessionalActivitySet(), "profession", "profession", 200, output)); setTeachingService(degreeFile, teacherFile, output, file); } toplevel.put("q-cf-cfile", file); toplevel.put("q-new-cf-cfile", file); jsons.put(toplevel, output.toString()); }); return jsons; } protected void setTeachingService(DegreeFile degreeFile, TeacherFile teacherFile, StringBuilder output, JSONObject file) { JSONArray insideLectures = new JSONArray(); teacherFile.getA3esTeachingService().forEach(teachingService -> { JSONObject lecture = new JSONObject(); lecture.put("curricularUnit", cut("curricularUnit", teachingService.getCurricularUnitName(), output, 100)); lecture.put("studyCycle", cut("studyCycle", teachingService.getStudyCycles(), output, 200)); lecture.put("type", cut("type", teachingService.getMethodologyTypes(), output, 30)); lecture.put("hoursPerWeek", teachingService.getTotalHours()); insideLectures.add(lecture); }); file.put("form-unit", insideLectures.subList(0, Math.min(TEACHER_SERVICE_ITEMS, insideLectures.size()))); } private JSONArray getJsonActivities(Set<? extends TeacherActivity> teacherActivities, String jsonObject, String outputLabel, int size, StringBuilder output) { final JSONArray researchArray = new JSONArray(); teacherActivities.forEach(activity -> { JSONObject research = new JSONObject(); research.put(jsonObject, cut(outputLabel, activity.getActivity(), output, size)); researchArray.add(research); }); return researchArray; } private List<String> uploadDegreeStudyPlan(ExportDegreeProcessBean form) { List<String> output = new ArrayList<String>(); for (Object object : invokeToArray(webResource().path(API_FOLDER).queryParam("formId", formId))) { JSONObject folder = (JSONObject) object; if (form.getDegreeStudyPlanFolderName().equals(folder.get("name"))) { String competencesId = (String) folder.get("id"); for (Object annexObj : invokeToArray(webResource().path(API_ANNEX).queryParam("formId", formId) .queryParam("folderId", competencesId))) { JSONObject annex = (JSONObject) annexObj; delete(webResource().path(API_ANNEX).path((String) annex.get("id")).queryParam("formId", formId) .queryParam("folderId", competencesId)); } for (Entry<JSONObject, String> json : buildDegreeStudyPlanJson(form.getDegreeFile()).entrySet()) { Response response = post(webResource().path(API_ANNEX).queryParam("formId", formId).queryParam("folderId", competencesId), json.getKey().toJSONString()); int status = response.getStatus(); if (status == 201) { output.add("201 Created: " + ((JSONObject) json.getKey().get(getDegreeStudyPlanFieldKey("1"))).get("pt") + ": " + json.getValue()); } else { output.add(status + ": " + ((JSONObject) json.getKey().get(getDegreeStudyPlanFieldKey("1"))).get("pt") + ": " + response.getEntity() + " input: " + json.getKey().toJSONString()); } }; } } return output; } private Map<JSONObject, String> buildDegreeStudyPlanJson(DegreeFile degreeFile) { String ukLanguage = " (" + UK.getDisplayLanguage() + ")"; String ptLanguage = " (" + PT.getDisplayLanguage() + ")"; ExecutionYear executionYear = degreeFile.getAccreditationProcess().getExecutionYear(); Map<LocalizedString, Map<String, Set<CurricularUnitFile>>> jsonMap = new HashMap<LocalizedString, Map<String, Set<CurricularUnitFile>>>(); degreeFile.getCurricularUnitFileSet().forEach(cf -> { if (cf.getCurricularCourse() == null) { addCurricularUnitFileToMap(jsonMap, cf, NO_GROUP, null); } else { for (Context context : contextsFor(cf.getCurricularCourse(), executionYear)) { LocalizedString group = groupFor(context, executionYear); ExecutionSemester executionSemester = getExecutionSemester(context, executionYear); String q432 = context.getCurricularYear() + " / " + executionSemester.getName(); addCurricularUnitFileToMap(jsonMap, cf, group, q432); } } }); Map<JSONObject, String> result = new HashMap<JSONObject, String>(); for (LocalizedString group : jsonMap.keySet()) { Map<String, Set<CurricularUnitFile>> groupByPeriod = jsonMap.get(group); for (String q432 : groupByPeriod.keySet()) { StringBuilder output = new StringBuilder(); JSONObject json = new JSONObject(); JSONObject q431 = new JSONObject(); q431.put("en", cutBegining(getDegreeStudyPlanFieldKey("1") + ukLanguage, group.getContent(UK), output, 100)); q431.put("pt", cutBegining(getDegreeStudyPlanFieldKey("1") + ptLanguage, group.getContent(PT), output, 100)); json.put(getDegreeStudyPlanFieldKey("1"), q431); json.put(getDegreeStudyPlanFieldKey("2"), cut(getDegreeStudyPlanFieldKey("2"), q432, output, 100)); JSONArray curricularUnitsArray = new JSONArray(); for (CurricularUnitFile cf : groupByPeriod.get(q432)) { JSONObject curricularUnit = new JSONObject(); curricularUnit.put("curricularUnit", cf.getCurricularUnitName().getContent()); curricularUnit.put("scientificArea", cf.getScientificArea()); curricularUnit.put("type", cf.getCourseRegime()); curricularUnit.put("totalWorkingHours", cf.getWorkingHours()); curricularUnit.put("totalContactHours", cf.getContactHours()); curricularUnit.put("credits", cf.getEcts()); curricularUnit.put("observations", cf.getObservations().getContent()); curricularUnitsArray.add(curricularUnit); } json.put("grid", curricularUnitsArray); result.put(json, output.toString()); } } return result; } private void addCurricularUnitFileToMap(Map<LocalizedString, Map<String, Set<CurricularUnitFile>>> jsonMap, CurricularUnitFile cf, LocalizedString group, String executionPeriod) { Map<String, Set<CurricularUnitFile>> jsonGroup = jsonMap.get(group); if (jsonGroup == null) { jsonGroup = new HashMap<String, Set<CurricularUnitFile>>(); } Set<CurricularUnitFile> curricularUnitFiles = jsonGroup.get(executionPeriod); if (curricularUnitFiles == null) { curricularUnitFiles = new HashSet<CurricularUnitFile>(); } curricularUnitFiles.add(cf); jsonGroup.put(executionPeriod, curricularUnitFiles); jsonMap.put(group, jsonGroup); } private ExecutionSemester getExecutionSemester(final Context context, final ExecutionYear executionYear) { final CurricularPeriod curricularPeriod = context.getCurricularPeriod(); if (curricularPeriod.getAcademicPeriod().getName().equals("SEMESTER")) { return (curricularPeriod.getChildOrder() == 1) ? executionYear.getFirstExecutionPeriod() : executionYear .getLastExecutionPeriod(); } else { return executionYear.getFirstExecutionPeriod(); } } public String getDegreeStudyPlanFolderName() { return "4.3 Plano de estudos"; } public String getDegreeStudyPlanFolderIndex() { return "4.3."; } protected String getDegreeStudyPlanFieldKey(String keyIndex) { return "q-" + getDegreeStudyPlanFolderIndex() + keyIndex; } private LocalizedString groupFor(final Context context, final ExecutionYear executionYear) { LocalizedString groupFor = groupFor(context.getParentCourseGroup(), executionYear, new LocalizedString(PT, "").with(UK, "")); return groupFor != null ? groupFor : NO_GROUP; } private LocalizedString groupFor(final CourseGroup group, final ExecutionYear executionYear, final LocalizedString groupName) { if (group == null || group.isCycleCourseGroup()) { return groupName; } final LocalizedString newName = group.getNameI18N(executionYear).append(groupName, ", "); return group.getParentContextsSet().stream().filter(c -> c.isOpen(executionYear)) .map(c -> groupFor(c.getParentCourseGroup(), executionYear, newName)).findAny().orElse(newName); } private Set<Context> contextsFor(final CurricularCourse curricularCourse, final ExecutionYear executionYear) { return curricularCourse.getParentContextsSet().stream().filter(c -> c.isOpen(executionYear)).collect(Collectors.toSet()); } protected String cut(String field, String content, StringBuilder output, int size) { if (content == null) { output.append(message("message.fieldName.empty", field)); } else { if (content.length() > size) { output.append(message("message.field.cutted", field, size)); return content.substring(0, size - 4) + " ..."; } } return content; } protected String cutBegining(String field, String content, StringBuilder output, int size) { if (content == null) { output.append(message("message.fieldName.empty", field)); } else { if (content.length() > size) { output.append(message("message.field.cutted", field, size)); return "..." + content.substring((content.length() - size) + 3, content.length()); } } return content; } private String message(String code, Object... args) { return messageSource.getMessage(code, args, I18N.getLocale()); } }
package edu.gemini.spModel.target.env; import static edu.gemini.spModel.target.env.OptionsList.UpdateOps.appendAsPrimary; import edu.gemini.shared.util.immutable.*; import edu.gemini.spModel.guide.GuideProbe; import edu.gemini.spModel.guide.GuideProbeMap; import edu.gemini.spModel.pio.Param; import edu.gemini.spModel.pio.ParamSet; import edu.gemini.spModel.pio.Pio; import edu.gemini.spModel.pio.PioFactory; import edu.gemini.spModel.target.SPTarget; import java.io.Serializable; import java.util.*; // NOTE: Caching the active guiders here feels wrong. It is expensive to // calculate though (see GuideSync). This information was previously stored // in each GuideProbeTargets instance, via an enabled/disabled flag so this // seems like an improvement at least. // NOTE2: It's debatable whether this object should exist or just be merged // with TargetEnvironment. On the one hand, it is nice to have a single // container for all the guide target information. On the other, it is // inconvenient whenever the information is accessed or updated to go through // another layer of hierarchy that is, after all, pretty thin. /** * A GuideEnvironment tracks the active guiders and a collection of * {@link GuideGroup} options. It is meant to contain all target information * related to guiding. */ public final class GuideEnvironment implements Serializable, TargetContainer, OptionsList<GuideGroup> { /** * An empty GuideEnvironment. */ public static GuideEnvironment EMPTY = create(OptionsListImpl.create(GuideGroup.EMPTY_LIST)); /** * Creates a GuideEnvironment with the given set of GuideGroups and no * available guiders. */ public static GuideEnvironment create(OptionsList<GuideGroup> guideGroups) { Set<GuideProbe> empty = Collections.emptySet(); return new GuideEnvironment(empty, guideGroups); } /** * Creates a GuideEnvironment with the given available guiders and * set of GuideGroups. */ public static GuideEnvironment create(Collection<GuideProbe> availableGuiders, OptionsList<GuideGroup> guideGroups) { Set<GuideProbe> copyAct = newGuideProbeSet(availableGuiders); return new GuideEnvironment(copyAct, guideGroups); } private static Set<GuideProbe> cpGuideProbes(Collection<GuideProbe> elements) { final Set<GuideProbe> s = new TreeSet<>(GuideProbe.KeyComparator.instance); s.addAll(elements); return Collections.unmodifiableSet(s); } private static Set<GuideProbe> newGuideProbeSet(Collection<GuideProbe> elements) { return (elements.size() == 0) ? Collections.<GuideProbe>emptySet() : cpGuideProbes(elements); } private final Set<GuideProbe> activeGuiders; private final OptionsList<GuideGroup> guideGroups; private GuideEnvironment(Set<GuideProbe> activeGuiders, OptionsList<GuideGroup> guideGroups) { if (activeGuiders == null) { throw new IllegalArgumentException("availableGuiders == null"); } if (guideGroups == null) { throw new IllegalArgumentException("guideGroups == null"); } this.activeGuiders = activeGuiders; this.guideGroups = guideGroups; } /** * Gets an immutable set of the {@link GuideProbe}s that should be * considered active in this GuideEnvironment. */ public Set<GuideProbe> getActiveGuiders() { return activeGuiders; } /** * Sets the set of {@link GuideProbe} that should be considered active in * the new GuideEnvironment that is created and returned. * * @return a new GuideEnvironment, identical to this one, but with the * given activeGuiders */ public GuideEnvironment setActiveGuiders(Set<GuideProbe> activeGuiders) { if (activeGuiders.equals(this.activeGuiders)) return this; // reset the selection whenever the set of active guiders changes return new GuideEnvironment(newGuideProbeSet(activeGuiders), guideGroups); } /** * Gets all the {@link GuideProbe}s referenced by all the * {@link GuideProbeTargets} in all the {@link GuideGroup}s in this * environment. They are sorted using the * {@link edu.gemini.spModel.guide.GuideProbe.KeyComparator}. */ public SortedSet<GuideProbe> getReferencedGuiders() { final SortedSet<GuideProbe> res = new TreeSet<>(GuideProbe.KeyComparator.instance); guideGroups.getOptions().foreach(new ApplyOp<GuideGroup>() { @Override public void apply(GuideGroup guideGroup) { res.addAll(guideGroup.getReferencedGuiders()); } }); return res; } @Override public ImList<SPTarget> getTargets() { return guideGroups.getOptions().flatMap(TargetContainer.EXTRACT_TARGET); } @Override public boolean containsTarget(SPTarget target) { return guideGroups.getOptions().exists(new TargetMatch(target)); } private GuideEnvironment updateGuideGroups(UpdateOp<GuideGroup> f) { ImList<GuideGroup> updatedList = guideGroups.getOptions().map(f); Option<Integer> primary = guideGroups.getPrimaryIndex(); OptionsListImpl<GuideGroup> updated = OptionsListImpl.create(primary, updatedList); return new GuideEnvironment(activeGuiders, updated); } @Override public GuideEnvironment cloneTargets() { return updateGuideGroups(GuideGroup.CLONE_TARGETS); } @Override public GuideEnvironment removeTarget(SPTarget target) { return updateGuideGroups(GuideGroup.removeTargetUpdate(target)); } public GuideEnvironment removeGroup(GuideGroup group) { return setOptions(getOptions().remove(group)); } @Override public Iterator<GuideGroup> iterator() { return guideGroups.iterator(); } @Override public Option<GuideGroup> getPrimary() { return guideGroups.getPrimary(); } @Override public GuideEnvironment selectPrimary(Option<GuideGroup> primary) { return new GuideEnvironment(activeGuiders, guideGroups.selectPrimary(primary)); } @Override public GuideEnvironment selectPrimary(GuideGroup primary) { return new GuideEnvironment(activeGuiders, guideGroups.selectPrimary(primary)); } @Override public GuideEnvironment setPrimary(GuideGroup primary) { return new GuideEnvironment(activeGuiders, guideGroups.setPrimary(primary)); } @Override public Option<Integer> getPrimaryIndex() { return guideGroups.getPrimaryIndex(); } @Override public GuideEnvironment setPrimaryIndex(Option<Integer> primary) { return new GuideEnvironment(activeGuiders, guideGroups.setPrimaryIndex(primary)); } @Override public GuideEnvironment setPrimaryIndex(int primary) { return new GuideEnvironment(activeGuiders, guideGroups.setPrimaryIndex(primary)); } @Override public ImList<GuideGroup> getOptions() { return guideGroups.getOptions(); } @Override public GuideEnvironment setOptions(ImList<GuideGroup> newList) { return new GuideEnvironment(activeGuiders, guideGroups.setOptions(newList)); } @Override public GuideEnvironment update(Option<Integer> primaryIndex, ImList<GuideGroup> options) { return new GuideEnvironment(activeGuiders, guideGroups.update(primaryIndex, options)); } @Override public GuideEnvironment update(Op<GuideGroup> op) { return new GuideEnvironment(activeGuiders, guideGroups.update(op)); } public String mkString(String prefix, String sep, String suffix) { return guideGroups.mkString(prefix, sep, suffix); } @Override public String toString() { return mkString("[", ", ", "]"); } /** * A convenience method for updating a guide group's GuideProbeTargets. * Returns a copy of this GuideEnvironment after having included or updated * the given GuideProbeTargets in the indicated GuideGroup. */ public GuideEnvironment putGuideProbeTargets(GuideGroup grp, GuideProbeTargets gpt) { // Find the index of the group in the list of options, if it exists in // the guide environment. Otherwise, will be -1. int index = getOptions().indexOf(grp); // Update the group with the new guide probe targets. grp = grp.put(gpt); // Update the guide environment. return (index == -1) ? update(appendAsPrimary(grp)) : setOptions(getOptions().updated(index, grp)); } public static final String PARAM_SET_NAME = "guideEnv"; public ParamSet getParamSet(PioFactory factory) { ParamSet paramSet = factory.createParamSet(PARAM_SET_NAME); // Active guide probes Param active = factory.createParam("active"); for (GuideProbe g : activeGuiders) { active.addValue(g.getKey()); } paramSet.addParam(active); // Guide groups. Option<Integer> primary = guideGroups.getPrimaryIndex(); if (!primary.isEmpty()) { Pio.addIntParam(factory, paramSet, "primary", primary.getValue()); } for (GuideGroup grp : guideGroups) { paramSet.addParamSet(grp.getParamSet(factory)); } return paramSet; } public static GuideEnvironment fromParamSet(ParamSet parent) { // Active guide probes. Set<GuideProbe> active = new TreeSet<>(GuideProbe.KeyComparator.instance); Param activeParam = parent.getParam("active"); if (activeParam != null) { for (String guiderKey : activeParam.getValues()) { GuideProbe gp = GuideProbeMap.instance.get(guiderKey); if (gp != null) active.add(gp); } } // Guide groups. int primary = Pio.getIntValue(parent, "primary", -1); Option<Integer> primaryOpt = (primary < 0) ? None.INTEGER : new Some<>(primary); List<GuideGroup> groups = new ArrayList<>(); for (ParamSet gps : parent.getParamSets()) { groups.add(GuideGroup.fromParamSet(gps)); } return create(active, OptionsListImpl.create(primaryOpt, DefaultImList.create(groups))); } }
package seedu.tasklist.ui; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import static com.google.common.base.Preconditions.*; import com.google.common.eventbus.Subscribe; import javafx.beans.property.SimpleStringProperty; import javafx.beans.property.StringProperty; import javafx.beans.property.StringPropertyBase; import javafx.fxml.FXML; import javafx.scene.Node; import javafx.scene.Scene; import javafx.scene.control.Label; import javafx.scene.control.MenuItem; import javafx.scene.input.KeyCombination; import javafx.scene.layout.AnchorPane; import javafx.scene.layout.VBox; import javafx.stage.Stage; import seedu.tasklist.commons.core.Config; import seedu.tasklist.commons.core.EventsCenter; import seedu.tasklist.commons.core.GuiSettings; import seedu.tasklist.commons.events.TickEvent; import seedu.tasklist.commons.events.ui.ExitAppRequestEvent; import seedu.tasklist.logic.Logic; import seedu.tasklist.model.UserPrefs; /** * The Main Window. Provides the basic application layout containing * a menu bar and space where other JavaFX elements can be placed. */ public class MainWindow extends UiPart { private static final String ICON = "/images/smart_scheduler.png"; private static final String FXML = "MainWindow.fxml"; public static final int MIN_HEIGHT = 600; public static final int MIN_WIDTH = 450; private Logic logic; // Independent Ui parts residing in this Ui container private CategoryPanel categoryPanel; private TaskListPanel taskListPanel; private ResultDisplay resultDisplay; private StatusBarFooter statusBarFooter; private CommandBox commandBox; private Config config; private UserPrefs userPrefs; // Handles to elements of this Ui container private VBox rootLayout; private Scene scene; private String taskListName; @FXML private Label dateTimeLabel; @FXML private AnchorPane categoryPanelPlaceholder; @FXML private AnchorPane commandBoxPlaceholder; @FXML private MenuItem helpMenuItem; @FXML private AnchorPane personListPanelPlaceholder; @FXML private AnchorPane resultDisplayPlaceholder; @FXML private AnchorPane statusbarPlaceholder; //@FXML //private SplitPane dateTimePlaceholder; public MainWindow() { super(); } @Override public void setNode(Node node) { rootLayout = (VBox) node; } @Override public String getFxmlPath() { return FXML; } public static MainWindow load(Stage primaryStage, Config config, UserPrefs prefs, Logic logic) { MainWindow mainWindow = UiPartLoader.loadUiPart(primaryStage, new MainWindow()); mainWindow.configure(config.getAppTitle(), config.getTaskListName(), config, prefs, logic); EventsCenter.getInstance().registerHandler(mainWindow); return mainWindow; } @Subscribe private void handleTickEvent(TickEvent tickEvent){ SimpleDateFormat dateFormatter = new SimpleDateFormat("dd\'"+getDateSuffix(Integer.parseInt(new SimpleDateFormat("dd").format(new Date())))+"\' MMMMMMMMM, yyyy | h:mm a"); dateTimeLabel.setText(dateFormatter.format(new Date())); } private void configure(String appTitle, String taskListName, Config config, UserPrefs prefs, Logic logic) { //Set dependencies this.logic = logic; this.taskListName = taskListName; this.config = config; this.userPrefs = prefs; //Configure the UI setTitle(appTitle); setIcon(ICON); setWindowMinSize(); setWindowDefaultSize(prefs); scene = new Scene(rootLayout); primaryStage.setScene(scene); setAccelerators(); } private void setAccelerators() { helpMenuItem.setAccelerator(KeyCombination.valueOf("F1")); } void fillInnerParts() { categoryPanel = CategoryPanel.load(primaryStage, getCategoryPanelPlaceholder(), logic.getTaskCounter()); taskListPanel = TaskListPanel.load(primaryStage, getTaskListPlaceholder(), logic.getFilteredTaskList()); resultDisplay = ResultDisplay.load(primaryStage, getResultDisplayPlaceholder()); statusBarFooter = StatusBarFooter.load(primaryStage, getStatusbarPlaceholder(), config.getTaskListFilePath()); commandBox = CommandBox.load(primaryStage, getCommandBoxPlaceholder(), resultDisplay, logic); setLabelText(); System.out.println(); } private AnchorPane getCommandBoxPlaceholder() { return commandBoxPlaceholder; } private AnchorPane getStatusbarPlaceholder() { return statusbarPlaceholder; } private AnchorPane getResultDisplayPlaceholder() { return resultDisplayPlaceholder; } public AnchorPane getTaskListPlaceholder() { return personListPanelPlaceholder; } public AnchorPane getCategoryPanelPlaceholder() { return categoryPanelPlaceholder; } public void hide() { primaryStage.hide(); } private void setTitle(String appTitle) { primaryStage.setTitle(appTitle); } /** * Sets the default size based on user preferences. */ protected void setWindowDefaultSize(UserPrefs prefs) { primaryStage.setHeight(prefs.getGuiSettings().getWindowHeight()); primaryStage.setWidth(prefs.getGuiSettings().getWindowWidth()); if (prefs.getGuiSettings().getWindowCoordinates() != null) { primaryStage.setX(prefs.getGuiSettings().getWindowCoordinates().getX()); primaryStage.setY(prefs.getGuiSettings().getWindowCoordinates().getY()); } } private void setWindowMinSize() { primaryStage.setMinHeight(MIN_HEIGHT); primaryStage.setMinWidth(MIN_WIDTH); } /** * Returns the current size and the position of the main Window. */ public GuiSettings getCurrentGuiSetting() { return new GuiSettings(primaryStage.getWidth(), primaryStage.getHeight(), (int) primaryStage.getX(), (int) primaryStage.getY()); } @FXML public void handleHelp() { HelpWindow helpWindow = HelpWindow.load(primaryStage); helpWindow.show(); } public void show() { primaryStage.show(); } /** * Closes the application. */ @FXML private void handleExit() { raise(new ExitAppRequestEvent()); } public TaskListPanel getTaskListPanel() { return this.taskListPanel; } // public void loadTaskPage(ReadOnlyTask task) { // browserPanel.loadTaskPage(task); public void releaseResources() { // browserPanel.freeResources(); } public void setLabelText() { assert dateTimeLabel != null; SimpleDateFormat dateFormatter = new SimpleDateFormat("dd\'"+getDateSuffix(Integer.parseInt(new SimpleDateFormat("dd").format(new Date())))+"\' MMMMMMMMM, yyyy | h:mm a"); dateTimeLabel.setText(dateFormatter.format(new Date())); } private String getDateSuffix(int date) { checkArgument(date >= 1 && date <= 31, "illegal day of month: " + date); if (date >= 11 && date <= 13) { return "th"; } switch (date % 10) { case 1: return "st"; case 2: return "nd"; case 3: return "rd"; default: return "th"; } } }
package dr.evomodel.continuous; import dr.app.util.Arguments; import dr.evolution.tree.MultivariateTraitTree; import dr.evolution.tree.NodeRef; import dr.evomodel.branchratemodel.BranchRateModel; import dr.evomodel.tree.TreeStatistic; import dr.geo.math.SphericalPolarCoordinates; import dr.inference.model.Statistic; import dr.math.distributions.MultivariateNormalDistribution; import dr.stats.DiscreteStatistics; import dr.xml.*; import java.util.*; import java.io.IOException; /** * @author Marc Suchard * @author Philippe Lemey * @author Andrew Rambaut */ public class DiffusionRateStatistic extends Statistic.Abstract { public static final String DIFFUSION_RATE_STATISTIC = "diffusionRateStatistic"; public static final String TREE_DISPERSION_STATISTIC = "treeDispersionStatistic"; public static final String BOOLEAN_DIS_OPTION = "greatCircleDistance"; public static final String MODE = "mode"; public static final String MEDIAN = "median"; public static final String AVERAGE = "average"; // average over all branches public static final String WEIGHTED_AVERAGE = "weightedAverage"; // weighted average (=total distance/total time) public static final String COEFFICIENT_OF_VARIATION = "coefficientOfVariation"; // weighted average (=total distance/total time) public static final String STATISTIC = "statistic"; public static final String DIFFUSION_RATE = "diffusionRate"; // weighted average (=total distance/total time) public static final String WAVEFRONT_DISTANCE = "wavefrontDistance"; // weighted average (=total distance/total time) public static final String WAVEFRONT_RATE = "wavefrontRate"; // weighted average (=total distance/total time) public static final String DIFFUSION_COEFFICIENT = "diffusionCoefficient"; // public static final String DIFFUSIONCOEFFICIENT = "diffusionCoefficient"; // weighted average (=total distance/total time) // public static final String BOOLEAN_DC_OPTION = "diffusionCoefficient"; public static final String HEIGHT_UPPER = "heightUpper"; public static final String HEIGHT_LOWER = "heightLower"; public static final String HEIGHT_LOWER_SERIE = "heightLowerSerie"; public static final String CUMULATIVE = "cumulative"; public DiffusionRateStatistic(String name, List<AbstractMultivariateTraitLikelihood> traitLikelihoods, boolean option, Mode mode, summaryStatistic statistic, double heightUpper, double heightLower, double[] lowerHeights, boolean cumulative) { super(name); this.traitLikelihoods = traitLikelihoods; this.useGreatCircleDistances = option; summaryMode = mode; summaryStat = statistic; this.heightUpper = heightUpper; if (lowerHeights == null){ heightLowers = new double[]{heightLower}; } else { heightLowers = extractUnique(lowerHeights); Arrays.sort(heightLowers); reverse(heightLowers); } this.cumulative = cumulative; } public int getDimension() { return heightLowers.length; } public double getStatisticValue(int dim) { String traitName = traitLikelihoods.get(0).getTraitName(); double treelength = 0; double treeDistance = 0; double maxDistanceFromRoot = 0; double maxDistanceOverTimeFromRoot = 0; //double[] rates = null; List<Double> rates = new ArrayList<Double>(); //double[] diffusionCoefficients = null; List<Double> diffusionCoefficients = new ArrayList<Double>(); double waDiffusionCoefficient = 0; double lowerHeight = heightLowers[dim]; double upperHeight = Double.MAX_VALUE; if (heightLowers.length == 1){ upperHeight = heightUpper; } else { if (dim > 0) { if (!cumulative) { upperHeight = heightLowers[dim -1]; } } } // System.out.println("dim = "+dim+", heightLower = "+lowerHeight+", heightUpper = "+upperHeight); for (AbstractMultivariateTraitLikelihood traitLikelihood : traitLikelihoods) { MultivariateTraitTree tree = traitLikelihood.getTreeModel(); BranchRateModel branchRates = traitLikelihood.getBranchRateModel(); for (int i = 0; i < tree.getNodeCount(); i++) { NodeRef node = tree.getNode(i); if (node != tree.getRoot()) { NodeRef parentNode = tree.getParent(node); if ((tree.getNodeHeight(parentNode) > lowerHeight) && (tree.getNodeHeight(node) < upperHeight)) { double[] trait = traitLikelihood.getTraitForNode(tree, node, traitName); double[] parentTrait = traitLikelihood.getTraitForNode(tree, parentNode, traitName); double[] traitUp = parentTrait; double[] traitLow = trait; double timeUp = tree.getNodeHeight(parentNode); double timeLow = tree.getNodeHeight(node); double rate = (branchRates != null ? branchRates.getBranchRate(tree, node) : 1.0); MultivariateDiffusionModel diffModel = traitLikelihood.diffusionModel; double[] precision = diffModel.getPrecisionParameter().getParameterValues(); if (tree.getNodeHeight(parentNode) > upperHeight) { timeUp = upperHeight; //TODO: implement TrueNoise?? traitUp = imputeValue(trait, parentTrait, upperHeight, tree.getNodeHeight(node), tree.getNodeHeight(parentNode), precision, rate, false); } if (tree.getNodeHeight(node) < lowerHeight) { timeLow = lowerHeight; traitLow = imputeValue(trait, parentTrait, lowerHeight, tree.getNodeHeight(node), tree.getNodeHeight(parentNode), precision, rate, false); } double time = timeUp - timeLow; treelength += time; double[] rootTrait = traitLikelihood.getTraitForNode(tree, tree.getRoot(), traitName); if (useGreatCircleDistances && (trait.length == 2)) { // Great Circle distance SphericalPolarCoordinates coord1 = new SphericalPolarCoordinates(traitLow[0], traitLow[1]); SphericalPolarCoordinates coord2 = new SphericalPolarCoordinates(traitUp[0], traitUp[1]); double distance = coord1.distance(coord2); treeDistance += distance; double dc = Math.pow(distance,2)/(4*time); diffusionCoefficients.add(dc); waDiffusionCoefficient += dc*time; rates.add(distance/time); SphericalPolarCoordinates rootCoord = new SphericalPolarCoordinates(rootTrait[0], rootTrait[1]); double tempDistanceFromRoot = rootCoord.distance(coord2); if (tempDistanceFromRoot > maxDistanceFromRoot){ maxDistanceFromRoot = tempDistanceFromRoot; maxDistanceOverTimeFromRoot = tempDistanceFromRoot/(tree.getNodeHeight(tree.getRoot()) - timeLow); //distance between traitLow and traitUp for maxDistanceFromRoot if (timeUp == upperHeight) { maxDistanceFromRoot = distance; maxDistanceOverTimeFromRoot = distance/time; } } } else { double distance = getNativeDistance(traitLow, traitUp); treeDistance += distance; double dc = Math.pow(distance,2)/(4*time); diffusionCoefficients.add(dc); waDiffusionCoefficient += dc*time; rates.add(distance/time); double tempDistanceFromRoot = getNativeDistance(traitLow, rootTrait); if (tempDistanceFromRoot > maxDistanceFromRoot){ maxDistanceFromRoot = tempDistanceFromRoot; maxDistanceOverTimeFromRoot = tempDistanceFromRoot/(tree.getNodeHeight(tree.getRoot()) - timeLow); //distance between traitLow and traitUp for maxDistanceFromRoot if (timeUp == upperHeight) { maxDistanceFromRoot = distance; maxDistanceOverTimeFromRoot = distance/time; } } } } } } } if (summaryStat == summaryStatistic.DIFFUSION_RATE){ if (summaryMode == Mode.AVERAGE) { return DiscreteStatistics.mean(toArray(rates)); } else if (summaryMode == Mode.MEDIAN) { return DiscreteStatistics.median(toArray(rates)); } else if (summaryMode == Mode.COEFFICIENT_OF_VARIATION) { // don't compute mean twice final double mean = DiscreteStatistics.mean(toArray(rates)); return Math.sqrt(DiscreteStatistics.variance(toArray(rates), mean)) / mean; } else { return treeDistance / treelength; } } else if (summaryStat == summaryStatistic.DIFFUSION_COEFFICIENT) { if (summaryMode == Mode.AVERAGE) { return DiscreteStatistics.mean(toArray(diffusionCoefficients)); } else if (summaryMode == Mode.MEDIAN) { return DiscreteStatistics.median(toArray(diffusionCoefficients)); } else if (summaryMode == Mode.COEFFICIENT_OF_VARIATION) { // don't compute mean twice final double mean = DiscreteStatistics.mean(toArray(diffusionCoefficients)); return Math.sqrt(DiscreteStatistics.variance(toArray(diffusionCoefficients), mean)) / mean; } else { return waDiffusionCoefficient/treelength; } } else if (summaryStat == summaryStatistic.WAVEFRONT_DISTANCE) { return maxDistanceFromRoot; } else { return maxDistanceOverTimeFromRoot; } } // private double getNativeDistance(double[] location1, double[] location2) { // return Math.sqrt(Math.pow((location2[0] - location1[0]), 2.0) + Math.pow((location2[1] - location1[1]), 2.0)); private double getNativeDistance(double[] location1, double[] location2) { int traitDimension = location1.length; double sum = 0; for (int i = 0; i < traitDimension; i++) { sum += Math.pow((location2[i] - location1[i]),2); // System.out.println(sum); } return Math.sqrt(sum); } private double[] toArray(List<Double> list) { double[] returnArray = new double[list.size()]; for (int i = 0; i < list.size(); i++) { returnArray[i] = Double.valueOf(list.get(i).toString()); } return returnArray; } private double[] imputeValue(double[] nodeValue, double[] parentValue, double time, double nodeHeight, double parentHeight, double[] precisionArray, double rate, boolean trueNoise) { final double scaledTimeChild = (time - nodeHeight) * rate; final double scaledTimeParent = (parentHeight - time) * rate; final double scaledWeightTotal = 1.0 / scaledTimeChild + 1.0 / scaledTimeParent; final int dim = nodeValue.length; double[][] precision = new double[dim][dim]; int counter = 0; for (int a = 0; a < dim; a++){ for (int b = 0; b < dim; b++){ precision[a][b] = precisionArray[counter]; counter++ ; } } if (scaledTimeChild == 0) return nodeValue; if (scaledTimeParent == 0) return parentValue; // Find mean value, weighted average double[] mean = new double[dim]; double[][] scaledPrecision = new double[dim][dim]; for (int i = 0; i < dim; i++) { mean[i] = (nodeValue[i] / scaledTimeChild + parentValue[i] / scaledTimeParent) / scaledWeightTotal; if (trueNoise) { for (int j = i; j < dim; j++) scaledPrecision[j][i] = scaledPrecision[i][j] = precision[i][j] * scaledWeightTotal; } } // System.out.print(time+"\t"+nodeHeight+"\t"+parentHeight+"\t"+scaledTimeChild+"\t"+scaledTimeParent+"\t"+scaledWeightTotal+"\t"+mean[0]+"\t"+mean[1]+"\t"+scaledPrecision[0][0]+"\t"+scaledPrecision[0][1]+"\t"+scaledPrecision[1][0]+"\t"+scaledPrecision[1][1]); if (trueNoise) { mean = MultivariateNormalDistribution.nextMultivariateNormalPrecision(mean, scaledPrecision); } // System.out.println("\t"+mean[0]+"\t"+mean[1]+"\r"); double[] result = new double[dim]; for (int i = 0; i < dim; i++) result[i] = mean[i]; return result; } public static double[] parseVariableLengthDoubleArray(String inString) throws Arguments.ArgumentException { List<Double> returnList = new ArrayList<Double>(); StringTokenizer st = new StringTokenizer(inString, ","); while (st.hasMoreTokens()) { try { returnList.add(Double.parseDouble(st.nextToken())); } catch (NumberFormatException e) { throw new Arguments.ArgumentException(); } } if (returnList.size() > 0) { double[] doubleArray = new double[returnList.size()]; for (int i = 0; i < doubleArray.length; i++) doubleArray[i] = returnList.get(i); return doubleArray; } return null; } @Override public String getDimensionName(int dim) { if (getDimension() == 1) { return getStatisticName(); } else { return getStatisticName() +".height"+ heightLowers[dim]; } } public static void reverse(double[] array) { if (array == null) { return; } int i = 0; int j = array.length - 1; double tmp; while (j > i) { tmp = array[j]; array[j] = array[i]; array[i] = tmp; j i++; } } public static double[] extractUnique(double[] array){ Set<Double> tmp = new LinkedHashSet<Double>(); for (Double each : array) { tmp.add(each); } double [] output = new double[tmp.size()]; int i = 0; for (Double each : tmp) { output[i++] = each; } return output; } enum Mode { AVERAGE, WEIGHTED_AVERAGE, MEDIAN, COEFFICIENT_OF_VARIATION } enum summaryStatistic { DIFFUSION_RATE, DIFFUSION_COEFFICIENT, WAVEFRONT_DISTANCE, WAVEFRONT_RATE, } public static XMLObjectParser PARSER = new AbstractXMLObjectParser() { public String getParserName() { return DIFFUSION_RATE_STATISTIC; } @Override public String[] getParserNames() { return new String[]{getParserName(), TREE_DISPERSION_STATISTIC}; } public Object parseXMLObject(XMLObject xo) throws XMLParseException { String name = xo.getAttribute(NAME, xo.getId()); boolean option = xo.getAttribute(BOOLEAN_DIS_OPTION, true); // Default value is true Mode averageMode; String mode = xo.getAttribute(MODE, WEIGHTED_AVERAGE); if (mode.equals(AVERAGE)) { averageMode = Mode.AVERAGE; } else if (mode.equals(MEDIAN)) { averageMode = Mode.MEDIAN; } else if (mode.equals(COEFFICIENT_OF_VARIATION)) { averageMode = Mode.COEFFICIENT_OF_VARIATION; } else if (mode.equals(WEIGHTED_AVERAGE)) { averageMode = Mode.WEIGHTED_AVERAGE; } else { System.err.println("Unknown mode: "+mode+". Reverting to weighted average"); averageMode = Mode.WEIGHTED_AVERAGE; } // boolean diffCoeff = xo.getAttribute(BOOLEAN_DC_OPTION, false); // Default value is false summaryStatistic summaryStat; String statistic = xo.getAttribute(STATISTIC, DIFFUSION_RATE); if (statistic.equals(DIFFUSION_RATE)) { summaryStat = summaryStatistic.DIFFUSION_RATE; } else if (statistic.equals(WAVEFRONT_DISTANCE)) { summaryStat = summaryStatistic.WAVEFRONT_DISTANCE; } else if (statistic.equals(WAVEFRONT_RATE)) { summaryStat = summaryStatistic.WAVEFRONT_RATE; } else if (statistic.equals(DIFFUSION_COEFFICIENT)) { summaryStat = summaryStatistic.DIFFUSION_COEFFICIENT; } else { System.err.println("Unknown statistic: "+statistic+". Reverting to diffusion rate"); summaryStat = summaryStatistic.DIFFUSION_COEFFICIENT; } final double upperHeight = xo.getAttribute(HEIGHT_UPPER, Double.MAX_VALUE); final double lowerHeight = xo.getAttribute(HEIGHT_LOWER, 0.0); double[] lowerHeights = null; if (xo.hasAttribute(HEIGHT_LOWER_SERIE)){ String lowerHeightsString = xo.getStringAttribute(HEIGHT_LOWER_SERIE); try { lowerHeights = parseVariableLengthDoubleArray(lowerHeightsString); } catch (Arguments.ArgumentException e) { System.err.println("Error reading " + HEIGHT_LOWER_SERIE); System.exit(1); } } boolean cumulative = xo.getAttribute(CUMULATIVE, false); List<AbstractMultivariateTraitLikelihood> traitLikelihoods = new ArrayList<AbstractMultivariateTraitLikelihood>(); for (int i = 0; i < xo.getChildCount(); i++) { if (xo.getChild(i) instanceof AbstractMultivariateTraitLikelihood) { AbstractMultivariateTraitLikelihood amtl = (AbstractMultivariateTraitLikelihood) xo.getChild(i); traitLikelihoods.add(amtl); } } return new DiffusionRateStatistic(name, traitLikelihoods, option, averageMode, summaryStat, upperHeight, lowerHeight, lowerHeights, cumulative); }
package org.gbif.ws.server.filter; import java.net.HttpURLConnection; import java.net.URI; import java.util.UUID; import javax.ws.rs.core.Response; import com.sun.jersey.spi.container.ContainerRequest; import com.sun.jersey.spi.container.ContainerResponse; import com.sun.jersey.spi.container.ContainerResponseFilter; /** * Filter that updates http headers when a new resource is successfully created via a POST request unless * the response returns 204 No Content. * * The following headers are added or replaced if they existed: * <ul> * <li>Http response code 201</li> * <li>Location header is set accordingly based on returned key</li> * </ul> */ public class CreatedResponseFilter implements ContainerResponseFilter { @Override public ContainerResponse filter(ContainerRequest request, ContainerResponse response) { if (request.getMethod() != null && "post".equalsIgnoreCase(request.getMethod()) && response.getStatusType() != null && !Response.Status.NO_CONTENT.equals(response.getStatusType()) && response.getStatusType().getFamily() == Response.Status.Family.SUCCESSFUL) { response.setStatus(HttpURLConnection.HTTP_CREATED); // if response contains the key, also set Location if (response.getEntity() != null) { Object key = response.getEntity(); // we use POSTs also for non Create method which can return large objects, e.g. a list of parsed names // only set the location header if the object is one of the following simple primary key data types if (key instanceof Number || key instanceof UUID || key instanceof String) { // allow POSTing to resource with or without trailing slash URI location = request.getRequestUriBuilder().path(key.toString()).build(); response.getHttpHeaders().putSingle("Location", location.toString()); } } } return response; } }
package ch.idsia.mario.simulation; import ch.idsia.ai.agents.IAgent; import ch.idsia.mario.engine.MarioComponent; import ch.idsia.utils.SmartBool; import ch.idsia.utils.SmartInt; public class SimulationOptions { protected IAgent agent; protected MarioComponent marioComponent = null; protected SmartInt levelType = new SmartInt(); //int protected SmartInt levelDifficulty = new SmartInt(); //int protected SmartInt levelLength = new SmartInt(); //int protected SmartInt levelRandSeed = new SmartInt(); //int protected SmartBool visualization = new SmartBool(); //boolean protected SmartBool pauseWorld = new SmartBool(); // boolean protected SmartBool powerRestoration = new SmartBool(); //boolean protected SmartBool stopSimulationIfWin = new SmartBool(); //boolean //TODO: SK handle this common with all the rest options public int maxAttempts; public static int currentAttempt = 1; public SimulationOptions getBasicSimulatorOptions() { SimulationOptions ret = new SimulationOptions(); ret.setAgent(agent); ret.setLevelDifficulty(levelDifficulty.getValue()); ret.setLevelLength(levelLength.getValue()); ret.setLevelRandSeed(levelRandSeed.getValue()); ret.setLevelType(levelType.getValue()); ret.setMarioComponent(marioComponent); ret.setVisualization(visualization.getValue()); ret.setPauseWorld(pauseWorld.getValue()); ret.setPowerRestoration(powerRestoration.getValue()); ret.maxAttempts = this.maxAttempts; return ret; } public IAgent getAgent() { return agent; } public void setAgent(IAgent agent) { this.agent = agent; } public MarioComponent getMarioComponent() { return marioComponent; } public void setMarioComponent(MarioComponent marioComponent) { this.marioComponent = marioComponent; } public int getLevelType() { return levelType.getValue(); } public void setLevelType(int levelType) { this.levelType.setValue(levelType); } public int getLevelDifficulty() { return levelDifficulty.getValue(); } public void setLevelDifficulty(int levelDifficulty) { this.levelDifficulty.setValue(levelDifficulty); } public int getLevelLength() { return levelLength.getValue(); } public void setLevelLength(int levelLength) { this.levelLength.setValue(levelLength); } public int getLevelRandSeed() { return levelRandSeed.getValue(); } public void setLevelRandSeed(int levelRandSeed) { this.levelRandSeed.setValue(levelRandSeed); } public boolean isVisualization() { return visualization.getValue(); } public void setVisualization(boolean visualization) { this.visualization.setValue(visualization); } public void setPauseWorld(boolean pauseWorld) { this.pauseWorld.setValue(pauseWorld); } public Boolean isPauseWorld() { return pauseWorld.getValue(); } public Boolean isPowerRestoration() { return powerRestoration.getValue(); } public void setPowerRestoration(boolean powerRestoration) { this.powerRestoration.setValue(powerRestoration); } public Boolean isStopSimulationIfWin() { return stopSimulationIfWin.getValue(); } public void setStopSimulationIfWin(boolean stopSimulationIfWin) { this.stopSimulationIfWin.setValue(stopSimulationIfWin); } }
package seedu.todo.models; import java.io.IOException; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import seedu.todo.commons.exceptions.CannotRedoException; import seedu.todo.commons.exceptions.CannotUndoException; import seedu.todo.commons.util.DateUtil; import seedu.todo.storage.JsonStorage; import seedu.todo.storage.Storage; /** * This class holds the entire persistent database for the TodoList app. * <ul> * <li>This is a singleton class. For obvious reasons, the TodoList app should * not be working with multiple DB instances simultaneously.</li> * <li>Object to object dynamic references should not be expected to survive * serialization.</li> * </ul> * * @author louietyj * */ public class TodoListDB { private static TodoListDB instance = null; private static Storage storage = new JsonStorage(); private Set<Task> tasks = new LinkedHashSet<Task>(); private Set<Event> events = new LinkedHashSet<Event>(); protected TodoListDB() { // Prevent instantiation. } /** * Get a list of Tasks in the DB. * * @return tasks */ public List<Task> getAllTasks() { return new ArrayList<Task>(tasks); } /** * Count tasks which are not marked as complete, where {@code isComplete} is false. * * @return Number of incomplete tasks */ public int countIncompleteTasks() { int count = 0; for (Task task : tasks) { if (!task.isCompleted()) { count++; } } return count; } /** * Count tasks which are overdue, where {@code dueDate} is before the time now. * * @return Number of overdue tasks */ public int countOverdueTasks() { LocalDateTime now = LocalDateTime.now(); int count = 0; for (Task task : tasks) { LocalDateTime dueDate = task.getDueDate(); if (!task.isCompleted() && dueDate != null && dueDate.compareTo(now) < 0) { count++; } } return count; } /** * Get a list of Events in the DB. * * @return events */ public List<Event> getAllEvents() { return new ArrayList<Event>(events); } /** * Count events which are in the future, where {@code startDate} is after the time now. * * @return Number of future events */ public int countFutureEvents() { LocalDateTime now = LocalDateTime.now(); int count = 0; for (Event event : events) { LocalDateTime startDate = event.getStartDate(); if (startDate != null && startDate.compareTo(now) >= 0) { count++; } } return count; } /** * Create a new Task in the DB and return it.<br> * <i>The new record is not persisted until <code>save</code> is explicitly * called.</i> * * @return task */ public Task createTask() { Task task = new Task(); tasks.add(task); return task; } /** * Destroys a Task in the DB and persists the commit. * * @param task * @return true if the save was successful, false otherwise */ public boolean destroyTask(Task task) { tasks.remove(task); return save(); } /** * Create a new Event in the DB and return it.<br> * <i>The new record is not persisted until <code>save</code> is explicitly * called.</i> * * @return event */ public Event createEvent() { Event event = new Event(); events.add(event); return event; } /** * Destroys an Event in the DB and persists the commit. * * @param event * @return true if the save was successful, false otherwise */ public boolean destroyEvent(Event event) { events.remove(event); return save(); } /** * Gets the singleton instance of the TodoListDB. * * @return TodoListDB */ public static TodoListDB getInstance() { if (instance == null) { instance = new TodoListDB(); } return instance; } /** * Explicitly persists the database to disk. * * @return true if the save was successful, false otherwise */ public boolean save() { try { storage.save(this); return true; } catch (IOException e) { return false; } } /** * Explicitly reloads the database from disk. * * @return true if the load was successful, false otherwise */ public boolean load() { try { instance = storage.load(); return true; } catch (IOException e) { return false; } } public void move(String newPath) throws IOException { storage.move(newPath); } /** * Returns the maximum possible number of undos. * * @return undoSize */ public int undoSize() { return storage.undoSize(); } /** * Rolls back the DB by one commit. * * @return true if the rollback was successful, false otherwise */ public boolean undo() { try { instance = storage.undo(); return true; } catch (CannotUndoException | IOException e) { return false; } } /** * Returns the maximum possible number of redos. * * @return redoSize */ public int redoSize() { return storage.redoSize(); } /** * Rolls forward the DB by one undo commit. * * @return true if the redo was successful, false otherwise */ public boolean redo() { try { instance = storage.redo(); return true; } catch (CannotRedoException | IOException e) { return false; } } public List<Event> getAllCurrentEvents() { ArrayList<Event> currentEvents = new ArrayList<Event>(); Iterator<Event> iterator = events.iterator(); while (iterator.hasNext()) { Event currEvent = iterator.next(); if (!currEvent.isOver()) { currentEvents.add(currEvent); } } return currentEvents; } /** * Get a list of Incomplte Tasks in the DB. * * @return tasks */ public List<Task> getIncompleteTasksAndTaskWithTodayDate() { ArrayList<Task> incompleteTasks = new ArrayList<Task>(); Iterator<Task> iterator = tasks.iterator(); LocalDateTime todayDate = DateUtil.floorDate(LocalDateTime.now()); while (iterator.hasNext()) { Task currTask = iterator.next(); if (!currTask.isCompleted()) { //if incompleted incompleteTasks.add(currTask); } else { if (currTask.getDueDate() != null && DateUtil.floorDate(currTask.getDueDate()).equals(todayDate)) { incompleteTasks.add(currTask); } } } return incompleteTasks; } /** * Get a list of Task in the DB filtered by status and one date. * * @return list of tasks */ public List<Task> getTaskByDate(LocalDateTime givenDate, boolean isCompleted, boolean listAllStatus) { ArrayList<Task> taskByDate = new ArrayList<Task>(); Iterator<Task> iterator = tasks.iterator(); while (iterator.hasNext()) { Task currTask = iterator.next(); LocalDateTime currTaskDueDate = DateUtil.floorDate(currTask.getDueDate()); if (currTaskDueDate == null) { currTaskDueDate = LocalDateTime.MIN; } if (listAllStatus) { if (currTaskDueDate.equals(givenDate)) { taskByDate.add(currTask); } } else { if (currTaskDueDate.equals(givenDate) && currTask.isCompleted() == isCompleted) { taskByDate.add(currTask); } } } return taskByDate; } /** * Get a list of Task in the DB filtered by status and range of date. * * @return list of tasks */ public List<Task> getTaskByRange (LocalDateTime fromDate , LocalDateTime toDate, boolean isCompleted, boolean listAllStatus) { ArrayList<Task> taskByRange = new ArrayList<Task>(); Iterator<Task> iterator = tasks.iterator(); if (fromDate == null) { fromDate = LocalDateTime.MIN; } if (toDate == null) { toDate = LocalDateTime.MAX; } while (iterator.hasNext()) { Task currTask = iterator.next(); LocalDateTime currTaskDueDate = DateUtil.floorDate(currTask.getDueDate()); if (currTaskDueDate == null) { currTaskDueDate = LocalDateTime.MIN; } if (listAllStatus) { if (currTaskDueDate.compareTo(fromDate) >= 0 && currTaskDueDate.compareTo(toDate) <= 0) { taskByRange.add(currTask); } } else { if (currTaskDueDate.compareTo(fromDate) >= 0 && currTaskDueDate.compareTo(toDate) <= 0 && currTask.isCompleted() == isCompleted) { taskByRange.add(currTask); } } } return taskByRange; } /** * Get a list of Event in the DB filtered by status and one date. * * @return list of events */ public List<Event> getEventbyDate(LocalDateTime givenDate) { ArrayList<Event> eventByDate = new ArrayList<Event>(); Iterator<Event> iterator = events.iterator(); while (iterator.hasNext()) { Event currEvent = iterator.next(); if (DateUtil.floorDate(currEvent.getCalendarDT()).equals(givenDate)) { eventByDate.add(currEvent); } } return eventByDate; } /** * Get a list of Event in the DB filtered by status and range of date. * * @return list of events */ public List<Event> getEventByRange (LocalDateTime fromDate , LocalDateTime toDate) { ArrayList<Event> eventByRange = new ArrayList<Event>(); Iterator<Event> iterator = events.iterator(); //if either date are null, set it to min or max if (fromDate == null) { fromDate = LocalDateTime.MIN; } if (toDate == null) { toDate = LocalDateTime.MAX; } while (iterator.hasNext()) { Event currEvent = iterator.next(); if (DateUtil.floorDate(currEvent.getStartDate()).compareTo(fromDate) >= 0 && DateUtil.floorDate(currEvent.getStartDate()).compareTo(toDate) <= 0) { eventByRange.add(currEvent); } } return eventByRange; } }
package dr.inference.mcmc; import dr.inference.loggers.Logger; import dr.inference.loggers.MCLogger; import dr.inference.markovchain.MarkovChain; import dr.inference.markovchain.MarkovChainListener; import dr.inference.model.Model; import dr.inference.model.PathLikelihood; import dr.inference.operators.CombinedOperatorSchedule; import dr.inference.operators.OperatorAnalysisPrinter; import dr.inference.operators.OperatorSchedule; import dr.inference.prior.Prior; import dr.util.Identifiable; import dr.xml.*; import org.apache.commons.math.MathException; import org.apache.commons.math.distribution.BetaDistributionImpl; /** * An MCMC analysis that estimates parameters of a probabilistic model. * * @author Andrew Rambaut * @author Alex Alekseyenko * @version $Id: MCMC.java,v 1.41 2005/07/11 14:06:25 rambaut Exp $ */ public class MarginalLikelihoodEstimator implements Runnable, Identifiable { public MarginalLikelihoodEstimator(String id, int chainLength, int burninLength, int pathSteps, double fixedRunValue, // boolean linear, boolean lacing, PathScheme scheme, PathLikelihood pathLikelihood, OperatorSchedule schedule, MCLogger logger) { this.id = id; this.chainLength = chainLength; this.pathSteps = pathSteps; this.scheme = scheme; this.schedule = schedule; this.fixedRunValue = fixedRunValue; // deprecated // this.linear = (scheme == PathScheme.LINEAR); // this.lacing = false; // Was not such a good idea this.burninLength = burninLength; MCMCCriterion criterion = new MCMCCriterion(); pathDelta = 1.0 / pathSteps; pathParameter = 1.0; this.pathLikelihood = pathLikelihood; pathLikelihood.setPathParameter(pathParameter); mc = new MarkovChain(Prior.UNIFORM_PRIOR, pathLikelihood, schedule, criterion, 0, 0, true); this.logger = logger; } private void setDefaultBurnin() { if (burninLength == -1) { burnin = (int) (0.1 * chainLength); } else { burnin = burninLength; } } public void integrate(Integrator scheme) { setDefaultBurnin(); mc.setCurrentLength(burnin); scheme.init(); for (pathParameter = scheme.nextPathParameter(); pathParameter >= 0; pathParameter = scheme.nextPathParameter()) { pathLikelihood.setPathParameter(pathParameter); reportIteration(pathParameter, chainLength, burnin); long cl = mc.getCurrentLength(); mc.setCurrentLength(0); mc.runChain(burnin, false); mc.setCurrentLength(cl); mc.runChain(chainLength, false); (new OperatorAnalysisPrinter(schedule)).showOperatorAnalysis(System.out); ((CombinedOperatorSchedule) schedule).reset(); } } public abstract class Integrator { protected int step; protected int pathSteps; protected Integrator(int pathSteps) { this.pathSteps = pathSteps; } public void init() { step = 0; } abstract double nextPathParameter(); } public class FixedThetaRun extends Integrator { private double value; public FixedThetaRun(double value) { super(1); this.value = value; } double nextPathParameter() { if (step == 0) { step++; return value; } else { return -1.0; } } } public class LinearIntegrator extends Integrator { public LinearIntegrator(int pathSteps) { super(pathSteps); } double nextPathParameter() { if (step > pathSteps) { return -1; } double pathParameter = 1.0 - (double)step / (double)(pathSteps - 1); step = step + 1; return pathParameter; } } public class SigmoidIntegrator extends Integrator { private double alpha; public SigmoidIntegrator(double alpha, int pathSteps) { super(pathSteps); this.alpha = alpha; } double nextPathParameter() { if (step == 0) { step++; return 1.0; } else if (step == pathSteps) { step++; return 0.0; } else if (step > pathSteps) { return -1.0; } else { double xvalue = ((pathSteps - step)/((double)pathSteps)) - 0.5; step++; return Math.exp(alpha*xvalue)/(Math.exp(alpha*xvalue) + Math.exp(-alpha*xvalue)); } } } public class BetaQuantileIntegrator extends Integrator { private double alpha; public BetaQuantileIntegrator(double alpha, int pathSteps) { super(pathSteps); this.alpha = alpha; } double nextPathParameter() { if (step > pathSteps) return -1; double result = Math.pow((pathSteps - step)/((double)pathSteps), 1.0/alpha); step++; return result; } } public class BetaIntegrator extends Integrator { private BetaDistributionImpl betaDistribution; public BetaIntegrator(double alpha, double beta, int pathSteps) { super(pathSteps); this.betaDistribution = new BetaDistributionImpl(alpha, beta); } double nextPathParameter() { if (step > pathSteps) return -1; if (step == 0) { step += 1; return 1.0; } else if (step + 1 < pathSteps) { double ratio = (double) step / (double) (pathSteps - 1); try { step += 1; return 1.0 - betaDistribution.inverseCumulativeProbability(ratio); } catch (MathException e) { e.printStackTrace(); } } step += 1; return 0.0; } } public class GeometricIntegrator extends Integrator { public GeometricIntegrator(int pathSteps) { super(pathSteps); } double nextPathParameter() { if (step > pathSteps) { return -1; } if (step == pathSteps) { //pathSteps instead of pathSteps - 1 step += 1; return 0; } step += 1; return Math.pow(2, -(step - 1)); } } /*public void linearIntegration() { setDefaultBurnin(); mc.setCurrentLength(0); for (int step = 0; step < pathSteps; step++) { pathLikelihood.setPathParameter(pathParameter); reportIteration(pathParameter, chainLength, burnin); //mc.runChain(chainLength + burnin, false, 0); mc.runChain(chainLength + burnin, false); pathParameter -= pathDelta; } pathLikelihood.setPathParameter(0.0); reportIteration(pathParameter, chainLength, burnin); //mc.runChain(chainLength + burnin, false, 0); mc.runChain(chainLength + burnin, false); }*/ /*public void betaIntegration(double alpha, double beta) { setDefaultBurnin(); mc.setCurrentLength(0); BetaDistributionImpl betaDistribution = new BetaDistributionImpl(alpha, beta); for (int step = 0; step < pathSteps; step++) { if (step == 0) { pathParameter = 1.0; } else if (step + 1 < pathSteps) { double ratio = (double) step / (double) (pathSteps - 1); try { pathParameter = 1.0 - betaDistribution.inverseCumulativeProbability(ratio); } catch (MathException e) { e.printStackTrace(); } } else { pathParameter = 0.0; } pathLikelihood.setPathParameter(pathParameter); reportIteration(pathParameter, chainLength, burnin); //mc.runChain(chainLength + burnin, false, 0); mc.runChain(chainLength + burnin, false); (new OperatorAnalysisPrinter(schedule)).showOperatorAnalysis(System.out); ((CombinedOperatorSchedule) schedule).reset(); } }*/ private void reportIteration(double pathParameter, long chainLength, long burnin) { System.out.println("Attempting theta = " + pathParameter + " for " + chainLength + " iterations + " + burnin + " burnin."); } public void run() { logger.startLogging(); mc.addMarkovChainListener(chainListener); switch (scheme) { case FIXED: integrate(new FixedThetaRun(fixedRunValue)); break; case LINEAR: integrate(new LinearIntegrator(pathSteps)); break; case GEOMETRIC: integrate(new GeometricIntegrator(pathSteps)); break; case ONE_SIDED_BETA: integrate(new BetaIntegrator(1.0, betaFactor, pathSteps)); break; case BETA: integrate(new BetaIntegrator(alphaFactor, betaFactor, pathSteps)); break; case BETA_QUANTILE: integrate(new BetaQuantileIntegrator(alphaFactor, pathSteps)); break; case SIGMOID: integrate(new SigmoidIntegrator(alphaFactor, pathSteps)); break; default: throw new RuntimeException("Illegal path scheme"); } mc.removeMarkovChainListener(chainListener); } private final MarkovChainListener chainListener = new MarkovChainListener() { // for receiving messages from subordinate MarkovChain /** * Called to update the current model keepEvery states. */ public void currentState(long state, Model currentModel) { currentState = state; if (currentState >= burnin) { logger.log(state); } } /** * Called when a new new best posterior state is found. */ public void bestState(long state, Model bestModel) { currentState = state; } /** * cleans up when the chain finishes (possibly early). */ public void finished(long chainLength) { currentState = chainLength; (new OperatorAnalysisPrinter(schedule)).showOperatorAnalysis(System.out); // logger.log(currentState); logger.stopLogging(); } }; /** * @return the current state of the MCMC analysis. */ public boolean getSpawnable() { return spawnable; } private boolean spawnable = true; public void setSpawnable(boolean spawnable) { this.spawnable = spawnable; } public void setAlphaFactor(double alpha) { alphaFactor = alpha; } public void setBetaFactor(double beta) { betaFactor = beta; } public double getAlphaFactor() { return alphaFactor; } public double getBetaFactor() { return betaFactor; } public static XMLObjectParser PARSER = new AbstractXMLObjectParser() { public String getParserName() { return MARGINAL_LIKELIHOOD_ESTIMATOR; } /** * @return a tree object based on the XML element it was passed. */ public Object parseXMLObject(XMLObject xo) throws XMLParseException { PathLikelihood pathLikelihood = (PathLikelihood) xo.getChild(PathLikelihood.class); MCLogger logger = (MCLogger) xo.getChild(MCLogger.class); int chainLength = xo.getIntegerAttribute(CHAIN_LENGTH); int pathSteps = xo.getIntegerAttribute(PATH_STEPS); int burninLength = -1; if (xo.hasAttribute(BURNIN)) { burninLength = xo.getIntegerAttribute(BURNIN); } int prerunLength = -1; if (xo.hasAttribute(PRERUN)) { prerunLength = xo.getIntegerAttribute(PRERUN); } double fixedRunValue = -1.0; if (xo.hasAttribute(FIXED_VALUE)) { fixedRunValue = xo.getDoubleAttribute(FIXED_VALUE); } // deprecated boolean linear = xo.getAttribute(LINEAR, true); // boolean lacing = xo.getAttribute(LACING,false); PathScheme scheme; if (linear) { scheme = PathScheme.LINEAR; } else { scheme = PathScheme.GEOMETRIC; } // new approach if (xo.hasAttribute(PATH_SCHEME)) { // change to: getAttribute once deprecated approach removed scheme = PathScheme.parseFromString(xo.getAttribute(PATH_SCHEME, PathScheme.LINEAR.getText())); } for (int i = 0; i < xo.getChildCount(); i++) { Object child = xo.getChild(i); if (child instanceof Logger) { } } CombinedOperatorSchedule os = new CombinedOperatorSchedule(); XMLObject mcmcXML = xo.getChild(MCMC); for (int i = 0; i < mcmcXML.getChildCount(); ++i) { if (mcmcXML.getChild(i) instanceof MCMC) { MCMC mcmc = (MCMC) mcmcXML.getChild(i); if (prerunLength > 0) { java.util.logging.Logger.getLogger("dr.inference").info("Path Sampling Marginal Likelihood Estimator:\n\tEquilibrating chain " + mcmc.getId() + " for " + prerunLength + " iterations."); for (Logger log : mcmc.getLoggers()) { // Stop the loggers, so nothing gets written to normal output log.stopLogging(); } mcmc.getMarkovChain().runChain(prerunLength, false); } os.addOperatorSchedule(mcmc.getOperatorSchedule()); } } if (os.getScheduleCount() == 0) { System.err.println("Error: no mcmc objects provided in construction. Bayes Factor estimation will likely fail."); } MarginalLikelihoodEstimator mle = new MarginalLikelihoodEstimator(MARGINAL_LIKELIHOOD_ESTIMATOR, chainLength, burninLength, pathSteps, fixedRunValue, scheme, pathLikelihood, os, logger); if (!xo.getAttribute(SPAWN, true)) mle.setSpawnable(false); if (xo.hasAttribute(ALPHA)) { mle.setAlphaFactor(xo.getAttribute(ALPHA, 0.5)); } if (xo.hasAttribute(BETA)) { mle.setBetaFactor(xo.getAttribute(BETA, 0.5)); } String alphaBetaText = ""; if (scheme == PathScheme.ONE_SIDED_BETA) { alphaBetaText += "(1," + mle.getBetaFactor() + ")"; } else if (scheme == PathScheme.BETA) { alphaBetaText += "(" + mle.getAlphaFactor() + "," + mle.getBetaFactor() + ")"; } else if (scheme == PathScheme.BETA_QUANTILE) { alphaBetaText += "(" + mle.getAlphaFactor() + ")"; } else if (scheme == PathScheme.SIGMOID) { alphaBetaText += "(" + mle.getAlphaFactor() + ")"; } java.util.logging.Logger.getLogger("dr.inference").info("\nCreating the Marginal Likelihood Estimator chain:" + "\n chainLength=" + chainLength + "\n pathSteps=" + pathSteps + "\n pathScheme=" + scheme.getText() + alphaBetaText + "\n If you use these results, please cite:" + "\n Guy Baele, Philippe Lemey, Trevor Bedford, Andrew Rambaut, Marc A. Suchard, and Alexander V. Alekseyenko." + "\n 2012. Improving the accuracy of demographic and molecular clock model comparison while accommodating " + "\n phylogenetic uncertainty. Mol. Biol. Evol. (in press)."); return mle; } /** * this markov chain does most of the work. */ private final MarkovChain mc; private OperatorSchedule schedule; private String id = null; private long currentState; private final long chainLength; private long burnin; private final long burninLength; private int pathSteps; // private final boolean linear; // private final boolean lacing; private final PathScheme scheme; private double alphaFactor = 0.5; private double betaFactor = 0.5; private double fixedRunValue = -1.0; private final double pathDelta; private double pathParameter; private final MCLogger logger; private final PathLikelihood pathLikelihood; public static final String MARGINAL_LIKELIHOOD_ESTIMATOR = "marginalLikelihoodEstimator"; public static final String CHAIN_LENGTH = "chainLength"; public static final String PATH_STEPS = "pathSteps"; public static final String FIXED = "fixed"; public static final String LINEAR = "linear"; public static final String LACING = "lacing"; public static final String SPAWN = "spawn"; public static final String BURNIN = "burnin"; public static final String MCMC = "samplers"; public static final String PATH_SCHEME = "pathScheme"; public static final String FIXED_VALUE = "fixedValue"; public static final String ALPHA = "alpha"; public static final String BETA = "beta"; public static final String PRERUN = "prerun"; }
package net.bytebuddy.implementation; import net.bytebuddy.build.HashCodeAndEqualsPlugin; import net.bytebuddy.description.field.FieldDescription; import net.bytebuddy.description.method.MethodDescription; import net.bytebuddy.description.type.TypeDescription; import net.bytebuddy.description.type.TypeList; import net.bytebuddy.dynamic.scaffold.FieldLocator; import net.bytebuddy.dynamic.scaffold.InstrumentedType; import net.bytebuddy.implementation.bytecode.ByteCodeAppender; import net.bytebuddy.implementation.bytecode.StackManipulation; import net.bytebuddy.implementation.bytecode.assign.Assigner; import net.bytebuddy.implementation.bytecode.collection.ArrayFactory; import net.bytebuddy.implementation.bytecode.constant.MethodConstant; import net.bytebuddy.implementation.bytecode.member.FieldAccess; import net.bytebuddy.implementation.bytecode.member.MethodInvocation; import net.bytebuddy.implementation.bytecode.member.MethodReturn; import net.bytebuddy.implementation.bytecode.member.MethodVariableAccess; import net.bytebuddy.utility.RandomString; import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Opcodes; import java.lang.reflect.InvocationHandler; import java.util.ArrayList; import java.util.List; import static net.bytebuddy.matcher.ElementMatchers.genericFieldType; import static net.bytebuddy.matcher.ElementMatchers.named; /** * An adapter for adapting an {@link java.lang.reflect.InvocationHandler}. The adapter allows the invocation handler * to also intercept method calls to non-interface methods. */ @HashCodeAndEqualsPlugin.Enhance public abstract class InvocationHandlerAdapter implements Implementation { /** * A type description of the {@link InvocationHandler}. */ private static final TypeDescription.Generic INVOCATION_HANDLER_TYPE = TypeDescription.Generic.OfNonGenericType.ForLoadedType.of(InvocationHandler.class); /** * Indicates that a value should not be cached. */ private static final boolean UNCACHED = false; /** * Indicates that a {@link java.lang.reflect.Method} should be cached. */ private static final boolean CACHED = true; /** * Indicates that a lookup of a method constant should be looked up using an {@link java.security.AccessController}. */ private static final boolean PRIVILEGED = true; /** * The name of the field for storing an invocation handler. */ protected final String fieldName; /** * The assigner that is used for assigning the return invocation handler's return value to the * intercepted method's return value. */ protected final Assigner assigner; /** * Determines if the {@link java.lang.reflect.Method} instances that are handed to the intercepted methods are * cached in {@code static} fields. */ protected final boolean cached; /** * Determines if the {@link java.lang.reflect.Method} instances are retrieved by using an {@link java.security.AccessController}. */ protected final boolean privileged; /** * Creates a new invocation handler for a given field. * * @param fieldName The name of the field. * @param cached Determines if the {@link java.lang.reflect.Method} instances that are handed to the * intercepted methods are cached in {@code static} fields. * @param privileged Determines if the {@link java.lang.reflect.Method} instances are retrieved by using an {@link java.security.AccessController}. * @param assigner The assigner to apply when defining this implementation. */ protected InvocationHandlerAdapter(String fieldName, boolean cached, boolean privileged, Assigner assigner) { this.fieldName = fieldName; this.cached = cached; this.privileged = privileged; this.assigner = assigner; } /** * Creates an implementation for any instance of an {@link java.lang.reflect.InvocationHandler} that delegates * all method interceptions to the given instance which will be stored in a {@code static} field. * * @param invocationHandler The invocation handler to which all method calls are delegated. * @return An implementation that delegates all method interceptions to the given invocation handler. */ public static InvocationHandlerAdapter of(InvocationHandler invocationHandler) { return of(invocationHandler, ForInstance.PREFIX + "$" + RandomString.hashOf(invocationHandler.hashCode())); } /** * Creates an implementation for any instance of an {@link java.lang.reflect.InvocationHandler} that delegates * all method interceptions to the given instance which will be stored in a {@code static} field. * * @param invocationHandler The invocation handler to which all method calls are delegated. * @param fieldName The name of the field. * @return An implementation that delegates all method interceptions to the given invocation handler. */ public static InvocationHandlerAdapter of(InvocationHandler invocationHandler, String fieldName) { return new ForInstance(fieldName, CACHED, PRIVILEGED, Assigner.DEFAULT, invocationHandler); } /** * Creates an implementation for any {@link java.lang.reflect.InvocationHandler} that delegates * all method interceptions to a field with the given name. This field has to be of a subtype of invocation * handler and needs to be set before any invocations are intercepted. Otherwise, a {@link java.lang.NullPointerException} * will be thrown. * * @param name The name of the field. * @return An implementation that delegates all method interceptions to an instance field of the given name. */ public static InvocationHandlerAdapter toField(String name) { return toField(name, FieldLocator.ForClassHierarchy.Factory.INSTANCE); } /** * Creates an implementation for any {@link java.lang.reflect.InvocationHandler} that delegates * all method interceptions to a field with the given name. This field has to be of a subtype of invocation * handler and needs to be set before any invocations are intercepted. Otherwise, a {@link java.lang.NullPointerException} * will be thrown. * * @param name The name of the field. * @param fieldLocatorFactory The field locator factory * @return An implementation that delegates all method interceptions to an instance field of the given name. */ public static InvocationHandlerAdapter toField(String name, FieldLocator.Factory fieldLocatorFactory) { return new ForField(name, CACHED, PRIVILEGED, Assigner.DEFAULT, fieldLocatorFactory); } /** * Returns a list of stack manipulations that loads all arguments of an instrumented method. * * @param instrumentedMethod The method that is instrumented. * @return A list of stack manipulation that loads all arguments of an instrumented method. */ private List<StackManipulation> argumentValuesOf(MethodDescription instrumentedMethod) { TypeList.Generic parameterTypes = instrumentedMethod.getParameters().asTypeList(); List<StackManipulation> instruction = new ArrayList<StackManipulation>(parameterTypes.size()); int currentIndex = 1; for (TypeDescription.Generic parameterType : parameterTypes) { instruction.add(new StackManipulation.Compound( MethodVariableAccess.of(parameterType).loadFrom(currentIndex), assigner.assign(parameterType, TypeDescription.Generic.OBJECT, Assigner.Typing.STATIC))); currentIndex += parameterType.getStackSize().getSize(); } return instruction; } /** * By default, any {@link java.lang.reflect.Method} instance that is handed over to an * {@link java.lang.reflect.InvocationHandler} is cached in a static field. By invoking this method, * this feature can be disabled. * * @return A similar invocation handler adapter that applies caching. */ public abstract AssignerConfigurable withoutMethodCache(); /** * Determines if the {@link java.lang.reflect.Method} instances that are supplied to the invocation handler should be retrieved * by using an {@link java.security.AccessController}. This is the default configuration. * * @param privileged {@code true} if the lookup should be privileged. * @return This invocation handler adapter with the specified privilege setting. */ public abstract InvocationHandlerAdapter withPrivilegedMethodLookup(boolean privileged); /** * Applies an implementation that delegates to a invocation handler. * * @param methodVisitor The method visitor for writing the byte code to. * @param implementationContext The implementation context for the current implementation. * @param instrumentedMethod The method that is instrumented. * @param preparingManipulation A stack manipulation that applies any preparation to the operand stack. * @param fieldDescription The field that contains the value for the invocation handler. * @return The size of the applied assignment. */ protected ByteCodeAppender.Size apply(MethodVisitor methodVisitor, Context implementationContext, MethodDescription instrumentedMethod, StackManipulation preparingManipulation, FieldDescription fieldDescription) { if (instrumentedMethod.isStatic()) { throw new IllegalStateException("It is not possible to apply an invocation handler onto the static method " + instrumentedMethod); } MethodConstant.CanCache methodConstant = privileged ? MethodConstant.ofPrivileged(instrumentedMethod.asDefined()) : MethodConstant.of(instrumentedMethod.asDefined()); StackManipulation.Size stackSize = new StackManipulation.Compound( preparingManipulation, FieldAccess.forField(fieldDescription).read(), MethodVariableAccess.loadThis(), cached ? methodConstant.cached() : methodConstant, ArrayFactory.forType(TypeDescription.Generic.OBJECT).withValues(argumentValuesOf(instrumentedMethod)), MethodInvocation.invoke(INVOCATION_HANDLER_TYPE.getDeclaredMethods().getOnly()), assigner.assign(TypeDescription.Generic.OBJECT, instrumentedMethod.getReturnType(), Assigner.Typing.DYNAMIC), MethodReturn.of(instrumentedMethod.getReturnType()) ).apply(methodVisitor, implementationContext); return new ByteCodeAppender.Size(stackSize.getMaximalSize(), instrumentedMethod.getStackSize()); } /** * Allows for the configuration of an {@link net.bytebuddy.implementation.bytecode.assign.Assigner} * of an {@link net.bytebuddy.implementation.InvocationHandlerAdapter}. */ public interface AssignerConfigurable extends Implementation { /** * Configures an assigner to use with this invocation handler adapter. * * @param assigner The assigner to apply when defining this implementation. * @return This instrumentation with the given {@code assigner} configured. */ Implementation withAssigner(Assigner assigner); } /** * An implementation of an {@link net.bytebuddy.implementation.InvocationHandlerAdapter} that delegates method * invocations to an adapter that is stored in a static field. */ @HashCodeAndEqualsPlugin.Enhance protected static class ForInstance extends InvocationHandlerAdapter implements AssignerConfigurable { /** * The prefix for field that are created for storing the instrumented value. */ private static final String PREFIX = "invocationHandler"; /** * The invocation handler to which method interceptions are to be delegated. */ protected final InvocationHandler invocationHandler; /** * Creates a new invocation handler adapter for delegating invocations to an invocation handler that is stored * in a static field. * * @param fieldName The name of the field. * @param cached Determines if the {@link java.lang.reflect.Method} instances that are handed to the * intercepted methods are cached in {@code static} fields. * @param privileged Determines if the {@link java.lang.reflect.Method} instances are retrieved by * using an {@link java.security.AccessController}. * @param assigner The assigner to apply when defining this implementation. * @param invocationHandler The invocation handler to which all method calls are delegated. */ protected ForInstance(String fieldName, boolean cached, boolean privileged, Assigner assigner, InvocationHandler invocationHandler) { super(fieldName, cached, privileged, assigner); this.invocationHandler = invocationHandler; } @Override public AssignerConfigurable withoutMethodCache() { return new ForInstance(fieldName, UNCACHED, privileged, assigner, invocationHandler); } @Override public InvocationHandlerAdapter withPrivilegedMethodLookup(boolean privileged) { return new ForInstance(fieldName, cached, privileged, assigner, invocationHandler); } @Override public Implementation withAssigner(Assigner assigner) { return new ForInstance(fieldName, cached, privileged, assigner, invocationHandler); } @Override public InstrumentedType prepare(InstrumentedType instrumentedType) { return instrumentedType .withField(new FieldDescription.Token(fieldName, Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC | Opcodes.ACC_VOLATILE | Opcodes.ACC_SYNTHETIC, INVOCATION_HANDLER_TYPE)) .withInitializer(new LoadedTypeInitializer.ForStaticField(fieldName, invocationHandler)); } @Override public ByteCodeAppender appender(Target implementationTarget) { return new Appender(implementationTarget.getInstrumentedType()); } /** * An appender for implementing the {@link ForInstance}. */ @HashCodeAndEqualsPlugin.Enhance(includeSyntheticFields = true) protected class Appender implements ByteCodeAppender { /** * The instrumented type for which the methods are being intercepted. */ private final TypeDescription instrumentedType; /** * Creates a new appender. * * @param instrumentedType The type that is instrumented. */ protected Appender(TypeDescription instrumentedType) { this.instrumentedType = instrumentedType; } @Override public Size apply(MethodVisitor methodVisitor, Context implementationContext, MethodDescription instrumentedMethod) { return ForInstance.this.apply(methodVisitor, implementationContext, instrumentedMethod, StackManipulation.Trivial.INSTANCE, instrumentedType.getDeclaredFields().filter(named(fieldName).and(genericFieldType(INVOCATION_HANDLER_TYPE))).getOnly()); } } } /** * An implementation of an {@link net.bytebuddy.implementation.InvocationHandlerAdapter} that delegates method * invocations to an adapter that is stored in an instance field. */ @HashCodeAndEqualsPlugin.Enhance protected static class ForField extends InvocationHandlerAdapter implements AssignerConfigurable { /** * The field locator factory to use. */ private final FieldLocator.Factory fieldLocatorFactory; /** * Creates a new invocation handler adapter that loads its value from a field. * * @param fieldName The name of the field. * @param cached Determines if the {@link java.lang.reflect.Method} instances that are handed to the * intercepted methods are cached in {@code static} fields. * @param privileged Determines if the {@link java.lang.reflect.Method} instances are retrieved by using * an {@link java.security.AccessController}. * @param assigner The assigner to apply when defining this implementation. * @param fieldLocatorFactory The field locator factory to use. */ protected ForField(String fieldName, boolean cached, boolean privileged, Assigner assigner, FieldLocator.Factory fieldLocatorFactory) { super(fieldName, cached, privileged, assigner); this.fieldLocatorFactory = fieldLocatorFactory; } @Override public AssignerConfigurable withoutMethodCache() { return new ForField(fieldName, UNCACHED, privileged, assigner, fieldLocatorFactory); } @Override public InvocationHandlerAdapter withPrivilegedMethodLookup(boolean privileged) { return new ForField(fieldName, cached, privileged, assigner, fieldLocatorFactory); } @Override public Implementation withAssigner(Assigner assigner) { return new ForField(fieldName, cached, privileged, assigner, fieldLocatorFactory); } @Override public InstrumentedType prepare(InstrumentedType instrumentedType) { return instrumentedType; } @Override public ByteCodeAppender appender(Target implementationTarget) { FieldLocator.Resolution resolution = fieldLocatorFactory.make(implementationTarget.getInstrumentedType()).locate(fieldName); if (!resolution.isResolved()) { throw new IllegalStateException("Could not find a field named '" + fieldName + "' for " + implementationTarget.getInstrumentedType()); } else if (!resolution.getField().getType().asErasure().isAssignableTo(InvocationHandler.class)) { throw new IllegalStateException("Field " + resolution.getField() + " does not declare a type that is assignable to invocation handler"); } return new Appender(resolution.getField()); } /** * An appender for implementing the {@link ForField}. */ @HashCodeAndEqualsPlugin.Enhance(includeSyntheticFields = true) protected class Appender implements ByteCodeAppender { /** * The field that contains the invocation handler. */ private final FieldDescription fieldDescription; /** * Creates a new appender. * * @param fieldDescription The field that contains the invocation handler. */ protected Appender(FieldDescription fieldDescription) { this.fieldDescription = fieldDescription; } @Override public Size apply(MethodVisitor methodVisitor, Context implementationContext, MethodDescription instrumentedMethod) { return ForField.this.apply(methodVisitor, implementationContext, instrumentedMethod, fieldDescription.isStatic() ? StackManipulation.Trivial.INSTANCE : MethodVariableAccess.loadThis(), fieldDescription); } } } }
/* Open Source Software - may be modified and shared by FRC teams. The code */ /* the project. */ package edu.wpi.first.wpilibj.templates; import edu.wpi.first.wpilibj.SimpleRobot; import edu.wpi.first.wpilibj.CANJaguar; import edu.wpi.first.wpilibj.DriverStation; import edu.wpi.first.wpilibj.DriverStationLCD; import edu.wpi.first.wpilibj.DigitalInput; import edu.wpi.first.wpilibj.Victor; import edu.wpi.first.wpilibj.Jaguar; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.Talon; import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj.Servo; import edu.wpi.first.wpilibj.can.CANTimeoutException; import edu.wpi.first.wpilibj.camera.AxisCamera; import edu.wpi.first.wpilibj.camera.AxisCameraException; import edu.wpi.first.wpilibj.image.BinaryImage; import edu.wpi.first.wpilibj.image.ColorImage; import edu.wpi.first.wpilibj.image.CriteriaCollection; import edu.wpi.first.wpilibj.image.NIVision; import edu.wpi.first.wpilibj.image.NIVisionException; import edu.wpi.first.wpilibj.image.ParticleAnalysisReport; import team1517.aerialassist.mecanum.MecanumDrive; import team1517.aerialassist.io.DriverLCD; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the SimpleRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ public class RobotTemplate extends SimpleRobot { boolean catapultArmed = false; final int AREA_MINIMUM = 100; double tiltValue = 0.5, rotValue = 0.85, winchPower = -1; AxisCamera camera; CriteriaCollection cc; CANJaguar aF, aB, bF, bB; DriverStation driverStation; DigitalInput armedSwitch; Victor rotRod1, rotRod2, angle1, rot2rod2, rot2rot2, angle2; Talon winchMotor; Servo tiltServo, rotServo; Joystick xyStick, steerStick, auxStick; DriverLCD lcd; MecanumDrive mDrive; public RobotTemplate() { //camera = AxisCamera.getInstance(); cc = new CriteriaCollection(); // create the criteria for the particle filter cc.addCriteria(NIVision.MeasurementType.IMAQ_MT_AREA, AREA_MINIMUM, 215472, false); armedSwitch = new DigitalInput(1); driverStation = DriverStation.getInstance(); rotRod1 = new Victor(8); rotRod2 = new Victor(9); angle1 = new Victor(3); // rot2Rod1 = new Victor() // rot2Rod2 = new Victor() // angle2 = new Victor() winchMotor = new Talon(4); tiltServo = new Servo(5); rotServo = new Servo(6); xyStick = new Joystick(1); steerStick = new Joystick(2); auxStick = new Joystick(3); lcd = new DriverLCD(); initCANJaguars(); } /** * This function is called once each time the robot enters autonomous mode. */ public void autonomous() { boolean isLowGoal = driverStation.getDigitalIn(1); Timer aTimer = new Timer(); aTimer.start(); tiltServo.set(0.5); rotServo.set(0.5); if(isLowGoal) { Timer.delay(0.7);//Delays a amount of time in order for the hot goal vision targets to rotate into position. boolean isHotGoalStarting = false;//getHotGoal(); try { if(!isHotGoalStarting) { Timer.delay(2); } while(Math.abs(bF.getPosition()) < 10.18 && aTimer.get() < 10) { mDrive.drive(0, -0.7, 0); lcd.println(1, 1, "" + aF.getPosition()); lcd.updateLCD(); } mDrive.drive(0, 0, 0); } catch(CANTimeoutException ex) { ex.printStackTrace(); initCANJaguars(); } } else { aTimer.reset(); while(aTimer.get() < 0.2) { angle1.set(0.3); } angle1.set(0); /*try { while(Math.abs(bF.getPosition()) < 8.12 && aTimer.get() < 10) { mDrive.drive(0, -0.7, 0); } mDrive.drive(0, 0, 0); if(!armedSwitch.get()) { while(!armedSwitch.get()) { armCatapult(); } } fireCatapult(); } catch(CANTimeoutException ex) { ex.printStackTrace(); initCANJaguars(); }*/ } aTimer.stop(); } /** * This function is called once each time the robot enters operator control. */ public void operatorControl() { boolean exceptionFree = true; double x = 0, y = 0, t = 0; while(isOperatorControl() && isEnabled()) { if(xyStick.getRawButton(1)) { try { aF.setX(xyStick.getTwist()); bB.setX(xyStick.getTwist()); aB.setX(-1 * xyStick.getTwist()); bF.setX(-1 * xyStick.getTwist()); exceptionFree = true; } catch(CANTimeoutException ex) { ex.printStackTrace(); exceptionFree = false; } } else { /* * Controls the drive base and also handles exceptions. */ x = filterJoystickInput(steerStick.getX()); y = filterJoystickInput(xyStick.getY()); t = filterJoystickInput(xyStick.getTwist()); exceptionFree = tDrive(x, y, t); } if(!exceptionFree) { initCANJaguars(); } /* * Sets the output to the angle motor of the rot rods to the value of the y axis of the auxStick scaled by a factor of 0.7. */ angle1.set(auxStick.getY()); /* * Controls the rotation of the rot rods. */ if(auxStick.getRawButton(3)) { rotRod1.set(-0.7); rotRod2.set(0.7); } else if(auxStick.getRawButton(5)) { rotRod1.set(0.7); rotRod2.set(-0.7); } else { rotRod1.set(0); rotRod2.set(0); } /* * Manual control of the catapult winch. */ if(auxStick.getRawButton(2)) { winchMotor.set(-1); } else if(auxStick.getRawButton(4)) { armCatapult(); } else if(auxStick.getRawButton(6)) { fireCatapult(); } else { winchMotor.set(0); } if(auxStick.getRawAxis(6) > 0 && winchPower <= 0.95) { winchPower = winchPower + 0.05; } else if(auxStick.getRawAxis(6) < 0 && winchPower >= -0.95) { winchPower = winchPower - 0.05; } /* * Sets the output values of the camera axis servos. */ tiltServo.set(tiltValue); rotServo.set(rotValue); /* * Allows the user to adjust the value set to the rotServo. */ if(auxStick.getRawAxis(5) > 0 && rotValue <= 0.95) { rotValue = rotValue + 0.05; } else if(auxStick.getRawAxis(5) < 0 && rotValue >= 0.05) { rotValue = rotValue - 0.05; } lcd.println(1, 1, "winch " + winchPower + " "); lcd.println(2, 1, "cam rot " + rotValue); lcd.println(3, 1, "input2" + armedSwitch.get() + " "); lcd.updateLCD(); Timer.delay(0.01); } } /** * This function is called once each time the robot enters test mode. */ public void test() { while(isTest() && isEnabled()) { if(auxStick.getRawButton(1)) { lcd.println(1, 1, " " + getHotGoal()); } if(auxStick.getRawButton(2)) { lcd.println(2, 1, " " + getVisionDistance()); } lcd.updateLCD(); } } /** * Moves the catapult into armed position. */ private void armCatapult() { if(!armedSwitch.get()) { winchMotor.set(-0.7); } else { winchMotor.set(0); } } /** * Fires the catapult. */ private void fireCatapult() { Timer timer = new Timer(); timer.start(); if(!armedSwitch.get()) { while(!armedSwitch.get() && timer.get() < 0.5) { winchMotor.set(-0.3); } } timer.reset(); while(armedSwitch.get() && timer.get() < 1) { winchMotor.set(-0.3); } winchMotor.set(0); timer.stop(); } /** * Added to abstract the drive method so that CAN can be switched to PWM easier and more simply. * @param mX The X value of the drive vector. * @param mY The Y value of the drive vector. * @param twist The turn added to the output of the drive vector. * @return True if successful, false if exceptions are thrown. */ private boolean tDrive(double mX, double mY, double twist) { try { return mDrive.drive(mX, mY, twist); } catch(NullPointerException ex) { ex.printStackTrace(); return true; } } /** * Detects whether the hot goal is visible. * @return True if the hot goal is visible. False if the hot goal is not visible or an exception has been thrown. */ private boolean getHotGoal() { try { ColorImage image = camera.getImage(); BinaryImage thresholdImage = image.thresholdHSV(80, 140, 165, 255, 200, 255); image.free(); BinaryImage hulledImage = thresholdImage.convexHull(false); thresholdImage.free(); if(hulledImage.getNumberParticles() > 0) { lcd.println(2, 1, "" + hulledImage.getNumberParticles()); lcd.updateLCD(); ParticleAnalysisReport report; for(int i = 0; i < hulledImage.getNumberParticles(); i++) { report = hulledImage.getParticleAnalysisReport(i); if((report.boundingRectHeight / report.boundingRectWidth) < 1) { return true; } } report = null; } hulledImage.free(); } catch (AxisCameraException ex) { ex.printStackTrace(); return false; } catch (NIVisionException ex) { ex.printStackTrace(); return false; } return false; } /** * Used to initialize the CANJaguars. It can also be called to reinitialize them if an exception is thrown. * @return Success */ private boolean initCANJaguars() { boolean successful = true; mDrive = null; while(aF == null || bF == null || aB == null || bB == null) { try { aF = null; bF = null; aB = null; bB = null; aF = new CANJaguar(1); bF = new CANJaguar(2); aB = new CANJaguar(3); bB = new CANJaguar(4); aF.changeControlMode(CANJaguar.ControlMode.kPercentVbus); bF.changeControlMode(CANJaguar.ControlMode.kPercentVbus); aB.changeControlMode(CANJaguar.ControlMode.kPercentVbus); bB.changeControlMode(CANJaguar.ControlMode.kPercentVbus); aF.configNeutralMode(CANJaguar.NeutralMode.kBrake); bF.configNeutralMode(CANJaguar.NeutralMode.kBrake); aB.configNeutralMode(CANJaguar.NeutralMode.kBrake); bB.configNeutralMode(CANJaguar.NeutralMode.kBrake); aF.setSpeedReference(CANJaguar.SpeedReference.kQuadEncoder); bF.setSpeedReference(CANJaguar.SpeedReference.kQuadEncoder); aB.setSpeedReference(CANJaguar.SpeedReference.kQuadEncoder); bB.setSpeedReference(CANJaguar.SpeedReference.kQuadEncoder); aF.setPositionReference(CANJaguar.PositionReference.kQuadEncoder); bF.setPositionReference(CANJaguar.PositionReference.kQuadEncoder); aB.setPositionReference(CANJaguar.PositionReference.kQuadEncoder); bB.setPositionReference(CANJaguar.PositionReference.kQuadEncoder); aF.configEncoderCodesPerRev(100); bF.configEncoderCodesPerRev(100); aB.configEncoderCodesPerRev(100); bB.configEncoderCodesPerRev(100); //aF.setX(0); //bF.setX(0); //aB.setX(0); //bB.setX(0); } catch(CANTimeoutException ex) { ex.printStackTrace(); successful = true; } } mDrive = new MecanumDrive(aF, aB, bF, bB); return successful; } /** * Detects whether one or more of the CANJaguars has lost and then regained power. * @return True if power to one or more of the CANJaguars has been cycled or if a timeout exception has occurred. False otherwise. */ private boolean getCANJaguarsPowerCycled() { try { if(aF.getPowerCycled() || aB.getPowerCycled() || bF.getPowerCycled() || bB.getPowerCycled()) { return true; } } catch(CANTimeoutException ex) { ex.printStackTrace(); return true; } return false; } /** * Filters out noise from the input of the joysticks. * @param joystickValue The raw input value from the joystick. * @return The filtered value. */ double filterJoystickInput(double joystickValue) { if(Math.abs(joystickValue) > 0.1) { return (joystickValue * joystickValue * joystickValue); } else { if(xyStick.getTwist() != 0) { return 0.0000000000001; } else { return 0; } } } double getVisionDistance() { try { ColorImage image = camera.getImage(); BinaryImage thresholdImage = image.thresholdHSV(80, 140, 165, 255, 200, 255); image.free(); BinaryImage hulledImage = thresholdImage.convexHull(false); thresholdImage.free(); if(hulledImage.getNumberParticles() > 0) { ParticleAnalysisReport report; for(int i = 0; i < hulledImage.getNumberParticles(); i++) { report = hulledImage.getParticleAnalysisReport(i); if(report.boundingRectWidth / report.boundingRectHeight < 1) //1 can be reduced. { //do distance calculations. //return distance. return report.center_mass_y_normalized + 1; } } } } catch (AxisCameraException ex) { ex.printStackTrace(); return -2; } catch (NIVisionException ex) { ex.printStackTrace(); return -3; } return -1; } }
package org.jfree.chart.renderer.xy; import java.awt.AlphaComposite; import java.awt.Color; import java.awt.Composite; import java.awt.Graphics2D; import java.awt.Paint; import java.awt.Stroke; import java.awt.geom.Line2D; import java.awt.geom.Rectangle2D; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import org.jfree.chart.axis.ValueAxis; import org.jfree.chart.ui.RectangleEdge; import org.jfree.chart.util.PaintUtilities; import org.jfree.chart.util.PublicCloneable; import org.jfree.chart.entity.EntityCollection; import org.jfree.chart.event.RendererChangeEvent; import org.jfree.chart.labels.HighLowItemLabelGenerator; import org.jfree.chart.labels.XYToolTipGenerator; import org.jfree.chart.plot.CrosshairState; import org.jfree.chart.plot.PlotOrientation; import org.jfree.chart.plot.PlotRenderingInfo; import org.jfree.chart.plot.XYPlot; import org.jfree.chart.util.SerialUtilities; import org.jfree.data.Range; import org.jfree.data.xy.IntervalXYDataset; import org.jfree.data.xy.OHLCDataset; import org.jfree.data.xy.XYDataset; /** * A renderer that draws candlesticks on an {@link XYPlot} (requires a * {@link OHLCDataset}). The example shown here is generated * by the <code>CandlestickChartDemo1.java</code> program included in the * JFreeChart demo collection: * <br><br> * <img src="../../../../../images/CandlestickRendererSample.png" * alt="CandlestickRendererSample.png" /> * <P> * This renderer does not include code to calculate the crosshair point for the * plot. */ public class CandlestickRenderer extends AbstractXYItemRenderer implements XYItemRenderer, Cloneable, PublicCloneable, Serializable { /** For serialization. */ private static final long serialVersionUID = 50390395841817121L; /** The average width method. */ public static final int WIDTHMETHOD_AVERAGE = 0; /** The smallest width method. */ public static final int WIDTHMETHOD_SMALLEST = 1; /** The interval data method. */ public static final int WIDTHMETHOD_INTERVALDATA = 2; /** The method of automatically calculating the candle width. */ private int autoWidthMethod = WIDTHMETHOD_AVERAGE; /** * The number (generally between 0.0 and 1.0) by which the available space * automatically calculated for the candles will be multiplied to determine * the actual width to use. */ private double autoWidthFactor = 4.5 / 7; /** The minimum gap between one candle and the next */ private double autoWidthGap = 0.0; /** The candle width. */ private double candleWidth; /** The maximum candlewidth in milliseconds. */ private double maxCandleWidthInMilliseconds = 1000.0 * 60.0 * 60.0 * 20.0; /** Temporary storage for the maximum candle width. */ private double maxCandleWidth; /** * The paint used to fill the candle when the price moved up from open to * close. */ private transient Paint upPaint; /** * The paint used to fill the candle when the price moved down from open * to close. */ private transient Paint downPaint; /** A flag controlling whether or not volume bars are drawn on the chart. */ private boolean drawVolume; /** * The paint used to fill the volume bars (if they are visible). Once * initialised, this field should never be set to <code>null</code>. * * @since 1.0.7 */ private transient Paint volumePaint; /** Temporary storage for the maximum volume. */ private transient double maxVolume; /** * A flag that controls whether or not the renderer's outline paint is * used to draw the outline of the candlestick. The default value is * <code>false</code> to avoid a change of behaviour for existing code. * * @since 1.0.5 */ private boolean useOutlinePaint; /** * Creates a new renderer for candlestick charts. */ public CandlestickRenderer() { this(-1.0); } /** * Creates a new renderer for candlestick charts. * <P> * Use -1 for the candle width if you prefer the width to be calculated * automatically. * * @param candleWidth The candle width. */ public CandlestickRenderer(double candleWidth) { this(candleWidth, true, new HighLowItemLabelGenerator()); } /** * Creates a new renderer for candlestick charts. * <P> * Use -1 for the candle width if you prefer the width to be calculated * automatically. * * @param candleWidth the candle width. * @param drawVolume a flag indicating whether or not volume bars should * be drawn. * @param toolTipGenerator the tool tip generator. <code>null</code> is * none. */ public CandlestickRenderer(double candleWidth, boolean drawVolume, XYToolTipGenerator toolTipGenerator) { super(); setDefaultToolTipGenerator(toolTipGenerator); this.candleWidth = candleWidth; this.drawVolume = drawVolume; this.volumePaint = Color.GRAY; this.upPaint = Color.GREEN; this.downPaint = Color.RED; this.useOutlinePaint = false; // false preserves the old behaviour // prior to introducing this flag } /** * Returns the width of each candle. * * @return The candle width. * * @see #setCandleWidth(double) */ public double getCandleWidth() { return this.candleWidth; } /** * Sets the candle width and sends a {@link RendererChangeEvent} to all * registered listeners. * <P> * If you set the width to a negative value, the renderer will calculate * the candle width automatically based on the space available on the chart. * * @param width The width. * @see #setAutoWidthMethod(int) * @see #setAutoWidthGap(double) * @see #setAutoWidthFactor(double) * @see #setMaxCandleWidthInMilliseconds(double) */ public void setCandleWidth(double width) { if (width != this.candleWidth) { this.candleWidth = width; fireChangeEvent(); } } /** * Returns the maximum width (in milliseconds) of each candle. * * @return The maximum candle width in milliseconds. * * @see #setMaxCandleWidthInMilliseconds(double) */ public double getMaxCandleWidthInMilliseconds() { return this.maxCandleWidthInMilliseconds; } /** * Sets the maximum candle width (in milliseconds) and sends a * {@link RendererChangeEvent} to all registered listeners. * * @param millis The maximum width. * * @see #getMaxCandleWidthInMilliseconds() * @see #setCandleWidth(double) * @see #setAutoWidthMethod(int) * @see #setAutoWidthGap(double) * @see #setAutoWidthFactor(double) */ public void setMaxCandleWidthInMilliseconds(double millis) { this.maxCandleWidthInMilliseconds = millis; fireChangeEvent(); } /** * Returns the method of automatically calculating the candle width. * * @return The method of automatically calculating the candle width. * * @see #setAutoWidthMethod(int) */ public int getAutoWidthMethod() { return this.autoWidthMethod; } /** * Sets the method of automatically calculating the candle width and * sends a {@link RendererChangeEvent} to all registered listeners. * <p> * <code>WIDTHMETHOD_AVERAGE</code>: Divides the entire display (ignoring * scale factor) by the number of items, and uses this as the available * width.<br> * <code>WIDTHMETHOD_SMALLEST</code>: Checks the interval between each * item, and uses the smallest as the available width.<br> * <code>WIDTHMETHOD_INTERVALDATA</code>: Assumes that the dataset supports * the IntervalXYDataset interface, and uses the startXValue - endXValue as * the available width. * <br> * * @param autoWidthMethod The method of automatically calculating the * candle width. * * @see #WIDTHMETHOD_AVERAGE * @see #WIDTHMETHOD_SMALLEST * @see #WIDTHMETHOD_INTERVALDATA * @see #getAutoWidthMethod() * @see #setCandleWidth(double) * @see #setAutoWidthGap(double) * @see #setAutoWidthFactor(double) * @see #setMaxCandleWidthInMilliseconds(double) */ public void setAutoWidthMethod(int autoWidthMethod) { if (this.autoWidthMethod != autoWidthMethod) { this.autoWidthMethod = autoWidthMethod; fireChangeEvent(); } } /** * Returns the factor by which the available space automatically * calculated for the candles will be multiplied to determine the actual * width to use. * * @return The width factor (generally between 0.0 and 1.0). * * @see #setAutoWidthFactor(double) */ public double getAutoWidthFactor() { return this.autoWidthFactor; } /** * Sets the factor by which the available space automatically calculated * for the candles will be multiplied to determine the actual width to use. * * @param autoWidthFactor The width factor (generally between 0.0 and 1.0). * * @see #getAutoWidthFactor() * @see #setCandleWidth(double) * @see #setAutoWidthMethod(int) * @see #setAutoWidthGap(double) * @see #setMaxCandleWidthInMilliseconds(double) */ public void setAutoWidthFactor(double autoWidthFactor) { if (this.autoWidthFactor != autoWidthFactor) { this.autoWidthFactor = autoWidthFactor; fireChangeEvent(); } } /** * Returns the amount of space to leave on the left and right of each * candle when automatically calculating widths. * * @return The gap. * * @see #setAutoWidthGap(double) */ public double getAutoWidthGap() { return this.autoWidthGap; } /** * Sets the amount of space to leave on the left and right of each candle * when automatically calculating widths and sends a * {@link RendererChangeEvent} to all registered listeners. * * @param autoWidthGap The gap. * * @see #getAutoWidthGap() * @see #setCandleWidth(double) * @see #setAutoWidthMethod(int) * @see #setAutoWidthFactor(double) * @see #setMaxCandleWidthInMilliseconds(double) */ public void setAutoWidthGap(double autoWidthGap) { if (this.autoWidthGap != autoWidthGap) { this.autoWidthGap = autoWidthGap; fireChangeEvent(); } } /** * Returns the paint used to fill candles when the price moves up from open * to close. * * @return The paint (possibly <code>null</code>). * * @see #setUpPaint(Paint) */ public Paint getUpPaint() { return this.upPaint; } /** * Sets the paint used to fill candles when the price moves up from open * to close and sends a {@link RendererChangeEvent} to all registered * listeners. * * @param paint the paint (<code>null</code> permitted). * * @see #getUpPaint() */ public void setUpPaint(Paint paint) { this.upPaint = paint; fireChangeEvent(); } /** * Returns the paint used to fill candles when the price moves down from * open to close. * * @return The paint (possibly <code>null</code>). * * @see #setDownPaint(Paint) */ public Paint getDownPaint() { return this.downPaint; } /** * Sets the paint used to fill candles when the price moves down from open * to close and sends a {@link RendererChangeEvent} to all registered * listeners. * * @param paint The paint (<code>null</code> permitted). */ public void setDownPaint(Paint paint) { this.downPaint = paint; fireChangeEvent(); } /** * Returns a flag indicating whether or not volume bars are drawn on the * chart. * * @return A boolean. * * @since 1.0.5 * * @see #setDrawVolume(boolean) */ public boolean getDrawVolume() { return this.drawVolume; } /** * Sets a flag that controls whether or not volume bars are drawn in the * background and sends a {@link RendererChangeEvent} to all registered * listeners. * * @param flag the flag. * * @see #getDrawVolume() */ public void setDrawVolume(boolean flag) { if (this.drawVolume != flag) { this.drawVolume = flag; fireChangeEvent(); } } /** * Returns the paint that is used to fill the volume bars if they are * visible. * * @return The paint (never <code>null</code>). * * @see #setVolumePaint(Paint) * * @since 1.0.7 */ public Paint getVolumePaint() { return this.volumePaint; } /** * Sets the paint used to fill the volume bars, and sends a * {@link RendererChangeEvent} to all registered listeners. * * @param paint the paint (<code>null</code> not permitted). * * @see #getVolumePaint() * @see #getDrawVolume() * * @since 1.0.7 */ public void setVolumePaint(Paint paint) { if (paint == null) { throw new IllegalArgumentException("Null 'paint' argument."); } this.volumePaint = paint; fireChangeEvent(); } /** * Returns the flag that controls whether or not the renderer's outline * paint is used to draw the candlestick outline. The default value is * <code>false</code>. * * @return A boolean. * * @since 1.0.5 * * @see #setUseOutlinePaint(boolean) */ public boolean getUseOutlinePaint() { return this.useOutlinePaint; } /** * Sets the flag that controls whether or not the renderer's outline * paint is used to draw the candlestick outline, and sends a * {@link RendererChangeEvent} to all registered listeners. * * @param use the new flag value. * * @since 1.0.5 * * @see #getUseOutlinePaint() */ public void setUseOutlinePaint(boolean use) { if (this.useOutlinePaint != use) { this.useOutlinePaint = use; fireChangeEvent(); } } /** * Returns the range of values the renderer requires to display all the * items from the specified dataset. * * @param dataset the dataset (<code>null</code> permitted). * * @return The range (<code>null</code> if the dataset is <code>null</code> * or empty). */ @Override public Range findRangeBounds(XYDataset dataset) { return findRangeBounds(dataset, true); } /** * Initialises the renderer then returns the number of 'passes' through the * data that the renderer will require (usually just one). This method * will be called before the first item is rendered, giving the renderer * an opportunity to initialise any state information it wants to maintain. * The renderer can do nothing if it chooses. * * @param g2 the graphics device. * @param dataArea the area inside the axes. * @param plot the plot. * @param dataset the data. * @param info an optional info collection object to return data back to * the caller. * * @return The number of passes the renderer requires. */ @Override public XYItemRendererState initialise(Graphics2D g2, Rectangle2D dataArea, XYPlot plot, XYDataset dataset, PlotRenderingInfo info) { // calculate the maximum allowed candle width from the axis... ValueAxis axis = plot.getDomainAxis(); double x1 = axis.getLowerBound(); double x2 = x1 + this.maxCandleWidthInMilliseconds; RectangleEdge edge = plot.getDomainAxisEdge(); double xx1 = axis.valueToJava2D(x1, dataArea, edge); double xx2 = axis.valueToJava2D(x2, dataArea, edge); this.maxCandleWidth = Math.abs(xx2 - xx1); // Absolute value, since the relative x // positions are reversed for horizontal orientation // calculate the highest volume in the dataset... if (this.drawVolume) { OHLCDataset highLowDataset = (OHLCDataset) dataset; this.maxVolume = 0.0; for (int series = 0; series < highLowDataset.getSeriesCount(); series++) { for (int item = 0; item < highLowDataset.getItemCount(series); item++) { double volume = highLowDataset.getVolumeValue(series, item); if (volume > this.maxVolume) { this.maxVolume = volume; } } } } return new XYItemRendererState(info); } /** * Draws the visual representation of a single data item. * * @param g2 the graphics device. * @param state the renderer state. * @param dataArea the area within which the plot is being drawn. * @param info collects info about the drawing. * @param plot the plot (can be used to obtain standard color * information etc). * @param domainAxis the domain axis. * @param rangeAxis the range axis. * @param dataset the dataset. * @param series the series index (zero-based). * @param item the item index (zero-based). * @param crosshairState crosshair information for the plot * (<code>null</code> permitted). * @param pass the pass index. */ @Override public void drawItem(Graphics2D g2, XYItemRendererState state, Rectangle2D dataArea, PlotRenderingInfo info, XYPlot plot, ValueAxis domainAxis, ValueAxis rangeAxis, XYDataset dataset, int series, int item, CrosshairState crosshairState, int pass) { boolean horiz; PlotOrientation orientation = plot.getOrientation(); if (orientation == PlotOrientation.HORIZONTAL) { horiz = true; } else if (orientation == PlotOrientation.VERTICAL) { horiz = false; } else { return; } // setup for collecting optional entity info... EntityCollection entities = null; if (info != null) { entities = info.getOwner().getEntityCollection(); } OHLCDataset highLowData = (OHLCDataset) dataset; double x = highLowData.getXValue(series, item); double yHigh = highLowData.getHighValue(series, item); double yLow = highLowData.getLowValue(series, item); double yOpen = highLowData.getOpenValue(series, item); double yClose = highLowData.getCloseValue(series, item); RectangleEdge domainEdge = plot.getDomainAxisEdge(); double xx = domainAxis.valueToJava2D(x, dataArea, domainEdge); RectangleEdge edge = plot.getRangeAxisEdge(); double yyHigh = rangeAxis.valueToJava2D(yHigh, dataArea, edge); double yyLow = rangeAxis.valueToJava2D(yLow, dataArea, edge); double yyOpen = rangeAxis.valueToJava2D(yOpen, dataArea, edge); double yyClose = rangeAxis.valueToJava2D(yClose, dataArea, edge); double volumeWidth; double stickWidth; if (this.candleWidth > 0) { // These are deliberately not bounded to minimums/maxCandleWidth to // retain old behaviour. volumeWidth = this.candleWidth; stickWidth = this.candleWidth; } else { double xxWidth = 0; int itemCount; switch (this.autoWidthMethod) { case WIDTHMETHOD_AVERAGE: itemCount = highLowData.getItemCount(series); if (horiz) { xxWidth = dataArea.getHeight() / itemCount; } else { xxWidth = dataArea.getWidth() / itemCount; } break; case WIDTHMETHOD_SMALLEST: // Note: It would be nice to pre-calculate this per series itemCount = highLowData.getItemCount(series); double lastPos = -1; xxWidth = dataArea.getWidth(); for (int i = 0; i < itemCount; i++) { double pos = domainAxis.valueToJava2D( highLowData.getXValue(series, i), dataArea, domainEdge); if (lastPos != -1) { xxWidth = Math.min(xxWidth, Math.abs(pos - lastPos)); } lastPos = pos; } break; case WIDTHMETHOD_INTERVALDATA: IntervalXYDataset intervalXYData = (IntervalXYDataset) dataset; double startPos = domainAxis.valueToJava2D( intervalXYData.getStartXValue(series, item), dataArea, plot.getDomainAxisEdge()); double endPos = domainAxis.valueToJava2D( intervalXYData.getEndXValue(series, item), dataArea, plot.getDomainAxisEdge()); xxWidth = Math.abs(endPos - startPos); break; } xxWidth -= 2 * this.autoWidthGap; xxWidth *= this.autoWidthFactor; xxWidth = Math.min(xxWidth, this.maxCandleWidth); volumeWidth = Math.max(Math.min(1, this.maxCandleWidth), xxWidth); stickWidth = Math.max(Math.min(3, this.maxCandleWidth), xxWidth); } Paint p = getItemPaint(series, item); Paint outlinePaint = null; if (this.useOutlinePaint) { outlinePaint = getItemOutlinePaint(series, item); } Stroke s = getItemStroke(series, item); g2.setStroke(s); if (this.drawVolume) { int volume = (int) highLowData.getVolumeValue(series, item); double volumeHeight = volume / this.maxVolume; double min, max; if (horiz) { min = dataArea.getMinX(); max = dataArea.getMaxX(); } else { min = dataArea.getMinY(); max = dataArea.getMaxY(); } double zzVolume = volumeHeight * (max - min); g2.setPaint(getVolumePaint()); Composite originalComposite = g2.getComposite(); g2.setComposite(AlphaComposite.getInstance( AlphaComposite.SRC_OVER, 0.3f)); if (horiz) { g2.fill(new Rectangle2D.Double(min, xx - volumeWidth / 2, zzVolume, volumeWidth)); } else { g2.fill(new Rectangle2D.Double(xx - volumeWidth / 2, max - zzVolume, volumeWidth, zzVolume)); } g2.setComposite(originalComposite); } if (this.useOutlinePaint) { g2.setPaint(outlinePaint); } else { g2.setPaint(p); } double yyMaxOpenClose = Math.max(yyOpen, yyClose); double yyMinOpenClose = Math.min(yyOpen, yyClose); double maxOpenClose = Math.max(yOpen, yClose); double minOpenClose = Math.min(yOpen, yClose); // draw the upper shadow if (yHigh > maxOpenClose) { if (horiz) { g2.draw(new Line2D.Double(yyHigh, xx, yyMaxOpenClose, xx)); } else { g2.draw(new Line2D.Double(xx, yyHigh, xx, yyMaxOpenClose)); } } // draw the lower shadow if (yLow < minOpenClose) { if (horiz) { g2.draw(new Line2D.Double(yyLow, xx, yyMinOpenClose, xx)); } else { g2.draw(new Line2D.Double(xx, yyLow, xx, yyMinOpenClose)); } } // draw the body Rectangle2D body; Rectangle2D hotspot; double length = Math.abs(yyHigh - yyLow); double base = Math.min(yyHigh, yyLow); if (horiz) { body = new Rectangle2D.Double(yyMinOpenClose, xx - stickWidth / 2, yyMaxOpenClose - yyMinOpenClose, stickWidth); hotspot = new Rectangle2D.Double(base, xx - stickWidth / 2, length, stickWidth); } else { body = new Rectangle2D.Double(xx - stickWidth / 2, yyMinOpenClose, stickWidth, yyMaxOpenClose - yyMinOpenClose); hotspot = new Rectangle2D.Double(xx - stickWidth / 2, base, stickWidth, length); } if (yClose > yOpen) { if (this.upPaint != null) { g2.setPaint(this.upPaint); } else { g2.setPaint(p); } g2.fill(body); } else { if (this.downPaint != null) { g2.setPaint(this.downPaint); } else { g2.setPaint(p); } g2.fill(body); } if (this.useOutlinePaint) { g2.setPaint(outlinePaint); } else { g2.setPaint(p); } g2.draw(body); // add an entity for the item... if (entities != null) { addEntity(entities, hotspot, dataset, series, item, 0.0, 0.0); } } /** * Tests this renderer for equality with another object. * * @param obj the object (<code>null</code> permitted). * * @return <code>true</code> or <code>false</code>. */ @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof CandlestickRenderer)) { return false; } CandlestickRenderer that = (CandlestickRenderer) obj; if (this.candleWidth != that.candleWidth) { return false; } if (!PaintUtilities.equal(this.upPaint, that.upPaint)) { return false; } if (!PaintUtilities.equal(this.downPaint, that.downPaint)) { return false; } if (this.drawVolume != that.drawVolume) { return false; } if (this.maxCandleWidthInMilliseconds != that.maxCandleWidthInMilliseconds) { return false; } if (this.autoWidthMethod != that.autoWidthMethod) { return false; } if (this.autoWidthFactor != that.autoWidthFactor) { return false; } if (this.autoWidthGap != that.autoWidthGap) { return false; } if (this.useOutlinePaint != that.useOutlinePaint) { return false; } if (!PaintUtilities.equal(this.volumePaint, that.volumePaint)) { return false; } return super.equals(obj); } /** * Returns a clone of the renderer. * * @return A clone. * * @throws CloneNotSupportedException if the renderer cannot be cloned. */ @Override public Object clone() throws CloneNotSupportedException { return super.clone(); } /** * Provides serialization support. * * @param stream the output stream. * * @throws IOException if there is an I/O error. */ private void writeObject(ObjectOutputStream stream) throws IOException { stream.defaultWriteObject(); SerialUtilities.writePaint(this.upPaint, stream); SerialUtilities.writePaint(this.downPaint, stream); SerialUtilities.writePaint(this.volumePaint, stream); } /** * Provides serialization support. * * @param stream the input stream. * * @throws IOException if there is an I/O error. * @throws ClassNotFoundException if there is a classpath problem. */ private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); this.upPaint = SerialUtilities.readPaint(stream); this.downPaint = SerialUtilities.readPaint(stream); this.volumePaint = SerialUtilities.readPaint(stream); } }
package sklearn.impute; import java.util.ArrayList; import java.util.List; import org.dmg.pmml.MissingValueTreatmentMethod; import org.jpmml.converter.Feature; import org.jpmml.sklearn.ClassDictUtil; import org.jpmml.sklearn.SkLearnEncoder; import sklearn.HasNumberOfFeatures; import sklearn.Transformer; public class SimpleImputer extends Transformer implements HasNumberOfFeatures { public SimpleImputer(String module, String name){ super(module, name); } @Override public int getNumberOfFeatures(){ int[] shape = getStatisticsShape(); return shape[0]; } @Override public List<Feature> encodeFeatures(List<Feature> features, SkLearnEncoder encoder){ Object missingValues = getMissingValues(); List<?> statistics = getStatistics(); String strategy = getStrategy(); ClassDictUtil.checkSize(features, statistics); if((Double.valueOf(Double.NaN)).equals(missingValues)){ missingValues = null; } MissingValueTreatmentMethod missingValueTreatment = parseStrategy(strategy); List<Feature> result = new ArrayList<>(); for(int i = 0; i < features.size(); i++){ Feature feature = features.get(i); Object statistic = statistics.get(i); feature = ImputerUtil.encodeFeature(feature, missingValues, statistic, missingValueTreatment, encoder); result.add(feature); } return result; } public Object getMissingValues(){ return getOptionalObject("missing_values"); } public List<?> getStatistics(){ return getArray("statistics_"); } public int[] getStatisticsShape(){ return getArrayShape("statistics_", 1); } public String getStrategy(){ return getString("strategy"); } static private MissingValueTreatmentMethod parseStrategy(String strategy){ switch(strategy){ case "constant": return MissingValueTreatmentMethod.AS_VALUE; case "mean": return MissingValueTreatmentMethod.AS_MEAN; case "median": return MissingValueTreatmentMethod.AS_MEDIAN; case "most_frequent": return MissingValueTreatmentMethod.AS_MODE; default: throw new IllegalArgumentException(strategy); } } }
package org.eclipse.birt.chart.render; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.eclipse.birt.chart.computation.DataPointHints; import org.eclipse.birt.chart.computation.DataSetIterator; import org.eclipse.birt.chart.computation.IConstants; import org.eclipse.birt.chart.computation.LegendEntryRenderingHints; import org.eclipse.birt.chart.computation.LegendItemHints; import org.eclipse.birt.chart.computation.LegendItemRenderingHints; import org.eclipse.birt.chart.computation.LegendLayoutHints; import org.eclipse.birt.chart.computation.Methods; import org.eclipse.birt.chart.computation.withaxes.PlotWithAxes; import org.eclipse.birt.chart.computation.withoutaxes.Coordinates; import org.eclipse.birt.chart.computation.withoutaxes.PlotWithoutAxes; import org.eclipse.birt.chart.device.IDeviceRenderer; import org.eclipse.birt.chart.device.IDisplayServer; import org.eclipse.birt.chart.device.IPrimitiveRenderer; import org.eclipse.birt.chart.device.IStructureDefinitionListener; import org.eclipse.birt.chart.device.ITextMetrics; import org.eclipse.birt.chart.engine.i18n.Messages; import org.eclipse.birt.chart.event.BlockGenerationEvent; import org.eclipse.birt.chart.event.EventObjectCache; import org.eclipse.birt.chart.event.InteractionEvent; import org.eclipse.birt.chart.event.LineRenderEvent; import org.eclipse.birt.chart.event.Polygon3DRenderEvent; import org.eclipse.birt.chart.event.PolygonRenderEvent; import org.eclipse.birt.chart.event.PrimitiveRenderEvent; import org.eclipse.birt.chart.event.RectangleRenderEvent; import org.eclipse.birt.chart.event.StructureSource; import org.eclipse.birt.chart.event.StructureType; import org.eclipse.birt.chart.event.TextRenderEvent; import org.eclipse.birt.chart.event.WrappedInstruction; import org.eclipse.birt.chart.event.WrappedStructureSource; import org.eclipse.birt.chart.exception.ChartException; import org.eclipse.birt.chart.factory.RunTimeContext; import org.eclipse.birt.chart.log.ILogger; import org.eclipse.birt.chart.log.Logger; import org.eclipse.birt.chart.model.Chart; import org.eclipse.birt.chart.model.ChartWithAxes; import org.eclipse.birt.chart.model.ChartWithoutAxes; import org.eclipse.birt.chart.model.DialChart; import org.eclipse.birt.chart.model.attribute.ActionType; import org.eclipse.birt.chart.model.attribute.Anchor; import org.eclipse.birt.chart.model.attribute.Bounds; import org.eclipse.birt.chart.model.attribute.ChartDimension; import org.eclipse.birt.chart.model.attribute.ColorDefinition; import org.eclipse.birt.chart.model.attribute.Direction; import org.eclipse.birt.chart.model.attribute.Fill; import org.eclipse.birt.chart.model.attribute.HorizontalAlignment; import org.eclipse.birt.chart.model.attribute.Insets; import org.eclipse.birt.chart.model.attribute.LegendBehaviorType; import org.eclipse.birt.chart.model.attribute.LegendItemType; import org.eclipse.birt.chart.model.attribute.LineAttributes; import org.eclipse.birt.chart.model.attribute.Location; import org.eclipse.birt.chart.model.attribute.Location3D; import org.eclipse.birt.chart.model.attribute.MultipleFill; import org.eclipse.birt.chart.model.attribute.Orientation; import org.eclipse.birt.chart.model.attribute.Palette; import org.eclipse.birt.chart.model.attribute.Position; import org.eclipse.birt.chart.model.attribute.Size; import org.eclipse.birt.chart.model.attribute.TextAlignment; import org.eclipse.birt.chart.model.attribute.TooltipValue; import org.eclipse.birt.chart.model.attribute.TriggerCondition; import org.eclipse.birt.chart.model.attribute.URLValue; import org.eclipse.birt.chart.model.attribute.VerticalAlignment; import org.eclipse.birt.chart.model.attribute.impl.BoundsImpl; import org.eclipse.birt.chart.model.attribute.impl.ColorDefinitionImpl; import org.eclipse.birt.chart.model.attribute.impl.LineAttributesImpl; import org.eclipse.birt.chart.model.attribute.impl.LocationImpl; import org.eclipse.birt.chart.model.attribute.impl.SeriesValueImpl; import org.eclipse.birt.chart.model.attribute.impl.SizeImpl; import org.eclipse.birt.chart.model.attribute.impl.TextAlignmentImpl; import org.eclipse.birt.chart.model.attribute.impl.TextImpl; import org.eclipse.birt.chart.model.attribute.impl.URLValueImpl; import org.eclipse.birt.chart.model.component.Axis; import org.eclipse.birt.chart.model.component.Label; import org.eclipse.birt.chart.model.component.Series; import org.eclipse.birt.chart.model.component.impl.LabelImpl; import org.eclipse.birt.chart.model.component.impl.SeriesImpl; import org.eclipse.birt.chart.model.data.SeriesDefinition; import org.eclipse.birt.chart.model.data.Trigger; import org.eclipse.birt.chart.model.data.impl.ActionImpl; import org.eclipse.birt.chart.model.data.impl.TriggerImpl; import org.eclipse.birt.chart.model.layout.Block; import org.eclipse.birt.chart.model.layout.ClientArea; import org.eclipse.birt.chart.model.layout.LabelBlock; import org.eclipse.birt.chart.model.layout.Legend; import org.eclipse.birt.chart.model.layout.Plot; import org.eclipse.birt.chart.model.layout.TitleBlock; import org.eclipse.birt.chart.plugin.ChartEnginePlugin; import org.eclipse.birt.chart.script.ScriptHandler; import org.eclipse.birt.chart.util.ChartUtil; import org.eclipse.birt.chart.util.PluginSettings; import org.eclipse.emf.common.util.EList; /** * Provides a generic framework that initiates the rendering sequence of the * various chart components. Series type extensions could subclass this class if * they plan on rendering everything for themselves in the plot area. */ public abstract class BaseRenderer implements ISeriesRenderer { /** * This key is to reference the location array of last stacked series. */ protected final static String STACKED_SERIES_LOCATION_KEY = "stacked_series_location_key"; //$NON-NLS-1$ /** * This key is to reference the fixed location array of last stacked series. */ protected final static String FIXED_STACKED_SERIES_LOCATION_KEY = "fixed_stacked_series_location_key"; //$NON-NLS-1$ /** * This key is to reference the fixed index value of last stacked series. */ protected final static String FIXED_STACKED_SERIES_INDEX_KEY = "fixed_stacked_series_index_key"; //$NON-NLS-1$ /** The key is to reference the size information of stacked cone or triangle series. */ protected final static String STACKED_SERIES_SIZE_KEY = "stacked_series_size_key"; //$NON-NLS-1$ protected static final String TIMER = "T"; //$NON-NLS-1$ protected ISeriesRenderingHints srh; protected IDisplayServer xs; protected IDeviceRenderer ir; protected DeferredCache dc; protected Chart cm; protected Object oComputations; protected Series se; protected SeriesDefinition sd; /** * All renders associated with the chart provided for convenience and * inter-series calculations */ protected BaseRenderer[] brna; /** * Identifies the series sequence # in the list of series renders */ protected transient int iSeriesIndex = -1; /** * Identifies the series count in the list of series renders */ protected transient int iSeriesCount = 1; /** * Internally used to simulate a translucent shadow */ public static final ColorDefinition SHADOW = ColorDefinitionImpl.create( 64, 64, 64, 127 ); /** * Internally used to darken a tiled image with a translucent dark grey * color */ protected static final ColorDefinition DARK_GLASS = ColorDefinitionImpl.create( 64, 64, 64, 127 ); /** * Internally used to brighten a tiled image with a translucent light grey * color */ protected static final ColorDefinition LIGHT_GLASS = ColorDefinitionImpl.create( 196, 196, 196, 127 ); /** * Transparency for translucent color. Should between 0 and 100. */ protected static double OVERRIDE_TRANSPARENCY = 50; /** * The associated runtimeContext. */ protected transient RunTimeContext rtc = null; /** The mananger assures correct paint z-order of series for 2D case. */ protected DeferredCacheManager fDeferredCacheManager; private static ILogger logger = Logger.getLogger( "org.eclipse.birt.chart.engine/render" ); //$NON-NLS-1$ /** * The internal constructor that must be defined as public * * @param _ir * @param _cm */ public BaseRenderer( ) { } /** * Sets the context infomation for current renderer. * * @param _cm * @param _o * @param _se * @param _ax * @param _sd */ public void set( Chart _cm, Object _oComputation, Series _se, SeriesDefinition _sd ) { cm = _cm; oComputations = _oComputation; se = _se; sd = _sd; } /** * Sets the deferred cache used by current renderer. */ public final void set( DeferredCache _dc ) { dc = _dc; } /** * Sets the device renderer for current renderer. */ public final void set( IDeviceRenderer _ir ) { ir = _ir; } /** * Sets the diplay server for current renderer. */ public final void set( IDisplayServer _xs ) { xs = _xs; } /** * Sets the series rendering hints for current renderer. */ public final void set( ISeriesRenderingHints _srh ) { srh = _srh; } /** * Setes all associated renderers used for current chart rendering. */ public final void set( BaseRenderer[] _brna ) { brna = _brna; } /** * Sets the runtime context object for current renderer. */ public final void set( RunTimeContext _rtc ) { rtc = _rtc; } /** * @return Returns the series rendering hints for current renderer. */ public final ISeriesRenderingHints getSeriesRenderingHints( ) { return srh; } /** * @return Returns the display server for current renderer. */ public final IDisplayServer getXServer( ) { return xs; } /** * @return Returns the scale of current device against standard 72dpi * (X/72). */ public final double getDeviceScale( ) { return xs.getDpiResolution( ) / 72d; } /** * @return Returns the series definition associated with current renderer. */ public final SeriesDefinition getSeriesDefinition( ) { return sd; } /** * Identifies the series sequence # in the list of series renders(start from * 0). * * @return The index of the Series being rendered */ public final int getSeriesIndex( ) { return iSeriesIndex; } /** * @return Returns the series count for current chart rendering. */ public final int getSeriesCount( ) { return iSeriesCount; } /** * @return Returns the deferred cache associated with current renderer. */ public final DeferredCache getDeferredCache( ) { return dc; } /** * Provides access to any other renderer in the group that participates in * chart rendering * * @param iIndex * @return */ public final BaseRenderer getRenderer( int iIndex ) { return brna[iIndex]; } /** * @return Returns the runtime context associated with current renderer. */ public final RunTimeContext getRunTimeContext( ) { return rtc; } /** * Renders all blocks using the appropriate block z-order and the * containment hierarchy. * * @param bo */ public void render( Map htRenderers, Bounds bo ) throws ChartException { final boolean bFirstInSequence = ( iSeriesIndex == 0 ); final boolean bLastInSequence = ( iSeriesIndex == iSeriesCount - 1 ); boolean bStarted = bFirstInSequence; long lTimer = System.currentTimeMillis( ); Block bl = cm.getBlock( ); final Enumeration e = bl.children( true ); final BlockGenerationEvent bge = new BlockGenerationEvent( this ); final IDeviceRenderer idr = getDevice( ); final ScriptHandler sh = getRunTimeContext( ).getScriptHandler( ); if ( bFirstInSequence ) { // ALWAYS RENDER THE OUTERMOST BLOCK FIRST ScriptHandler.callFunction( sh, ScriptHandler.BEFORE_DRAW_BLOCK, bl, getRunTimeContext( ).getScriptContext( ) ); getRunTimeContext( ).notifyStructureChange( IStructureDefinitionListener.BEFORE_DRAW_BLOCK, bl ); bge.updateBlock( bl ); renderChartBlock( idr, bl, StructureSource.createChartBlock( bl ) ); ScriptHandler.callFunction( sh, ScriptHandler.AFTER_DRAW_BLOCK, bl, getRunTimeContext( ).getScriptContext( ) ); getRunTimeContext( ).notifyStructureChange( IStructureDefinitionListener.AFTER_DRAW_BLOCK, bl ); } // RENDER ALL BLOCKS EXCEPT FOR THE LEGEND IN THIS ITERATIVE LOOP while ( e.hasMoreElements( ) ) { bl = (Block) e.nextElement( ); bge.updateBlock( bl ); if ( bl instanceof Plot ) { ScriptHandler.callFunction( sh, ScriptHandler.BEFORE_DRAW_BLOCK, bl, getRunTimeContext( ).getScriptContext( ) ); getRunTimeContext( ).notifyStructureChange( IStructureDefinitionListener.BEFORE_DRAW_BLOCK, bl ); renderPlot( ir, (Plot) bl ); ScriptHandler.callFunction( sh, ScriptHandler.AFTER_DRAW_BLOCK, bl, getRunTimeContext( ).getScriptContext( ) ); getRunTimeContext( ).notifyStructureChange( IStructureDefinitionListener.AFTER_DRAW_BLOCK, bl ); if ( bFirstInSequence && !bLastInSequence ) { break; } if ( !bStarted ) { bStarted = true; } } else if ( bl instanceof TitleBlock && bStarted ) { ScriptHandler.callFunction( sh, ScriptHandler.BEFORE_DRAW_BLOCK, bl, getRunTimeContext( ).getScriptContext( ) ); getRunTimeContext( ).notifyStructureChange( IStructureDefinitionListener.BEFORE_DRAW_BLOCK, bl ); renderTitle( ir, (TitleBlock) bl ); ScriptHandler.callFunction( sh, ScriptHandler.AFTER_DRAW_BLOCK, bl, getRunTimeContext( ).getScriptContext( ) ); getRunTimeContext( ).notifyStructureChange( IStructureDefinitionListener.AFTER_DRAW_BLOCK, bl ); } else if ( bl instanceof LabelBlock && bStarted ) { ScriptHandler.callFunction( sh, ScriptHandler.BEFORE_DRAW_BLOCK, bl, getRunTimeContext( ).getScriptContext( ) ); getRunTimeContext( ).notifyStructureChange( IStructureDefinitionListener.BEFORE_DRAW_BLOCK, bl ); renderLabel( ir, bl, StructureSource.createUnknown( bl ) ); ScriptHandler.callFunction( sh, ScriptHandler.AFTER_DRAW_BLOCK, bl, getRunTimeContext( ).getScriptContext( ) ); getRunTimeContext( ).notifyStructureChange( IStructureDefinitionListener.AFTER_DRAW_BLOCK, bl ); } else if ( bl instanceof Legend && bStarted && bLastInSequence ) { ScriptHandler.callFunction( sh, ScriptHandler.BEFORE_DRAW_BLOCK, bl, getRunTimeContext( ).getScriptContext( ) ); getRunTimeContext( ).notifyStructureChange( IStructureDefinitionListener.BEFORE_DRAW_BLOCK, bl ); renderLegend( idr, (Legend) bl, htRenderers ); ScriptHandler.callFunction( sh, ScriptHandler.AFTER_DRAW_BLOCK, bl, getRunTimeContext( ).getScriptContext( ) ); getRunTimeContext( ).notifyStructureChange( IStructureDefinitionListener.AFTER_DRAW_BLOCK, bl ); } else if ( bStarted ) { ScriptHandler.callFunction( sh, ScriptHandler.BEFORE_DRAW_BLOCK, bl, getRunTimeContext( ).getScriptContext( ) ); getRunTimeContext( ).notifyStructureChange( IStructureDefinitionListener.BEFORE_DRAW_BLOCK, bl ); renderBlock( ir, bl, StructureSource.createUnknown( bl ) ); ScriptHandler.callFunction( sh, ScriptHandler.AFTER_DRAW_BLOCK, bl, getRunTimeContext( ).getScriptContext( ) ); getRunTimeContext( ).notifyStructureChange( IStructureDefinitionListener.AFTER_DRAW_BLOCK, bl ); } } lTimer = System.currentTimeMillis( ) - lTimer; if ( htRenderers.containsKey( TIMER ) ) { final Long l = (Long) htRenderers.get( TIMER ); htRenderers.put( TIMER, new Long( l.longValue( ) + lTimer ) ); } else { htRenderers.put( TIMER, new Long( lTimer ) ); } if ( bLastInSequence ) { try { fDeferredCacheManager.flushAll( ); // FLUSH DEFERRED CACHE } catch ( ChartException ex ) { // NOTE: RENDERING EXCEPTION ALREADY BEING THROWN throw new ChartException( ChartEnginePlugin.ID, ChartException.RENDERING, ex ); } logger.log( ILogger.INFORMATION, Messages.getString( "info.elapsed.render.time", //$NON-NLS-1$ new Object[]{ new Long( lTimer ) }, rtc.getULocale( ) ) ); htRenderers.remove( TIMER ); } } /** * Renders the legend block based on the legend rendering rules. * * @param ipr * @param lg * @param htRenderers * * @throws ChartException */ public void renderLegend( IPrimitiveRenderer ipr, Legend lg, Map htRenderers ) throws ChartException { if ( !lg.isVisible( ) ) // CHECK VISIBILITY { return; } renderBlock( ipr, lg, StructureSource.createLegend( lg ) ); final IDisplayServer xs = getDevice( ).getDisplayServer( ); final double dScale = getDeviceScale( ); Bounds bo = lg.getBounds( ).scaledInstance( dScale ); Size sz = null; // TODO Refactoring: create function double dX, dY; if ( lg.getPosition( ) != Position.INSIDE_LITERAL ) { try { sz = lg.getPreferredSize( xs, cm, rtc ); } catch ( Exception ex ) { throw new ChartException( ChartEnginePlugin.ID, ChartException.RENDERING, ex ); } sz.scale( dScale ); // USE ANCHOR IN POSITIONING THE LEGEND CLIENT AREA WITHIN THE BLOCK // SLACK SPACE dX = bo.getLeft( ) + ( bo.getWidth( ) - sz.getWidth( ) ) / 2; dY = 0; if ( lg.isSetAnchor( ) ) { int iAnchor = lg.getAnchor( ).getValue( ); // swap west/east if ( isRightToLeft( ) ) { if ( iAnchor == Anchor.EAST ) { iAnchor = Anchor.WEST; } else if ( iAnchor == Anchor.NORTH_EAST ) { iAnchor = Anchor.NORTH_WEST; } else if ( iAnchor == Anchor.SOUTH_EAST ) { iAnchor = Anchor.SOUTH_WEST; } else if ( iAnchor == Anchor.WEST ) { iAnchor = Anchor.EAST; } else if ( iAnchor == Anchor.NORTH_WEST ) { iAnchor = Anchor.NORTH_EAST; } else if ( iAnchor == Anchor.SOUTH_WEST ) { iAnchor = Anchor.SOUTH_EAST; } } switch ( iAnchor ) { case Anchor.NORTH : case Anchor.NORTH_EAST : case Anchor.NORTH_WEST : dY = bo.getTop( ); break; case Anchor.SOUTH : case Anchor.SOUTH_EAST : case Anchor.SOUTH_WEST : dY = bo.getTop( ) + bo.getHeight( ) - sz.getHeight( ); break; default : // CENTERED dY = bo.getTop( ) + ( bo.getHeight( ) - sz.getHeight( ) ) / 2; break; } switch ( iAnchor ) { case Anchor.WEST : case Anchor.NORTH_WEST : case Anchor.SOUTH_WEST : dX = bo.getLeft( ); break; case Anchor.EAST : case Anchor.SOUTH_EAST : case Anchor.NORTH_EAST : dX = bo.getLeft( ) + bo.getWidth( ) - sz.getWidth( ); break; default : // CENTERED dX = bo.getLeft( ) + ( bo.getWidth( ) - sz.getWidth( ) ) / 2; break; } } else { dX = bo.getLeft( ) + ( bo.getWidth( ) - sz.getWidth( ) ) / 2; dY = bo.getTop( ) + ( bo.getHeight( ) - sz.getHeight( ) ) / 2; } } else { // USE PREVIOUSLY COMPUTED POSITION IN THE GENERATOR FOR LEGEND // 'INSIDE' PLOT dX = bo.getLeft( ); dY = bo.getTop( ); sz = SizeImpl.create( bo.getWidth( ), bo.getHeight( ) ); } // get cached legend info. final LegendLayoutHints lilh = rtc.getLegendLayoutHints( ); if ( lilh == null ) { throw new ChartException( ChartEnginePlugin.ID, ChartException.RENDERING, "exception.null.legend.layout.hints", //$NON-NLS-1$ Messages.getResourceBundle( rtc.getULocale( ) ) ); } // consider legend title size. Label lgTitle = lg.getTitle( ); double lgTitleWidth = 0, lgTitleHeight = 0; double yOffset = 0, xOffset = 0, wOffset = 0, hOffset = 0; final boolean bRenderLegendTitle = lgTitle != null && lgTitle.isSetVisible( ) && lgTitle.isVisible( ); int iTitlePos = Position.ABOVE; if ( bRenderLegendTitle ) { lgTitle = LabelImpl.copyInstance( lgTitle ); // handle external resource string final String sPreviousValue = lgTitle.getCaption( ).getValue( ); lgTitle.getCaption( ) .setValue( rtc.externalizedMessage( sPreviousValue ) ); // use cached value Size titleSize = lilh.getTitleSize( ); lgTitleWidth = titleSize.getWidth( ); lgTitleHeight = titleSize.getHeight( ); iTitlePos = lg.getTitlePosition( ).getValue( ); // swap left/right if ( isRightToLeft( ) ) { if ( iTitlePos == Position.LEFT ) { iTitlePos = Position.RIGHT; } else if ( iTitlePos == Position.RIGHT ) { iTitlePos = Position.LEFT; } } switch ( iTitlePos ) { case Position.ABOVE : yOffset = lgTitleHeight; hOffset = -yOffset; break; case Position.BELOW : hOffset = -lgTitleHeight; break; case Position.LEFT : xOffset = lgTitleWidth; wOffset = -xOffset; break; case Position.RIGHT : wOffset = -lgTitleWidth; break; } } // RENDER THE LEGEND CLIENT AREA final ClientArea ca = lg.getClientArea( ); final Insets lgIns = lg.getInsets( ).scaledInstance( dScale ); LineAttributes lia = ca.getOutline( ); bo = BoundsImpl.create( dX, dY, sz.getWidth( ), sz.getHeight( ) ); bo = bo.adjustedInstance( lgIns ); dX = bo.getLeft( ); dY = bo.getTop( ); // Adjust bounds. bo.delta( xOffset, yOffset, wOffset, hOffset ); dX = bo.getLeft( ); dY = bo.getTop( ); final double dBaseX = dX; final double dBaseY = dY; final RectangleRenderEvent rre = (RectangleRenderEvent) ( (EventObjectCache) ir ).getEventObject( StructureSource.createLegend( lg ), RectangleRenderEvent.class ); // render client area shadow. if ( ca.getShadowColor( ) != null ) { rre.setBounds( bo.translateInstance( 3, 3 ) ); rre.setBackground( ca.getShadowColor( ) ); ipr.fillRectangle( rre ); } // render client area rre.setBounds( bo ); rre.setOutline( lia ); rre.setBackground( ca.getBackground( ) ); ipr.fillRectangle( rre ); ipr.drawRectangle( rre ); lia = LineAttributesImpl.copyInstance( lia ); lia.setVisible( true ); // SEPARATOR LINES MUST BE VISIBLE LineAttributes liSep = lg.getSeparator( ) == null ? lia : lg.getSeparator( ); final SeriesDefinition[] seda = cm.getSeriesForLegend( ); // INITIALIZATION OF VARS USED IN FOLLOWING LOOPS final Orientation o = lg.getOrientation( ); final Direction d = lg.getDirection( ); final Label la = LabelImpl.create( ); la.setCaption( TextImpl.copyInstance( lg.getText( ) ) ); la.getCaption( ).setValue( "X" ); //$NON-NLS-1$ final ITextMetrics itm = xs.getTextMetrics( la ); try { final double dItemHeight = itm.getFullHeight( ); final double dHorizontalSpacing = 4; Insets insCA = ca.getInsets( ).scaledInstance( dScale ); Series seBase; List al; LegendItemRenderingHints lirh; Palette pa; int iPaletteCount; EList elPaletteEntries; Fill fPaletteEntry; final boolean bPaletteByCategory = ( cm.getLegend( ) .getItemType( ) .getValue( ) == LegendItemType.CATEGORIES ); // COMPUTATIONS HERE MUST BE IN SYNC WITH THE ACTUAL RENDERER String strNeedInvert = (String)rtc.getState( "[Legend]bNeedInvert" ); boolean bNeedInvert = Boolean.valueOf(strNeedInvert).booleanValue( ); if ( o.getValue( ) == Orientation.VERTICAL ) { if ( bPaletteByCategory ) { SeriesDefinition sdBase = null; if ( cm instanceof ChartWithAxes ) { // ONLY SUPPORT 1 BASE AXIS FOR NOW final Axis axPrimaryBase = ( (ChartWithAxes) cm ).getBaseAxes( )[0]; if ( axPrimaryBase.getSeriesDefinitions( ).isEmpty( ) ) { // NOTHING TO RENDER (BASE AXIS HAS NO SERIES // DEFINITIONS) return; } // OK TO ASSUME THAT 1 BASE SERIES DEFINITION EXISTS sdBase = (SeriesDefinition) axPrimaryBase.getSeriesDefinitions( ) .get( 0 ); } else if ( cm instanceof ChartWithoutAxes ) { if ( ( (ChartWithoutAxes) cm ).getSeriesDefinitions( ) .isEmpty( ) ) { // NOTHING TO RENDER (BASE AXIS HAS NO SERIES // DEFINITIONS) return; } // OK TO ASSUME THAT 1 BASE SERIES DEFINITION EXISTS sdBase = (SeriesDefinition) ( (ChartWithoutAxes) cm ).getSeriesDefinitions( ) .get( 0 ); } // OK TO ASSUME THAT 1 BASE RUNTIME SERIES EXISTS seBase = (Series) sdBase.getRunTimeSeries( ).get( 0 ); pa = sdBase.getSeriesPalette( ); elPaletteEntries = pa.getEntries( ); iPaletteCount = elPaletteEntries.size( ); if ( lilh.getLegendItemHints( ) == null ) { throw new ChartException( ChartEnginePlugin.ID, ChartException.RENDERING, "exception.null.legend.item.hints", //$NON-NLS-1$ Messages.getResourceBundle( rtc.getULocale( ) ) ); } // use cached value LegendItemHints[] liha = lilh.getLegendItemHints( ); LegendItemHints lih; Map columnCache = searchMaxColumnWidth( liha ); for ( int i = 0; i < liha.length; i++ ) { // render each legend item. lih = liha[i]; if ( ( lih.getType( ) & IConstants.LEGEND_ENTRY ) == IConstants.LEGEND_ENTRY ) { la.getCaption( ).setValue( lih.getText( ) ); // CYCLE THROUGH THE PALETTE fPaletteEntry = (Fill) elPaletteEntries.get( lih.getCategoryIndex( ) % iPaletteCount ); lirh = (LegendItemRenderingHints) htRenderers.get( seBase ); double columnWidth = bo.getWidth( ); Double cachedWidth = (Double) columnCache.get( lih ); if ( cachedWidth != null ) { columnWidth = cachedWidth.doubleValue( ) + 3 * dItemHeight / 2 + 2 * insCA.getLeft( ); } renderLegendItem( ipr, lg, la, null, dBaseX + lih.getLeft( ), dBaseY + lih.getTop( ) + insCA.getTop( ), lih.getWidth( ), dItemHeight, lih.getHeight( ), 0, columnWidth, insCA.getLeft( ), dHorizontalSpacing, seBase, fPaletteEntry, lirh, lih.getCategoryIndex( ), dScale ); } } } else if ( d.getValue( ) == Direction.TOP_BOTTOM ) { if ( lilh.getLegendItemHints( ) == null ) { throw new ChartException( ChartEnginePlugin.ID, ChartException.RENDERING, "exception.null.legend.item.hints", //$NON-NLS-1$ Messages.getResourceBundle( rtc.getULocale( ) ) ); } LegendItemHints[] liha = lilh.getLegendItemHints( ); LegendItemHints lih; int k = 0; Map columnCache = searchMaxColumnWidth( liha ); for ( int j = 0; j < seda.length; j++ ) { int iSedaId = bNeedInvert ? seda.length - 1 - j : j; al = seda[iSedaId].getRunTimeSeries( ); pa = seda[iSedaId].getSeriesPalette( ); elPaletteEntries = pa.getEntries( ); iPaletteCount = elPaletteEntries.size( ); for ( int i = 0; i < al.size( ); i++ ) { seBase = (Series) al.get( i ); if ( !seBase.isVisible( ) ) { continue; } lirh = (LegendItemRenderingHints) htRenderers.get( seBase ); if ( k < liha.length ) { lih = liha[k++]; if ( lih.getType( ) == IConstants.LEGEND_ENTRY ) { la.getCaption( ).setValue( lih.getText( ) ); Label valueLa = null; if ( lg.isShowValue( ) ) { valueLa = LabelImpl.copyInstance( seBase.getLabel( ) ); valueLa.getCaption( ) .setValue( lih.getExtraText( ) ); // Bugzilla #185885, make sure the label // will be drawn valueLa.setVisible( true ); } // CYCLE THROUGH THE PALETTE fPaletteEntry = (Fill) elPaletteEntries.get( lih.getCategoryIndex( ) % iPaletteCount ); double columnWidth = bo.getWidth( ); Double cachedWidth = (Double) columnCache.get( lih ); if ( cachedWidth != null ) { columnWidth = cachedWidth.doubleValue( ) + 3 * dItemHeight / 2 + 2 * insCA.getLeft( ); } renderLegendItem( ipr, lg, la, valueLa, dBaseX + lih.getLeft( ), dBaseY + lih.getTop( ) + insCA.getTop( ), lih.getWidth( ), dItemHeight, lih.getHeight( ), lih.getExtraHeight( ), columnWidth, insCA.getLeft( ), dHorizontalSpacing, seBase, fPaletteEntry, lirh, i, dScale ); } } } if ( j < seda.length - 1 && k < liha.length ) { lih = liha[k]; if ( lih.getType( ) == IConstants.LEGEND_SEPERATOR ) { k++; renderSeparator( ipr, lg, liSep, dBaseX + lih.getLeft( ), dBaseY + lih.getTop( ), lih.getWidth( ), Orientation.HORIZONTAL_LITERAL ); } } } } else if ( d.getValue( ) == Direction.LEFT_RIGHT ) { if ( lilh.getLegendItemHints( ) == null ) { throw new ChartException( ChartEnginePlugin.ID, ChartException.RENDERING, "exception.null.legend.item.hints", //$NON-NLS-1$ Messages.getResourceBundle( rtc.getULocale( ) ) ); } LegendItemHints[] liha = lilh.getLegendItemHints( ); LegendItemHints lih; int k = 0; Map columnCache = searchMaxColumnWidth( liha ); for ( int j = 0; j < seda.length; j++ ) { int iSedaId = bNeedInvert ? seda.length - 1 - j : j; al = seda[iSedaId].getRunTimeSeries( ); pa = seda[iSedaId].getSeriesPalette( ); elPaletteEntries = pa.getEntries( ); iPaletteCount = elPaletteEntries.size( ); for ( int i = 0; i < al.size( ); i++ ) { seBase = (Series) al.get( i ); if ( !seBase.isVisible( ) ) { continue; } lirh = (LegendItemRenderingHints) htRenderers.get( seBase ); if ( k < liha.length ) { lih = liha[k++]; if ( lih.getType( ) == IConstants.LEGEND_ENTRY ) { la.getCaption( ).setValue( lih.getText( ) ); Label valueLa = null; if ( lg.isShowValue( ) ) { valueLa = LabelImpl.copyInstance( seBase.getLabel( ) ); valueLa.getCaption( ) .setValue( lih.getExtraText( ) ); // Bugzilla #185885, make sure the label // will be drawn valueLa.setVisible( true ); } // CYCLE THROUGH THE PALETTE fPaletteEntry = (Fill) elPaletteEntries.get( lih.getCategoryIndex( ) % iPaletteCount ); double columnWidth = bo.getWidth( ); Double cachedWidth = (Double) columnCache.get( lih ); if ( cachedWidth != null ) { columnWidth = cachedWidth.doubleValue( ) + 3 * dItemHeight / 2 + 2 * insCA.getLeft( ); } renderLegendItem( ipr, lg, la, valueLa, dBaseX + lih.getLeft( ), dBaseY + lih.getTop( ) + insCA.getTop( ), lih.getWidth( ), dItemHeight, lih.getHeight( ), lih.getExtraHeight( ), columnWidth, insCA.getLeft( ), dHorizontalSpacing, seBase, fPaletteEntry, lirh, i, dScale ); } } } if ( j < seda.length - 1 && k < liha.length ) { lih = liha[k]; if ( lih.getType( ) == IConstants.LEGEND_SEPERATOR ) { k++; renderSeparator( ipr, lg, liSep, dBaseX + lih.getLeft( ), dBaseY + lih.getTop( ), bo.getHeight( ), Orientation.VERTICAL_LITERAL ); } } } } else { throw new ChartException( ChartEnginePlugin.ID, ChartException.RENDERING, "exception.illegal.legend.direction", //$NON-NLS-1$ new Object[]{ d.getName( ) }, Messages.getResourceBundle( rtc.getULocale( ) ) ); } } else if ( o.getValue( ) == Orientation.HORIZONTAL ) { if ( bPaletteByCategory ) { SeriesDefinition sdBase = null; if ( cm instanceof ChartWithAxes ) { // ONLY SUPPORT 1 BASE AXIS FOR NOW final Axis axPrimaryBase = ( (ChartWithAxes) cm ).getBaseAxes( )[0]; if ( axPrimaryBase.getSeriesDefinitions( ).isEmpty( ) ) { // NOTHING TO RENDER (BASE AXIS HAS NO SERIES // DEFINITIONS) return; } // OK TO ASSUME THAT 1 BASE SERIES DEFINITION EXISTS sdBase = (SeriesDefinition) axPrimaryBase.getSeriesDefinitions( ) .get( 0 ); } else if ( cm instanceof ChartWithoutAxes ) { if ( ( (ChartWithoutAxes) cm ).getSeriesDefinitions( ) .isEmpty( ) ) { // NOTHING TO RENDER (BASE AXIS HAS NO SERIES // DEFINITIONS) return; } // OK TO ASSUME THAT 1 BASE SERIES DEFINITION EXISTS sdBase = (SeriesDefinition) ( (ChartWithoutAxes) cm ).getSeriesDefinitions( ) .get( 0 ); } // OK TO ASSUME THAT 1 BASE RUNTIME SERIES EXISTS seBase = (Series) sdBase.getRunTimeSeries( ).get( 0 ); pa = sdBase.getSeriesPalette( ); elPaletteEntries = pa.getEntries( ); iPaletteCount = elPaletteEntries.size( ); if ( lilh.getLegendItemHints( ) == null ) { throw new ChartException( ChartEnginePlugin.ID, ChartException.RENDERING, "exception.null.legend.item.hints", //$NON-NLS-1$ Messages.getResourceBundle( rtc.getULocale( ) ) ); } // use cached value LegendItemHints[] liha = lilh.getLegendItemHints( ); LegendItemHints lih; for ( int i = 0; i < liha.length; i++ ) { // render each legend item. lih = liha[i]; if ( ( lih.getType( ) & IConstants.LEGEND_ENTRY ) == IConstants.LEGEND_ENTRY ) { la.getCaption( ).setValue( lih.getText( ) ); // CYCLE THROUGH THE PALETTE fPaletteEntry = (Fill) elPaletteEntries.get( lih.getCategoryIndex( ) % iPaletteCount ); lirh = (LegendItemRenderingHints) htRenderers.get( seBase ); renderLegendItem( ipr, lg, la, null, dBaseX + lih.getLeft( ), dBaseY + lih.getTop( ) + insCA.getTop( ), lih.getWidth( ), dItemHeight, lih.getHeight( ), 0, lih.getWidth( ) + 3 * dItemHeight / 2 + 2 * insCA.getLeft( ), insCA.getLeft( ), dHorizontalSpacing, seBase, fPaletteEntry, lirh, lih.getCategoryIndex( ), dScale ); } } } else if ( d.getValue( ) == Direction.TOP_BOTTOM ) { if ( lilh.getLegendItemHints( ) == null ) { throw new ChartException( ChartEnginePlugin.ID, ChartException.RENDERING, "exception.null.legend.item.hints", //$NON-NLS-1$ Messages.getResourceBundle( rtc.getULocale( ) ) ); } LegendItemHints[] liha = lilh.getLegendItemHints( ); LegendItemHints lih; int k = 0; for ( int j = 0; j < seda.length; j++ ) { int iSedaId = bNeedInvert ? seda.length - 1 - j : j; al = seda[iSedaId].getRunTimeSeries( ); pa = seda[iSedaId].getSeriesPalette( ); elPaletteEntries = pa.getEntries( ); iPaletteCount = elPaletteEntries.size( ); for ( int i = 0; i < al.size( ); i++ ) { seBase = (Series) al.get( i ); if ( !seBase.isVisible( ) ) { continue; } lirh = (LegendItemRenderingHints) htRenderers.get( seBase ); if ( k < liha.length ) { lih = liha[k++]; if ( lih.getType( ) == IConstants.LEGEND_ENTRY ) { la.getCaption( ).setValue( lih.getText( ) ); Label valueLa = null; if ( lg.isShowValue( ) ) { valueLa = LabelImpl.copyInstance( seBase.getLabel( ) ); valueLa.getCaption( ) .setValue( lih.getExtraText( ) ); // Bugzilla #185885, make sure the label // will be drawn valueLa.setVisible( true ); } // CYCLE THROUGH THE PALETTE fPaletteEntry = (Fill) elPaletteEntries.get( lih.getCategoryIndex( ) % iPaletteCount ); renderLegendItem( ipr, lg, la, valueLa, dBaseX + lih.getLeft( ), dBaseY + lih.getTop( ) + insCA.getTop( ), lih.getWidth( ), dItemHeight, lih.getHeight( ), lih.getExtraHeight( ), lih.getWidth( ) + 3 * dItemHeight / 2 + 2 * insCA.getLeft( ), insCA.getLeft( ), dHorizontalSpacing, seBase, fPaletteEntry, lirh, i, dScale ); } } } if ( j < seda.length - 1 && k < liha.length ) { lih = liha[k]; if ( lih.getType( ) == IConstants.LEGEND_SEPERATOR ) { k++; renderSeparator( ipr, lg, liSep, dBaseX + lih.getLeft( ), dBaseY + lih.getTop( ), bo.getWidth( ), Orientation.HORIZONTAL_LITERAL ); } } } } else if ( d.getValue( ) == Direction.LEFT_RIGHT ) { if ( lilh.getLegendItemHints( ) == null ) { throw new ChartException( ChartEnginePlugin.ID, ChartException.RENDERING, "exception.null.legend.item.hints", //$NON-NLS-1$ Messages.getResourceBundle( rtc.getULocale( ) ) ); } LegendItemHints[] liha = lilh.getLegendItemHints( ); LegendItemHints lih; int k = 0; for ( int j = 0; j < seda.length; j++ ) { int iSedaId = bNeedInvert ? seda.length - 1 - j : j; al = seda[iSedaId].getRunTimeSeries( ); pa = seda[iSedaId].getSeriesPalette( ); elPaletteEntries = pa.getEntries( ); iPaletteCount = elPaletteEntries.size( ); for ( int i = 0; i < al.size( ); i++ ) { seBase = (Series) al.get( i ); if ( !seBase.isVisible( ) ) { continue; } lirh = (LegendItemRenderingHints) htRenderers.get( seBase ); if ( k < liha.length ) { lih = liha[k++]; if ( lih.getType( ) == IConstants.LEGEND_ENTRY ) { la.getCaption( ).setValue( lih.getText( ) ); Label valueLa = null; if ( lg.isShowValue( ) ) { valueLa = LabelImpl.copyInstance( seBase.getLabel( ) ); valueLa.getCaption( ) .setValue( lih.getExtraText( ) ); // Bugzilla #185885, make sure the label // will be drawn valueLa.setVisible( true ); } // CYCLE THROUGH THE PALETTE fPaletteEntry = (Fill) elPaletteEntries.get( lih.getCategoryIndex( ) % iPaletteCount ); renderLegendItem( ipr, lg, la, valueLa, dBaseX + lih.getLeft( ), dBaseY + lih.getTop( ) + insCA.getTop( ), lih.getWidth( ), dItemHeight, lih.getHeight( ), lih.getExtraHeight( ), lih.getWidth( ) + 3 * dItemHeight / 2 + 2 * insCA.getLeft( ), insCA.getLeft( ), dHorizontalSpacing, seBase, fPaletteEntry, lirh, i, dScale ); } } } if ( j < seda.length - 1 && k < liha.length ) { lih = liha[k]; if ( lih.getType( ) == IConstants.LEGEND_SEPERATOR ) { k++; renderSeparator( ipr, lg, liSep, dBaseX + lih.getLeft( ), dBaseY + lih.getTop( ), lih.getHeight( ), Orientation.VERTICAL_LITERAL ); } } } } else { throw new ChartException( ChartEnginePlugin.ID, ChartException.RENDERING, "exception.illegal.legend.direction", //$NON-NLS-1$ new Object[]{ d.getName( ) }, Messages.getResourceBundle( rtc.getULocale( ) ) ); } } else { throw new ChartException( ChartEnginePlugin.ID, ChartException.RENDERING, "exception.illegal.legend.orientation", //$NON-NLS-1$ new Object[]{ o.getName( ) }, Messages.getResourceBundle( rtc.getULocale( ) ) ); } // Render legend title if defined. if ( bRenderLegendTitle ) { double lX = bo.getLeft( ); double lY = bo.getTop( ); switch ( iTitlePos ) { case Position.ABOVE : lX = bo.getLeft( ) + ( bo.getWidth( ) - lgTitleWidth ) / 2d; lY = bo.getTop( ) - lgTitleHeight; break; case Position.BELOW : lX = bo.getLeft( ) + ( bo.getWidth( ) - lgTitleWidth ) / 2d; lY = bo.getTop( ) + bo.getHeight( ); break; case Position.LEFT : lX = bo.getLeft( ) - lgTitleWidth; lY = bo.getTop( ) + ( bo.getHeight( ) - lgTitleHeight ) / 2d; break; case Position.RIGHT : lX = bo.getLeft( ) + bo.getWidth( ); lY = bo.getTop( ) + ( bo.getHeight( ) - lgTitleHeight ) / 2d; break; } final TextRenderEvent tre = (TextRenderEvent) ( (EventObjectCache) ir ).getEventObject( WrappedStructureSource.createLegendTitle( lg, lgTitle ), TextRenderEvent.class ); tre.setBlockBounds( BoundsImpl.create( lX, lY, lgTitleWidth, lgTitleHeight ) ); TextAlignment ta = TextAlignmentImpl.create( ); ta.setHorizontalAlignment( HorizontalAlignment.CENTER_LITERAL ); ta.setVerticalAlignment( VerticalAlignment.CENTER_LITERAL ); tre.setBlockAlignment( ta ); tre.setLabel( lgTitle ); tre.setAction( TextRenderEvent.RENDER_TEXT_IN_BLOCK ); ipr.drawText( tre ); } } finally { itm.dispose( ); // DISPOSE RESOURCES AFTER USE } } /** * Internally used to render a legend item separator * * @param ipr * @param lg * @param dX * @param dY * @param dLength * @param o */ protected static final void renderSeparator( IPrimitiveRenderer ipr, Legend lg, LineAttributes lia, double dX, double dY, double dLength, Orientation o ) throws ChartException { if ( o.getValue( ) == Orientation.HORIZONTAL ) { final LineRenderEvent lre = (LineRenderEvent) ( (EventObjectCache) ipr ).getEventObject( StructureSource.createLegend( lg ), LineRenderEvent.class ); lre.setLineAttributes( lia ); lre.setStart( LocationImpl.create( dX, dY ) ); lre.setEnd( LocationImpl.create( dX + dLength, dY ) ); ipr.drawLine( lre ); } else if ( o.getValue( ) == Orientation.VERTICAL ) { final LineRenderEvent lre = (LineRenderEvent) ( (EventObjectCache) ipr ).getEventObject( StructureSource.createLegend( lg ), LineRenderEvent.class ); lre.setLineAttributes( lia ); lre.setStart( LocationImpl.create( dX, dY ) ); lre.setEnd( LocationImpl.create( dX, dY + dLength ) ); ipr.drawLine( lre ); } } /** * Search the width for each column when legend is vertical. * * @param liha * @return */ protected Map searchMaxColumnWidth( LegendItemHints[] liha ) { HashMap rt = new HashMap( ); int start = -1; double x = 0; double maxWidth = 0; for ( int i = 0; i < liha.length; i++ ) { if ( liha[i].getType( ) == IConstants.LEGEND_SEPERATOR ) { continue; } if ( start < 0 ) { start = i; x = liha[i].getLeft( ); maxWidth = liha[i].getWidth( ); } else if ( liha[i].getLeft( ) != x ) { for ( int j = start; j < i; j++ ) { rt.put( liha[j], new Double( maxWidth ) ); } start = i; x = liha[i].getLeft( ); maxWidth = liha[i].getWidth( ); } else { maxWidth = Math.max( maxWidth, liha[i].getWidth( ) ); } } for ( int j = Math.max( start, 0 ); j < liha.length; j++ ) { rt.put( liha[j], new Double( maxWidth ) ); } return rt; } /** * Internally provided to render a single legend entry * * @param ipr * @param lg * @param la * @param dX * @param dY * @param dW * @param dItemHeight * @param dLeftInset * @param dHorizontalSpacing * @param se * @param fPaletteEntry * @param lirh * @param i * data row index * * @throws RenderingException */ protected final void renderLegendItem( IPrimitiveRenderer ipr, Legend lg, Label la, Label valueLa, double dX, double dY, double dW, double dItemHeight, double dFullHeight, double dExtraHeight, double dColumnWidth, double dLeftInset, double dHorizontalSpacing, Series se, Fill fPaletteEntry, LegendItemRenderingHints lirh, int dataIndex, double dScale ) throws ChartException { // Copy correct font setting into current legend item label. if ( la != null && la.getCaption( ) != null && valueLa != null && valueLa.getCaption( ) != null ) { valueLa.getCaption( ).setFont( la.getCaption( ).getFont( ) ); valueLa.getCaption( ).setColor( la.getCaption( ).getColor( ) ); } LegendEntryRenderingHints lerh = new LegendEntryRenderingHints( la, valueLa, dataIndex, fPaletteEntry ); ScriptHandler sh = getRunTimeContext( ).getScriptHandler( ); // TODO replace with LegendEntryRenderingHints ScriptHandler.callFunction( sh, ScriptHandler.BEFORE_DRAW_LEGEND_ENTRY, la, getRunTimeContext( ).getScriptContext( ) ); getRunTimeContext( ).notifyStructureChange( IStructureDefinitionListener.BEFORE_DRAW_LEGEND_ENTRY, la ); final Bounds bo = lirh.getLegendGraphicBounds( ); if ( isRightToLeft( ) ) { bo.setLeft( ( dX + dColumnWidth - dLeftInset - 1 - 3 * dItemHeight / 2 ) / dScale ); } else { bo.setLeft( ( dX + dLeftInset + 1 ) / dScale ); } bo.setTop( ( dY + 1 + ( dFullHeight - dItemHeight ) / 2 ) / dScale ); bo.setWidth( ( 3 * dItemHeight / 2 ) / dScale ); bo.setHeight( ( dItemHeight - 2 ) / dScale ); ScriptHandler.callFunction( sh, ScriptHandler.BEFORE_DRAW_LEGEND_ITEM, lerh, bo, getRunTimeContext( ).getScriptContext( ) ); getRunTimeContext( ).notifyStructureChange( IStructureDefinitionListener.BEFORE_DRAW_LEGEND_ITEM, lerh ); bo.setLeft( bo.getLeft( ) * dScale ); bo.setTop( bo.getTop( ) * dScale ); bo.setWidth( bo.getWidth( ) * dScale ); bo.setHeight( bo.getHeight( ) * dScale ); final BaseRenderer br = lirh.getRenderer( ); br.renderLegendGraphic( ipr, lg, fPaletteEntry, bo ); // 1. Draw series identify label. final TextRenderEvent tre = (TextRenderEvent) ( (EventObjectCache) ir ).getEventObject( StructureSource.createLegend( lg ), TextRenderEvent.class ); double dLaAngle = la.getCaption( ).getFont( ).getRotation( ); if ( isRightToLeft( ) ) { dLaAngle = -dLaAngle; } double dDeltaHeight = 0; if ( dLaAngle > 0 && dLaAngle < 90 ) { dDeltaHeight = ( bo.getHeight( ) + dFullHeight - dItemHeight ) / 2; } else if ( dLaAngle < 0 && dLaAngle > -90 ) { dDeltaHeight = ( bo.getHeight( ) - dFullHeight + dItemHeight ) / 2; } else if ( dLaAngle == 0 || dLaAngle == 90 || dLaAngle == -90 ) { dDeltaHeight = bo.getHeight( ) / 2; } if ( isRightToLeft( ) ) { tre.setLocation( LocationImpl.create( dX + dColumnWidth - dLeftInset - 3 * dItemHeight / 2 - dHorizontalSpacing, bo.getTop( ) + dDeltaHeight ) ); tre.setTextPosition( TextRenderEvent.LEFT ); } else { tre.setLocation( LocationImpl.create( dX + dLeftInset + ( 3 * dItemHeight / 2 ) + dHorizontalSpacing, bo.getTop( ) + dDeltaHeight ) ); tre.setTextPosition( TextRenderEvent.RIGHT ); } if ( la.isVisible( ) ) { tre.setLabel( la ); tre.setAction( TextRenderEvent.RENDER_TEXT_AT_LOCATION ); ipr.drawText( tre ); } // 2. Draw legend value label. if ( valueLa != null ) { final double dValueWidth = dColumnWidth - 2 * dLeftInset; Label tmpLa = LabelImpl.copyInstance( valueLa ); TextAlignment ta = TextAlignmentImpl.create( ); ta.setHorizontalAlignment( HorizontalAlignment.CENTER_LITERAL ); ta.setVerticalAlignment( VerticalAlignment.CENTER_LITERAL ); tre.setBlockAlignment( ta ); tre.setBlockBounds( BoundsImpl.create( dX + dLeftInset + 1, dY + dFullHeight + 1, dValueWidth - 2, dExtraHeight - 1 ) ); tre.setLabel( tmpLa ); tre.setAction( TextRenderEvent.RENDER_TEXT_IN_BLOCK ); ipr.drawText( tre ); } if ( isInteractivityEnabled( ) ) { // PROCESS 'SERIES LEVEL' TRIGGERS USING SOURCE='bs' Trigger tg; EList elTriggers = lg.getTriggers( ); Location[] loaHotspot = new Location[4]; // use the complete legend item area as the hotspot loaHotspot[0] = LocationImpl.create( dX + 1, dY + 1 ); loaHotspot[1] = LocationImpl.create( dX + dColumnWidth - 1, dY + 1 ); loaHotspot[2] = LocationImpl.create( dX + dColumnWidth - 1, dY + dFullHeight + dExtraHeight - 1 ); loaHotspot[3] = LocationImpl.create( dX + 1, dY + dFullHeight + dExtraHeight - 1 ); Trigger buildinTg = null; if ( cm.getInteractivity( ) != null ) { boolean customed = false; switch ( cm.getInteractivity( ).getLegendBehavior( ).getValue( ) ) { case LegendBehaviorType.HIGHLIGHT_SERIE : for ( Iterator itr = elTriggers.iterator( ); itr.hasNext( ); ) { tg = (Trigger) itr.next( ); if ( tg.getCondition( ) == TriggerCondition.ONCLICK_LITERAL || tg.getAction( ).getType( ) == ActionType.HIGHLIGHT_LITERAL ) { customed = true; } } if ( !customed ) { buildinTg = TriggerImpl.create( TriggerCondition.ONCLICK_LITERAL, ActionImpl.create( ActionType.HIGHLIGHT_LITERAL, SeriesValueImpl.create( String.valueOf( se.getSeriesIdentifier( ) ) ) ) ); } break; case LegendBehaviorType.TOGGLE_SERIE_VISIBILITY : for ( Iterator itr = elTriggers.iterator( ); itr.hasNext( ); ) { tg = (Trigger) itr.next( ); if ( tg.getCondition( ) == TriggerCondition.ONCLICK_LITERAL || tg.getAction( ).getType( ) == ActionType.TOGGLE_VISIBILITY_LITERAL ) { customed = true; } } if ( !customed ) { buildinTg = TriggerImpl.create( TriggerCondition.ONCLICK_LITERAL, ActionImpl.create( ActionType.TOGGLE_VISIBILITY_LITERAL, SeriesValueImpl.create( String.valueOf( se.getSeriesIdentifier( ) ) ) ) ); } break; case LegendBehaviorType.NONE : break; } } if ( !elTriggers.isEmpty( ) || buildinTg != null ) { final StructureSource source; if ( this.cm.getLegend( ).getItemType( ) == LegendItemType.CATEGORIES_LITERAL ) { final DataPointHints dph = new DataPointHints( la, null, null, null, null, null, null, null, null, dataIndex, null, 0, null ); source = WrappedStructureSource.createSeriesDataPoint( se, dph ); } else { source = StructureSource.createSeries( se ); } final InteractionEvent iev = (InteractionEvent) ( (EventObjectCache) ipr ).getEventObject( source, InteractionEvent.class ); for ( int t = 0; t < elTriggers.size( ); t++ ) { tg = TriggerImpl.copyInstance( (Trigger) elTriggers.get( t ) ); processTrigger( tg, WrappedStructureSource.createLegendEntry( lg, lerh ) ); iev.addTrigger( tg ); } if ( buildinTg != null ) { processTrigger( buildinTg, WrappedStructureSource.createLegendEntry( lg, lerh ) ); iev.addTrigger( buildinTg ); } final PolygonRenderEvent pre = (PolygonRenderEvent) ( (EventObjectCache) ipr ).getEventObject( source, PolygonRenderEvent.class ); pre.setPoints( loaHotspot ); iev.setHotSpot( pre ); ipr.enableInteraction( iev ); } } ScriptHandler.callFunction( sh, ScriptHandler.AFTER_DRAW_LEGEND_ITEM, lerh, bo, getRunTimeContext( ).getScriptContext( ) ); getRunTimeContext( ).notifyStructureChange( IStructureDefinitionListener.AFTER_DRAW_LEGEND_ITEM, lerh ); ScriptHandler.callFunction( sh, ScriptHandler.AFTER_DRAW_LEGEND_ENTRY, la, getRunTimeContext( ).getScriptContext( ) ); getRunTimeContext( ).notifyStructureChange( IStructureDefinitionListener.AFTER_DRAW_LEGEND_ENTRY, la ); } /** * Renders the Plot * * @param ipr * The Primitive Renderer of a Device Renderer * @param p * The Plot to render * * @throws ChartException */ public void renderPlot( IPrimitiveRenderer ipr, Plot p ) throws ChartException { if ( !p.isVisible( ) ) // CHECK VISIBILITY { return; } final boolean bFirstInSequence = ( iSeriesIndex == 0 ); final boolean bLastInSequence = ( iSeriesIndex == iSeriesCount - 1 ); if ( bFirstInSequence ) { renderBackground( ipr, p ); } if ( getSeries( ) != null ) { ScriptHandler.callFunction( getRunTimeContext( ).getScriptHandler( ), ScriptHandler.BEFORE_DRAW_SERIES, getSeries( ), this, getRunTimeContext( ).getScriptContext( ) ); getRunTimeContext( ).notifyStructureChange( IStructureDefinitionListener.BEFORE_DRAW_SERIES, getSeries( ) ); renderSeries( ipr, p, srh ); // CALLS THE APPROPRIATE SUBCLASS // FOR ScriptHandler.callFunction( getRunTimeContext( ).getScriptHandler( ), ScriptHandler.AFTER_DRAW_SERIES, getSeries( ), this, getRunTimeContext( ).getScriptContext( ) ); getRunTimeContext( ).notifyStructureChange( IStructureDefinitionListener.AFTER_DRAW_SERIES, getSeries( ) ); } if ( bLastInSequence ) { // RENDER OVERLAYS HERE IF ANY } } /** * Renders the background. * * @param ipr * @param p * * @throws ChartException */ protected void renderBackground( IPrimitiveRenderer ipr, Plot p ) throws ChartException { final double dScale = getDeviceScale( ); final RectangleRenderEvent rre = (RectangleRenderEvent) ( (EventObjectCache) ipr ).getEventObject( StructureSource.createPlot( p ), RectangleRenderEvent.class ); rre.updateFrom( p, dScale ); // POINTS => PIXELS ipr.fillRectangle( rre ); ipr.drawRectangle( rre ); Object oComputations = getComputations( ); if ( oComputations instanceof PlotWithoutAxes ) { final ClientArea ca = p.getClientArea( ); Bounds cbo = rre.getBounds( ); // render client area shadow if ( ca.getShadowColor( ) != null ) { rre.setBounds( cbo.translateInstance( 3, 3 ) ); rre.setBackground( ca.getShadowColor( ) ); ipr.fillRectangle( rre ); } // render client area rre.setBounds( cbo ); rre.setBackground( ca.getBackground( ) ); ipr.fillRectangle( rre ); ipr.drawRectangle( rre ); if ( !ca.getOutline( ).isSetVisible( ) ) { throw new ChartException( ChartEnginePlugin.ID, ChartException.RENDERING, "exception.client.area.outline.visibility", //$NON-NLS-1$ Messages.getResourceBundle( rtc.getULocale( ) ) ); } if ( ca.getOutline( ).isVisible( ) ) { final Bounds bo = p.getBounds( ).scaledInstance( dScale ); // POINTS // PIXELS final PlotWithoutAxes pwoa = (PlotWithoutAxes) oComputations; final Size sz = SizeImpl.create( bo.getWidth( ) / pwoa.getColumnCount( ), bo.getHeight( ) / pwoa.getRowCount( ) ); final LineRenderEvent lre = (LineRenderEvent) ( (EventObjectCache) ipr ).getEventObject( StructureSource.createPlot( p ), LineRenderEvent.class ); lre.setLineAttributes( ca.getOutline( ) ); int colCount = pwoa.getColumnCount( ); int rowCount = pwoa.getRowCount( ); ChartWithoutAxes cwoa = pwoa.getModel( ); if ( cwoa instanceof DialChart && ( (DialChart) cwoa ).isDialSuperimposition( ) ) { colCount = 1; rowCount = 1; } for ( int i = 1; i < colCount; i++ ) { lre.setStart( LocationImpl.create( bo.getLeft( ) + i * sz.getWidth( ), bo.getTop( ) ) ); lre.setEnd( LocationImpl.create( bo.getLeft( ) + i * sz.getWidth( ), bo.getTop( ) + bo.getHeight( ) ) ); ipr.drawLine( lre ); } for ( int j = 1; j < rowCount; j++ ) { lre.setStart( LocationImpl.create( bo.getLeft( ), bo.getTop( ) + j * sz.getHeight( ) ) ); lre.setEnd( LocationImpl.create( bo.getLeft( ) + bo.getWidth( ), bo.getTop( ) + j * sz.getHeight( ) ) ); ipr.drawLine( lre ); } } } } /** * Renders the block. * * @param ipr * @param b * * @throws ChartException */ protected void renderBlock( IPrimitiveRenderer ipr, Block b, Object oSource ) throws ChartException { final double dScale = getDeviceScale( ); final RectangleRenderEvent rre = (RectangleRenderEvent) ( (EventObjectCache) ipr ).getEventObject( oSource, RectangleRenderEvent.class ); rre.updateFrom( b, dScale ); ipr.fillRectangle( rre ); ipr.drawRectangle( rre ); } /** * Renders the chart block. * * @param ipr * @param b * * @throws ChartException */ protected void renderChartBlock( IPrimitiveRenderer ipr, Block b, Object oSource ) throws ChartException { final double dScale = getDeviceScale( ); final RectangleRenderEvent rre = (RectangleRenderEvent) ( (EventObjectCache) ipr ).getEventObject( oSource, RectangleRenderEvent.class ); rre.updateFrom( b, dScale ); ipr.fillRectangle( rre ); ipr.drawRectangle( rre ); if ( isInteractivityEnabled( ) ) { Trigger tg; EList elTriggers = b.getTriggers( ); Location[] loaHotspot = new Location[4]; Bounds bo = b.getBounds( ).scaledInstance( dScale ); double dLeft = bo.getLeft( ); double dTop = bo.getTop( ); double dWidth = bo.getWidth( ); double dHeight = bo.getHeight( ); loaHotspot[0] = LocationImpl.create( dLeft, dTop ); loaHotspot[1] = LocationImpl.create( dLeft + dWidth, dTop ); loaHotspot[2] = LocationImpl.create( dLeft + dWidth, dTop + dHeight ); loaHotspot[3] = LocationImpl.create( dLeft, dTop + dHeight ); if ( !elTriggers.isEmpty( ) ) { final InteractionEvent iev = (InteractionEvent) ( (EventObjectCache) ipr ).getEventObject( StructureSource.createChartBlock( b ), InteractionEvent.class ); for ( int t = 0; t < elTriggers.size( ); t++ ) { tg = TriggerImpl.copyInstance( (Trigger) elTriggers.get( t ) ); processTrigger( tg, StructureSource.createChartBlock( b ) ); iev.addTrigger( tg ); } final PolygonRenderEvent pre = (PolygonRenderEvent) ( (EventObjectCache) ipr ).getEventObject( StructureSource.createChartBlock( b ), PolygonRenderEvent.class ); pre.setPoints( loaHotspot ); iev.setHotSpot( pre ); ipr.enableInteraction( iev ); } } } /** * Renders label. * * @param ipr * @param b * * @throws ChartException */ public void renderLabel( IPrimitiveRenderer ipr, Block b, Object oSource ) throws ChartException { if ( !b.isVisible( ) ) { return; } renderBlock( ipr, b, oSource ); final double dScale = getDeviceScale( ); final LabelBlock lb = (LabelBlock) b; final TextRenderEvent tre = (TextRenderEvent) ( (EventObjectCache) ipr ).getEventObject( oSource, TextRenderEvent.class ); // need backup original non-externalized value. final String sRestoreValue = tre.updateFrom( lb, dScale, rtc ); if ( lb.getLabel( ).isVisible( ) ) { ipr.drawText( tre ); } lb.getLabel( ).getCaption( ).setValue( sRestoreValue ); } /** * Renders the Chart Title Block * * @param ipr * The Primitive Renderer of a Device Renderer * @param b * The TitleBlock to render * * @throws ChartException */ public void renderTitle( IPrimitiveRenderer ipr, TitleBlock b ) throws ChartException { // switch lable alignment TextAlignment restoreValue = b.getLabel( ) .getCaption( ) .getFont( ) .getAlignment( ); b.getLabel( ) .getCaption( ) .getFont( ) .setAlignment( switchTextAlignment( restoreValue ) ); renderLabel( ipr, b, StructureSource.createTitle( b ) ); // restore original value b.getLabel( ).getCaption( ).getFont( ).setAlignment( restoreValue ); if ( isInteractivityEnabled( ) ) { Trigger tg; EList elTriggers = b.getTriggers( ); Location[] loaHotspot = new Location[4]; final double dScale = getDeviceScale( ); Bounds bo = b.getBounds( ).scaledInstance( dScale ); double dLeft = bo.getLeft( ); double dTop = bo.getTop( ); double dWidth = bo.getWidth( ); double dHeight = bo.getHeight( ); loaHotspot[0] = LocationImpl.create( dLeft, dTop ); loaHotspot[1] = LocationImpl.create( dLeft + dWidth, dTop ); loaHotspot[2] = LocationImpl.create( dLeft + dWidth, dTop + dHeight ); loaHotspot[3] = LocationImpl.create( dLeft, dTop + dHeight ); if ( !elTriggers.isEmpty( ) ) { final InteractionEvent iev = (InteractionEvent) ( (EventObjectCache) ipr ).getEventObject( StructureSource.createTitle( b ), InteractionEvent.class ); for ( int t = 0; t < elTriggers.size( ); t++ ) { tg = TriggerImpl.copyInstance( (Trigger) elTriggers.get( t ) ); processTrigger( tg, StructureSource.createTitle( b ) ); iev.addTrigger( tg ); } final PolygonRenderEvent pre = (PolygonRenderEvent) ( (EventObjectCache) ipr ).getEventObject( StructureSource.createTitle( b ), PolygonRenderEvent.class ); pre.setPoints( loaHotspot ); iev.setHotSpot( pre ); ipr.enableInteraction( iev ); } } } /** * Creates empty renderer instance. * * @param cm * @param oComputations * @return */ private static final BaseRenderer[] createEmptyInstance( Chart cm, RunTimeContext rtc, Object oComputations ) { final BaseRenderer[] brna = new BaseRenderer[1]; final AxesRenderer ar = new EmptyWithAxes( ); ar.iSeriesIndex = 0; ar.set( cm, oComputations, null, null, null ); ar.set( rtc ); brna[0] = ar; return brna; } /** * This method returns appropriate renders for the given chart model. It * uses extension points to identify a renderer corresponding to a custom * series. * * @param cm * @param rtc * @param oComputations * * @return * @throws ChartException */ public static final BaseRenderer[] instances( Chart cm, RunTimeContext rtc, Object oComputations ) throws ChartException { final PluginSettings ps = PluginSettings.instance( ); BaseRenderer[] brna = null; final boolean bPaletteByCategory = ( cm.getLegend( ) .getItemType( ) .getValue( ) == LegendItemType.CATEGORIES ); if ( cm instanceof ChartWithAxes ) { final ChartWithAxes cwa = (ChartWithAxes) cm; final Axis[] axa = cwa.getPrimaryBaseAxes( ); Axis axPrimaryBase = axa[0]; Series se; AxesRenderer ar = null; List al = new ArrayList( ), alRunTimeSeries; EList elBase, elOrthogonal; SeriesDefinition sd = null; int iSI = 0; // SERIES INDEX COUNTER elBase = axPrimaryBase.getSeriesDefinitions( ); if ( elBase.isEmpty( ) ) // NO SERIES DEFINITIONS { return createEmptyInstance( cm, rtc, oComputations ); } else { // ONLY 1 SERIES DEFINITION MAY BE // ASSOCIATED // WITH THE BASE AXIS final SeriesDefinition sdBase = (SeriesDefinition) elBase.get( 0 ); alRunTimeSeries = sdBase.getRunTimeSeries( ); if ( alRunTimeSeries.isEmpty( ) ) { return createEmptyInstance( cm, rtc, oComputations ); } // ONLY 1 SERIES MAY BE // ASSOCIATED WITH THE // BASE SERIES DEFINITION se = (Series) alRunTimeSeries.get( 0 ); ar = ( se.getClass( ) == SeriesImpl.class ) ? new EmptyWithAxes( ) : (AxesRenderer) ps.getRenderer( se.getClass( ) ); // INITIALIZE THE RENDERER ar.set( cm, oComputations, se, axPrimaryBase, sdBase ); ar.set( rtc ); ar.iSeriesIndex = iSI++; al.add( ar ); final Axis[] axaOrthogonal = cwa.getOrthogonalAxes( axPrimaryBase, true ); for ( int i = 0; i < axaOrthogonal.length; i++ ) { elOrthogonal = axaOrthogonal[i].getSeriesDefinitions( ); for ( int j = 0; j < elOrthogonal.size( ); j++ ) { sd = (SeriesDefinition) elOrthogonal.get( j ); alRunTimeSeries = sd.getRunTimeSeries( ); for ( int k = 0; k < alRunTimeSeries.size( ); k++ ) { se = (Series) alRunTimeSeries.get( k ); ar = ( se.getClass( ) == SeriesImpl.class ) ? new EmptyWithAxes( ) : (AxesRenderer) ps.getRenderer( se.getClass( ) ); // INITIALIZE THE RENDERER ar.set( cm, oComputations, se, axaOrthogonal[i], bPaletteByCategory ? sdBase : sd ); ar.iSeriesIndex = iSI++; al.add( ar ); } } } // CONVERT INTO AN ARRAY AS REQUESTED brna = new BaseRenderer[iSI]; for ( int i = 0; i < iSI; i++ ) { ar = (AxesRenderer) al.get( i ); ar.iSeriesCount = iSI; brna[i] = ar; } } } else if ( cm instanceof ChartWithoutAxes ) { final ChartWithoutAxes cwoa = (ChartWithoutAxes) cm; EList elBase = cwoa.getSeriesDefinitions( ); EList elOrthogonal; SeriesDefinition sd, sdBase; List alRuntimeSeries; final Series[] sea = cwoa.getRunTimeSeries( ); Series se; final int iSeriesCount = sea.length; brna = new BaseRenderer[iSeriesCount]; int iSI = 0; // SERIES INDEX COUNTER for ( int i = 0; i < elBase.size( ); i++ ) { sdBase = (SeriesDefinition) elBase.get( i ); alRuntimeSeries = sdBase.getRunTimeSeries( ); // CHECK FOR A SINGLE BASE SERIES ONLY if ( alRuntimeSeries.size( ) != 1 ) { throw new ChartException( ChartEnginePlugin.ID, ChartException.PLUGIN, "exception.illegal.base.runtime.series.count", //$NON-NLS-1$ new Object[]{ new Integer( alRuntimeSeries.size( ) ) }, Messages.getResourceBundle( rtc.getULocale( ) ) ); } se = (Series) alRuntimeSeries.get( 0 ); brna[iSI] = ( se.getClass( ) == SeriesImpl.class ) ? new EmptyWithoutAxes( ) : ps.getRenderer( se.getClass( ) ); // INITIALIZE THE RENDERER brna[iSI].set( cm, oComputations, se, sdBase ); brna[iSI].set( rtc ); brna[iSI].iSeriesIndex = iSI++; elOrthogonal = ( (SeriesDefinition) elBase.get( i ) ).getSeriesDefinitions( ); for ( int j = 0; j < elOrthogonal.size( ); j++ ) { sd = (SeriesDefinition) elOrthogonal.get( j ); alRuntimeSeries = sd.getRunTimeSeries( ); for ( int k = 0; k < alRuntimeSeries.size( ); k++ ) { se = (Series) alRuntimeSeries.get( k ); brna[iSI] = ( se.getClass( ) == SeriesImpl.class ) ? new EmptyWithoutAxes( ) : ps.getRenderer( se.getClass( ) ); // INITIALIZE THE RENDERER brna[iSI].set( cm, oComputations, se, bPaletteByCategory ? sdBase : sd ); brna[iSI].iSeriesIndex = iSI++; } } } for ( int k = 0; k < iSI; k++ ) { brna[k].iSeriesCount = iSI; } } return brna; } /** * @return Returns series associated with current renderer. */ public final Series getSeries( ) { return se; } /** * @return Returns chart model associated with current renderer. */ public final Chart getModel( ) { return cm; } /** * @return Returns computation object associated with current renderer. */ public final Object getComputations( ) { return oComputations; } /** * @return Returns device renderer associated with current renderer. */ public final IDeviceRenderer getDevice( ) { return ir; } /** * Renders a 2D or extruded 2D plane as necessary for a given front surface * polygon. Takes into account the correct z-ordering of each plane and * applies basic lighting. This convenience method may be used by series * type rendering extensions if needed. * * @param ipr * A handle to the primitive rendering device * @param oSource * The object wrapped in the polygon rendering event * @param loaFront * The co-ordinates of the front face polygon * @param f * The fill color for the front face * @param lia * The edge color for the polygon * @param dSeriesThickness * The thickness or the extrusion level (for 2.5D or 3D) * * @throws RenderingException */ protected final void renderPlane( IPrimitiveRenderer ipr, Object oSource, Location[] loaFront, Fill f, LineAttributes lia, ChartDimension cd, double dSeriesThickness, boolean bDeferred ) throws ChartException { PolygonRenderEvent pre; if ( cd.getValue( ) == ChartDimension.TWO_DIMENSIONAL ) { // RENDER THE POLYGON pre = (PolygonRenderEvent) ( (EventObjectCache) ipr ).getEventObject( oSource, PolygonRenderEvent.class ); pre.setPoints( loaFront ); pre.setBackground( f ); pre.setOutline( lia ); if ( bDeferred ) { dc.addPlane( pre, PrimitiveRenderEvent.FILL | PrimitiveRenderEvent.DRAW ); } else { ipr.fillPolygon( pre ); ipr.drawPolygon( pre ); } return; } final boolean bSolidColor = f instanceof ColorDefinition; Fill fDarker = null, fBrighter = null; if ( cd.getValue( ) == ChartDimension.TWO_DIMENSIONAL_WITH_DEPTH || cd.getValue( ) == ChartDimension.THREE_DIMENSIONAL ) { fDarker = f; if ( fDarker instanceof ColorDefinition ) { fDarker = ( (ColorDefinition) fDarker ).darker( ); } fBrighter = f; if ( fBrighter instanceof ColorDefinition ) { fBrighter = ( (ColorDefinition) fBrighter ).brighter( ); } } final int nSides = loaFront.length; final Location[][] loaa = new Location[nSides + 1][]; Location[] loa; double dY, dSmallestY = 0; for ( int j, i = 0; i < nSides; i++ ) { j = i + 1; if ( j >= loaFront.length ) j = 0; loa = new Location[4]; loa[0] = LocationImpl.create( loaFront[i].getX( ), loaFront[i].getY( ) ); loa[1] = LocationImpl.create( loaFront[j].getX( ), loaFront[j].getY( ) ); loa[2] = LocationImpl.create( loaFront[j].getX( ) + dSeriesThickness, loaFront[j].getY( ) - dSeriesThickness ); loa[3] = LocationImpl.create( loaFront[i].getX( ) + dSeriesThickness, loaFront[i].getY( ) - dSeriesThickness ); loaa[i] = loa; } loaa[nSides] = loaFront; // SORT ON MULTIPLE KEYS (GREATEST Y, SMALLEST X) double dI, dJ; Location[] loaI, loaJ; for ( int i = 0; i < nSides - 1; i++ ) { loaI = loaa[i]; for ( int j = i + 1; j < nSides; j++ ) { loaJ = loaa[j]; dI = getY( loaI, IConstants.AVERAGE ); dJ = getY( loaJ, IConstants.AVERAGE ); // Use fuzzy comparison here due to possible precision loss // during computation. if ( ChartUtil.mathGT( dJ, dI ) ) // SWAP { loaa[i] = loaJ; loaa[j] = loaI; loaI = loaJ; } else if ( ChartUtil.mathEqual( dJ, dI ) ) { dI = getX( loaI, IConstants.AVERAGE ); dJ = getX( loaJ, IConstants.AVERAGE ); if ( ChartUtil.mathGT( dI, dJ ) ) { loaa[i] = loaJ; loaa[j] = loaI; loaI = loaJ; } } } } int iSmallestYIndex = 0; for ( int i = 0; i < nSides; i++ ) { dY = getY( loaa[i], IConstants.AVERAGE ); if ( i == 0 ) { dSmallestY = dY; } // #192797: Use fuzzy comparison here due to possible precision // loss during computation. else if ( ChartUtil.mathGT( dSmallestY, dY ) ) { dSmallestY = dY; iSmallestYIndex = i; } } ArrayList alModel = new ArrayList( nSides + 1 ); Fill fP; for ( int i = 0; i <= nSides; i++ ) { pre = (PolygonRenderEvent) ( (EventObjectCache) ipr ).getEventObject( oSource, PolygonRenderEvent.class ); pre.setOutline( lia ); pre.setPoints( loaa[i] ); if ( i < nSides ) // OTHER SIDES (UNKNOWN ORDER) ARE DEEP { pre.setDepth( -dSeriesThickness ); } else // FRONT FACE IS NOT DEEP { pre.setDepth( 0 ); } if ( i == nSides ) { fP = f; } else if ( i == iSmallestYIndex ) { fP = fBrighter; } else { fP = fDarker; } pre.setBackground( fP ); if ( bDeferred ) { alModel.add( pre.copy( ) ); } else { ipr.fillPolygon( pre ); } if ( i == nSides ) { } else if ( i == iSmallestYIndex ) { // DRAW A TRANSLUCENT LIGHT GLASS PANE OVER THE BRIGHTER SURFACE // (IF NOT A SOLID COLOR) if ( !bSolidColor ) { pre.setBackground( LIGHT_GLASS ); } if ( bDeferred ) { alModel.add( pre.copy( ) ); } else { ipr.fillPolygon( pre ); } } else { // DRAW A TRANSLUCENT DARK GLASS PANE OVER THE DARKER SURFACE // (IF NOT A SOLID COLOR) if ( !bSolidColor ) { pre.setBackground( DARK_GLASS ); } if ( bDeferred ) { alModel.add( pre.copy( ) ); } else { ipr.fillPolygon( pre ); } } if ( !bDeferred ) { ipr.drawPolygon( pre ); } } if ( !alModel.isEmpty( ) ) { dc.addModel( new WrappedInstruction( getDeferredCache( ), alModel, PrimitiveRenderEvent.FILL ) ); } } /** * Renders planes as 3D presentation. * * @param ipr * @param oSource * @param loaFace * @param f * @param lia * @throws ChartException */ protected final void render3DPlane( IPrimitiveRenderer ipr, Object oSource, List loaFace, Fill f, LineAttributes lia ) throws ChartException { Polygon3DRenderEvent pre = (Polygon3DRenderEvent) ( (EventObjectCache) ipr ).getEventObject( oSource, Polygon3DRenderEvent.class ); pre.setDoubleSided( false ); int nSides = loaFace.size( ); for ( int i = 0; i < nSides; i++ ) { pre.setOutline( lia ); pre.setPoints3D( (Location3D[]) loaFace.get( i ) ); pre.setBackground( f ); dc.addPlane( pre, PrimitiveRenderEvent.FILL | PrimitiveRenderEvent.DRAW ); } } /** * Finds particular Y value from given location list. * * @param loa * Location list. * @param iProperty * This value must be one of following: * <ul> * <li>IConstants.MIN * <li>IConstants.MAX * <li>IConstants.AVERAGE * </ul> * * @return */ public static final double getY( Location[] loa, int iProperty ) { int iCount = loa.length; double dY = 0; if ( iProperty == IConstants.MIN ) { dY = loa[0].getY( ); for ( int i = 1; i < iCount; i++ ) { dY = Math.min( dY, loa[i].getY( ) ); } } else if ( iProperty == IConstants.MAX ) { dY = loa[0].getY( ); for ( int i = 1; i < iCount; i++ ) { dY = Math.max( dY, loa[i].getY( ) ); } } else if ( iProperty == IConstants.AVERAGE ) { for ( int i = 0; i < iCount; i++ ) { dY += loa[i].getY( ); } dY /= iCount; } return dY; } /** * Finds particular X value from given location list. * * @param loa * Location list. * @param iProperty * This value must be one of following: * <ul> * <li>IConstants.MIN * <li>IConstants.MAX * <li>IConstants.AVERAGE * </ul> * * @return */ public static final double getX( Location[] loa, int iProperty ) { int iCount = loa.length; double dX = 0; if ( iProperty == IConstants.MIN ) { dX = loa[0].getX( ); for ( int i = 1; i < iCount; i++ ) { dX = Math.min( dX, loa[i].getX( ) ); } } else if ( iProperty == IConstants.MAX ) { dX = loa[0].getX( ); for ( int i = 1; i < iCount; i++ ) { dX = Math.max( dX, loa[i].getX( ) ); } } else if ( iProperty == IConstants.AVERAGE ) { for ( int i = 0; i < iCount; i++ ) { dX += loa[i].getX( ); } dX /= iCount; } return dX; } /** * post-process the triggers. * * @param tg * The Trigger to modify * @param source * The StructureSource associated with the Trigger */ public void processTrigger( Trigger tg, StructureSource source ) { // use user action renderer first. IActionRenderer iar = getRunTimeContext( ).getActionRenderer( ); if ( iar != null ) { iar.processAction( tg.getAction( ), source ); } // internal processing. if ( StructureType.SERIES_DATA_POINT.equals( source.getType( ) ) ) { DataPointHints dph = (DataPointHints) source.getSource( ); if ( tg.getAction( ).getType( ) == ActionType.SHOW_TOOLTIP_LITERAL ) { // BUILD THE VALUE String toolText = ( (TooltipValue) tg.getAction( ).getValue( ) ).getText( ); // if it's null, then use DataPoint label automatically. // !!! DON'T check zero length string here, since this has a // particular meaning to avoid the tooltip. if ( toolText == null )// || toolText.length( ) == 0 ) { ( (TooltipValue) tg.getAction( ).getValue( ) ).setText( dph.getDisplayValue( ) ); } } else if ( tg.getAction( ).getType( ) == ActionType.URL_REDIRECT_LITERAL ) { // BUILD A URI final URLValue uv = (URLValue) tg.getAction( ).getValue( ); String sBaseURL = uv.getBaseUrl( ); if ( sBaseURL == null ) { sBaseURL = ""; //$NON-NLS-1$ } final StringBuffer sb = new StringBuffer( sBaseURL ); char c = '?'; if ( sBaseURL.indexOf( c ) != -1 ) { c = '&'; } if ( uv.getBaseParameterName( ) != null && uv.getBaseParameterName( ).length( ) > 0 ) { sb.append( c ); c = '&'; sb.append( URLValueImpl.encode( uv.getBaseParameterName( ) ) ); sb.append( '=' ); sb.append( URLValueImpl.encode( dph.getBaseDisplayValue( ) ) ); } if ( uv.getValueParameterName( ) != null && uv.getValueParameterName( ).length( ) > 0 ) { sb.append( c ); c = '&'; sb.append( URLValueImpl.encode( uv.getValueParameterName( ) ) ); sb.append( '=' ); sb.append( URLValueImpl.encode( dph.getOrthogonalDisplayValue( ) ) ); } if ( uv.getSeriesParameterName( ) != null && uv.getSeriesParameterName( ).length( ) > 0 ) { sb.append( c ); c = '&'; sb.append( URLValueImpl.encode( uv.getSeriesParameterName( ) ) ); sb.append( '=' ); sb.append( URLValueImpl.encode( dph.getSeriesDisplayValue( ) ) ); } uv.setBaseUrl( sb.toString( ) ); } } } /** * @return Returns the current cell bounds associated with current series. * @see #getCellBounds(int) */ protected final Bounds getCellBounds( ) { return getCellBounds( iSeriesIndex ); } /** * Returns the bounds of an individual cell (if the rendered model is a * ChartWithoutAxis and plot is to be split into a grid) or the entire plot * bounds (if the rendered model is a ChartWithAxis). * * @return */ protected final Bounds getCellBounds( int seriesIndex ) { Object obj = getComputations( ); Bounds bo = null; if ( obj instanceof PlotWithoutAxes ) { PlotWithoutAxes pwoa = (PlotWithoutAxes) obj; Coordinates co = pwoa.getCellCoordinates( seriesIndex - 1 ); Size sz = pwoa.getCellSize( ); bo = BoundsImpl.copyInstance( pwoa.getBounds( ) ); bo.setLeft( bo.getLeft( ) + co.getColumn( ) * sz.getWidth( ) ); bo.setTop( bo.getTop( ) + co.getRow( ) * sz.getHeight( ) ); bo.setWidth( sz.getWidth( ) ); bo.setHeight( sz.getHeight( ) ); bo = bo.adjustedInstance( pwoa.getCellInsets( ) ); } else if ( obj instanceof PlotWithAxes ) { PlotWithAxes pwa = (PlotWithAxes) obj; bo = BoundsImpl.copyInstance( pwa.getPlotBounds( ) ); bo = bo.adjustedInstance( pwa.getPlotInsets( ) ); } return bo; } /** * Returns the bounds of the plot area, NOTE this bounds has reduced the * insets. * * @return */ protected final Bounds getPlotBounds( ) { Object obj = getComputations( ); Bounds bo = null; if ( obj instanceof PlotWithoutAxes ) { PlotWithoutAxes pwoa = (PlotWithoutAxes) obj; bo = BoundsImpl.copyInstance( pwoa.getBounds( ) ); bo = bo.adjustedInstance( pwoa.getCellInsets( ) ); } else if ( obj instanceof PlotWithAxes ) { PlotWithAxes pwa = (PlotWithAxes) obj; bo = BoundsImpl.copyInstance( pwa.getPlotBounds( ) ); bo = bo.adjustedInstance( pwa.getPlotInsets( ) ); } return bo; } /** * This convenience method renders the data point label along with the * shadow If there's a need to render the data point label and the shadow * separately, each call should be made separately by calling into the * primitive rendering interface directly. */ public final void renderLabel( Object oSource, int iTextRenderType, Label laDataPoint, Position lp, Location lo, Bounds bo ) throws ChartException { renderLabel( oSource, iTextRenderType, laDataPoint, lp, lo, bo, dc ); } /** * Renderer label with specified <code>DeferredCache</code>. * * @param oSource * @param iTextRenderType * @param laDataPoint * @param lp * @param lo * @param bo * @param _dc * @throws ChartException */ public final void renderLabel( Object oSource, int iTextRenderType, Label laDataPoint, Position lp, Location lo, Bounds bo, DeferredCache _dc ) throws ChartException { final IDeviceRenderer idr = getDevice( ); TextRenderEvent tre = (TextRenderEvent) ( (EventObjectCache) idr ).getEventObject( oSource, TextRenderEvent.class ); if ( iTextRenderType != TextRenderEvent.RENDER_TEXT_IN_BLOCK ) { tre.setTextPosition( Methods.getLabelPosition( lp ) ); tre.setLocation( lo ); } else { tre.setBlockBounds( bo ); tre.setBlockAlignment( null ); } tre.setLabel( laDataPoint ); tre.setAction( iTextRenderType ); if ( _dc == null ) { dc.addLabel( tre ); } else { _dc.addLabel( tre ); } } /** * This method validates the given datapoints. */ protected void validateNullDatapoint( DataPointHints[] dphs ) throws ChartException { if ( dphs == null ) { throw new ChartException( ChartEnginePlugin.ID, ChartException.VALIDATION, "exception.base.orthogonal.null.datapoint", //$NON-NLS-1$ Messages.getResourceBundle( rtc.getULocale( ) ) ); } } /** * This method validates the dataset state from given series rendering * hints. */ protected void validateDataSetCount( ISeriesRenderingHints isrh ) throws ChartException { if ( ( isrh.getDataSetStructure( ) & ISeriesRenderingHints.BASE_ORTHOGONAL_OUT_OF_SYNC ) == ISeriesRenderingHints.BASE_ORTHOGONAL_OUT_OF_SYNC ) { throw new ChartException( ChartEnginePlugin.ID, ChartException.VALIDATION, "exception.base.orthogonal.inconsistent.count", //$NON-NLS-1$ new Object[]{ new Integer( isrh.getBaseDataSet( ).size( ) ), new Integer( isrh.getOrthogonalDataSet( ).size( ) ) }, Messages.getResourceBundle( rtc.getULocale( ) ) ); } } /** * Filters the Null or invalid entry(contains NaN value) from the list. Each * entry should be a double[2] or double[3] array object. * * @param ll * @return */ protected List filterNull( List ll ) { ArrayList al = new ArrayList( ); for ( int i = 0; i < ll.size( ); i++ ) { double[] obj = (double[]) ll.get( i ); if ( obj == null || Double.isNaN( obj[0] ) || Double.isNaN( obj[1] ) ) { continue; } al.add( obj ); } return al; } /** * Filters the Null or invalid entry(contains NaN value) from the array. * * @param ll * @return */ protected Location[] filterNull( Location[] ll ) { ArrayList al = new ArrayList( ); for ( int i = 0; i < ll.length; i++ ) { if ( Double.isNaN( ll[i].getX( ) ) || Double.isNaN( ll[i].getY( ) ) ) { continue; } al.add( ll[i] ); } if ( ll instanceof Location3D[] ) { return (Location3D[]) al.toArray( new Location3D[al.size( )] ); } return (Location[]) al.toArray( new Location[al.size( )] ); } /** * Check the if the given value is NaN. * * @param value * @return */ protected static boolean isNaN( Object value ) { return ( value == null ) || ( value instanceof Number && Double.isNaN( ( (Number) value ).doubleValue( ) ) ); } /** * Returns if the right-left mode is enabled. * * @return */ public boolean isRightToLeft( ) { if ( rtc == null ) { return false; } return rtc.isRightToLeft( ); } /** * Returns if current palette is from the category series. * * @return */ protected boolean isPaletteByCategory( ) { return ( cm.getLegend( ).getItemType( ).getValue( ) == LegendItemType.CATEGORIES ); } /** * Switch Anchor value due to right-left setting. * * @param anchor * @return */ public Anchor switchAnchor( Anchor anchor ) { if ( anchor != null && isRightToLeft( ) ) { switch ( anchor.getValue( ) ) { case Anchor.EAST : anchor = Anchor.WEST_LITERAL; break; case Anchor.NORTH_EAST : anchor = Anchor.NORTH_WEST_LITERAL; break; case Anchor.SOUTH_EAST : anchor = Anchor.SOUTH_WEST_LITERAL; break; case Anchor.WEST : anchor = Anchor.EAST_LITERAL; break; case Anchor.NORTH_WEST : anchor = Anchor.NORTH_EAST_LITERAL; break; case Anchor.SOUTH_WEST : anchor = Anchor.SOUTH_EAST_LITERAL; break; } } return anchor; } /** * Switch Position value due to right-left setting. * * @param po * @return */ public Position switchPosition( Position po ) { if ( po != null && isRightToLeft( ) ) { if ( po == Position.RIGHT_LITERAL ) { po = Position.LEFT_LITERAL; } else if ( po == Position.LEFT_LITERAL ) { po = Position.RIGHT_LITERAL; } } return po; } /** * Switch TextAlignment value due to right-left setting. * * @param ta * @return */ public TextAlignment switchTextAlignment( TextAlignment ta ) { if ( ta != null && isRightToLeft( ) ) { if ( ta.getHorizontalAlignment( ) == HorizontalAlignment.LEFT_LITERAL ) { ta.setHorizontalAlignment( HorizontalAlignment.RIGHT_LITERAL ); } else if ( ta.getHorizontalAlignment( ) == HorizontalAlignment.RIGHT_LITERAL ) { ta.setHorizontalAlignment( HorizontalAlignment.LEFT_LITERAL ); } } return ta; } /** * Returns if interactivity is enabled on the model. * * @return */ public boolean isInteractivityEnabled( ) { return ( cm.getInteractivity( ) == null || cm.getInteractivity( ) .isEnable( ) ); } /** * Returns if the corresponding category entry is filtered as minslice in * legend. Subclass should override this method to implement their own * legend strategy. * * @return return null if no minslice applied or minslice feature is not * supported. */ public int[] getFilteredMinSliceEntry( DataSetIterator dsi ) { // no filter by default. return null; } /** * Updates the tranlucency of the fill according to series setting. * * @param fill * @param se */ public void updateTranslucency( Fill fill, Series se ) { if ( se != null && se.isTranslucent( ) ) { if ( fill instanceof ColorDefinition ) { ( (ColorDefinition) fill ).setTransparency( (int) ( OVERRIDE_TRANSPARENCY * 255d / 100d ) ); } else if ( fill instanceof MultipleFill ) { for ( int i = 0; i < ( (MultipleFill) fill ).getFills( ).size( ); i++ ) { updateTranslucency( (Fill) ( (MultipleFill) fill ).getFills( ) .get( i ), se ); } } } } /** * Set current <code>DeferredCacheManager</code> instance. * * @param dcm specified instance of <code>DeferredCacheMananger</code>. */ public void setDeferredCacheManager( DeferredCacheManager dcm ) { fDeferredCacheManager = dcm; } /** * Returns <code>DeferredCacheManager</code> instance. * * @return <code>DeferredCacheManager</code> instance. */ public DeferredCacheManager getDeferredCacheManager( ) { return fDeferredCacheManager; } /** * Creates the interaction event for triggers list * * @param iSource * @param elTriggers * @param ipr * @return */ protected final InteractionEvent createEvent( StructureSource iSource, List elTriggers, IPrimitiveRenderer ipr ) { final InteractionEvent iev = new InteractionEvent( iSource ); for ( int t = 0; t < elTriggers.size( ); t++ ) { Trigger tg = TriggerImpl.copyInstance( (Trigger) elTriggers.get( t ) ); processTrigger( tg, iSource ); iev.addTrigger( tg ); } return iev; } /** * Renders the interactivity hotspot for a data point * * @param ipr * @param dph * @param pre * @throws ChartException */ protected final void renderInteractivity( IPrimitiveRenderer ipr, DataPointHints dph, PrimitiveRenderEvent pre ) throws ChartException { if ( isInteractivityEnabled( ) && dph != null ) { // PROCESS 'SERIES LEVEL' TRIGGERS USING SOURCE='bs' final EList elTriggers = getSeries( ).getTriggers( ); if ( !elTriggers.isEmpty( ) ) { final StructureSource iSource = WrappedStructureSource.createSeriesDataPoint( getSeries( ), dph ); final InteractionEvent iev = createEvent( iSource, elTriggers, ipr ); iev.setHotSpot( pre ); ipr.enableInteraction( iev ); } } } protected final boolean isFirstVisibleSeries( ) { if ( iSeriesIndex == 0 ) { return false; } for ( int i = 1; i < iSeriesCount; i++ ) { BaseRenderer renderer = getRenderer( i ); if ( renderer.getSeries( ).isVisible( ) ) { return i == iSeriesIndex; } } return false; } protected final boolean isLastVisibleSeries( ) { if ( iSeriesIndex == 0 ) { return false; } for ( int i = iSeriesCount - 1; i > 0; i { BaseRenderer renderer = getRenderer( i ); if ( renderer.getSeries( ).isVisible( ) ) { return i == iSeriesIndex; } } return false; } }
package org.innovateuk.ifs.application.repository; import org.innovateuk.ifs.application.domain.ApplicationStatistics; import org.innovateuk.ifs.application.resource.AssessorCountSummaryResource; import org.innovateuk.ifs.invite.domain.CompetitionParticipantRole; import org.innovateuk.ifs.workflow.resource.State; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.PagingAndSortingRepository; import org.springframework.data.repository.query.Param; import java.util.Collection; import java.util.List; public interface ApplicationStatisticsRepository extends PagingAndSortingRepository<ApplicationStatistics, Long> { String APPLICATION_FILTER = "SELECT a FROM ApplicationStatistics a WHERE a.competition = :compId " + "AND (a.applicationProcess.activityState.state IN :states) " + "AND (str(a.id) LIKE CONCAT('%', :filter, '%'))"; List<ApplicationStatistics> findByCompetitionAndApplicationProcessActivityStateStateIn(long competitionId, Collection<State> applicationStates); @Query(APPLICATION_FILTER) Page<ApplicationStatistics> findByCompetitionAndApplicationProcessActivityStateStateIn(@Param("compId") long competitionId, @Param("states") Collection<State> applicationStates, @Param("filter") String filter, Pageable pageable); // TODO IFS-399 pass in the states as enum sets from the service, rather than hardcoding strings String REJECTED_AND_SUBMITTED_STATES_STRING = "(org.innovateuk.ifs.workflow.resource.State.REJECTED,org.innovateuk.ifs.workflow.resource.State.WITHDRAWN, org.innovateuk.ifs.workflow.resource.State.SUBMITTED)"; String NOT_ACCEPTED_OR_SUBMITTED_STATES_STRING = "(org.innovateuk.ifs.workflow.resource.State.PENDING,org.innovateuk.ifs.workflow.resource.State.REJECTED," + "org.innovateuk.ifs.workflow.resource.State.WITHDRAWN,org.innovateuk.ifs.workflow.resource.State.CREATED,org.innovateuk.ifs.workflow.resource.State.SUBMITTED)"; String SUBMITTED_STATES_STRING = "(org.innovateuk.ifs.workflow.resource.State.SUBMITTED)"; @Query("SELECT NEW org.innovateuk.ifs.application.resource.AssessorCountSummaryResource(" + " user.id, " + " concat(user.firstName, ' ', user.lastName), " + " profile.skillsAreas, " + " sum(case when activityState.state NOT IN " + REJECTED_AND_SUBMITTED_STATES_STRING + " THEN 1 ELSE 0 END), " + // total assigned " sum(case when competitionParticipant.competition.id = :compId AND activityState.state NOT IN " + REJECTED_AND_SUBMITTED_STATES_STRING + " THEN 1 ELSE 0 END), " + // assigned " sum(case when competitionParticipant.competition.id = :compId AND activityState.state NOT IN " + NOT_ACCEPTED_OR_SUBMITTED_STATES_STRING + " THEN 1 ELSE 0 END), " + // accepted " sum(case when competitionParticipant.competition.id = :compId AND activityState.state IN " + SUBMITTED_STATES_STRING + " THEN 1 ELSE 0 END) " + // submitted ") " + "FROM User user " + "JOIN CompetitionParticipant competitionParticipant ON competitionParticipant.user = user " + "JOIN Profile profile ON profile.id = user.profileId " + "LEFT JOIN Application application ON application.competition = competitionParticipant.competition " + // AND application.applicationProcess.activityState.state IN :submittedStates " + "LEFT JOIN ProcessRole processRole ON processRole.user = user AND processRole.applicationId = application.id " + "LEFT JOIN Assessment assessment ON assessment.participant = processRole.id AND assessment.target = application " + "LEFT JOIN ActivityState activityState ON assessment.activityState = activityState.id " + "WHERE " + " competitionParticipant.status = org.innovateuk.ifs.invite.domain.ParticipantStatus.ACCEPTED AND " + " competitionParticipant.role = 'ASSESSOR' " + "GROUP BY user " + "HAVING sum(case when competitionParticipant.competition.id = :compId THEN 1 ELSE 0 END) > 0") Page<AssessorCountSummaryResource> getAssessorCountSummaryByCompetition(@Param("compId") long competitionId, Pageable pageable); }
package org.jfree.data.time; import java.util.Calendar; import java.util.TimeZone; import org.jfree.data.DomainInfo; import org.jfree.data.Range; import org.jfree.data.RangeInfo; import org.jfree.data.general.SeriesChangeEvent; import org.jfree.data.xy.AbstractIntervalXYDataset; import org.jfree.data.xy.IntervalXYDataset; /** * A dynamic dataset. * <p> * Like FastTimeSeriesCollection, this class is a functional replacement * for JFreeChart's TimeSeriesCollection _and_ TimeSeries classes. * FastTimeSeriesCollection is appropriate for a fixed time range; for * real-time applications this subclass adds the ability to append new * data and discard the oldest. * In this class, the arrays used in FastTimeSeriesCollection become FIFO's. * NOTE:As presented here, all data is assumed &gt;= 0, an assumption which is * embodied only in methods associated with interface RangeInfo. */ public class DynamicTimeSeriesCollection extends AbstractIntervalXYDataset implements IntervalXYDataset, DomainInfo, RangeInfo { /** * Useful constant for controlling the x-value returned for a time * period. */ public static final int START = 0; /** * Useful constant for controlling the x-value returned for a time period. */ public static final int MIDDLE = 1; /** * Useful constant for controlling the x-value returned for a time period. */ public static final int END = 2; /** The maximum number of items for each series (can be overridden). */ private int maximumItemCount = 2000; // an arbitrary safe default value /** The history count. */ protected int historyCount; /** Storage for the series keys. */ private Comparable[] seriesKeys; /** The time period class - barely used, and could be removed (DG). */ private Class timePeriodClass = Minute.class; // default value; /** Storage for the x-values. */ protected RegularTimePeriod[] pointsInTime; /** The number of series. */ private int seriesCount; /** * A wrapper for a fixed array of float values. */ protected class ValueSequence { /** Storage for the float values. */ float[] dataPoints; /** * Default constructor: */ public ValueSequence() { this(DynamicTimeSeriesCollection.this.maximumItemCount); } /** * Creates a sequence with the specified length. * * @param length the length. */ public ValueSequence(int length) { this.dataPoints = new float[length]; for (int i = 0; i < length; i++) { this.dataPoints[i] = 0.0f; } } /** * Enters data into the storage array. * * @param index the index. * @param value the value. */ public void enterData(int index, float value) { this.dataPoints[index] = value; } /** * Returns a value from the storage array. * * @param index the index. * * @return The value. */ public float getData(int index) { return this.dataPoints[index]; } } /** An array for storing the objects that represent each series. */ protected ValueSequence[] valueHistory; /** A working calendar (to recycle) */ protected Calendar workingCalendar; /** * The position within a time period to return as the x-value (START, * MIDDLE or END). */ private int position; /** * A flag that indicates that the domain is 'points in time'. If this flag * is true, only the x-value is used to determine the range of values in * the domain, the start and end x-values are ignored. */ private boolean domainIsPointsInTime; /** index for mapping: points to the oldest valid time and data. */ private int oldestAt; // as a class variable, initializes == 0 /** Index of the newest data item. */ private int newestAt; // cached values used for interface DomainInfo: /** the # of msec by which time advances. */ private long deltaTime; /** Cached domain start (for use by DomainInfo). */ private Long domainStart; /** Cached domain end (for use by DomainInfo). */ private Long domainEnd; /** Cached domain range (for use by DomainInfo). */ private Range domainRange; // Cached values used for interface RangeInfo: (note minValue pinned at 0) // A single set of extrema covers the entire SeriesCollection /** The minimum value. */ private Float minValue = 0.0f; /** The maximum value. */ private Float maxValue = null; /** The value range. */ private Range valueRange; // autoinit's to null. /** * Constructs a dataset with capacity for N series, tied to default * timezone. * * @param nSeries the number of series to be accommodated. * @param nMoments the number of TimePeriods to be spanned. */ public DynamicTimeSeriesCollection(int nSeries, int nMoments) { this(nSeries, nMoments, new Millisecond(), TimeZone.getDefault()); this.newestAt = nMoments - 1; } /** * Constructs an empty dataset, tied to a specific timezone. * * @param nSeries the number of series to be accommodated * @param nMoments the number of TimePeriods to be spanned * @param zone the timezone. */ public DynamicTimeSeriesCollection(int nSeries, int nMoments, TimeZone zone) { this(nSeries, nMoments, new Millisecond(), zone); this.newestAt = nMoments - 1; } /** * Creates a new dataset. * * @param nSeries the number of series. * @param nMoments the number of items per series. * @param timeSample a time period sample. */ public DynamicTimeSeriesCollection(int nSeries, int nMoments, RegularTimePeriod timeSample) { this(nSeries, nMoments, timeSample, TimeZone.getDefault()); } /** * Creates a new dataset. * * @param nSeries the number of series. * @param nMoments the number of items per series. * @param timeSample a time period sample. * @param zone the time zone. */ public DynamicTimeSeriesCollection(int nSeries, int nMoments, RegularTimePeriod timeSample, TimeZone zone) { // the first initialization must precede creation of the ValueSet array: this.maximumItemCount = nMoments; // establishes length of each array this.historyCount = nMoments; this.seriesKeys = new Comparable[nSeries]; // initialize the members of "seriesNames" array so they won't be null: for (int i = 0; i < nSeries; i++) { this.seriesKeys[i] = ""; } this.newestAt = nMoments - 1; this.valueHistory = new ValueSequence[nSeries]; this.timePeriodClass = timeSample.getClass(); /// Expand the following for all defined TimePeriods: if (this.timePeriodClass == Millisecond.class) { this.pointsInTime = new Millisecond[nMoments]; } else if (this.timePeriodClass == Second.class) { this.pointsInTime = new Second[nMoments]; } else if (this.timePeriodClass == Minute.class) { this.pointsInTime = new Minute[nMoments]; } else if (this.timePeriodClass == Hour.class) { this.pointsInTime = new Hour[nMoments]; } /// .. etc.... this.workingCalendar = Calendar.getInstance(zone); this.position = START; this.domainIsPointsInTime = true; } /** * Fill the pointsInTime with times using TimePeriod.next(): * Will silently return if the time array was already populated. * * Also computes the data cached for later use by * methods implementing the DomainInfo interface: * * @param start the start. * * @return ??. */ public synchronized long setTimeBase(RegularTimePeriod start) { if (this.pointsInTime[0] == null) { this.pointsInTime[0] = start; for (int i = 1; i < this.historyCount; i++) { this.pointsInTime[i] = this.pointsInTime[i - 1].next(); } } long oldestL = this.pointsInTime[0].getFirstMillisecond( this.workingCalendar); long nextL = this.pointsInTime[1].getFirstMillisecond( this.workingCalendar); this.deltaTime = nextL - oldestL; this.oldestAt = 0; this.newestAt = this.historyCount - 1; findDomainLimits(); return this.deltaTime; } /** * Finds the domain limits. Note: this doesn't need to be synchronized * because it's called from within another method that already is. */ protected void findDomainLimits() { long startL = getOldestTime().getFirstMillisecond(this.workingCalendar); long endL; if (this.domainIsPointsInTime) { endL = getNewestTime().getFirstMillisecond(this.workingCalendar); } else { endL = getNewestTime().getLastMillisecond(this.workingCalendar); } this.domainStart = startL; this.domainEnd = endL; this.domainRange = new Range(startL, endL); } /** * Returns the x position type (START, MIDDLE or END). * * @return The x position type. */ public int getPosition() { return this.position; } /** * Sets the x position type (START, MIDDLE or END). * * @param position The x position type. */ public void setPosition(int position) { this.position = position; } /** * Adds a series to the dataset. Only the y-values are supplied, the * x-values are specified elsewhere. * * @param values the y-values. * @param seriesNumber the series index (zero-based). * @param seriesKey the series key. * * Use this as-is during setup only, or add the synchronized keyword around * the copy loop. */ public void addSeries(float[] values, int seriesNumber, Comparable seriesKey) { invalidateRangeInfo(); int i; if (values == null) { throw new IllegalArgumentException("TimeSeriesDataset.addSeries(): " + "cannot add null array of values."); } if (seriesNumber >= this.valueHistory.length) { throw new IllegalArgumentException("TimeSeriesDataset.addSeries(): " + "cannot add more series than specified in c'tor"); } if (this.valueHistory[seriesNumber] == null) { this.valueHistory[seriesNumber] = new ValueSequence(this.historyCount); this.seriesCount++; } // But if that series array already exists, just overwrite its contents // Avoid IndexOutOfBoundsException: int srcLength = values.length; int copyLength = this.historyCount; boolean fillNeeded = false; if (srcLength < this.historyCount) { fillNeeded = true; copyLength = srcLength; } for (i = 0; i < copyLength; i++) { // deep copy from values[], caller // can safely discard that array this.valueHistory[seriesNumber].enterData(i, values[i]); } if (fillNeeded) { for (i = copyLength; i < this.historyCount; i++) { this.valueHistory[seriesNumber].enterData(i, 0.0f); } } if (seriesKey != null) { this.seriesKeys[seriesNumber] = seriesKey; } fireSeriesChanged(); } /** * Sets the name of a series. If planning to add values individually. * * @param seriesNumber the series. * @param key the new key. */ public void setSeriesKey(int seriesNumber, Comparable key) { this.seriesKeys[seriesNumber] = key; } /** * Adds a value to a series. * * @param seriesNumber the series index. * @param index ??. * @param value the value. */ public void addValue(int seriesNumber, int index, float value) { invalidateRangeInfo(); if (seriesNumber >= this.valueHistory.length) { throw new IllegalArgumentException( "TimeSeriesDataset.addValue(): series + seriesNumber + "unspecified in c'tor" ); } if (this.valueHistory[seriesNumber] == null) { this.valueHistory[seriesNumber] = new ValueSequence(this.historyCount); this.seriesCount++; } // But if that series array already exists, just overwrite its contents //synchronized(this) this.valueHistory[seriesNumber].enterData(index, value); fireSeriesChanged(); } /** * Returns the number of series in the collection. * * @return The series count. */ @Override public int getSeriesCount() { return this.seriesCount; } /** * Returns the number of items in a series. * <p> * For this implementation, all series have the same number of items. * * @param series the series index (zero-based). * * @return The item count. */ @Override public int getItemCount(int series) { // all arrays equal length, // so ignore argument: return this.historyCount; } // Methods for managing the FIFO's: /** * Re-map an index, for use in retrieving data. * * @param toFetch the index. * * @return The translated index. */ protected int translateGet(int toFetch) { if (this.oldestAt == 0) { return toFetch; // no translation needed } // else [implicit here] int newIndex = toFetch + this.oldestAt; if (newIndex >= this.historyCount) { newIndex -= this.historyCount; } return newIndex; } /** * Returns the actual index to a time offset by "delta" from newestAt. * * @param delta the delta. * * @return The offset. */ public int offsetFromNewest(int delta) { return wrapOffset(this.newestAt + delta); } /** * ?? * * @param delta ?? * * @return The offset. */ public int offsetFromOldest(int delta) { return wrapOffset(this.oldestAt + delta); } /** * ?? * * @param protoIndex the index. * * @return The offset. */ protected int wrapOffset(int protoIndex) { int tmp = protoIndex; if (tmp >= this.historyCount) { tmp -= this.historyCount; } else if (tmp < 0) { tmp += this.historyCount; } return tmp; } /** * Adjust the array offset as needed when a new time-period is added: * Increments the indices "oldestAt" and "newestAt", mod(array length), * zeroes the series values at newestAt, returns the new TimePeriod. * * @return The new time period. */ public synchronized RegularTimePeriod advanceTime() { RegularTimePeriod nextInstant = this.pointsInTime[this.newestAt].next(); this.newestAt = this.oldestAt; // newestAt takes value previously held // by oldestAT // if the oldest data contained a maximum Y-value, invalidate the stored // Y-max and Y-range data: boolean extremaChanged = false; float oldMax = 0.0f; if (this.maxValue != null) { oldMax = this.maxValue; } for (int s = 0; s < getSeriesCount(); s++) { if (this.valueHistory[s].getData(this.oldestAt) == oldMax) { extremaChanged = true; } if (extremaChanged) { break; } } /*** If data can be < 0, add code here to check the minimum **/ if (extremaChanged) { invalidateRangeInfo(); } // wipe the next (about to be used) set of data slots float wiper = (float) 0.0; for (int s = 0; s < getSeriesCount(); s++) { this.valueHistory[s].enterData(this.newestAt, wiper); } // Update the array of TimePeriods: this.pointsInTime[this.newestAt] = nextInstant; // Now advance "oldestAt", wrapping at end of the array this.oldestAt++; if (this.oldestAt >= this.historyCount) { this.oldestAt = 0; } // Update the domain limits: long startL = this.domainStart; //(time is kept in msec) this.domainStart = startL + this.deltaTime; long endL = this.domainEnd; this.domainEnd = endL + this.deltaTime; this.domainRange = new Range(startL, endL); fireSeriesChanged(); return nextInstant; } // If data can be < 0, the next 2 methods should be modified /** * Invalidates the range info. */ public void invalidateRangeInfo() { this.maxValue = null; this.valueRange = null; } /** * Returns the maximum value. * * @return The maximum value. */ protected double findMaxValue() { double max = 0.0f; for (int s = 0; s < getSeriesCount(); s++) { for (int i = 0; i < this.historyCount; i++) { double tmp = getYValue(s, i); if (tmp > max) { max = tmp; } } } return max; } /** End, positive-data-only code **/ /** * Returns the index of the oldest data item. * * @return The index. */ public int getOldestIndex() { return this.oldestAt; } /** * Returns the index of the newest data item. * * @return The index. */ public int getNewestIndex() { return this.newestAt; } // appendData() writes new data at the index position given by newestAt/ // When adding new data dynamically, use advanceTime(), followed by this: /** * Appends new data. * * @param newData the data. */ public void appendData(float[] newData) { int nDataPoints = newData.length; if (nDataPoints > this.valueHistory.length) { throw new IllegalArgumentException( "More data than series to put them in"); } int s; // index to select the "series" for (s = 0; s < nDataPoints; s++) { // check whether the "valueHistory" array member exists; if not, // create them: if (this.valueHistory[s] == null) { this.valueHistory[s] = new ValueSequence(this.historyCount); } this.valueHistory[s].enterData(this.newestAt, newData[s]); } fireSeriesChanged(); } /** * Appends data at specified index, for loading up with data from file(s). * * @param newData the data * @param insertionIndex the index value at which to put it * @param refresh value of n in "refresh the display on every nth call" * (ignored if <= 0 ) */ public void appendData(float[] newData, int insertionIndex, int refresh) { int nDataPoints = newData.length; if (nDataPoints > this.valueHistory.length) { throw new IllegalArgumentException( "More data than series to put them in"); } for (int s = 0; s < nDataPoints; s++) { if (this.valueHistory[s] == null) { this.valueHistory[s] = new ValueSequence(this.historyCount); } this.valueHistory[s].enterData(insertionIndex, newData[s]); } if (refresh > 0) { insertionIndex++; if (insertionIndex % refresh == 0) { fireSeriesChanged(); } } } /** * Returns the newest time. * * @return The newest time. */ public RegularTimePeriod getNewestTime() { return this.pointsInTime[this.newestAt]; } /** * Returns the oldest time. * * @return The oldest time. */ public RegularTimePeriod getOldestTime() { return this.pointsInTime[this.oldestAt]; } /** * Returns the x-value. * * @param series the series index (zero-based). * @param item the item index (zero-based). * * @return The value. */ // getXxx() ftns can ignore the "series" argument: // Don't synchronize this!! Instead, synchronize the loop that calls it. @Override public Number getX(int series, int item) { RegularTimePeriod tp = this.pointsInTime[translateGet(item)]; return getX(tp); } /** * Returns the y-value. * * @param series the series index (zero-based). * @param item the item index (zero-based). * * @return The value. */ @Override public double getYValue(int series, int item) { // Don't synchronize this!! // Instead, synchronize the loop that calls it. ValueSequence values = this.valueHistory[series]; return values.getData(translateGet(item)); } /** * Returns the y-value. * * @param series the series index (zero-based). * @param item the item index (zero-based). * * @return The value. */ @Override public Number getY(int series, int item) { return new Float(getYValue(series, item)); } /** * Returns the start x-value. * * @param series the series index (zero-based). * @param item the item index (zero-based). * * @return The value. */ @Override public Number getStartX(int series, int item) { RegularTimePeriod tp = this.pointsInTime[translateGet(item)]; return tp.getFirstMillisecond(this.workingCalendar); } /** * Returns the end x-value. * * @param series the series index (zero-based). * @param item the item index (zero-based). * * @return The value. */ @Override public Number getEndX(int series, int item) { RegularTimePeriod tp = this.pointsInTime[translateGet(item)]; return tp.getLastMillisecond(this.workingCalendar); } /** * Returns the start y-value. * * @param series the series index (zero-based). * @param item the item index (zero-based). * * @return The value. */ @Override public Number getStartY(int series, int item) { return getY(series, item); } /** * Returns the end y-value. * * @param series the series index (zero-based). * @param item the item index (zero-based). * * @return The value. */ @Override public Number getEndY(int series, int item) { return getY(series, item); } /* // "Extras" found useful when analyzing/verifying class behavior: public Number getUntranslatedXValue(int series, int item) { return super.getXValue(series, item); } public float getUntranslatedY(int series, int item) { return super.getY(series, item); } */ /** * Returns the key for a series. * * @param series the series index (zero-based). * * @return The key. */ @Override public Comparable getSeriesKey(int series) { return this.seriesKeys[series]; } /** * Sends a {@link SeriesChangeEvent} to all registered listeners. */ protected void fireSeriesChanged() { seriesChanged(new SeriesChangeEvent(this)); } // The next 3 functions override the base-class implementation of // the DomainInfo interface. Using saved limits (updated by // each updateTime() call), improves performance. /** * Returns the minimum x-value in the dataset. * * @param includeInterval a flag that determines whether or not the * x-interval is taken into account. * * @return The minimum value. */ @Override public double getDomainLowerBound(boolean includeInterval) { return this.domainStart.doubleValue(); // a Long kept updated by advanceTime() } /** * Returns the maximum x-value in the dataset. * * @param includeInterval a flag that determines whether or not the * x-interval is taken into account. * * @return The maximum value. */ @Override public double getDomainUpperBound(boolean includeInterval) { return this.domainEnd.doubleValue(); // a Long kept updated by advanceTime() } /** * Returns the range of the values in this dataset's domain. * * @param includeInterval a flag that determines whether or not the * x-interval is taken into account. * * @return The range. */ @Override public Range getDomainBounds(boolean includeInterval) { if (this.domainRange == null) { findDomainLimits(); } return this.domainRange; } /** * Returns the x-value for a time period. * * @param period the period. * * @return The x-value. */ private long getX(RegularTimePeriod period) { switch (this.position) { case (START) : return period.getFirstMillisecond(this.workingCalendar); case (MIDDLE) : return period.getMiddleMillisecond(this.workingCalendar); case (END) : return period.getLastMillisecond(this.workingCalendar); default: return period.getMiddleMillisecond(this.workingCalendar); } } // The next 3 functions implement the RangeInfo interface. // Using saved limits (updated by each updateTime() call) significantly // improves performance. WARNING: this code makes the simplifying // assumption that data is never negative. Expand as needed for the // general case. /** * Returns the minimum range value. * * @param includeInterval a flag that determines whether or not the * y-interval is taken into account. * * @return The minimum range value. */ @Override public double getRangeLowerBound(boolean includeInterval) { double result = Double.NaN; if (this.minValue != null) { result = this.minValue.doubleValue(); } return result; } /** * Returns the maximum range value. * * @param includeInterval a flag that determines whether or not the * y-interval is taken into account. * * @return The maximum range value. */ @Override public double getRangeUpperBound(boolean includeInterval) { double result = Double.NaN; if (this.maxValue != null) { result = this.maxValue.doubleValue(); } return result; } /** * Returns the value range. * * @param includeInterval a flag that determines whether or not the * y-interval is taken into account. * * @return The range. */ @Override public Range getRangeBounds(boolean includeInterval) { if (this.valueRange == null) { double max = getRangeUpperBound(includeInterval); this.valueRange = new Range(0.0, max); } return this.valueRange; } }
package summarizer.receiver; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.opencompare.api.java.AbstractFeature; import org.opencompare.api.java.Cell; import org.opencompare.api.java.Feature; import org.opencompare.api.java.PCM; import org.opencompare.api.java.PCMContainer; import org.opencompare.api.java.Product; import org.opencompare.api.java.impl.io.KMFJSONLoader; import org.opencompare.api.java.io.PCMLoader; public class Receiver { PCM pcm; public Receiver(){ this.pcm = null; } /** * * @return pcm */ public PCM getPCM(){ return pcm; } /** * Dfinit le pcm partir du fichier de pcm * * @param pcmFile * @throws IOException */ public void setPCM(File pcmFile) throws IOException{ PCMLoader loader = new KMFJSONLoader(); List<PCMContainer> pcmContainers = loader.load(pcmFile); this.pcm = pcmContainers.get(0).getPcm(); } /** * Reduit le pcm en y supprimant les feature pas choisis par l'utilisateur * * @param featuresChooseList Liste des features retenus */ public void reduceFeature(List<Integer> featuresChooseList){ HashMap<Integer, Feature> listeFeatures = new HashMap<>(); int featCpt = 0; for (Feature feature : pcm.getConcreteFeatures()) { featCpt++; listeFeatures.put(featCpt, feature); } if(featuresChooseList.size() > 0){ //Supprimer les features choisis par l'utilisateur de la liste des features for(int i: featuresChooseList){ listeFeatures.remove(i); } //Supprimer les features restant sur la liste du pcm for(int key: listeFeatures.keySet()){ pcm.removeFeature((AbstractFeature) listeFeatures.get(key)); } } } /** * Reduit la liste des produits du PCM sur la base du choix des valeurs * particulires de certains features * * @param choix Liste des features et leurs valeurs particulires */ public void reduceProduct(Map<Feature, String> choix){ List<Product> productsListe = pcm.getProducts(); for(Product produit: productsListe){ Boolean test = true; for(Feature feature: choix.keySet()){ Cell cellule = produit.findCell(feature); if(choix.get(feature).equals(cellule.getContent())){ test = test && true; } else{ test = test && false; } } if(!test){ pcm.removeProduct(produit); } } // System.out.println(""); // System.out.println(pcm); } /** * Export le PCM au format JSON * @throws IOException */ public void jsonExport() throws IOException{ // System.out.println(""); // System.out.println(pcm); //Filtre FiltreVisitor filter = new FiltreVisitor(); HashMap<String, HashMap<String, List<Cell>>> dataFiltered = filter.filtreReduit(pcm); // Export File resumeFile = new File("src/main/java/IHM/public_html/json/summarizer.js"); //fichier cible JsonExport exporter = new JsonExport(); exporter.export(dataFiltered, resumeFile); } /** * Choisit les produits pour lesquels les valeurs des cellules de certain features sont egaux ou compris * entre des valeurs prdfini. Le choix des critres se fait sur un seul feature et selon un ordre de type * donnes: * booleans -> numbers -> stringValues -> multiples -> partials -> units * -> versions -> notApplicables -> dimensions -> conditionals -> notAvailables */ public void randomChoose(){ Map<Feature, String> productsChoice = new HashMap<>(); if(!chooseFeatureByType("booleans").isEmpty()){ Feature firstFeature = chooseFeatureByType("booleans").get(0); //Ajoute le premier feature et la valeur YES aux critres des produits choisir productsChoice.put(firstFeature, "Yes"); } else{ if(!chooseFeatureByType("numbers").isEmpty()){ Feature firstFeature = chooseFeatureByType("numbers").get(0); FiltreVisitor filter = new FiltreVisitor(); List<Cell> cellData = filter.filtreReduit(pcm).get(firstFeature.getName()).get("numbers"); //Cast des valeur en float List<Float> newListe = new ArrayList<>(); for(Cell cell: cellData){ newListe.add(Float.parseFloat(cell.getContent())); } //Calcul de la moyenne float moyenne = (new MotifImpl()).moyenne(newListe); //Ajoute le premier feature et la valeur moyenne aux critres des produits choisir productsChoice.put(firstFeature, Float.toString(moyenne)); } else{ if(!chooseFeatureByType("stringValues").isEmpty()){ Feature firstFeature = chooseFeatureByType("stringValues").get(0); Motif motif = new MotifImpl(); FiltreVisitor filter = new FiltreVisitor(); List<Cell> cellData = filter.filtreReduit(pcm).get(firstFeature.getName()).get("stringValues"); //Calcul des pourcentages d'apparition des valeurs des cellules Map<String, Float> pourcentages = motif.pourcentage(cellData); //Calcul du pourcentage maximal Float maxPourc = motif.max(pourcentages.values()); //Recupre l'un des mots les plus represent String mustRepresented = ""; for(String key: pourcentages.keySet()){ if(pourcentages.get(key) == maxPourc){ mustRepresented = key; } } //Ajoute le premier feature et l'un des valeur les plus reprsentes aux critres des produits choisir productsChoice.put(firstFeature, mustRepresented); } } } reduceProduct(productsChoice); } /** * Retourne les features comprenant des cellules d'un type donne * * @param typeName Nom de l'interpretation/type souhaite * @return Liste des features correspondant */ private List<Feature> chooseFeatureByType(String typeName){ FiltreVisitor filter = new FiltreVisitor(); HashMap<String, HashMap<String, List<Cell>>> dataFiltered = filter.filtreReduit(pcm); List<Feature> listOfChoosedFeature = new ArrayList<>(); for (Feature feature : pcm.getConcreteFeatures()) { HashMap<String, List<Cell>> listeType = dataFiltered.get(feature.getName()); for(String key: listeType.keySet()){ if(key.equals(typeName)){ listOfChoosedFeature.add(feature); } } } return listOfChoosedFeature; } }
package de.alpharogroup.wicket.components.examples.application; import java.io.IOException; import org.apache.log4j.Logger; import org.apache.wicket.Application; import org.apache.wicket.markup.head.IHeaderResponse; import org.apache.wicket.markup.head.filter.JavaScriptFilteredIntoFooterHeaderResponse; import org.apache.wicket.markup.html.IHeaderResponseDecorator; import org.apache.wicket.markup.html.WebPage; import org.apache.wicket.protocol.https.HttpsConfig; import org.apache.wicket.protocol.https.HttpsMapper; import org.jaulp.wicket.PackageResourceReferences; import de.agilecoders.wicket.core.settings.ThemeProvider; import de.agilecoders.wicket.themes.markup.html.bootstrap3.Bootstrap3Theme; import de.agilecoders.wicket.themes.markup.html.google.GoogleTheme; import de.agilecoders.wicket.themes.markup.html.metro.MetroTheme; import de.agilecoders.wicket.themes.markup.html.wicket.WicketTheme; import de.agilecoders.wicket.themes.settings.BootswatchThemeProvider; import de.alpharogroup.wicket.bootstrap2.application.WicketBootstrapApplication; import de.alpharogroup.wicket.bootstrap2.themes.CustomTheme; import de.alpharogroup.wicket.components.examples.home.HomePage; /** * Application object for your web application. If you want to run this application without deploying, run the Start class. * * @see de.alpharogroup.wicket.components.examples.StartComponentExamples#main(String[]) */ public class WicketApplication extends WicketBootstrapApplication { /** The Constant logger. */ private static final Logger LOGGER = Logger .getLogger(WicketApplication.class.getName()); public static final String FOOTER_FILTER_NAME = "footer-container"; /** * @see org.apache.wicket.Application#getHomePage() */ @Override public Class<? extends WebPage> getHomePage() { return HomePage.class; } /** * Factory method for set the default theme of the application. This method * is invoked in the {@code WicketBootstrapApplication.configureBootstrap()} * method and can be overridden from the derived classes so users can * provide their own version of the default theme of the application. * * @return the default theme as string. */ protected String newDefaultTheme() { return "wicket"; } /** * @see org.apache.wicket.Application#init() */ @Override public void init() { super.init(); // add your configuration here initializeAllHeaderContributors(); // set footer scripts... setHeaderResponseDecorator(new IHeaderResponseDecorator() { public IHeaderResponse decorate(IHeaderResponse response) { return new JavaScriptFilteredIntoFooterHeaderResponse(response, FOOTER_FILTER_NAME); } }); // set up ports for http and https... setRootRequestMapper(new HttpsMapper(getRootRequestMapper(), new HttpsConfig(8080, 8443))); } protected void configureBootstrap() { configureBootstrap(new CustomTheme()); } /** * Initialize all header contributors. */ private void initializeAllHeaderContributors() { try { initializeResources(); } catch (final ClassNotFoundException e) { LOGGER.error( "ClassNotFoundException in the initializeResources-Method from the WicketApplication.", e); } catch (final IOException e) { LOGGER.error( "IOException in the initializeResources-Method from the WicketApplication.", e); } } public String getDomainName() { return "jaulp-wicket-components.com"; } /** * Initialize resources. * * @throws ClassNotFoundException * the class not found exception * @throws IOException * Signals that an I/O exception has occurred. */ public void initializeResources() throws ClassNotFoundException, IOException { PackageResourceReferences prr = PackageResourceReferences.getInstance(); prr.initializeResources(getPackageToScan()); } /** * Gets the WicketApplication. * * @return the WicketApplication object. */ public static WicketApplication get() { return ((WicketApplication) Application.get()); } public String getPackageToScan() { return "de.alpharogroup.wicket.components.examples"; } }
package tachyon.examples; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.IntBuffer; import java.nio.channels.FileChannel.MapMode; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.log4j.Logger; import org.apache.thrift.TException; import tachyon.CommonUtils; import tachyon.Constants; import tachyon.Version; import tachyon.client.OutStream; import tachyon.client.TachyonByteBuffer; import tachyon.client.WriteType; import tachyon.client.TachyonFS; import tachyon.client.TachyonFile; import tachyon.conf.UserConf; import tachyon.thrift.FileAlreadyExistException; import tachyon.thrift.InvalidPathException; import tachyon.thrift.SuspectedFileSizeException; public class Performance { private static Logger LOG = Logger.getLogger(Constants.LOGGER_TYPE); private static final int RESULT_ARRAY_SIZE = 64; private static final String FOLDER = "/mnt/ramdisk/"; private static TachyonFS MTC = null; private static String MASTER_ADDRESS = null; private static String FILE_NAME = null; private static int BLOCK_SIZE_BYTES = -1; private static long BLOCKS_PER_FILE = -1; private static int THREADS = -1; private static int FILES = -1; private static boolean DEBUG_MODE = false; private static long FILE_BYTES = -1; private static long FILES_BYTES = -1; private static String RESULT_PREFIX = null; private static long[] Results = new long[RESULT_ARRAY_SIZE]; private static int BASE_FILE_NUMBER = 0; public static void createFiles() throws IOException { long startTimeMs = CommonUtils.getCurrentMs(); for (int k = 0; k < THREADS; k ++) { int fileId = MTC.createFile(FILE_NAME + (k + BASE_FILE_NUMBER)); CommonUtils.printTimeTakenMs(startTimeMs, LOG, "user_createFiles with fileId " + fileId); } } public static void logPerIteration(long startTimeMs, int times, String msg, int workerId) { long takenTimeMs = System.currentTimeMillis() - startTimeMs; double result = 1000L * FILE_BYTES / takenTimeMs / 1024 / 1024; LOG.info(times + msg + workerId + " : " + result + " Mb/sec. Took " + takenTimeMs + " ms. "); } public static abstract class Worker extends Thread { protected int mWorkerId; protected int mLeft; protected int mRight; protected ByteBuffer mBuf; public Worker(int id, int left, int right, ByteBuffer buf) { mWorkerId = id; mLeft = left; mRight = right; mBuf = buf; } } public static class GeneralWorker extends Worker { private boolean mOneToMany; private boolean mMemoryOnly; private String mMsg; public GeneralWorker(int id, int left, int right, ByteBuffer buf, boolean oneToMany, boolean memoryOnly, String msg) { super(id, left, right, buf); mOneToMany = oneToMany; mMemoryOnly = memoryOnly; mMsg = msg; } public void memoryCopyParition() throws IOException { if (DEBUG_MODE) { mBuf.flip(); CommonUtils.printByteBuffer(LOG, mBuf); } mBuf.flip(); long sum = 0; String str = "th " + mMsg + " @ Worker "; if (mOneToMany) { ByteBuffer dst = null; RandomAccessFile file = null; if (mMemoryOnly) { dst = ByteBuffer.allocateDirect((int) FILE_BYTES); } for (int times = mLeft; times < mRight; times ++) { long startTimeMs = System.currentTimeMillis(); if (!mMemoryOnly) { file = new RandomAccessFile(FOLDER + (mWorkerId + BASE_FILE_NUMBER), "rw"); dst = file.getChannel().map(MapMode.READ_WRITE, 0, FILE_BYTES); } dst.order(ByteOrder.nativeOrder()); for (int k = 0; k < BLOCKS_PER_FILE; k ++) { mBuf.array()[0] = (byte) (k + mWorkerId); dst.put(mBuf.array()); } dst.clear(); sum += dst.get(times); dst.clear(); if (!mMemoryOnly) { file.close(); } logPerIteration(startTimeMs, times, str, mWorkerId); } } else { ByteBuffer dst = null; RandomAccessFile file = null; if (mMemoryOnly) { dst = ByteBuffer.allocateDirect((int) FILE_BYTES); } for (int times = mLeft; times < mRight; times ++) { long startTimeMs = System.currentTimeMillis(); if (!mMemoryOnly) { file = new RandomAccessFile(FOLDER + (mWorkerId + BASE_FILE_NUMBER), "rw"); dst = file.getChannel().map(MapMode.READ_WRITE, 0, FILE_BYTES); } dst.order(ByteOrder.nativeOrder()); for (int k = 0; k < BLOCKS_PER_FILE; k ++) { dst.get(mBuf.array()); } sum += mBuf.get(times % 16); dst.clear(); if (!mMemoryOnly) { file.close(); } logPerIteration(startTimeMs, times, str, mWorkerId); } } Results[mWorkerId] = sum; } @Override public void run() { try { memoryCopyParition(); } catch (IOException e) { CommonUtils.runtimeException(e); } LOG.info(mMsg + mWorkerId + " just finished."); } } public static class TachyonWriterWorker extends Worker { private TachyonFS mTC; public TachyonWriterWorker(int id, int left, int right, ByteBuffer buf) { super(id, left, right, buf); mTC = TachyonFS.get(MASTER_ADDRESS); } public void writeParition() throws IOException, SuspectedFileSizeException, InvalidPathException, TException { if (DEBUG_MODE) { mBuf.flip(); CommonUtils.printByteBuffer(LOG, mBuf); } mBuf.flip(); for (int pId = mLeft; pId < mRight; pId ++) { long startTimeMs = System.currentTimeMillis(); TachyonFile file = mTC.getFile(FILE_NAME + (mWorkerId + BASE_FILE_NUMBER)); OutStream os = file.getOutStream(WriteType.MUST_CACHE); for (int k = 0; k < BLOCKS_PER_FILE; k ++) { mBuf.array()[0] = (byte) (k + mWorkerId); os.write(mBuf.array()); } os.close(); logPerIteration(startTimeMs, pId, "th WriteTachyonFile @ Worker ", pId); } } @Override public void run() { try { writeParition(); } catch (Exception e) { CommonUtils.runtimeException(e); } LOG.info("WriteWorker " + mWorkerId + " just finished."); } } public static class TachyonReadWorker extends Worker { private TachyonFS mTC; public TachyonReadWorker(int id, int left, int right, ByteBuffer buf) { super(id, left, right, buf); mTC = TachyonFS.get(MASTER_ADDRESS); } public void readPartition() throws IOException, SuspectedFileSizeException, InvalidPathException, TException { TachyonByteBuffer buf; if (DEBUG_MODE) { LOG.info("Verifying the reading data..."); for (int pId = mLeft; pId < mRight; pId ++) { TachyonFile file = mTC.getFile(FILE_NAME + mWorkerId); buf = file.readByteBuffer(); IntBuffer intBuf; intBuf = buf.DATA.asIntBuffer(); int tmp; for (int i = 0; i < BLOCKS_PER_FILE; i ++) { for (int k = 0; k < BLOCK_SIZE_BYTES / 4; k ++) { tmp = intBuf.get(); if ((k == 0 && tmp == (i + mWorkerId)) || (k != 0 && tmp == k)) { } else { CommonUtils.runtimeException("WHAT? " + tmp + " " + k); } } } buf.close(); } } long sum = 0; for (int pId = mLeft; pId < mRight; pId ++) { long startTimeMs = System.currentTimeMillis(); TachyonFile file = mTC.getFile(FILE_NAME + (mWorkerId + BASE_FILE_NUMBER)); buf = file.readByteBuffer(); for (int i = 0; i < BLOCKS_PER_FILE; i ++) { buf.DATA.get(mBuf.array()); } sum += mBuf.get(pId % 16); if (DEBUG_MODE) { buf.DATA.flip(); CommonUtils.printByteBuffer(LOG, buf.DATA); } buf.DATA.clear(); logPerIteration(startTimeMs, pId, "th ReadTachyonFile @ Worker ", pId); buf.close(); } Results[mWorkerId] = sum; } @Override public void run() { try { readPartition(); } catch (Exception e) { CommonUtils.runtimeException(e); } LOG.info("ReadWorker " + mWorkerId + " just finished."); } } public static class HdfsWorker extends Worker { private boolean mWrite; private String mMsg; public HdfsWorker(int id, int left, int right, ByteBuffer buf, boolean write, String msg) { super(id, left, right, buf); mWrite = write; mMsg = msg; } public void io() throws IOException { if (DEBUG_MODE) { mBuf.flip(); CommonUtils.printByteBuffer(LOG, mBuf); } mBuf.flip(); long sum = 0; String str = "th " + mMsg + " @ Worker "; Configuration tConf = new Configuration(); // tConf.set("fs.default.name", FILE_NAME.); // tConf.set("fs.default.name", "hdfs://localhost:54310"); // tConf.addResource("/home/haoyuan/Tachyon/hadoop-1.0.4/conf/core-site.xml"); FileSystem fs = FileSystem.get(tConf); if (mWrite) { for (int times = mLeft; times < mRight; times ++) { long startTimeMs = System.currentTimeMillis(); String filePath = FILE_NAME + (mWorkerId + BASE_FILE_NUMBER); OutputStream os = fs.create(new Path(filePath)); for (int k = 0; k < BLOCKS_PER_FILE; k ++) { mBuf.array()[0] = (byte) (k + mWorkerId); os.write(mBuf.array()); } os.close(); logPerIteration(startTimeMs, times, str, mWorkerId); } } else { for (int times = mLeft; times < mRight; times ++) { long startTimeMs = System.currentTimeMillis(); String filePath = FILE_NAME + (mWorkerId + BASE_FILE_NUMBER); InputStream is = fs.open(new Path(filePath)); long len = BLOCKS_PER_FILE * BLOCK_SIZE_BYTES; while (len > 0) { int r = is.read(mBuf.array()); len -= r; if (r == -1) { CommonUtils.runtimeException("R == -1"); } } is.close(); logPerIteration(startTimeMs, times, str, mWorkerId); } } Results[mWorkerId] = sum; } @Override public void run() { try { io(); } catch (IOException e) { CommonUtils.runtimeException(e); } LOG.info(mMsg + mWorkerId + " just finished."); } } private static void memoryCopyTest(boolean write, boolean memoryOnly) { ByteBuffer[] bufs = new ByteBuffer[THREADS]; for (int thread = 0; thread < THREADS; thread ++) { ByteBuffer sRawData = ByteBuffer.allocate(BLOCK_SIZE_BYTES); sRawData.order(ByteOrder.nativeOrder()); for (int k = 0; k < BLOCK_SIZE_BYTES / 4; k ++) { sRawData.putInt(k); } bufs[thread] = sRawData; } String msg = (write ? "Write" : "Read") + (memoryOnly ? "_Memory " : "_RamFile "); GeneralWorker[] WWs = new GeneralWorker[THREADS]; int t = FILES / THREADS; for (int thread = 0; thread < THREADS; thread ++) { WWs[thread] = new GeneralWorker( thread, t * thread, t * (thread + 1), bufs[thread], write, memoryOnly, msg); } long startTimeMs = System.currentTimeMillis(); for (int thread = 0; thread < THREADS; thread ++) { WWs[thread].start(); } for (int thread = 0; thread < THREADS; thread ++) { try { WWs[thread].join(); } catch (InterruptedException e) { CommonUtils.runtimeException(e); } } long takenTimeMs = System.currentTimeMillis() - startTimeMs; double result = 1000L * FILES_BYTES / takenTimeMs / 1024 / 1024; LOG.info(result + " Mb/sec. " + RESULT_PREFIX + "Entire " + msg + " Test : " + " Took " + takenTimeMs + " ms. Current System Time: " + System.currentTimeMillis()); } private static void TachyonTest(boolean write) { ByteBuffer[] bufs = new ByteBuffer[THREADS]; for (int thread = 0; thread < THREADS; thread ++) { ByteBuffer sRawData = ByteBuffer.allocate(BLOCK_SIZE_BYTES); sRawData.order(ByteOrder.nativeOrder()); for (int k = 0; k < BLOCK_SIZE_BYTES / 4; k ++) { sRawData.putInt(k); } bufs[thread] = sRawData; } Worker[] WWs = new Worker[THREADS]; int t = FILES / THREADS; for (int thread = 0; thread < THREADS; thread ++) { if (write) { WWs[thread] = new TachyonWriterWorker(thread, t * thread, t * (thread + 1), bufs[thread]); } else { WWs[thread] = new TachyonReadWorker(thread, t * thread, t * (thread + 1), bufs[thread]); } } long startTimeMs = System.currentTimeMillis(); for (int thread = 0; thread < THREADS; thread ++) { WWs[thread].start(); } for (int thread = 0; thread < THREADS; thread ++) { try { WWs[thread].join(); } catch (InterruptedException e) { CommonUtils.runtimeException(e); } } long takenTimeMs = System.currentTimeMillis() - startTimeMs; double result = FILES_BYTES * 1000L / takenTimeMs / 1024 / 1024; LOG.info(result + " Mb/sec. " + RESULT_PREFIX + "Entire " + (write ? "Write ": "Read ") + " Took " + takenTimeMs + " ms. Current System Time: " + System.currentTimeMillis()); } private static void HdfsTest(boolean write) { ByteBuffer[] bufs = new ByteBuffer[THREADS]; for (int thread = 0; thread < THREADS; thread ++) { ByteBuffer sRawData = ByteBuffer.allocate(BLOCK_SIZE_BYTES); sRawData.order(ByteOrder.nativeOrder()); for (int k = 0; k < BLOCK_SIZE_BYTES / 4; k ++) { sRawData.putInt(k); } bufs[thread] = sRawData; } Worker[] WWs = new Worker[THREADS]; int t = FILES / THREADS; String msg = (write ? "Write " : "Read "); for (int thread = 0; thread < THREADS; thread ++) { WWs[thread] = new HdfsWorker(thread, t * thread, t * (thread + 1), bufs[thread], write, msg); } long startTimeMs = System.currentTimeMillis(); for (int thread = 0; thread < THREADS; thread ++) { WWs[thread].start(); } for (int thread = 0; thread < THREADS; thread ++) { try { WWs[thread].join(); } catch (InterruptedException e) { CommonUtils.runtimeException(e); } } long takenTimeMs = System.currentTimeMillis() - startTimeMs; double result = FILES_BYTES * 1000L / takenTimeMs / 1024 / 1024; LOG.info(result + " Mb/sec. " + RESULT_PREFIX + "Entire " + (write ? "Write ": "Read ") + " Took " + takenTimeMs + " ms. Current System Time: " + System.currentTimeMillis()); } public static void main(String[] args) throws IOException, InvalidPathException, FileAlreadyExistException { if (args.length != 9) { System.out.println("java -cp target/tachyon-" + Version.VERSION + "-jar-with-dependencies.jar tachyon.examples.Performance " + "<MasterIp> <FileName> <WriteBlockSizeInBytes> <BlocksPerFile> " + "<DebugMode:true/false> <Threads> <FilesPerThread> <TestCaseNumber> <BaseFileNumber>\n" + "1: Files Write Test\n" + "2: Files Read Test\n" + "3: RamFile Write Test \n" + "4: RamFile Read Test \n" + "5: ByteBuffer Write Test \n" + "6: ByteBuffer Read Test \n"); System.exit(-1); } MASTER_ADDRESS = args[0]; FILE_NAME = args[1]; BLOCK_SIZE_BYTES = Integer.parseInt(args[2]); BLOCKS_PER_FILE = Long.parseLong(args[3]); DEBUG_MODE = ("true".equals(args[4])); THREADS = Integer.parseInt(args[5]); FILES = Integer.parseInt(args[6]) * THREADS; int testCase = Integer.parseInt(args[7]); BASE_FILE_NUMBER = Integer.parseInt(args[8]); FILE_BYTES = BLOCKS_PER_FILE * BLOCK_SIZE_BYTES; FILES_BYTES = 1L * FILE_BYTES * FILES; RESULT_PREFIX = String.format("Threads %d FilesPerThread %d TotalFiles %d " + "BLOCK_SIZE_KB %d BLOCKS_PER_FILE %d FILE_SIZE_MB %d " + "Tachyon_WRITE_BUFFER_SIZE_KB %d BaseFileNumber %d : ", THREADS, FILES / THREADS, FILES, BLOCK_SIZE_BYTES / 1024, BLOCKS_PER_FILE, CommonUtils.getMB(FILE_BYTES), UserConf.get().FILE_BUFFER_BYTES / 1024, BASE_FILE_NUMBER); for (int k = 0; k < 10000000; k ++) { // Warmup } if (testCase == 1) { RESULT_PREFIX = "TachyonFilesWriteTest " + RESULT_PREFIX; LOG.info(RESULT_PREFIX); MTC = TachyonFS.get(MASTER_ADDRESS); createFiles(); TachyonTest(true); } else if (testCase == 2) { RESULT_PREFIX = "TachyonFilesReadTest " + RESULT_PREFIX; LOG.info(RESULT_PREFIX); MTC = TachyonFS.get(MASTER_ADDRESS); TachyonTest(false); } else if (testCase == 3) { RESULT_PREFIX = "RamFile Write " + RESULT_PREFIX; LOG.info(RESULT_PREFIX); memoryCopyTest(true, false); } else if (testCase == 4) { RESULT_PREFIX = "RamFile Read " + RESULT_PREFIX; LOG.info(RESULT_PREFIX); memoryCopyTest(false, false); } else if (testCase == 5) { RESULT_PREFIX = "ByteBuffer Write Test " + RESULT_PREFIX; LOG.info(RESULT_PREFIX); memoryCopyTest(true, true); } else if (testCase == 6) { RESULT_PREFIX = "ByteBuffer Read Test " + RESULT_PREFIX; LOG.info(RESULT_PREFIX); memoryCopyTest(false, true); } else if (testCase == 7) { RESULT_PREFIX = "HdfsFilesWriteTest " + RESULT_PREFIX; LOG.info(RESULT_PREFIX); HdfsTest(true); } else if (testCase == 8) { RESULT_PREFIX = "HdfsFilesReadTest " + RESULT_PREFIX; LOG.info(RESULT_PREFIX); HdfsTest(false); } else { CommonUtils.runtimeException("No Test Case " + testCase); } for (int k = 0; k < RESULT_ARRAY_SIZE; k ++) { System.out.print(Results[k] + " "); } System.out.println(); System.exit(0); } }
package eu.liveandgov.wp1.sensor_miner.persistence; import android.content.Context; import android.content.SharedPreferences; import android.os.Handler; import android.os.Looper; import android.util.Log; import java.io.BufferedWriter; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; import java.nio.channels.FileChannel; import java.util.Date; import java.util.zip.GZIPOutputStream; import eu.liveandgov.wp1.sensor_miner.GlobalContext; public class ZipFilePersistor implements Persistor { public static final String LOG_TAG = "ZFP"; public static final String FILENAME = "sensor.log.gz"; private static final String SHARED_PREFS_NAME = "ZipFilePersistorPrefs"; private static final String PREF_VALID_LENGTH = "validLength"; private static final long MINIMUM_MANIFEST_DELAY = 4000; private File logFile; private Handler handler; private BufferedWriter fileWriter; private long sampleCount = 0L; private long lastManifestation = 0L; public ZipFilePersistor(File logFile) { this.logFile = logFile; if(openLogFileAppend()) { // Set manifestation point lastManifestation = System.currentTimeMillis(); } } @Override public synchronized void push(String s) { if (fileWriter == null) { Log.v(LOG_TAG, "Blocked write event"); return; } try { fileWriter.write(s + "\n"); sampleCount ++; } catch (IOException e) { Log.e(LOG_TAG,"Cannot write file."); e.printStackTrace(); } // Manifest GZIP data, if specified amount of time has passed long currentTime = System.currentTimeMillis(); if(lastManifestation + MINIMUM_MANIFEST_DELAY < currentTime) { if(manifestData()) { Log.i(LOG_TAG, "Manifested the data at " + new Date(currentTime)); lastManifestation = currentTime; } } } @Override public boolean exportSamples(File stageFile) { boolean suc = true; Log.i(LOG_TAG, "Exporting samples."); if (stageFile.exists()) { Log.e(LOG_TAG, "Stage file exists."); return false; } suc = closeLogFile(); if (!suc) { Log.e(LOG_TAG, "Cosing LogFile failed."); return false; } // Renamed, the valid length is now zero suc = logFile.renameTo(stageFile); putValidLength(0); if (!suc) { Log.e(LOG_TAG, "Renaming failed."); return false; } suc = openLogFileOverwrite(); if (!suc) { Log.e(LOG_TAG, "Opening new Log File failed."); return false; } // Set manifestation point lastManifestation = System.currentTimeMillis(); sampleCount = 0; return true; } @Override public boolean hasSamples() { return logFile.length() > 0; } @Override public void deleteSamples() { closeLogFile(); // Deleted, the valid length is now zero logFile.delete(); putValidLength(0); } @Override public String getStatus() { return "File size: " + logFile.length()/1024 + "kb. Samples written: " + sampleCount; } private boolean openLogFileAppend() { try { // Compare actual length to valid length final long validLength = getValidLength(); final long actualLength = logFile.length(); Log.d(LOG_TAG, "Valid zipfile length: " + validLength + ", actual length " + actualLength); if(actualLength > validLength) { Log.w(LOG_TAG, "Erronous file size, truncating"); // Truncate if mismatching final FileChannel channel = new FileOutputStream(logFile, true).getChannel(); channel.truncate(validLength); channel.close(); } fileWriter = new BufferedWriter(new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(logFile,true)), "UTF8")); } catch (IOException e) { e.printStackTrace(); return false; } return true; } // Gets the valid length private long getValidLength() { SharedPreferences prefs = GlobalContext.context.getSharedPreferences(SHARED_PREFS_NAME, Context.MODE_PRIVATE); return prefs.getLong(PREF_VALID_LENGTH, 0L); } // Store last value to keep access to the shared prefs to a minimum private long lastPutValidLength = -1; // Puts the next valid length if it differs from the last put value private void putValidLength(long value) { if(lastPutValidLength == value) { return; } lastPutValidLength = value; Log.d(LOG_TAG, "New valid length: " + value); SharedPreferences prefs = GlobalContext.context.getSharedPreferences(SHARED_PREFS_NAME, Context.MODE_PRIVATE); SharedPreferences.Editor editor = prefs.edit(); editor.putLong(PREF_VALID_LENGTH, value); editor.commit(); } private boolean openLogFileOverwrite() { try { fileWriter = new BufferedWriter(new OutputStreamWriter(new GZIPOutputStream(new FileOutputStream(logFile,false)), "UTF8")); } catch (IOException e) { e.printStackTrace(); return false; } return true; } private boolean closeLogFile() { try { fileWriter.flush(); fileWriter.close(); putValidLength(logFile.length()); } catch (IOException e) { e.printStackTrace(); return false; } fileWriter = null; return true; } private boolean manifestData() { // Filewriter is closed, so no manifest action required if(fileWriter == null) { return true; } // Else we close an reopen for appending if(closeLogFile()){ if(openLogFileAppend()) { return true; } } return false; } }
package team.unstudio.jblockly; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import javafx.beans.property.BooleanProperty; import javafx.beans.property.BooleanPropertyBase; import javafx.beans.property.DoubleProperty; import javafx.beans.property.DoublePropertyBase; import javafx.beans.property.ObjectProperty; import javafx.collections.ObservableList; import javafx.geometry.HPos; import javafx.geometry.Insets; import javafx.geometry.Orientation; import javafx.geometry.Point2D; import javafx.geometry.VPos; import javafx.scene.Node; import javafx.scene.Parent; import javafx.scene.layout.Region; import javafx.scene.paint.Color; import javafx.scene.paint.Paint; import javafx.scene.shape.SVGPath; import team.unstudio.jblockly.BlockSlot.SlotType; public final class Block extends Region { public static final double INSERT_WIDTH = 5; public static final double INSERT_OFFSET_Y = 10; public static final double INSERT_HEIGHT = 10; public static final double NEXT_WIDTH = 10; public static final double NEXT_HEIGHT = 5; public static final double NEXT_OFFSET_X = 10; private static final String MARGIN_CONSTRAINT = "block-margin"; private static final String NAME_CONSTRAINT = "block-name"; private static void setConstraint(Node node, Object key, Object value) { if (value == null) { node.getProperties().remove(key); } else { node.getProperties().put(key, value); } if (node.getParent() != null) { node.getParent().requestLayout(); } } private static Object getConstraint(Node node, Object key) { if (node.hasProperties()) { Object value = node.getProperties().get(key); if (value != null) { return value; } } return null; } public static void setMargin(Node child, Insets value) { setConstraint(child, MARGIN_CONSTRAINT, value); } public static Insets getMargin(Node child) { return (Insets) getConstraint(child, MARGIN_CONSTRAINT); } public static void setNodeName(Node child, String value) { setConstraint(child, NAME_CONSTRAINT, value); } public static String getNodeName(Node child) { return (String) getConstraint(child, NAME_CONSTRAINT); } private BooleanProperty movable; public final BooleanProperty movableProperty() { if (movable == null) { movable = new BooleanPropertyBase(true) { @Override public String getName() { return "movable"; } @Override public Object getBean() { return Block.this; } }; } return movable; } public boolean isMovable() {return movableProperty().get();} public void setMovable(boolean movable) {movableProperty().set(movable);} private DoubleProperty verticalSpacing; public final DoubleProperty verticalSpacingProperty(){ if(verticalSpacing == null){ verticalSpacing = new DoublePropertyBase(0) { @Override protected void invalidated() { requestLayout(); } @Override public String getName() { return "verticalSpacing"; } @Override public Object getBean() { return Block.this; } }; } return verticalSpacing; } public final double getVerticalSpacing(){return verticalSpacingProperty().get();} public final void setVerticalSpacing(double value){verticalSpacingProperty().set(value);} private DoubleProperty horizontalSpacing; public final DoubleProperty horizontalSpacingProperty(){ if(horizontalSpacing == null){ horizontalSpacing = new DoublePropertyBase(0) { @Override protected void invalidated() { requestLayout(); } @Override public String getName() { return "horizontalSpacing"; } @Override public Object getBean() { return Block.this; } }; } return horizontalSpacing; } public final double getHorizontalSpacing(){return horizontalSpacingProperty().get();} public final void setHorizontalSpacing(double value){horizontalSpacingProperty().set(value);} private boolean moving; public final boolean isMoving(){ return moving; } private final SVGPath svgPath; private double tempOldX, tempOldY; private boolean performingLayout; private double[][] _tempArray; private List<BlockSlot> _tempList; private Map<String, Node> _nameToNode; public enum ConnectionType { LEFT, TOP, BUTTOM, TOPANDBUTTOM, NONE } private ConnectionType connectionType = ConnectionType.NONE; public ConnectionType getConnectionType() { return connectionType; } public void setConnectionType(ConnectionType connectionType) { this.connectionType = connectionType; } public Block() { svgPath = new SVGPath(); getChildren().add(svgPath); setOnMousePressed(event -> { if (!isMovable()) return; addToWorkspace(); tempOldX = event.getSceneX() - getLayoutX(); tempOldY = event.getSceneY() - getLayoutY(); moving = true; }); setOnMouseDragged(event -> { if (!moving) return; setLayoutX(event.getSceneX() - tempOldX); setLayoutY(event.getSceneY() - tempOldY); }); setOnMouseReleased(event -> { moving = false; getWorkspace().tryLinkBlock(this, event.getSceneX(), event.getSceneY()); }); setPickOnBounds(false); // ,contains setFill(Color.GRAY); setStroke(Color.BLACK); setVerticalSpacing(5);//TODO: Fix vertical spacing layout problem setHorizontalSpacing(5); } public final SVGPath getSVGPath(){ return svgPath; } public final ObjectProperty<Paint> fillProperty() { return svgPath.fillProperty(); } public final Paint getFill(){ return svgPath.getFill(); } public final void setFill(Paint value){ svgPath.setFill(value); } public final ObjectProperty<Paint> strokeProperty() { return svgPath.strokeProperty(); } public final Paint getStroke(){ return svgPath.getStroke(); } public final void setStroke(Paint value){ svgPath.setStroke(value); } public BlockWorkspace getWorkspace() { Parent parent = getParent(); if (parent instanceof BlockSlot) return ((BlockSlot) parent).getWorkspace(); else if (parent instanceof BlockWorkspace) return (BlockWorkspace) parent; else return null; } public void addToWorkspace(){ Parent oldParent = getParent(); if (oldParent == null) return; if (oldParent instanceof BlockWorkspace) return; Parent parent = getParent(); double x = getLayoutX(), y = getLayoutY(); while (!(parent instanceof BlockWorkspace)) { x += parent.getLayoutX(); y += parent.getLayoutY(); parent = parent.getParent(); } ((BlockWorkspace) parent).getChildren().add(this); setLayoutX(x); setLayoutY(y); ((BlockSlot)oldParent).validateBlock(); } public boolean tryLinkBlock(Block block,double x,double y){ for(BlockSlot slot:_tempList) if(slot.tryLinkBlock(block, x-slot.getLayoutX(), y-slot.getLayoutY())) return true; return false; } public Set<String> getNodeNames() { return getNameToNode().keySet(); } public Map<String,Node> getNameToNode() { if(_nameToNode==null) _nameToNode = new HashMap<>(); _nameToNode.clear(); for(Node node:getChildren()){ String name = getNodeName(node); if(name!=null) _nameToNode.put(name, node); } return Collections.unmodifiableMap(_nameToNode); } public Node getNode(String name) { return getNameToNode().get(name); } public void addNode(String name, Node node) { if (getNameToNode().containsKey(name)) { return; } setNodeName(node, name); getChildren().add(node); } public void removeNode(String name) { if (!getNameToNode().containsKey(name)) { return; } getChildren().remove(getNameToNode().get(name)); } public boolean containNodeName(String name) { return getNameToNode().containsKey(name); } @Override public ObservableList<Node> getChildren() { return super.getChildren(); } @Override protected double computePrefWidth(double height) { return super.computePrefWidth(height); } @Override protected double computePrefHeight(double width) { List<Node> managed = new ArrayList<>(getManagedChildren()); managed.remove(svgPath); double vSpace = getVerticalSpacing(); double hSpace = getHorizontalSpacing(); double[][] actualAreaBounds = getTempArray(managed.size());; List<BlockSlot> slots = getLineBounds(managed, vSpace, hSpace, false, actualAreaBounds); if(slots.isEmpty()) return 0; double y = 0; for(BlockSlot slot:slots) y+=slot.getLineHeight() + vSpace; return y; } @Override public boolean contains(double localX, double localY) { return svgPath.contains(localX, localY); } @Override public boolean contains(Point2D localPoint) { return svgPath.contains(localPoint); } @Override public void requestLayout() { if (performingLayout) { return; } super.requestLayout(); } @Override protected void layoutChildren() { if (performingLayout) return; performingLayout = true; List<Node> managed = new ArrayList<>(getManagedChildren()); managed.remove(svgPath); double vSpace = getVerticalSpacing(); double hSpace = getHorizontalSpacing(); HPos hpos = HPos.LEFT; VPos vpos = VPos.TOP; double[][] actualAreaBounds = getTempArray(managed.size());; List<BlockSlot> slots = getLineBounds(managed, vSpace, hSpace, false, actualAreaBounds); if (slots.isEmpty()){ svgPath.setContent(""); }else{ ConnectionType connectionType = getConnectionType(); StringBuilder builder = new StringBuilder(); builder.append(getTopPath(connectionType, slots.get(0).getLineWidth())); double x = getConnectionType()==ConnectionType.LEFT?INSERT_WIDTH:0; double y = 0; for (int i = 0, size = slots.size(); i < size; i++) { BlockSlot slot = slots.get(i); layoutLine(slot, managed, actualAreaBounds, x, y, vSpace, hSpace, hpos, vpos); switch (slot.getSlotType()) { case BRANCH: builder.append(getBranchPath(connectionType,y, slot.getLineWidth(), slot.getLineHeight(), i + 1 == size ? slot.getLineWidth() : slots.get(i + 1).getLineWidth())); break; case INSERT: builder.append(getInsertPath(connectionType,y, slot.getLineWidth())); break; default: break; } SlotType slotType = slot.getSlotType(); if (slotType != SlotType.NEXT){ y += slot.getLineHeight() + vSpace; } } builder.append(getBottomPath(connectionType, y)); svgPath.setContent(builder.toString()); } layoutInArea(svgPath, 0, 0, svgPath.prefWidth(-1), svgPath.prefHeight(-1), 0, HPos.CENTER, VPos.CENTER); performingLayout = false; } private void layoutLine(BlockSlot slot, List<Node> managed, double[][] actualAreaBounds, double left, double top, double vSpace,double hSpace, HPos hpos, VPos vpos) { SlotType slotType = slot.getSlotType(); double x = left+hSpace; double y = top + vSpace; for (int i = slot.getFirstNode(), end = slot.getLastNode() - 1; i <= end; i++) { Node child = managed.get(i); layoutInArea(child, x, y, actualAreaBounds[0][i], slot.getLineHeight(), 0, getMargin(child), hpos, vpos); x += actualAreaBounds[0][i] + hSpace; } if(slotType==SlotType.BRANCH&&x<BlockSlot.BRANCH_MIN_WIDTH) x = BlockSlot.BRANCH_MIN_WIDTH; layoutInArea(slot, x, top, slot.getWidth(), slot.getHeight(), 0, null, hpos, vpos); } private List<BlockSlot> getLineBounds(List<Node> managed, double vSpace, double hSpace, boolean minimum, double[][] actualAreaBounds) { List<BlockSlot> temp = getTempList(); double tempWidth = hSpace, tempHeight = 0, tempMaxWidth = 0; int lastBranchOrNextBlock = -1, firstNode = 0; for (int i = 0, size = managed.size(); i < size; i++) { Node child = managed.get(i); //Node Insets margin = getMargin(child); if (minimum) { actualAreaBounds[0][i] = computeChildMinAreaWidth(child, -1, margin, -1, false); actualAreaBounds[1][i] = computeChildMinAreaHeight(child, -1, margin, -1); } else { actualAreaBounds[0][i] = computeChildPrefAreaWidth(child, -1, margin, -1, false); actualAreaBounds[1][i] = computeChildPrefAreaHeight(child, -1, margin, -1); } if (tempHeight < actualAreaBounds[1][i]) tempHeight = actualAreaBounds[1][i]; if (child instanceof BlockSlot) { BlockSlot slot = (BlockSlot) child; temp.add(slot); slot.setLineWidth(tempWidth); slot.setLineHeight(tempHeight); slot.setFirstNode(firstNode); slot.setLastNode(i); firstNode = i + 1; int tsize = temp.size(); if (slot.getSlotType() == SlotType.BRANCH || slot.getSlotType() == SlotType.NEXT) { if (tsize - lastBranchOrNextBlock != 1) replaceAllLineWidth(temp, lastBranchOrNextBlock + 1, tsize - 2, tempMaxWidth); lastBranchOrNextBlock = tsize - 1; tempMaxWidth = 0; } else { if (tempMaxWidth < tempWidth) tempMaxWidth = tempWidth; if (size - i == 1 && tsize - lastBranchOrNextBlock != 1) replaceAllLineWidth(temp, lastBranchOrNextBlock + 1, tsize - 1, tempMaxWidth); } tempWidth = hSpace; tempHeight = 0; } else { tempWidth += actualAreaBounds[0][i] + hSpace; } } return temp; } private void replaceAllLineWidth(List<BlockSlot> managed, int start, int end, double width) { for (int i = start; i <= end; i++) managed.get(i).setLineWidth(width); } private List<BlockSlot> getTempList(){ if(_tempList == null) _tempList = new ArrayList<>(); _tempList.clear(); return _tempList; } private double[][] getTempArray(int size) { if (_tempArray == null) { _tempArray = new double[2][size]; } else if (_tempArray[0].length < size) { _tempArray = new double[2][Math.max(_tempArray.length * 3, size)]; } return _tempArray; } private double computeChildMinAreaHeight(Node child, double minBaselineComplement, Insets margin, double width) { final boolean snap = isSnapToPixel(); double top = margin != null ? snapSpace(margin.getTop(), snap) : 0; double bottom = margin != null ? snapSpace(margin.getBottom(), snap) : 0; double alt = -1; if (child.isResizable() && child.getContentBias() == Orientation.HORIZONTAL) { // height // depends // width double left = margin != null ? snapSpace(margin.getLeft(), snap) : 0; double right = margin != null ? snapSpace(margin.getRight(), snap) : 0; alt = snapSize(width != -1 ? boundedSize(child.minWidth(-1), width - left - right, child.maxWidth(-1)) : child.maxWidth(-1)); } // For explanation, see computeChildPrefAreaHeight if (minBaselineComplement != -1) { double baseline = child.getBaselineOffset(); if (child.isResizable() && baseline == BASELINE_OFFSET_SAME_AS_HEIGHT) { return top + snapSize(child.minHeight(alt)) + bottom + minBaselineComplement; } else { return baseline + minBaselineComplement; } } else { return top + snapSize(child.minHeight(alt)) + bottom; } } private double computeChildPrefAreaHeight(Node child, double prefBaselineComplement, Insets margin, double width) { final boolean snap = isSnapToPixel(); double top = margin != null ? snapSpace(margin.getTop(), snap) : 0; double bottom = margin != null ? snapSpace(margin.getBottom(), snap) : 0; double alt = -1; if (child.isResizable() && child.getContentBias() == Orientation.HORIZONTAL) { // height // depends // width double left = margin != null ? snapSpace(margin.getLeft(), snap) : 0; double right = margin != null ? snapSpace(margin.getRight(), snap) : 0; alt = snapSize(boundedSize(child.minWidth(-1), width != -1 ? width - left - right : child.prefWidth(-1), child.maxWidth(-1))); } if (prefBaselineComplement != -1) { double baseline = child.getBaselineOffset(); if (child.isResizable() && baseline == BASELINE_OFFSET_SAME_AS_HEIGHT) { // When baseline is same as height, the preferred height of the // node will be above the baseline, so we need to add // the preferred complement to it return top + snapSize(boundedSize(child.minHeight(alt), child.prefHeight(alt), child.maxHeight(alt))) + bottom + prefBaselineComplement; } else { // For all other Nodes, it's just their baseline and the // complement. // Note that the complement already contain the Node's preferred // (or fixed) height return top + baseline + prefBaselineComplement + bottom; } } else { return top + snapSize(boundedSize(child.minHeight(alt), child.prefHeight(alt), child.maxHeight(alt))) + bottom; } } private double computeChildMinAreaWidth(Node child, double baselineComplement, Insets margin, double height, boolean fillHeight) { final boolean snap = isSnapToPixel(); double left = margin != null ? snapSpace(margin.getLeft(), snap) : 0; double right = margin != null ? snapSpace(margin.getRight(), snap) : 0; double alt = -1; if (height != -1 && child.isResizable() && child.getContentBias() == Orientation.VERTICAL) { // width // depends // height double top = margin != null ? snapSpace(margin.getTop(), snap) : 0; double bottom = (margin != null ? snapSpace(margin.getBottom(), snap) : 0); double bo = child.getBaselineOffset(); final double contentHeight = bo == BASELINE_OFFSET_SAME_AS_HEIGHT && baselineComplement != -1 ? height - top - bottom - baselineComplement : height - top - bottom; if (fillHeight) { alt = snapSize(boundedSize(child.minHeight(-1), contentHeight, child.maxHeight(-1))); } else { alt = snapSize(boundedSize(child.minHeight(-1), child.prefHeight(-1), Math.min(child.maxHeight(-1), contentHeight))); } } return left + snapSize(child.minWidth(alt)) + right; } private double computeChildPrefAreaWidth(Node child, double baselineComplement, Insets margin, double height, boolean fillHeight) { final boolean snap = isSnapToPixel(); double left = margin != null ? snapSpace(margin.getLeft(), snap) : 0; double right = margin != null ? snapSpace(margin.getRight(), snap) : 0; double alt = -1; if (height != -1 && child.isResizable() && child.getContentBias() == Orientation.VERTICAL) { // width // depends // height double top = margin != null ? snapSpace(margin.getTop(), snap) : 0; double bottom = margin != null ? snapSpace(margin.getBottom(), snap) : 0; double bo = child.getBaselineOffset(); final double contentHeight = bo == BASELINE_OFFSET_SAME_AS_HEIGHT && baselineComplement != -1 ? height - top - bottom - baselineComplement : height - top - bottom; if (fillHeight) { alt = snapSize(boundedSize(child.minHeight(-1), contentHeight, child.maxHeight(-1))); } else { alt = snapSize(boundedSize(child.minHeight(-1), child.prefHeight(-1), Math.min(child.maxHeight(-1), contentHeight))); } } return left + snapSize(boundedSize(child.minWidth(alt), child.prefWidth(alt), child.maxWidth(alt))) + right; } private static double boundedSize(double min, double pref, double max) { double a = pref >= min ? pref : min; double b = min >= max ? min : max; return a <= b ? a : b; } private static double snapSpace(double value, boolean snapToPixel) { return snapToPixel ? Math.round(value) : value; } private String getTopPath(ConnectionType connectionType, double width) { switch (connectionType) { case TOP: case TOPANDBUTTOM: return new StringBuilder("M 0 0 H ").append(NEXT_OFFSET_X).append(" V ").append(NEXT_HEIGHT).append(" H ") .append(NEXT_OFFSET_X + NEXT_WIDTH).append(" V 0 H ").append(width).toString(); case LEFT: return new StringBuilder("M ").append(INSERT_WIDTH).append(" ").append(INSERT_HEIGHT + INSERT_OFFSET_Y) .append(" H 0 V ").append(INSERT_HEIGHT).append(" H ").append(INSERT_WIDTH).append(" V 0 H ") .append(INSERT_WIDTH+width).toString(); default: return new StringBuilder("M 0 0 H ").append(width).toString(); } } private String getBottomPath(ConnectionType connectionType, double y) { switch (connectionType) { case BUTTOM: case TOPANDBUTTOM: return new StringBuilder(" V ").append(y).append(" H 20 V ").append(y + 5).append(" H 10 V ").append(y) .append(" H 0 Z").toString(); case LEFT: return new StringBuilder(" V ").append(y).append(" H ").append(INSERT_WIDTH).append(" Z").toString(); default: return new StringBuilder(" V ").append(y).append(" H 0 Z").toString(); } } private String getBranchPath(ConnectionType connectionType, double y, double width, double height, double nextWidth) { switch (connectionType) { case LEFT: return new StringBuilder(" V ").append(y).append(" H ").append(INSERT_WIDTH+width + NEXT_OFFSET_X + NEXT_WIDTH).append(" V ") .append(y + NEXT_HEIGHT).append(" H ").append(INSERT_WIDTH+width + NEXT_OFFSET_X).append(" V ").append(y) .append(" H ").append(INSERT_WIDTH+width).append(" V ").append(y + height).append(" H ").append(INSERT_WIDTH+nextWidth) .toString(); default: return new StringBuilder(" V ").append(y).append(" H ").append(width + NEXT_OFFSET_X + NEXT_WIDTH).append(" V ") .append(y + NEXT_HEIGHT).append(" H ").append(width + NEXT_OFFSET_X).append(" V ").append(y) .append(" H ").append(width).append(" V ").append(y + height).append(" H ").append(nextWidth) .toString(); } } private String getInsertPath(ConnectionType connectionType, double y, double width) { switch (connectionType) { case LEFT: return new StringBuilder(" V ").append(y + INSERT_OFFSET_Y).append(" H ").append(INSERT_WIDTH+width - INSERT_WIDTH) .append(" V ").append(y + INSERT_OFFSET_Y + INSERT_HEIGHT).append(" H ").append(INSERT_WIDTH+width).toString(); default: return new StringBuilder(" V ").append(y + INSERT_OFFSET_Y).append(" H ").append(width - INSERT_WIDTH) .append(" V ").append(y + INSERT_OFFSET_Y + INSERT_HEIGHT).append(" H ").append(width).toString(); } } }
package fr.tcpmfa.display; import java.awt.Image; import fr.tcpmfa.util.Coordinate; public interface GraphicalElement { public Image getImage(); public void setImage(Image Image); public Coordinate getCoordinates(); public void setCoordinates(Coordinate Coordinates); }
package com.elastisys.scale.cloudpool.commons.basepool; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static java.lang.Math.max; import static java.lang.String.format; import java.net.InetAddress; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import jersey.repackaged.com.google.common.util.concurrent.ThreadFactoryBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.elastisys.scale.cloudpool.api.CloudPool; import com.elastisys.scale.cloudpool.api.CloudPoolException; import com.elastisys.scale.cloudpool.api.NotFoundException; import com.elastisys.scale.cloudpool.api.types.CloudPoolMetadata; import com.elastisys.scale.cloudpool.api.types.Machine; import com.elastisys.scale.cloudpool.api.types.MachinePool; import com.elastisys.scale.cloudpool.api.types.MachineState; import com.elastisys.scale.cloudpool.api.types.MembershipStatus; import com.elastisys.scale.cloudpool.api.types.PoolSizeSummary; import com.elastisys.scale.cloudpool.api.types.ServiceState; import com.elastisys.scale.cloudpool.commons.basepool.alerts.AlertTopics; import com.elastisys.scale.cloudpool.commons.basepool.config.AlertsConfig; import com.elastisys.scale.cloudpool.commons.basepool.config.BaseCloudPoolConfig; import com.elastisys.scale.cloudpool.commons.basepool.config.ScaleInConfig; import com.elastisys.scale.cloudpool.commons.basepool.config.ScaleOutConfig; import com.elastisys.scale.cloudpool.commons.basepool.driver.CloudPoolDriver; import com.elastisys.scale.cloudpool.commons.basepool.driver.StartMachinesException; import com.elastisys.scale.cloudpool.commons.resizeplanner.ResizePlan; import com.elastisys.scale.cloudpool.commons.resizeplanner.ResizePlanner; import com.elastisys.scale.cloudpool.commons.termqueue.ScheduledTermination; import com.elastisys.scale.cloudpool.commons.termqueue.TerminationQueue; import com.elastisys.scale.commons.json.JsonUtils; import com.elastisys.scale.commons.net.alerter.Alert; import com.elastisys.scale.commons.net.alerter.AlertSeverity; import com.elastisys.scale.commons.net.alerter.Alerter; import com.elastisys.scale.commons.net.alerter.http.HttpAlerter; import com.elastisys.scale.commons.net.alerter.http.HttpAlerterConfig; import com.elastisys.scale.commons.net.alerter.smtp.SmtpAlerter; import com.elastisys.scale.commons.net.alerter.smtp.SmtpAlerterConfig; import com.elastisys.scale.commons.net.host.HostUtils; import com.elastisys.scale.commons.util.time.UtcTime; import com.google.common.base.Joiner; import com.google.common.base.Optional; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.eventbus.EventBus; import com.google.common.util.concurrent.Atomics; import com.google.gson.JsonElement; import com.google.gson.JsonObject; public class BaseCloudPool implements CloudPool { /** {@link Logger} instance. */ static final Logger LOG = LoggerFactory.getLogger(BaseCloudPool.class); /** Maximum concurrency in the {@link #executorService}. */ private static final int MAX_CONCURRENCY = 20; /** A cloud-specific management driver for the cloud pool. */ private CloudPoolDriver cloudDriver = null; /** The currently set configuration. */ private final AtomicReference<BaseCloudPoolConfig> config; /** {@link ExecutorService} handling execution of "background jobs". */ private final ScheduledExecutorService executorService; /** <code>true</code> if pool has been started. */ private final AtomicBoolean started; /** The desired size of the machine pool. */ private final AtomicReference<Integer> desiredSize; /** * {@link EventBus} used to post {@link Alert} events that are to be * forwarded by configured {@link Alerter}s (if any). */ private final EventBus eventBus; /** * Holds the list of configured {@link Alerter}s (if any). Each * {@link Alerter} is registered with the {@link EventBus} to forward posted * {@link Alert}s. */ private final AtomicReference<List<Alerter>> alerters; /** * Pool update task that periodically runs the {@link #updateMachinePool()} * method to (1) effectuate pending instance terminations in the / * termination queue and (2) replace terminated instances. */ private ScheduledFuture<?> poolUpdateTask; /** Lock to protect the machine pool from concurrent modifications. */ private final Object updateLock = new Object(); /** * The queue of already termination-marked instances (these will be used to * filter out instances already scheduled for termination from the candidate * set). */ private final TerminationQueue terminationQueue; /** * Constructs a new {@link BaseCloudPool} managing a given * {@link CloudPoolDriver}. * * @param cloudDriver * A cloud-specific management driver for the cloud pool. */ public BaseCloudPool(CloudPoolDriver cloudDriver) { this(cloudDriver, new EventBus()); } /** * Constructs a new {@link BaseCloudPool} managing a given * {@link CloudPoolDriver} and with an {@link EventBus} provided by the * caller. * * @param cloudDriver * A cloud-specific management driver for the cloud pool. * @param eventBus * The {@link EventBus} used to send {@link Alert}s and event * messages between components of the cloud pool. */ public BaseCloudPool(CloudPoolDriver cloudDriver, EventBus eventBus) { checkArgument(cloudDriver != null, "cloudDriver is null"); checkArgument(eventBus != null, "eventBus is null"); this.cloudDriver = cloudDriver; this.eventBus = eventBus; ThreadFactory threadFactory = new ThreadFactoryBuilder() .setDaemon(true).setNameFormat("cloudpool-%d").build(); this.executorService = Executors.newScheduledThreadPool( MAX_CONCURRENCY, threadFactory); this.config = Atomics.newReference(); this.started = new AtomicBoolean(false); this.alerters = Atomics.newReference(); this.terminationQueue = new TerminationQueue(); this.desiredSize = Atomics.newReference(); } @Override public void configure(JsonObject jsonConfig) throws IllegalArgumentException, CloudPoolException { BaseCloudPoolConfig configuration = validate(jsonConfig); synchronized (this.updateLock) { this.config.set(configuration); if (isStarted()) { stop(); } start(); } } private BaseCloudPoolConfig validate(JsonObject jsonConfig) throws IllegalArgumentException { try { BaseCloudPoolConfig configuration = JsonUtils.toObject(jsonConfig, BaseCloudPoolConfig.class); configuration.validate(); return configuration; } catch (Exception e) { Throwables.propagateIfInstanceOf(e, IllegalArgumentException.class); throw new IllegalArgumentException( "failed to validate cloud pool configuration: " + e.getMessage(), e); } } @Override public Optional<JsonObject> getConfiguration() { BaseCloudPoolConfig currentConfig = this.config.get(); if (currentConfig == null) { return Optional.absent(); } return Optional.of(JsonUtils.toJson(currentConfig).getAsJsonObject()); } private void start() throws CloudPoolException { checkState(getConfiguration().isPresent(), "attempt to start cloud pool before being configured"); if (isStarted()) { return; } LOG.info("starting {} driving a {}", getClass().getSimpleName(), this.cloudDriver.getClass().getSimpleName()); // re-configure driver LOG.info("configuring cloud pool '{}'", config().getCloudPool() .getName()); this.cloudDriver.configure(config()); determineDesiredSizeIfUnset(); // start pool update task that periodically runs updateMachinepool() int poolUpdatePeriod = config().getPoolUpdatePeriod(); this.poolUpdateTask = this.executorService.scheduleWithFixedDelay( new PoolUpdateTask(), poolUpdatePeriod, poolUpdatePeriod, TimeUnit.SECONDS); setUpAlerters(config()); this.started.set(true); LOG.info(getClass().getSimpleName() + " started."); } /** * In case no {@link #desiredSize} has been set yet, this method determines * the (initial) desired size for the {@link CloudPoolDriver} as the current * size of the {@link CloudPoolDriver}. On failure to determine the pool * size (for example, due to a temporary cloud provider API outage), an * alert is sent out (if alerting has been set up). */ private void determineDesiredSizeIfUnset() { if (this.desiredSize.get() != null) { return; } try { LOG.debug("determining initial desired pool size"); setDesiredSizeIfUnset(getMachinePool()); } catch (CloudPoolException e) { String message = format( "failed to determine initial size of pool: %s\n%s", e.getMessage(), Throwables.getStackTraceAsString(e)); this.eventBus.post(new Alert(AlertTopics.POOL_FETCH.name(), AlertSeverity.ERROR, UtcTime.now(), message)); LOG.error(message); } } /** * Initializes the {@link #desiredSize} (if one hasn't already been set) * from a given {@link MachinePool} . * <p/> * If {@link #desiredSize} is already set, this method returns immediately. * * @param pool */ private void setDesiredSizeIfUnset(MachinePool pool) { if (this.desiredSize.get() != null) { return; } // exclude inactive instances since they aren't actually part // of the desiredSize (they are to be replaced) int effectiveSize = pool.getActiveMachines().size(); int allocated = pool.getAllocatedMachines().size(); this.desiredSize.set(effectiveSize); LOG.info("initial desiredSize is {} (allocated: {}, effective: {})", this.desiredSize, allocated, effectiveSize); } private void stop() { if (isStarted()) { LOG.debug("stopping {} ...", getClass().getSimpleName()); // cancel tasks (allow any running tasks to finish) this.poolUpdateTask.cancel(false); this.poolUpdateTask = null; takeDownAlerters(); this.started.set(false); } LOG.info(getClass().getSimpleName() + " stopped."); } boolean isStarted() { return this.started.get(); } @Override public MachinePool getMachinePool() throws CloudPoolException { checkState(getConfiguration().isPresent(), "cloud pool needs to be configured before use"); List<Machine> machines = listMachines(); MachinePool pool = new MachinePool(machines, UtcTime.now()); // if we haven't yet determined the desired size, we do so now setDesiredSizeIfUnset(pool); return pool; } Integer desiredSize() { return this.desiredSize.get(); } @Override public PoolSizeSummary getPoolSize() throws CloudPoolException { checkState(getConfiguration().isPresent(), "cloud pool needs to be configured before use"); MachinePool pool = getMachinePool(); return new PoolSizeSummary(this.desiredSize.get(), pool .getAllocatedMachines().size(), pool.getActiveMachines().size()); } /** * Lists the {@link Machine}s in the {@link CloudPoolDriver}. Raises a * {@link CloudPoolException} on failure and sends alert (if configured). * * @return * * @throws CloudPoolException */ private List<Machine> listMachines() { return this.cloudDriver.listMachines(); } @Override public void setDesiredSize(int desiredSize) throws IllegalArgumentException, CloudPoolException { checkState(getConfiguration().isPresent(), "cloud pool needs to be configured before use"); checkArgument(desiredSize >= 0, "negative desired pool size"); // prevent concurrent pool modifications synchronized (this.updateLock) { LOG.info("set desiredSize to {}", desiredSize); this.desiredSize.set(desiredSize); } } @Override public void terminateMachine(String machineId, boolean decrementDesiredSize) throws IllegalArgumentException, CloudPoolException { checkState(getConfiguration().isPresent(), "cloud pool needs to be configured before use"); // prevent concurrent pool modifications synchronized (this.updateLock) { LOG.debug("terminating {}", machineId); this.cloudDriver.terminateMachine(machineId); if (decrementDesiredSize) { // note: decrement unless desiredSize has been set to 0 (without // having been effectuated yet) int newSize = max(this.desiredSize.get() - 1, 0); LOG.debug("decrementing desiredSize to {}", newSize); setDesiredSize(newSize); } } terminationAlert(machineId); } @Override public void attachMachine(String machineId) throws IllegalArgumentException, CloudPoolException { checkState(getConfiguration().isPresent(), "cloud pool needs to be configured before use"); // prevent concurrent pool modifications synchronized (this.updateLock) { LOG.debug("attaching instance {} to pool", machineId); this.cloudDriver.attachMachine(machineId); // implicitly increases pool size setDesiredSize(this.desiredSize.get() + 1); } attachAlert(machineId); } @Override public void detachMachine(String machineId, boolean decrementDesiredSize) throws IllegalArgumentException, CloudPoolException { checkState(getConfiguration().isPresent(), "cloud pool needs to be configured before use"); // prevent concurrent pool modifications synchronized (this.updateLock) { LOG.debug("detaching {} from pool", machineId); this.cloudDriver.detachMachine(machineId); if (decrementDesiredSize) { // note: decrement unless desiredSize has been set to 0 (without // having been effectuated yet) int newSize = max(this.desiredSize.get() - 1, 0); LOG.debug("decrementing desiredSize to {}", newSize); setDesiredSize(newSize); } } detachAlert(machineId); } @Override public void setServiceState(String machineId, ServiceState serviceState) throws IllegalArgumentException { checkState(getConfiguration().isPresent(), "cloud pool needs to be configured before use"); LOG.debug("service state {} assigned to {}", serviceState.name(), machineId); this.cloudDriver.setServiceState(machineId, serviceState); serviceStateAlert(machineId, serviceState); } @Override public void setMembershipStatus(String machineId, MembershipStatus membershipStatus) throws NotFoundException, CloudPoolException { checkState(getConfiguration().isPresent(), "cloud pool needs to be configured before use"); LOG.debug("membership status {} assigned to {}", membershipStatus, machineId); this.cloudDriver.setMembershipStatus(machineId, membershipStatus); membershipStatusAlert(machineId, membershipStatus); } @Override public CloudPoolMetadata getMetadata() { return this.cloudDriver.getMetadata(); } /** * Sets up {@link Alerter}s, in case the configuration contains an * {@link AlertsConfig}. * * @param configuration */ private void setUpAlerters(BaseCloudPoolConfig configuration) { AlertsConfig alertsConfig = configuration.getAlerts(); if (alertsConfig == null) { LOG.debug("no alerts configuration, no alerters set up"); return; } List<Alerter> newAlerters = Lists.newArrayList(); Map<String, JsonElement> standardAlertMetadataTags = standardAlertMetadata(); // add SMTP alerters List<SmtpAlerterConfig> smtpAlerters = alertsConfig.getSmtpAlerters(); LOG.debug("adding {} SMTP alerter(s)", smtpAlerters.size()); for (SmtpAlerterConfig smtpAlerterConfig : smtpAlerters) { newAlerters.add(new SmtpAlerter(smtpAlerterConfig, standardAlertMetadataTags)); } // add HTTP alerters List<HttpAlerterConfig> httpAlerters = alertsConfig.getHttpAlerters(); LOG.debug("adding {} HTTP alerter(s)", httpAlerters.size()); for (HttpAlerterConfig httpAlerterConfig : httpAlerters) { newAlerters.add(new HttpAlerter(httpAlerterConfig, standardAlertMetadataTags)); } // register every alerter with event bus for (Alerter alerter : newAlerters) { this.eventBus.register(alerter); } this.alerters.set(newAlerters); } /** * Standard {@link Alert} tags to include in all {@link Alert} mails sent by * the configured {@link Alerter}s. * * @return */ private Map<String, JsonElement> standardAlertMetadata() { Map<String, JsonElement> standardTags = Maps.newHashMap(); List<String> ipv4Addresses = Lists.newArrayList(); for (InetAddress inetAddr : HostUtils.hostIpv4Addresses()) { ipv4Addresses.add(inetAddr.getHostAddress()); } standardTags.put("cloudPoolEndpointIps", JsonUtils.toJson(ipv4Addresses)); standardTags.put("cloudPoolName", JsonUtils.toJson(config().getCloudPool().getName())); return standardTags; } /** * Unregisters all configured {@link Alerter}s from the {@link EventBus}. */ private void takeDownAlerters() { if (this.alerters.get() != null) { List<Alerter> alerterList = this.alerters.get(); for (Alerter alerter : alerterList) { this.eventBus.unregister(alerter); } } } BaseCloudPoolConfig config() { return this.config.get(); } private String poolName() { return config().getCloudPool().getName(); } private ScaleOutConfig scaleOutConfig() { return config().getScaleOutConfig(); } private ScaleInConfig scaleInConfig() { return config().getScaleInConfig(); } /** * Updates the size of the machine pool to match the currently set desired * size. This may involve terminating termination-due machines and placing * new server requests to replace terminated servers. * <p/> * Waits for the {@link #updateLock} to avoid concurrent pool updates. * * @throws CloudPoolException */ void updateMachinePool() throws CloudPoolException { // check if we need to determine desired size (it may not have been // possible on startup, e.g., due to cloud API being ureachable) determineDesiredSizeIfUnset(); if (this.desiredSize.get() == null) { LOG.warn("cannot update pool: haven't been able to " + "determine initial desired size"); return; } // prevent multiple threads from concurrently updating pool synchronized (this.updateLock) { int targetSize = this.desiredSize.get(); try { doPoolUpdate(targetSize); } catch (Throwable e) { String message = format("failed to adjust pool " + "\"%s\" to desired size %d: %s\n%s", poolName(), targetSize, e.getMessage(), Throwables.getStackTraceAsString(e)); this.eventBus.post(new Alert(AlertTopics.RESIZE.name(), AlertSeverity.ERROR, UtcTime.now(), message)); throw new CloudPoolException(message, e); } } } private void doPoolUpdate(int newSize) throws CloudPoolException { LOG.info("updating pool size to desired size {}", newSize); MachinePool pool = getMachinePool(); LOG.debug("current pool members: {}", Lists.transform(pool.getMachines(), Machine.toShortString())); this.terminationQueue.filter(pool.getActiveMachines()); ResizePlanner resizePlanner = new ResizePlanner(pool, this.terminationQueue, scaleInConfig() .getVictimSelectionPolicy(), scaleInConfig() .getInstanceHourMargin()); int netSize = resizePlanner.getNetSize(); ResizePlan resizePlan = resizePlanner.calculateResizePlan(newSize); if (resizePlan.hasScaleOutActions()) { scaleOut(resizePlan); } if (resizePlan.hasScaleInActions()) { List<ScheduledTermination> terminations = resizePlan .getToTerminate(); LOG.info("scheduling {} machine(s) for termination", terminations.size()); for (ScheduledTermination termination : terminations) { this.terminationQueue.add(termination); LOG.debug("scheduling machine {} for termination at {}", termination.getInstance().getId(), termination.getTerminationTime()); } LOG.debug("termination queue: {}", this.terminationQueue); } if (resizePlan.noChanges()) { LOG.info("pool is already properly sized ({})", netSize); } // effectuate scheduled terminations that are (over)due terminateOverdueMachines(); } private List<Machine> scaleOut(ResizePlan resizePlan) throws StartMachinesException { LOG.info("sparing {} machine(s) from termination, " + "placing {} new request(s)", resizePlan.getToSpare(), resizePlan.getToRequest()); this.terminationQueue.spare(resizePlan.getToSpare()); try { List<Machine> startedMachines = this.cloudDriver.startMachines( resizePlan.getToRequest(), scaleOutConfig()); startAlert(startedMachines); return startedMachines; } catch (StartMachinesException e) { // may have failed part-way through. notify of machines that were // started before error occurred. startAlert(e.getStartedMachines()); throw e; } } private List<Machine> terminateOverdueMachines() { LOG.debug("checking termination queue for overdue machines: {}", this.terminationQueue); List<ScheduledTermination> overdueInstances = this.terminationQueue .popOverdueInstances(); if (overdueInstances.isEmpty()) { return Collections.emptyList(); } List<Machine> terminated = Lists.newArrayList(); LOG.info("Terminating {} overdue machine(s): {}", overdueInstances.size(), overdueInstances); for (ScheduledTermination overdueInstance : overdueInstances) { String victimId = overdueInstance.getInstance().getId(); try { this.cloudDriver.terminateMachine(victimId); terminated.add(overdueInstance.getInstance()); } catch (Exception e) { // only warn, since a failure to terminate an instance is not // necessarily an error condition, as the machine, e.g., may // have been terminated by external means since we last checked // the pool members String message = format( "failed to terminate instance '%s': %s\n%s", victimId, e.getMessage(), Throwables.getStackTraceAsString(e)); LOG.warn(message); this.eventBus.post(new Alert(AlertTopics.RESIZE.name(), AlertSeverity.WARN, UtcTime.now(), message)); } } if (!terminated.isEmpty()) { terminationAlert(terminated); } return terminated; } /** * Post an {@link Alert} that new machines have been started in the pool. * * @param startedMachines * The new machine instances that have been started. */ void startAlert(List<Machine> startedMachines) { if (startedMachines.isEmpty()) { return; } String message = String.format( "%d machine(s) were requested from cloud pool %s", startedMachines.size(), poolName()); LOG.info(message); Map<String, JsonElement> tags = Maps.newHashMap(); List<String> startedMachineIds = Lists.transform(startedMachines, Machine.toId()); tags.put("requestedMachines", JsonUtils.toJson(Joiner.on(", ").join(startedMachineIds))); tags.put("poolMembers", poolMembersTag()); this.eventBus.post(new Alert(AlertTopics.RESIZE.name(), AlertSeverity.INFO, UtcTime.now(), message, tags)); } /** * Post an {@link Alert} that the members have been terminated from the * pool. * * @param terminatedMachines * The machine instances that were terminated. */ void terminationAlert(List<Machine> terminatedMachines) { String message = String.format( "%d machine(s) were terminated in cloud pool %s", terminatedMachines.size(), poolName()); LOG.info(message); Map<String, JsonElement> tags = Maps.newHashMap(); List<String> terminatedMachineIds = Lists.transform(terminatedMachines, Machine.toId()); tags.put("terminatedMachines", JsonUtils.toJson(Joiner.on(", ").join(terminatedMachineIds))); tags.put("poolMembers", poolMembersTag()); this.eventBus.post(new Alert(AlertTopics.RESIZE.name(), AlertSeverity.INFO, UtcTime.now(), message, tags)); } /** * Post an {@link Alert} that a machine was terminated from the pool. * * @param machineId */ void terminationAlert(String machineId) { Map<String, JsonElement> tags = Maps.newHashMap(); tags.put("terminatedMachines", JsonUtils.toJson(machineId)); tags.put("poolMembers", poolMembersTag()); String message = String.format("Terminated machine %s.", machineId); this.eventBus.post(new Alert(AlertTopics.RESIZE.name(), AlertSeverity.INFO, UtcTime.now(), message, tags)); } /** * Post an {@link Alert} that a machine was attached to the pool. * * @param machineId */ void attachAlert(String machineId) { Map<String, JsonElement> tags = ImmutableMap.of("attachedMachines", JsonUtils.toJson(machineId)); String message = String.format("Attached machine %s to pool.", machineId); this.eventBus.post(new Alert(AlertTopics.RESIZE.name(), AlertSeverity.INFO, UtcTime.now(), message, tags)); } /** * Post an {@link Alert} that a machine was detached from the pool. * * @param machineId */ void detachAlert(String machineId) { Map<String, JsonElement> tags = ImmutableMap.of("detachedMachines", JsonUtils.toJson(machineId)); String message = String.format("Detached machine %s from pool.", machineId); this.eventBus.post(new Alert(AlertTopics.RESIZE.name(), AlertSeverity.INFO, UtcTime.now(), message, tags)); } /** * Post an {@link Alert} that a pool member had its {@link ServiceState} * set. * * @param machineId * @param state */ void serviceStateAlert(String machineId, ServiceState state) { Map<String, JsonElement> tags = ImmutableMap.of(); String message = String.format( "Service state set to %s for machine %s.", state.name(), machineId); this.eventBus.post(new Alert(AlertTopics.SERVICE_STATE.name(), AlertSeverity.DEBUG, UtcTime.now(), message, tags)); } /** * Post an {@link Alert} that a pool member had its {@link MembershipStatus} * set. * * @param machineId * @param membershipStatus */ void membershipStatusAlert(String machineId, MembershipStatus membershipStatus) { Map<String, JsonElement> tags = ImmutableMap.of(); String message = String.format( "Membership status set to %s for machine %s.", membershipStatus, machineId); this.eventBus.post(new Alert(AlertTopics.MEMBERSHIP_STATUS.name(), AlertSeverity.DEBUG, UtcTime.now(), message, tags)); } private JsonElement poolMembersTag() { try { List<Machine> poolMembers = listMachines(); // exclude metadata field (noisy) List<Machine> shortFormatMembers = Lists.transform(poolMembers, Machine.toShortFormat()); return JsonUtils.toJson(shortFormatMembers); } catch (Exception e) { LOG.warn("failed to retrieve pool members: {}", e.getMessage()); return JsonUtils.toJson(String.format("N/A (call failed: %s)", e.getMessage())); } } /** * Task that, when executed, runs {@link BaseCloudPool#updateMachinePool()}. */ public class PoolUpdateTask implements Runnable { @Override public void run() { try { updateMachinePool(); } catch (CloudPoolException e) { LOG.error( format("machine pool update task failed: %s", e.getMessage()), e); } } } }
package org.opennms.netmgt.provision.persist; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.List; import org.apache.commons.io.FileUtils; import org.opennms.core.utils.LogUtils; import org.opennms.core.xml.JaxbUtils; import org.opennms.netmgt.provision.persist.foreignsource.ForeignSource; import org.opennms.netmgt.provision.persist.requisition.Requisition; import org.springframework.core.io.Resource; public class RequisitionFileUtils { static void createPath(final File fsPath) throws ForeignSourceRepositoryException { if (!fsPath.exists()) { if (!fsPath.mkdirs()) { throw new ForeignSourceRepositoryException("unable to create directory " + fsPath.getPath()); } } } static File encodeFileName(final String path, final String foreignSourceName) { return new File(path, foreignSourceName + ".xml"); } static ForeignSource getForeignSourceFromFile(final File inputFile) throws ForeignSourceRepositoryException { return JaxbUtils.unmarshal(ForeignSource.class, inputFile); } static Requisition getRequisitionFromFile(final File inputFile) throws ForeignSourceRepositoryException { try { return JaxbUtils.unmarshal(Requisition.class, inputFile); } catch (final Throwable e) { throw new ForeignSourceRepositoryException("unable to unmarshal " + inputFile.getPath(), e); } } static File getOutputFileForForeignSource(final String path, final ForeignSource foreignSource) { final File fsPath = new File(path); createPath(fsPath); return encodeFileName(path, foreignSource.getName()); } static File getOutputFileForRequisition(final String path, final Requisition requisition) { final File reqPath = new File(path); createPath(reqPath); return encodeFileName(path, requisition.getForeignSource()); } public static File createSnapshot(final ForeignSourceRepository repository, final String foreignSource) { final URL url = repository.getRequisitionURL(foreignSource); if (url == null) { LogUtils.warnf(RequisitionFileUtils.class, "Unable to get requisition URL for foreign source %s", foreignSource); return null; } final String sourceFileName = url.getFile(); if (sourceFileName == null) { LogUtils.warnf(RequisitionFileUtils.class, "Trying to create snapshot for %s, but getFile() doesn't return a value", url); return null; } final File sourceFile = new File(sourceFileName); if (!sourceFile.exists()) { LogUtils.warnf(RequisitionFileUtils.class, "Trying to create snapshot for %s, but %s does not exist.", url, sourceFileName); return null; } final String targetFileName = sourceFileName + '.' + System.currentTimeMillis(); final File targetFile = new File(targetFileName); try { FileUtils.copyFile(sourceFile, targetFile, true); return targetFile; } catch (final IOException e) { LogUtils.warnf(RequisitionFileUtils.class, e, "Failed to copy %s to %s", sourceFileName, targetFileName); } return null; } public static List<File> findSnapshots(final ForeignSourceRepository repository, final String foreignSource) { final List<File> files = new ArrayList<File>(); URL url = null; try { url = repository.getRequisitionURL(foreignSource); } catch (final ForeignSourceRepositoryException e) { LogUtils.debugf(RequisitionFileUtils.class, e, "Can't find snapshots for %s, an exception occurred getting the requisition URL!", foreignSource); } if (url != null) { final String sourceFileName = url.getFile(); if (sourceFileName != null) { final File sourceFile = new File(sourceFileName); final File sourceDirectory = sourceFile.getParentFile(); for (final File entry : sourceDirectory.listFiles()) { if (isSnapshot(foreignSource, entry)) { files.add(entry); } } } } return files; } private static boolean isSnapshot(final String foreignSource, final File entry) { return !entry.isDirectory() && entry.getName().matches(foreignSource + ".xml.\\d+"); } public static void deleteResourceIfSnapshot(final Requisition requisition) { final Resource resource = requisition.getResource(); if (resource == null) return; try { final File resourceFile = resource.getFile(); if (isSnapshot(requisition.getForeignSource(), resourceFile)) { if (!resourceFile.delete()) { LogUtils.debugf(RequisitionFileUtils.class, "Failed to delete %s", resourceFile); } } } catch (final IOException e) { LogUtils.debugf(RequisitionFileUtils.class, e, "Resource %s can't be turned into a file, skipping snapshot delete detection.", resource); return; } } public static void deleteAllSnapshots(final ForeignSourceRepository repository) { for (final String foreignSource : repository.getActiveForeignSourceNames()) { final List<File> snapshots = findSnapshots(repository, foreignSource); for (final File snapshot : snapshots) { snapshot.delete(); } } } }
package org.myrobotlab.opencv; import static org.bytedeco.opencv.global.opencv_core.CV_32F; import static org.bytedeco.opencv.global.opencv_dnn.blobFromImage; import static org.bytedeco.opencv.global.opencv_dnn.readNetFromCaffe; import static org.bytedeco.opencv.global.opencv_imgproc.resize; import java.awt.Graphics2D; import java.awt.image.BufferedImage; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.TreeMap; import org.bytedeco.javacpp.indexer.FloatIndexer; import org.bytedeco.javacv.OpenCVFrameConverter; import org.bytedeco.opencv.opencv_core.IplImage; import org.bytedeco.opencv.opencv_core.Mat; import org.bytedeco.opencv.opencv_core.Scalar; import org.bytedeco.opencv.opencv_core.Size; import org.bytedeco.opencv.opencv_dnn.Net; import org.myrobotlab.document.Classification; import org.myrobotlab.logging.LoggerFactory; import org.myrobotlab.math.geometry.Rectangle; import org.slf4j.Logger; public class OpenCVFilterFaceDetectDNN extends OpenCVFilter { private static final long serialVersionUID = 1L; public transient final static Logger log = LoggerFactory.getLogger(OpenCVFilterFaceDetectDNN.class.getCanonicalName()); // int x0, y0, x1, y1; private String FACE_LABEL = "face"; transient private Net net; /** * bounding boxes of faces */ final List<Rectangle> bb = new ArrayList<>(); final Map<String, List<Classification>> classifications = new TreeMap<>(); // if deps were checked in it would be like this /* public String model = FileIO.gluePaths(Service.getResourceDir(OpenCV.class),"models/facedetectdnn/res10_300x300_ssd_iter_140000.caffemodel"); public String protoTxt = FileIO.gluePaths(Service.getResourceDir(OpenCV.class),"models/facedetectdnn/deploy.prototxt.txt"); */ // but public String model = "resource/OpenCV/models/facedetectdnn/res10_300x300_ssd_iter_140000.caffemodel"; public String protoTxt = "resource/OpenCV/models/facedetectdnn/deploy.prototxt.txt"; double threshold = .2; transient private final OpenCVFrameConverter.ToIplImage grabberConverter = new OpenCVFrameConverter.ToIplImage(); transient private OpenCVFrameConverter.ToIplImage converterToIpl = new OpenCVFrameConverter.ToIplImage(); public OpenCVFilterFaceDetectDNN() { this(null); } public OpenCVFilterFaceDetectDNN(String name) { super(name); loadModel(); } public void loadModel() { // log.info("loading DNN caffee model for face recogntion.."); if (!new File(protoTxt).exists()) { log.warn("Caffe DNN Face Detector ProtoTxt not found {}", protoTxt); return; } if (!new File(model).exists()) { log.warn("Caffe DNN Face Detector model not found {}", model); return; } net = readNetFromCaffe(protoTxt, model); log.info("Caffe DNN Face Detector model loaded."); } @Override public void imageChanged(IplImage image) { // TODO: noOp? } @Override public IplImage process(IplImage image) { int h = image.height(); int w = image.width(); // TODO: cv2.resize(image, (300, 300)) Mat srcMat = grabberConverter.convertToMat(grabberConverter.convert(image)); Mat inputMat = new Mat(); resize(srcMat, inputMat, new Size(300, 300));// resize the image to match // the input size of the model // create a 4-dimensional blob from image with NCHW (Number of images in the // batch -for training only-, Channel, Height, Width) // dimensions order, // for more details read the official docs at // https://docs.opencv.org/trunk/d6/d0f/group__dnn.html#gabd0e76da3c6ad15c08b01ef21ad55dd8 Mat blob = blobFromImage(inputMat, 1.0, new Size(300, 300), new Scalar(104.0, 177.0, 123.0, 0), false, false, CV_32F); // log.info("Input Blob : {}", blob); // set the input to network model if (blob == null) { return image; } net.setInput(blob); // feed forward the input to the network to get the output matrix Mat output = net.forward(); Mat ne = new Mat(new Size(output.size(3), output.size(2)), CV_32F, output.ptr(0, 0));// extract // matrix // for // output // matrix // with // form // (number // detections FloatIndexer srcIndexer = ne.createIndexer(); // create indexer to access // elements of the matrix // log.info("Output Size: {}", output.size(3)); bb.clear(); classifications.clear(); for (int i = 0; i < output.size(3); i++) {// iterate to extract elements float confidence = srcIndexer.get(i, 2); // log.info("Getting element {} confidence {}", i, confidence); float f1 = srcIndexer.get(i, 3); float f2 = srcIndexer.get(i, 4); float f3 = srcIndexer.get(i, 5); float f4 = srcIndexer.get(i, 6); if (confidence > threshold) { // log.info("Passes the threshold test."); float tx = f1 * w;// top left point's x float ty = f2 * h;// top left point's y float bx = f3 * w;// bottom right point's x float by = f4 * h;// bottom right point's y Rectangle rect = new Rectangle(tx, ty, bx - tx, by - ty); List<Classification> cl = null; Classification classification = new Classification(FACE_LABEL, confidence, rect); if (classifications.containsKey(FACE_LABEL)) { classifications.get(FACE_LABEL).add(classification); } else { cl = new ArrayList<>(); cl.add(classification); classifications.put(FACE_LABEL, cl); } bb.add(rect); } } publishClassification(classifications); IplImage result = grabberConverter.convert(converterToIpl.convert(srcMat)); ne.close(); return result; } @Override public BufferedImage processDisplay(Graphics2D graphics, BufferedImage image) { for (String label : classifications.keySet()) { List<Classification> cl = classifications.get(label); for (Classification c : cl) { Rectangle rect = c.getBoundingBox(); graphics.drawString(String.format("%s %.3f", c.getLabel(), c.getConfidence()), (int) rect.x, (int) rect.y); graphics.drawRect((int) rect.x, (int) rect.y, (int) rect.width, (int) rect.height); } } return image; } }
package org.neo4j.kernel.impl.transaction.xaframework; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.nio.channels.ReadableByteChannel; import java.util.HashMap; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import javax.transaction.xa.XAException; import javax.transaction.xa.Xid; import org.neo4j.kernel.impl.transaction.TransactionFailureException; import org.neo4j.kernel.impl.transaction.XidImpl; import org.neo4j.kernel.impl.util.ArrayMap; import org.neo4j.kernel.impl.util.FileUtils; /** * <CODE>XaLogicalLog</CODE> is a transaction and logical log combined. In * this log information about the transaction (such as started, prepared and * committed) will be written. All commands participating in the transaction * will also be written to the log. * <p> * Normally you don't have to do anything with this log except open it after it * has been instanciated (see {@link XaContainer}). The only method that may be * of use when implementing a XA compatible resource is the * {@link #getCurrentTxIdentifier}. Leave everything else be unless you know * what you're doing. * <p> * When the log is opened it will be scaned for uncompleted transactions and * those transactions will be re-created. When scan of log is complete all * transactions that hasn't entered prepared state will be marked as done * (implies rolledback) and dropped. All transactions that have been prepared * will be held in memory until the transaction manager tells them to commit. * Transaction that already started commit but didn't get flagged as done will * be re-committed. */ public class XaLogicalLog { private Logger log; // empty record due to memory mapped file private static final byte EMPTY = (byte) 0; // tx has started private static final byte TX_START = (byte) 1; // tx has been prepared private static final byte TX_PREPARE = (byte) 2; // a XaCommand in a transaction private static final byte COMMAND = (byte) 3; // done, either a read only tx or rolledback/forget private static final byte DONE = (byte) 4; // tx one-phase commit private static final byte TX_1P_COMMIT = (byte) 5; // tx two-phase commit private static final byte TX_2P_COMMIT = (byte) 6; private static final char CLEAN = 'C'; private static final char LOG1 = '1'; private static final char LOG2 = '2'; private FileChannel fileChannel = null; private final ByteBuffer buffer; private LogBuffer writeBuffer = null; private long logVersion = 0; private ArrayMap<Integer,StartEntry> xidIdentMap = new ArrayMap<Integer,StartEntry>( 4, false, true ); private Map<Integer,XaTransaction> recoveredTxMap = new HashMap<Integer,XaTransaction>(); private int nextIdentifier = 1; private boolean scanIsComplete = false; private String fileName = null; private final XaResourceManager xaRm; private final XaCommandFactory cf; private final XaTransactionFactory xaTf; private char currentLog = CLEAN; private boolean keepLogs = false; private boolean autoRotate = true; private long rotateAtSize = 10*1024*1024; // 10MB private boolean backupSlave = false; private boolean useMemoryMapped = true; XaLogicalLog( String fileName, XaResourceManager xaRm, XaCommandFactory cf, XaTransactionFactory xaTf, Map<Object,Object> config ) { this.fileName = fileName; this.xaRm = xaRm; this.cf = cf; this.xaTf = xaTf; this.useMemoryMapped = getMemoryMapped( config ); log = Logger.getLogger( this.getClass().getName() + "/" + fileName ); buffer = ByteBuffer.allocateDirect( 9 + Xid.MAXGTRIDSIZE + Xid.MAXBQUALSIZE * 10 ); } private boolean getMemoryMapped( Map<Object,Object> config ) { if ( config != null ) { String value = (String) config.get( "use_memory_mapped_buffers" ); if ( value != null && value.toLowerCase().equals( "false" ) ) { return false; } } return true; } synchronized void open() throws IOException { String activeFileName = fileName + ".active"; if ( !new File( activeFileName ).exists() ) { if ( new File( fileName ).exists() ) { // old < b8 xaframework with no log rotation and we need to // do recovery on it open( fileName ); } else { open( fileName + ".1" ); setActiveLog( LOG1 ); } } else { FileChannel fc = new RandomAccessFile( activeFileName , "rw" ).getChannel(); byte bytes[] = new byte[256]; ByteBuffer buf = ByteBuffer.wrap( bytes ); int read = fc.read( buf ); fc.close(); if ( read != 4 ) { throw new IllegalStateException( "Read " + read + " bytes from " + activeFileName + " but expected 4" ); } buf.flip(); char c = buf.asCharBuffer().get(); File copy = new File( fileName + ".copy" ); if ( copy.exists() ) { if ( !copy.delete() ) { log.warning( "Unable to delete " + copy.getName() ); } } if ( c == CLEAN ) { // clean String newLog = fileName + ".1"; if ( new File( newLog ).exists() ) { throw new IllegalStateException( "Active marked as clean but log " + newLog + " exist" ); } open( newLog ); setActiveLog( LOG1 ); } else if ( c == LOG1 ) { String newLog = fileName + ".1"; if ( !new File( newLog ).exists() ) { throw new IllegalStateException( "Active marked as 1 but no " + newLog + " exist" ); } currentLog = LOG1; File otherLog = new File( fileName + ".2" ); if ( otherLog.exists() ) { if ( !otherLog.delete() ) { log.warning( "Unable to delete " + copy.getName() ); } } open( newLog ); } else if ( c == LOG2 ) { String newLog = fileName + ".2"; if ( !new File( newLog ).exists() ) { throw new IllegalStateException( "Active marked as 2 but no " + newLog + " exist" ); } File otherLog = new File( fileName + ".1" ); if ( otherLog.exists() ) { if ( !otherLog.delete() ) { log.warning( "Unable to delete " + copy.getName() ); } } currentLog = LOG2; open( newLog ); } else { throw new IllegalStateException( "Unknown active log: " + c ); } } if ( !useMemoryMapped ) { writeBuffer = new DirectMappedLogBuffer( fileChannel ); } else { writeBuffer = new MemoryMappedLogBuffer( fileChannel ); } } private void open( String fileToOpen ) throws IOException { fileChannel = new RandomAccessFile( fileToOpen, "rw" ).getChannel(); if ( fileChannel.size() != 0 ) { doInternalRecovery( fileToOpen ); } else { logVersion = xaTf.getCurrentVersion(); buffer.clear(); buffer.putLong( logVersion ); buffer.flip(); fileChannel.write( buffer ); scanIsComplete = true; } } public boolean scanIsComplete() { return scanIsComplete; } private int getNextIdentifier() { nextIdentifier++; if ( nextIdentifier < 0 ) { nextIdentifier = 1; } return nextIdentifier; } // returns identifier for transaction // [TX_START][xid[gid.length,bid.lengh,gid,bid]][identifier][format id] public synchronized int start( Xid xid ) throws XAException { if ( backupSlave ) { throw new XAException( "Resource is configured as backup slave, " + "no new transactions can be started for " + fileName + "." + currentLog ); } int xidIdent = getNextIdentifier(); try { byte globalId[] = xid.getGlobalTransactionId(); byte branchId[] = xid.getBranchQualifier(); int formatId = xid.getFormatId(); long position = writeBuffer.getFileChannelPosition(); writeBuffer.put( TX_START ).put( (byte) globalId.length ).put( (byte) branchId.length ).put( globalId ).put( branchId ) .putInt( xidIdent ).putInt( formatId ); xidIdentMap.put( xidIdent, new StartEntry( xid, position ) ); } catch ( IOException e ) { throw new XAException( "Logical log couldn't start transaction: " + e ); } return xidIdent; } private boolean readTxStartEntry() throws IOException { // get the global id long position = fileChannel.position(); buffer.clear(); buffer.limit( 1 ); if ( fileChannel.read( buffer ) != buffer.limit() ) { return false; } buffer.flip(); byte globalIdLength = buffer.get(); // get the branchId id buffer.clear(); buffer.limit( 1 ); if ( fileChannel.read( buffer ) != buffer.limit() ) { return false; } buffer.flip(); byte branchIdLength = buffer.get(); byte globalId[] = new byte[globalIdLength]; ByteBuffer tmpBuffer = ByteBuffer.wrap( globalId ); if ( fileChannel.read( tmpBuffer ) != globalId.length ) { return false; } byte branchId[] = new byte[branchIdLength]; tmpBuffer = ByteBuffer.wrap( branchId ); if ( fileChannel.read( tmpBuffer ) != branchId.length ) { return false; } // get the tx identifier buffer.clear(); buffer.limit( 4 ); if ( fileChannel.read( buffer ) != buffer.limit() ) { return false; } buffer.flip(); int identifier = buffer.getInt(); if ( identifier >= nextIdentifier ) { nextIdentifier = (identifier + 1); } // get the format id buffer.clear(); buffer.limit( 4 ); if ( fileChannel.read( buffer ) != buffer.limit() ) { return false; } buffer.flip(); int formatId = buffer.getInt(); // re-create the transaction Xid xid = new XidImpl( globalId, branchId, formatId ); xidIdentMap.put( identifier, new StartEntry( xid, position ) ); XaTransaction xaTx = xaTf.create( identifier ); xaTx.setRecovered(); recoveredTxMap.put( identifier, xaTx ); xaRm.injectStart( xid, xaTx ); return true; } // [TX_PREPARE][identifier] public synchronized void prepare( int identifier ) throws XAException { assert xidIdentMap.get( identifier ) != null; try { writeBuffer.put( TX_PREPARE ).putInt( identifier ); writeBuffer.force(); } catch ( IOException e ) { throw new XAException( "Logical log unable to mark prepare [" + identifier + "] " + e ); } } private boolean readTxPrepareEntry() throws IOException { // get the tx identifier buffer.clear(); buffer.limit( 4 ); if ( fileChannel.read( buffer ) != buffer.limit() ) { return false; } buffer.flip(); int identifier = buffer.getInt(); StartEntry entry = xidIdentMap.get( identifier ); if ( entry == null ) { return false; } Xid xid = entry.getXid(); if ( xaRm.injectPrepare( xid ) ) { // read only we can remove xidIdentMap.remove( identifier ); recoveredTxMap.remove( identifier ); } return true; } // [TX_1P_COMMIT][identifier] public synchronized void commitOnePhase( int identifier ) throws XAException { assert xidIdentMap.get( identifier ) != null; try { writeBuffer.put( TX_1P_COMMIT ).putInt( identifier ); writeBuffer.force(); } catch ( IOException e ) { throw new XAException( "Logical log unable to mark 1P-commit [" + identifier + "] " + e ); } } private boolean readTxOnePhaseCommit() throws IOException { // get the tx identifier buffer.clear(); buffer.limit( 4 ); if ( fileChannel.read( buffer ) != buffer.limit() ) { return false; } buffer.flip(); int identifier = buffer.getInt(); StartEntry entry = xidIdentMap.get( identifier ); if ( entry == null ) { return false; } Xid xid = entry.getXid(); try { xaRm.injectOnePhaseCommit( xid ); } catch ( XAException e ) { e.printStackTrace(); throw new IOException( e.getMessage() ); } return true; } // [DONE][identifier] public synchronized void done( int identifier ) throws XAException { if ( backupSlave ) { return; } assert xidIdentMap.get( identifier ) != null; try { writeBuffer.put( DONE ).putInt( identifier ); xidIdentMap.remove( identifier ); } catch ( IOException e ) { throw new XAException( "Logical log unable to mark as done [" + identifier + "] " + e ); } } // [DONE][identifier] called from XaResourceManager during internal recovery synchronized void doneInternal( int identifier ) throws IOException { buffer.clear(); buffer.put( DONE ).putInt( identifier ); buffer.flip(); fileChannel.write( buffer ); xidIdentMap.remove( identifier ); } private boolean readDoneEntry() throws IOException { // get the tx identifier buffer.clear(); buffer.limit( 4 ); if ( fileChannel.read( buffer ) != buffer.limit() ) { return false; } buffer.flip(); int identifier = buffer.getInt(); StartEntry entry = xidIdentMap.get( identifier ); if ( entry == null ) { return false; } Xid xid = entry.getXid(); xaRm.pruneXid( xid ); xidIdentMap.remove( identifier ); recoveredTxMap.remove( identifier ); return true; } // [TX_2P_COMMIT][identifier] public synchronized void commitTwoPhase( int identifier ) throws XAException { assert xidIdentMap.get( identifier ) != null; try { writeBuffer.put( TX_2P_COMMIT ).putInt( identifier ); writeBuffer.force(); } catch ( IOException e ) { throw new XAException( "Logical log unable to mark 2PC [" + identifier + "] " + e ); } } private boolean readTxTwoPhaseCommit() throws IOException { // get the tx identifier buffer.clear(); buffer.limit( 4 ); if ( fileChannel.read( buffer ) != buffer.limit() ) { return false; } buffer.flip(); int identifier = buffer.getInt(); StartEntry entry = xidIdentMap.get( identifier ); if ( entry == null ) { return false; } Xid xid = entry.getXid(); if ( xid == null ) { return false; } try { xaRm.injectTwoPhaseCommit( xid ); } catch ( XAException e ) { e.printStackTrace(); throw new IOException( e.getMessage() ); } return true; } // [COMMAND][identifier][COMMAND_DATA] public synchronized void writeCommand( XaCommand command, int identifier ) throws IOException { checkLogRotation(); assert xidIdentMap.get( identifier ) != null; writeBuffer.put( COMMAND ).putInt( identifier ); command.writeToFile( writeBuffer ); // fileChannel, buffer ); } private boolean readCommandEntry() throws IOException { buffer.clear(); buffer.limit( 4 ); if ( fileChannel.read( buffer ) != buffer.limit() ) { return false; } buffer.flip(); int identifier = buffer.getInt(); XaCommand command = cf.readCommand( fileChannel, buffer ); if ( command == null ) { // readCommand returns null if full command couldn't be loaded return false; } command.setRecovered(); XaTransaction xaTx = recoveredTxMap.get( identifier ); xaTx.injectCommand( command ); return true; } private void checkLogRotation() throws IOException { if ( autoRotate && writeBuffer.getFileChannelPosition() >= rotateAtSize ) { long currentPos = writeBuffer.getFileChannelPosition(); long firstStartEntry = getFirstStartEntry( currentPos ); // only rotate if no huge tx is running if ( ( currentPos - firstStartEntry ) < rotateAtSize / 2 ) { rotate(); } } } private void renameCurrentLogFileAndIncrementVersion( String logFileName, long endPosition ) throws IOException { File file = new File( logFileName ); if ( !file.exists() ) { throw new IOException( "Logical log[" + logFileName + "] not found" ); } String newName = fileName + ".v" + xaTf.getAndSetNewVersion(); File newFile = new File( newName ); boolean renamed = FileUtils.renameFile( file, newFile ); if ( !renamed ) { throw new IOException( "Failed to rename log to: " + newName ); } else { try { FileChannel channel = new RandomAccessFile( newName, "rw" ).getChannel(); FileUtils.truncateFile( channel, endPosition ); } catch ( IOException e ) { log.log( Level.WARNING, "Failed to truncate log at correct size", e ); } } } private void deleteCurrentLogFile( String logFileName ) throws IOException { File file = new File( logFileName ); if ( !file.exists() ) { throw new IOException( "Logical log[" + logFileName + "] not found" ); } boolean deleted = FileUtils.deleteFile( file ); if ( !deleted ) { log.warning( "Unable to delete clean logical log[" + logFileName + "]" ); } } private void releaseCurrentLogFile() throws IOException { if ( writeBuffer != null ) { writeBuffer.force(); writeBuffer = null; } fileChannel.close(); fileChannel = null; } public synchronized void close() throws IOException { if ( fileChannel == null || !fileChannel.isOpen() ) { log.fine( "Logical log: " + fileName + " already closed" ); return; } long endPosition = writeBuffer.getFileChannelPosition(); if ( xidIdentMap.size() > 0 ) { log.info( "Close invoked with " + xidIdentMap.size() + " running transaction(s). " ); writeBuffer.force(); writeBuffer = null; fileChannel.close(); log.info( "Dirty log: " + fileName + "." + currentLog + " now closed. Recovery will be started automatically next " + "time it is opened." ); return; } releaseCurrentLogFile(); char logWas = currentLog; if ( currentLog != CLEAN ) // again special case, see above { setActiveLog( CLEAN ); } if ( !keepLogs || backupSlave ) { if ( logWas == CLEAN ) { // special case going from old xa version with no log rotation // and we started with a recovery deleteCurrentLogFile( fileName ); } else { deleteCurrentLogFile( fileName + "." + logWas ); } } else { renameCurrentLogFileAndIncrementVersion( fileName + "." + logWas, endPosition ); } } private void doInternalRecovery( String logFileName ) throws IOException { log.info( "Non clean shutdown detected on log [" + logFileName + "]. Recovery started ..." ); // get log creation time buffer.clear(); buffer.limit( 8 ); if ( fileChannel.read( buffer ) != 8 ) { log.info( "Unable to read timestamp information, " + "no records in logical log." ); fileChannel.close(); boolean success = FileUtils.renameFile( new File( logFileName ), new File( logFileName + "_unknown_timestamp_" + System.currentTimeMillis() + ".log" ) ); assert success; fileChannel = new RandomAccessFile( logFileName, "rw" ).getChannel(); return; } buffer.flip(); logVersion = buffer.getLong(); log.fine( "Logical log version: " + logVersion ); long logEntriesFound = 0; long lastEntryPos = fileChannel.position(); while ( readEntry() ) { logEntriesFound++; lastEntryPos = fileChannel.position(); } // make sure we overwrite any broken records fileChannel.position( lastEntryPos ); scanIsComplete = true; log.fine( "Internal recovery completed, scanned " + logEntriesFound + " log entries." ); xaRm.checkXids(); if ( xidIdentMap.size() == 0 ) { log.fine( "Recovery completed." ); } else { log.fine( "[" + logFileName + "] Found " + xidIdentMap.size() + " prepared 2PC transactions." ); for ( StartEntry entry : xidIdentMap.values() ) { log.fine( "[" + logFileName + "] 2PC xid[" + entry.getXid() + "]" ); } } recoveredTxMap.clear(); } // for testing, do not use! void reset() { xidIdentMap.clear(); recoveredTxMap.clear(); } private boolean readEntry() throws IOException { buffer.clear(); buffer.limit( 1 ); if ( fileChannel.read( buffer ) != buffer.limit() ) { // ok no more entries we're done return false; } buffer.flip(); byte entry = buffer.get(); switch ( entry ) { case TX_START: return readTxStartEntry(); case TX_PREPARE: return readTxPrepareEntry(); case TX_1P_COMMIT: return readTxOnePhaseCommit(); case TX_2P_COMMIT: return readTxTwoPhaseCommit(); case COMMAND: return readCommandEntry(); case DONE: return readDoneEntry(); case EMPTY: fileChannel.position( fileChannel.position() - 1 ); return false; default: throw new IOException( "Internal recovery failed, " + "unknown log entry[" + entry + "]" ); } } private ArrayMap<Thread,Integer> txIdentMap = new ArrayMap<Thread,Integer>( 5, true, true ); void registerTxIdentifier( int identifier ) { txIdentMap.put( Thread.currentThread(), identifier ); } void unregisterTxIdentifier() { txIdentMap.remove( Thread.currentThread() ); } /** * If the current thread is committing a transaction the identifier of that * {@link XaTransaction} can be obtained invoking this method. * * @return the identifier of the transaction committing or <CODE>-1</CODE> * if current thread isn't committing any transaction */ public int getCurrentTxIdentifier() { Integer intValue = txIdentMap.get( Thread.currentThread() ); if ( intValue != null ) { return intValue; } return -1; } public ReadableByteChannel getLogicalLog( long version ) throws IOException { String name = fileName + ".v" + version; if ( !new File( name ).exists() ) { throw new IOException( "No such log version:" + version ); } return new RandomAccessFile( name, "r" ).getChannel(); } public long getLogicalLogLength( long version ) { String name = fileName + ".v" + version; File file = new File( name ); if ( !file.exists() ) { return -1; } return file.length(); } public boolean hasLogicalLog( long version ) { String name = fileName + ".v" + version; return new File( name ).exists(); } public boolean deleteLogicalLog( long version ) { String name = fileName + ".v" + version; File file = new File(name ); if ( file.exists() ) { return FileUtils.deleteFile( file ); } return false; } public void makeBackupSlave() { if ( xidIdentMap.size() > 0 ) { throw new IllegalStateException( "There are active transactions" ); } backupSlave = true; } private static class LogApplier { private final ReadableByteChannel byteChannel; private final ByteBuffer buffer; private final XaTransactionFactory xaTf; private final XaResourceManager xaRm; private final XaCommandFactory xaCf; private final ArrayMap<Integer,StartEntry> xidIdentMap; private final Map<Integer,XaTransaction> recoveredTxMap; LogApplier( ReadableByteChannel byteChannel, ByteBuffer buffer, XaTransactionFactory xaTf, XaResourceManager xaRm, XaCommandFactory xaCf, ArrayMap<Integer,StartEntry> xidIdentMap, Map<Integer,XaTransaction> recoveredTxMap ) { this.byteChannel = byteChannel; this.buffer = buffer; this.xaTf = xaTf; this.xaRm = xaRm; this.xaCf = xaCf; this.xidIdentMap = xidIdentMap; this.recoveredTxMap = recoveredTxMap; } boolean readAndApplyEntry() throws IOException { buffer.clear(); buffer.limit( 1 ); if ( byteChannel.read( buffer ) != buffer.limit() ) { // ok no more entries we're done return false; } buffer.flip(); byte entry = buffer.get(); switch ( entry ) { case TX_START: readTxStartEntry(); return true; case TX_PREPARE: readTxPrepareEntry(); return true; case TX_1P_COMMIT: readAndApplyTxOnePhaseCommit(); return true; case TX_2P_COMMIT: readAndApplyTxTwoPhaseCommit(); return true; case COMMAND: readCommandEntry(); return true; case DONE: readDoneEntry(); return true; case EMPTY: return false; default: throw new IOException( "Internal recovery failed, " + "unknown log entry[" + entry + "]" ); } } private void readTxStartEntry() throws IOException { // get the global id buffer.clear(); buffer.limit( 1 ); if ( byteChannel.read( buffer ) != buffer.limit() ) { throw new IOException( "Unable to read tx start entry" ); } buffer.flip(); byte globalIdLength = buffer.get(); // get the branchId id buffer.clear(); buffer.limit( 1 ); if ( byteChannel.read( buffer ) != buffer.limit() ) { throw new IOException( "Unable to read tx start entry" ); } buffer.flip(); byte branchIdLength = buffer.get(); byte globalId[] = new byte[globalIdLength]; ByteBuffer tmpBuffer = ByteBuffer.wrap( globalId ); if ( byteChannel.read( tmpBuffer ) != globalId.length ) { throw new IOException( "Unable to read tx start entry" ); } byte branchId[] = new byte[branchIdLength]; tmpBuffer = ByteBuffer.wrap( branchId ); if ( byteChannel.read( tmpBuffer ) != branchId.length ) { throw new IOException( "Unable to read tx start entry" ); } // get the tx identifier buffer.clear(); buffer.limit( 4 ); if ( byteChannel.read( buffer ) != buffer.limit() ) { throw new IOException( "Unable to read tx start entry" ); } buffer.flip(); int identifier = buffer.getInt(); // get the format id buffer.clear(); buffer.limit( 4 ); if ( byteChannel.read( buffer ) != buffer.limit() ) { throw new IOException( "Unable to read tx start entry" ); } buffer.flip(); int formatId = buffer.getInt(); // re-create the transaction Xid xid = new XidImpl( globalId, branchId, formatId ); xidIdentMap.put( identifier, new StartEntry( xid, -1 ) ); XaTransaction xaTx = xaTf.create( identifier ); xaTx.setRecovered(); recoveredTxMap.put( identifier, xaTx ); xaRm.injectStart( xid, xaTx ); } private void readTxPrepareEntry() throws IOException { // get the tx identifier buffer.clear(); buffer.limit( 4 ); if ( byteChannel.read( buffer ) != buffer.limit() ) { throw new IOException( "Unable to read tx prepare entry" ); } buffer.flip(); int identifier = buffer.getInt(); StartEntry entry = xidIdentMap.get( identifier ); if ( entry == null ) { throw new IOException( "Unable to read tx prepeare entry" ); } Xid xid = entry.getXid(); if ( xaRm.injectPrepare( xid ) ) { // read only, we can remove xidIdentMap.remove( identifier ); recoveredTxMap.remove( identifier ); } } private void readAndApplyTxOnePhaseCommit() throws IOException { // get the tx identifier buffer.clear(); buffer.limit( 4 ); if ( byteChannel.read( buffer ) != buffer.limit() ) { throw new IOException( "Unable to read tx 1PC entry" ); } buffer.flip(); int identifier = buffer.getInt(); StartEntry entry = xidIdentMap.get( identifier ); if ( entry == null ) { throw new IOException( "Unable to read tx prepeare entry" ); } Xid xid = entry.getXid(); try { xaRm.commit( xid, true ); } catch ( XAException e ) { e.printStackTrace(); throw new IOException( e.getMessage() ); } } private void readAndApplyTxTwoPhaseCommit() throws IOException { // get the tx identifier buffer.clear(); buffer.limit( 4 ); if ( byteChannel.read( buffer ) != buffer.limit() ) { throw new IOException( "Unable to read tx 2PC entry" ); } buffer.flip(); int identifier = buffer.getInt(); StartEntry entry = xidIdentMap.get( identifier ); if ( entry == null ) { throw new IOException( "Unable to read tx prepeare entry" ); } Xid xid = entry.getXid(); try { xaRm.commit( xid, true ); } catch ( XAException e ) { e.printStackTrace(); throw new IOException( e.getMessage() ); } } private void readCommandEntry() throws IOException { buffer.clear(); buffer.limit( 4 ); if ( byteChannel.read( buffer ) != buffer.limit() ) { throw new IOException( "Unable to read tx command entry" ); } buffer.flip(); int identifier = buffer.getInt(); XaCommand command = xaCf.readCommand( byteChannel, buffer ); if ( command == null ) { throw new IOException( "Unable to read command entry" ); } command.setRecovered(); XaTransaction xaTx = recoveredTxMap.get( identifier ); xaTx.injectCommand( command ); } private boolean readDoneEntry() throws IOException { // get the tx identifier buffer.clear(); buffer.limit( 4 ); if ( byteChannel.read( buffer ) != buffer.limit() ) { return false; } buffer.flip(); int identifier = buffer.getInt(); StartEntry entry = xidIdentMap.get( identifier ); if ( entry == null ) { throw new IOException( "Unable to read tx done entry" ); } Xid xid = entry.getXid(); xaRm.pruneXidIfExist( xid ); xidIdentMap.remove( identifier ); recoveredTxMap.remove( identifier ); return true; } } public synchronized void applyLog( ReadableByteChannel byteChannel ) throws IOException { if ( !backupSlave ) { throw new IllegalStateException( "This is not a backup slave" ); } if ( xidIdentMap.size() > 0 ) { throw new IllegalStateException( "There are active transactions" ); } buffer.clear(); buffer.limit( 8 ); if ( byteChannel.read( buffer ) != 8 ) { throw new IOException( "Unable to read log version" ); } buffer.flip(); logVersion = buffer.getLong(); if ( logVersion != xaTf.getCurrentVersion() ) { throw new IllegalStateException( "Tried to apply version " + logVersion + " but expected version " + xaTf.getCurrentVersion() ); } log.fine( "Logical log version: " + logVersion ); long logEntriesFound = 0; LogApplier logApplier = new LogApplier( byteChannel, buffer, xaTf, xaRm, cf, xidIdentMap, recoveredTxMap ); while ( logApplier.readAndApplyEntry() ) { logEntriesFound++; } byteChannel.close(); xaTf.flushAll(); xaTf.getAndSetNewVersion(); xaRm.reset(); log.info( "Log[" + fileName + "] version " + logVersion + " applied successfully." ); } public synchronized void rotate() throws IOException { xaTf.flushAll(); String newLogFile = fileName + ".2"; String currentLogFile = fileName + ".1"; char newActiveLog = LOG2; long currentVersion = xaTf.getCurrentVersion(); String oldCopy = fileName + ".v" + currentVersion; if ( currentLog == CLEAN || currentLog == LOG2 ) { newActiveLog = LOG1; newLogFile = fileName + ".1"; currentLogFile = fileName + ".2"; } else { assert currentLog == LOG1; } if ( new File( newLogFile ).exists() ) { throw new IOException( "New log file: " + newLogFile + " already exist" ); } if ( new File( oldCopy ).exists() ) { throw new IOException( "Copy log file: " + oldCopy + " already exist" ); } long endPosition = writeBuffer.getFileChannelPosition(); writeBuffer.force(); FileChannel newLog = new RandomAccessFile( newLogFile, "rw" ).getChannel(); buffer.clear(); buffer.putLong( currentVersion + 1 ).flip(); if ( newLog.write( buffer ) != 8 ) { throw new IOException( "Unable to write log version to new" ); } fileChannel.position( 0 ); buffer.clear(); buffer.limit( 8 ); if( fileChannel.read( buffer ) != 8 ) { throw new IOException( "Verification of log version failed" ); } buffer.flip(); long verification = buffer.getLong(); if ( verification != currentVersion ) { throw new IOException( "Verification of log version failed, " + " expected " + currentVersion + " got " + verification ); } if ( xidIdentMap.size() > 0 ) { fileChannel.position( getFirstStartEntry( endPosition ) ); } buffer.clear(); buffer.limit( 1 ); boolean emptyHit = false; while ( fileChannel.read( buffer ) == 1 && !emptyHit ) { buffer.flip(); byte entry = buffer.get(); switch ( entry ) { case TX_START: readAndWriteTxStartEntry( newLog ); break; case TX_PREPARE: readAndWriteTxPrepareEntry( newLog ); break; case TX_1P_COMMIT: readAndWriteTxOnePhaseCommit( newLog ); break; case TX_2P_COMMIT: readAndWriteTxTwoPhaseCommit( newLog ); break; case COMMAND: readAndWriteCommandEntry( newLog ); break; case DONE: readAndVerifyDoneEntry(); break; case EMPTY: emptyHit = true; break; default: throw new IOException( "Log rotation failed, " + "unknown log entry[" + entry + "]" ); } buffer.clear(); buffer.limit( 1 ); } newLog.force( false ); releaseCurrentLogFile(); setActiveLog( newActiveLog ); if ( keepLogs ) { renameCurrentLogFileAndIncrementVersion( currentLogFile, endPosition ); } else { deleteCurrentLogFile( currentLogFile ); xaTf.getAndSetNewVersion(); } if ( xaTf.getCurrentVersion() != ( currentVersion + 1 ) ) { throw new IOException( "version change failed" ); } fileChannel = newLog; if ( !useMemoryMapped ) { writeBuffer = new DirectMappedLogBuffer( fileChannel ); } else { writeBuffer = new MemoryMappedLogBuffer( fileChannel ); } } private long getFirstStartEntry( long endPosition ) { long firstEntryPosition = endPosition; for ( StartEntry entry : xidIdentMap.values() ) { if ( entry.getStartPosition() < firstEntryPosition ) { assert entry.getStartPosition() > 0; firstEntryPosition = entry.getStartPosition(); } } return firstEntryPosition; } private void setActiveLog( char c ) throws IOException { if ( c != CLEAN && c != LOG1 && c != LOG2 ) { throw new IllegalArgumentException( "Log must be either clean, " + "1 or 2" ); } if ( c == currentLog ) { throw new IllegalStateException( "Log should not be equal to " + "current " + currentLog ); } ByteBuffer bb = ByteBuffer.wrap( new byte[4] ); bb.asCharBuffer().put( c ).flip(); FileChannel fc = new RandomAccessFile( fileName + ".active" , "rw" ).getChannel(); int wrote = fc.write( bb ); if ( wrote != 4 ) { throw new IllegalStateException( "Expected to write 4 -> " + wrote ); } fc.force( false ); fc.close(); currentLog = c; } // [COMMAND][identifier][COMMAND_DATA] private void readAndWriteCommandEntry( FileChannel newLog ) throws IOException { buffer.clear(); buffer.put( COMMAND ); buffer.limit( 1 + 4 ); if ( fileChannel.read( buffer ) != 4 ) { throw new IllegalStateException( "Unable to read command header" ); } buffer.flip(); buffer.position( 1 ); int identifier = buffer.getInt(); FileChannel writeToLog = null; if ( xidIdentMap.get( identifier ) != null ) { writeToLog = newLog; } if ( writeToLog != null ) { buffer.position( 0 ); if ( writeToLog.write( buffer ) != 5 ) { throw new TransactionFailureException( "Unable to write command header" ); } } XaCommand command = cf.readCommand( fileChannel, buffer ); if ( writeToLog != null ) { command.writeToFile( new DirectLogBuffer( writeToLog, buffer ) ); } } private void readAndVerifyDoneEntry() throws IOException { buffer.clear(); buffer.limit( 4 ); if ( fileChannel.read( buffer ) != 4 ) { throw new IllegalStateException( "Unable to read done entry" ); } buffer.flip(); int identifier = buffer.getInt(); if ( xidIdentMap.get( identifier ) != null ) { throw new IllegalStateException( identifier + " done entry found but still active" ); } } // [TX_1P_COMMIT][identifier] private void readAndWriteTxOnePhaseCommit( FileChannel newLog ) throws IOException { buffer.clear(); buffer.limit( 1 + 4 ); buffer.put( TX_1P_COMMIT ); if ( fileChannel.read( buffer ) != 4 ) { throw new IllegalStateException( "Unable to read 1P commit entry" ); } buffer.flip(); buffer.position( 1 ); int identifier = buffer.getInt(); FileChannel writeToLog = null; if ( xidIdentMap.get( identifier ) != null ) { writeToLog = newLog; } buffer.position( 0 ); if ( writeToLog != null && writeToLog.write( buffer ) != 5 ) { throw new TransactionFailureException( "Unable to write 1P commit entry" ); } } private void readAndWriteTxTwoPhaseCommit( FileChannel newLog ) throws IOException { buffer.clear(); buffer.limit( 1 + 4 ); buffer.put( TX_2P_COMMIT ); if ( fileChannel.read( buffer ) != 4 ) { throw new IllegalStateException( "Unable to read 2P commit entry" ); } buffer.flip(); buffer.position( 1 ); int identifier = buffer.getInt(); FileChannel writeToLog = null; if ( xidIdentMap.get( identifier ) != null ) { // " 2PC found but still active" ); writeToLog = newLog; } buffer.position( 0 ); if ( writeToLog != null && writeToLog.write( buffer ) != 5 ) { throw new TransactionFailureException( "Unable to write 2P commit entry" ); } } private void readAndWriteTxPrepareEntry( FileChannel newLog ) throws IOException { // get the tx identifier buffer.clear(); buffer.limit( 1 + 4 ); buffer.put( TX_PREPARE ); if ( fileChannel.read( buffer ) != 4 ) { throw new IllegalStateException( "Unable to read prepare entry" ); } buffer.flip(); buffer.position( 1 ); int identifier = buffer.getInt(); FileChannel writeToLog = null; if ( xidIdentMap.get( identifier ) != null ) { writeToLog = newLog; } buffer.position( 0 ); if ( writeToLog != null && writeToLog.write( buffer ) != 5 ) { throw new TransactionFailureException( "Unable to write prepare entry" ); } } // [TX_START][xid[gid.length,bid.lengh,gid,bid]][identifier][format id] private void readAndWriteTxStartEntry( FileChannel newLog ) throws IOException { // get the global id buffer.clear(); buffer.put( TX_START ); buffer.limit( 3 ); if ( fileChannel.read( buffer ) != 2 ) { throw new IllegalStateException( "Unable to read tx start entry xid id lengths" ); } buffer.flip(); buffer.position( 1 ); byte globalIdLength = buffer.get(); byte branchIdLength = buffer.get(); int xidLength = globalIdLength + branchIdLength; buffer.limit( 3 + xidLength + 8 ); buffer.position( 3 ); if ( fileChannel.read( buffer ) != 8 + xidLength ) { throw new IllegalStateException( "Unable to read xid" ); } buffer.flip(); buffer.position( 3 + xidLength ); int identifier = buffer.getInt(); FileChannel writeToLog = null; StartEntry entry = xidIdentMap.get( identifier ); if ( entry != null ) { writeToLog = newLog; entry.setStartPosition( newLog.position() ); } buffer.position( 0 ); if ( writeToLog != null && writeToLog.write( buffer ) != 3 + 8 + xidLength ) { throw new TransactionFailureException( "Unable to write tx start xid" ); } } public void setKeepLogs( boolean keep ) { this.keepLogs = keep; } public boolean isLogsKept() { return this.keepLogs; } public void setAutoRotateLogs( boolean autoRotate ) { this.autoRotate = autoRotate; } public boolean isLogsAutoRotated() { return this.autoRotate; } public void setLogicalLogTargetSize( long size ) { this.rotateAtSize = size; } public long getLogicalLogTargetSize() { return this.rotateAtSize; } private static class StartEntry { private final Xid xid; private long startEntryPosition; StartEntry( Xid xid, long startPosition ) { this.xid = xid; this.startEntryPosition = startPosition; } Xid getXid() { return xid; } long getStartPosition() { return startEntryPosition; } void setStartPosition( long newPosition ) { startEntryPosition = newPosition; } } public String getFileName( long version ) { return fileName + ".v" + version; } }
package org.tigris.subversion.subclipse.ui.subscriber; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.eclipse.compare.structuremergeviewer.IDiffElement; import org.eclipse.core.resources.IResource; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.team.core.synchronize.FastSyncInfoFilter; import org.eclipse.team.core.synchronize.SyncInfo; import org.eclipse.team.ui.synchronize.ISynchronizeModelElement; import org.eclipse.team.ui.synchronize.ISynchronizePageConfiguration; import org.eclipse.team.ui.synchronize.SynchronizeModelAction; import org.eclipse.team.ui.synchronize.SynchronizeModelOperation; import org.tigris.subversion.subclipse.core.ISVNLocalResource; import org.tigris.subversion.subclipse.core.SVNException; import org.tigris.subversion.subclipse.core.resources.SVNWorkspaceRoot; import org.tigris.subversion.subclipse.ui.ISVNUIConstants; import org.tigris.subversion.subclipse.ui.SVNUIPlugin; public class OverrideAndUpdateSynchronizeAction extends SynchronizeModelAction { public OverrideAndUpdateSynchronizeAction(String text, ISynchronizePageConfiguration configuration) { super(text, configuration); } protected FastSyncInfoFilter getSyncInfoFilter() { return new FastSyncInfoFilter() { public boolean select(SyncInfo info) { SyncInfoDirectionFilter filter = new SyncInfoDirectionFilter(new int[] {SyncInfo.OUTGOING,SyncInfo.CONFLICTING}); if (!filter.select(info)) return false; IStructuredSelection selection = getStructuredSelection(); boolean removeUnAdded = SVNUIPlugin.getPlugin().getPreferenceStore().getBoolean(ISVNUIConstants.PREF_REMOVE_UNADDED_RESOURCES_ON_REPLACE); Iterator iter = selection.iterator(); while (iter.hasNext()) { ISynchronizeModelElement element = (ISynchronizeModelElement)iter.next(); IResource resource = element.getResource(); if (resource == null) { return false; } ISVNLocalResource svnResource = SVNWorkspaceRoot.getSVNResourceFor(resource); if (svnResource == null) { return false; } try { if (!resource.exists() && !svnResource.getStatusFromCache().isDeleted()) { return false; } if (svnResource.isAdded()) return false; if (!removeUnAdded && !svnResource.isManaged()) return false; } catch (SVNException e) { return false; } } return true; } }; } protected SynchronizeModelOperation getSubscriberOperation(ISynchronizePageConfiguration configuration, IDiffElement[] elements) { List selectedResources = new ArrayList(elements.length); for (int i=0; i<elements.length; i++) { if (elements[i] instanceof ISynchronizeModelElement) { selectedResources.add(((ISynchronizeModelElement)elements[i]).getResource()); } } IResource[] resources = new IResource[selectedResources.size()]; selectedResources.toArray(resources); return new OverrideAndUpdateSynchronizeOperation(configuration, elements, resources, resources); } }
package org.ndexbio.model.object.network; import java.util.ArrayList; import java.util.List; import org.ndexbio.model.object.NdexExternalObject; import org.ndexbio.model.object.NdexPropertyValuePair; import org.ndexbio.model.object.PropertiedObject; import org.ndexbio.model.object.SimplePropertyValuePair; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @JsonIgnoreProperties(ignoreUnknown = true) public class NetworkSummary extends NdexExternalObject implements PropertiedObject { private String _description; private int _edgeCount; private boolean _isComplete; private boolean _isLocked; private VisibilityType _visibility; private boolean _isReadOnly; private String _name; private int _nodeCount; private String _owner; private String _URI; // private long _highestElementId; private String _version; private List<NdexPropertyValuePair> _properties; private List<SimplePropertyValuePair> _presentationProperties; public NetworkSummary () { super(); _type = this.getClass().getSimpleName(); _isComplete = false; _isLocked = false; _isReadOnly = false; // setVisibility(VisibilityType.PRIVATE); _edgeCount = 0; _nodeCount = 0; _properties = new ArrayList<NdexPropertyValuePair> (10); _presentationProperties = new ArrayList<SimplePropertyValuePair> (10); } public String getDescription() { return _description; } public void setDescription(String description) { _description = description; } public int getEdgeCount() { return _edgeCount; } public void setEdgeCount(int edgeCount) { _edgeCount = edgeCount; } public String getVersion() { return _version; } public void setVersion(String version) { this._version = version; } public VisibilityType getVisibility() { return _visibility; } public void setVisibility(VisibilityType visibility) { this._visibility = visibility; } public boolean getIsComplete() { return _isComplete; } public void setIsComplete(boolean isComplete) { _isComplete = isComplete; } public boolean getIsLocked() { return _isLocked; } public void setIsLocked(boolean isLocked) { _isLocked = isLocked; } public String getName() { return _name; } public void setName(String name) { _name = name; } public int getNodeCount() { return _nodeCount; } public void setNodeCount(int nodeCount) { _nodeCount = nodeCount; } public List<NdexPropertyValuePair> getProperties() { return _properties; } public List<SimplePropertyValuePair> getPresentationProperties() { return _presentationProperties; } public void setProperties(List<NdexPropertyValuePair> properties) { _properties = properties; } public void setPresentationProperties(List<SimplePropertyValuePair> properties) { _presentationProperties = properties; } public String getURI() { return _URI; } public void setURI(String URI) { this._URI = URI; } public boolean getIsReadOnly() { return _isReadOnly; } public void setIsReadOnly(boolean isReadOnly) { this._isReadOnly = isReadOnly; } public String getOwner() { return _owner; } public void setOwner(String owner) { this._owner = owner; } }
package com.opengamma.financial.analytics.model.forex.forward; import java.util.Map; import java.util.Set; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.opengamma.OpenGammaRuntimeException; import com.opengamma.engine.ComputationTarget; import com.opengamma.engine.function.AbstractFunction; import com.opengamma.engine.function.FunctionCompilationContext; import com.opengamma.engine.function.FunctionExecutionContext; import com.opengamma.engine.function.FunctionInputs; import com.opengamma.engine.target.ComputationTargetType; import com.opengamma.engine.value.ComputedValue; import com.opengamma.engine.value.ValueProperties; import com.opengamma.engine.value.ValuePropertyNames; import com.opengamma.engine.value.ValueRequirement; import com.opengamma.engine.value.ValueRequirementNames; import com.opengamma.engine.value.ValueSpecification; import com.opengamma.financial.analytics.CurrencyLabelledMatrix1D; import com.opengamma.financial.analytics.model.CalculationPropertyNamesAndValues; import com.opengamma.financial.analytics.model.forex.ForexVisitors; import com.opengamma.financial.currency.CurrencyMatrixSpotSourcingFunction; import com.opengamma.financial.security.FinancialSecurity; import com.opengamma.financial.security.FinancialSecurityTypes; import com.opengamma.util.async.AsynchronousExecution; import com.opengamma.util.money.Currency; /** * Calculates Present Value on FX Forward instruments. */ public class FXForwardPresentValueFunction extends AbstractFunction.NonCompiledInvoker { @Override public ComputationTargetType getTargetType() { return FinancialSecurityTypes.FX_FORWARD_SECURITY.or(FinancialSecurityTypes.NON_DELIVERABLE_FX_FORWARD_SECURITY); } @Override public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target) { return ImmutableSet.of(new ValueSpecification(ValueRequirementNames.PRESENT_VALUE, target.toSpecification(), ValueProperties.all())); } @Override public Set<ValueRequirement> getRequirements(final FunctionCompilationContext context, final ComputationTarget target, final ValueRequirement desiredValue) { final ValueProperties constraints = desiredValue.getConstraints(); final Set<String> calculationMethod = constraints.getValues(ValuePropertyNames.CALCULATION_METHOD); if (calculationMethod != null && calculationMethod.size() == 1) { if (!CalculationPropertyNamesAndValues.DISCOUNTING.equals(Iterables.getOnlyElement(calculationMethod))) { return null; } } final ValueRequirement fxPvRequirement = new ValueRequirement(ValueRequirementNames.FX_PRESENT_VALUE, target.toSpecification(), constraints); final FinancialSecurity security = (FinancialSecurity) target.getSecurity(); final Currency payCurrency = getPayCurrency(security); final Currency receiveCurrency = getReceiveCurrency(security); final ValueRequirement spotRateRequirement = CurrencyMatrixSpotSourcingFunction.getConversionRequirement(payCurrency, receiveCurrency); return ImmutableSet.of(fxPvRequirement, spotRateRequirement); } @Override public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target, final Map<ValueSpecification, ValueRequirement> inputs) { ValueProperties properties = null; for (final Map.Entry<ValueSpecification, ValueRequirement> entry : inputs.entrySet()) { if (entry.getKey().getValueName().equals(ValueRequirementNames.FX_PRESENT_VALUE)) { properties = entry.getKey().getProperties(); break; } } if (properties == null) { return null; } final Currency currency = getPayCurrency((FinancialSecurity) target.getSecurity()); return ImmutableSet.of(new ValueSpecification(ValueRequirementNames.PRESENT_VALUE, target.toSpecification(), getResultProperties(currency, properties.copy()))); } @Override public Set<ComputedValue> execute(final FunctionExecutionContext executionContext, final FunctionInputs inputs, final ComputationTarget target, final Set<ValueRequirement> desiredValues) throws AsynchronousExecution { final FinancialSecurity security = (FinancialSecurity) target.getSecurity(); final Currency payCurrency = getPayCurrency(security); final Currency receiveCurrency = getReceiveCurrency(security); final ComputedValue input = inputs.getComputedValue(ValueRequirementNames.FX_PRESENT_VALUE); final ValueSpecification inputSpec = input.getSpecification(); final CurrencyLabelledMatrix1D fxPresentValue = (CurrencyLabelledMatrix1D) input.getValue(); if (fxPresentValue.size() != 2) { throw new OpenGammaRuntimeException("Expected " + ValueRequirementNames.FX_PRESENT_VALUE + " input to contain 2 currency values, but found " + fxPresentValue.size()); } int payIndex = -1; int receiveIndex = -1; for (int i = 0; i < 2; i++) { final Currency currency = fxPresentValue.getKeys()[i]; if (payCurrency.equals(currency)) { payIndex = i; } else if (receiveCurrency.equals(currency)) { receiveIndex = i; } else { throw new OpenGammaRuntimeException(ValueRequirementNames.FX_PRESENT_VALUE + " contains unexpected currency " + currency + ". Expected " + payCurrency + " or " + receiveCurrency + "."); } } final double payValue = fxPresentValue.getValues()[payIndex]; final double receiveValue = fxPresentValue.getValues()[receiveIndex]; final double spot = (Double) inputs.getValue(ValueRequirementNames.SPOT_RATE); final double pv = payValue + spot * receiveValue; return ImmutableSet.of(new ComputedValue(getResultSpec(target, inputSpec.getProperties().copy()), pv)); } protected ValueSpecification getResultSpec(final ComputationTarget target, final ValueProperties.Builder fxPresentValueProperties) { final Currency currency = getPayCurrency((FinancialSecurity) target.getSecurity()); return new ValueSpecification(ValueRequirementNames.PRESENT_VALUE, target.toSpecification(), getResultProperties(currency, fxPresentValueProperties)); } protected ValueProperties getResultProperties(final Currency currency, final ValueProperties.Builder fxPresentValueProperties) { return fxPresentValueProperties.withoutAny(ValuePropertyNames.FUNCTION) .with(ValuePropertyNames.FUNCTION, getUniqueId()) .with(ValuePropertyNames.CURRENCY, currency.getCode()) .get(); } protected Currency getPayCurrency(final FinancialSecurity security) { return security.accept(ForexVisitors.getPayCurrencyVisitor()); } protected Currency getReceiveCurrency(final FinancialSecurity security) { return security.accept(ForexVisitors.getReceiveCurrencyVisitor()); } }
package edu.northwestern.bioinformatics.studycalendar.dataproviders.coppa.direct; import edu.northwestern.bioinformatics.studycalendar.dataproviders.api.SiteProvider; import edu.northwestern.bioinformatics.studycalendar.dataproviders.api.StudyProvider; import edu.northwestern.bioinformatics.studycalendar.dataproviders.api.StudySiteProvider; import org.osgi.framework.BundleActivator; import org.osgi.framework.BundleContext; /** * @author Rhett Sutphin */ public class Activator implements BundleActivator { public void start(BundleContext bundleContext) throws Exception { bundleContext.registerService(SiteProvider.class.getName(), new CoppaSiteProvider(), null); bundleContext.registerService(StudyProvider.class.getName(), new CoppaStudyProvider(), null); bundleContext.registerService(StudySiteProvider.class.getName(), new CoppaStudySiteProvider(), null); } public void stop(BundleContext bundleContext) throws Exception { } }
package org.realityforge.gwt.webpoller.client; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.Nonnull; import javax.annotation.Nullable; public abstract class WebPoller { private static final Logger LOG = Logger.getLogger( WebPoller.class.getName() ); /** * The duration between polls when not using long polling. */ public static final int DEFAULT_INTER_REQUEST_DURATION = 2000; /** * The duration between attempts when in error. */ public static final int DEFAULT_INTER_ERROR_DURATION = 2000; /** * The number of error before the poller is marked as failed. */ public static final int DEFAULT_ERROR_COUNT_THRESHOLD = 5; public interface Factory { @Nonnull WebPoller newWebPoller(); } private static Factory g_factory; private final RequestContext _requestContext = new WebPollerRequestContext(); private WebPollerListener _listener = NullWebPollerListener.INSTANCE; private RequestFactory _requestFactory; private boolean _active; private boolean _paused; private int _errorCount; private int _interRequestDuration = DEFAULT_INTER_REQUEST_DURATION; private int _interErrorDuration = DEFAULT_INTER_ERROR_DURATION; /** * The number of errors before the poller is marked as failed. */ private int _errorCountThreshold = DEFAULT_ERROR_COUNT_THRESHOLD; private Request _request; private Level _logLevel = Level.FINEST; public static WebPoller newWebPoller() { return ( null != g_factory ) ? g_factory.newWebPoller() : null; } public static void register( @Nonnull final Factory factory ) { g_factory = factory; } public static boolean deregister( @Nonnull final Factory factory ) { if ( g_factory != factory ) { return false; } else { g_factory = null; return true; } } public Level getLogLevel() { return _logLevel; } public void setLogLevel( final Level logLevel ) { _logLevel = logLevel; } /** * @return true if the poller is active. */ public boolean isActive() { return _active; } /** * @return true if active and the last poll resulted in error, false otherwise. */ public boolean inError() { return isActive() && _errorCount > 0; } public final void start() throws IllegalStateException { if ( isActive() ) { throw new IllegalStateException( "Start invoked on active poller" ); } if ( null == _requestFactory ) { throw new IllegalStateException( "Start invoked but no RequestFactory specified" ); } doStart(); } public final void stop() throws IllegalStateException { if ( !isActive() ) { throw new IllegalStateException( "Stop invoked on inactive poller" ); } doStop(); } public void setRequestFactory( @Nullable final RequestFactory requestFactory ) { if ( isActive() ) { throw new IllegalStateException( "Attempt to invoke setRequestFactory when poller active" ); } _requestFactory = requestFactory; } public final RequestFactory getRequestFactory() { return _requestFactory; } /** * @return the number of errors before poller is stopped. */ public final int getErrorCountThreshold() { return _errorCountThreshold; } /** * Set the number of errors before poller is stopped. */ public void setErrorCountThreshold( final int errorCountThreshold ) { if ( isActive() ) { throw new IllegalStateException( "Attempt to invoke setErrorCountThreshold when poller active" ); } _errorCountThreshold = errorCountThreshold; } public final int getInterRequestDuration() { return _interRequestDuration; } public final void setInterRequestDuration( final int interRequestDuration ) { if ( isActive() ) { throw new IllegalStateException( "Attempt to invoke setInterRequestDuration when poller active" ); } _interRequestDuration = interRequestDuration; } public int getInterErrorDuration() { return _interErrorDuration; } public void setInterErrorDuration( final int interErrorDuration ) { if ( isActive() ) { throw new IllegalStateException( "Attempt to invoke setInterErrorDuration when poller active" ); } _interErrorDuration = interErrorDuration; } public void setListener( @Nullable final WebPollerListener listener ) { _listener = null == listener ? NullWebPollerListener.INSTANCE : listener; } public void pause() { if ( !isActive() ) { throw new IllegalStateException( "Attempt to invoke pause when poller is inactive" ); } if ( isPaused() ) { throw new IllegalStateException( "Attempt to invoke pause when poller is already paused" ); } _paused = true; } public void resume() { if ( !isActive() ) { throw new IllegalStateException( "Attempt to invoke resume when poller is inactive" ); } if ( !isPaused() ) { throw new IllegalStateException( "Attempt to invoke resume when poller is not paused" ); } _paused = false; poll(); } /** * @return true if poller is paused. */ public boolean isPaused() { return _paused; } /** * Sub-classes should override this method to provide functionality. */ protected void doStop() { if ( isTimerActive() ) { stopTimer(); } cancelRequest(); _paused = false; _active = false; _errorCount = 0; onStop(); } protected void cancelRequest() { if ( null != _request ) { _request.cancel(); _request = null; } } /** * Stop the timer that is triggering the polling if any exists. */ protected abstract void stopTimer(); /** * Return true if the timer or the error timer is active. */ protected abstract boolean isTimerActive(); /** * Sub-classes should override this method to provide functionality. */ protected final void doStart() { _active = true; onStart(); initialPoll(); } private void initialPoll() { if ( 0 >= _interRequestDuration ) { poll(); } else { log( "Starting WebPoller timer" ); startTimer(); } } protected abstract void startErrorTimer(); protected abstract void stopErrorTimer(); /** * Start the timer that triggers the polling. */ protected abstract void startTimer(); /** * Invoked after a successful poll returning no data. */ protected final void onEmptyPollResult() { resetErrorState(); } /** * Invoked after a successful poll, regardless of whether data was received or not. */ private void resetErrorState() { if ( 0 != _errorCount ) { log( "Resetting WebPoller error state. Stopping error timer." ); _errorCount = 0; stopErrorTimer(); initialPoll(); } } /** * Fire a Start event. */ protected final void onStart() { log( "WebPoller start message." ); _listener.onStart( this ); } /** * Fire a Stop event. */ protected final void onStop() { log( "WebPoller stop message." ); _listener.onStop( this ); } /** * Fire a Message event. */ protected final void onMessage( @Nonnull final Map<String, String> context, @Nonnull final String data ) { log( "WebPoller message received: " + data ); _listener.onMessage( this, context, data ); resetErrorState(); } /** * Fire an Error event. * If the number of successive errors reaches a threshold then shut-down the poller. */ protected final void onError( @Nonnull final Throwable exception ) { _listener.onError( this, exception ); incErrorCount(); } protected void incErrorCount() { _errorCount++; log( "WebPoller error " + _errorCount + "/" + _errorCountThreshold ); if ( _errorCount > _errorCountThreshold ) { log( "WebPoller exceeded error threshold " + _errorCountThreshold + ". Stopping WebPoller" ); stopErrorTimer(); doStop(); } else if ( 1 == _errorCount ) { log( "WebPoller starting error timer." ); startErrorTimer(); } } /** * @return true if a poll request is outstanding. */ protected boolean isInPoll() { return null != _request; } /** * This should be invoked when the poll has completed. * It may be overriden by sub-classes to perform other cleanup. */ protected void pollReturned() { _request = null; /* We immediately poll if we are active and the inter-request duration is 0. However we will not immediately re-issue a poll if in error and the inter-error duration is not 0. */ if ( isActive() && 0 >= _interRequestDuration && ( !inError() || 0 >= _interErrorDuration ) ) { poll(); } } /** * Orchestrate the polling. */ protected final void poll() { if ( !isInPoll() && !isPaused() ) { doPoll(); } } protected final RequestContext getRequestContext() { return _requestContext; } /** * Perform actual polling. */ protected void doPoll() { log( "Performing Poll" ); try { _request = getRequestFactory().newRequest( getRequestContext() ); log( "Poll Scheduled" ); } catch ( final Exception e ) { log( "Poll Scheduled Error. Starting Error timer." ); getRequestContext().onError( e ); } } private void log( final String message ) { if ( LOG.isLoggable( getLogLevel() ) ) { LOG.log( getLogLevel(), message + " @ " + System.currentTimeMillis() ); } } private class WebPollerRequestContext implements RequestContext { @Override public void onEmptyMessage() { WebPoller.this.onEmptyPollResult(); WebPoller.this.pollReturned(); } @Override public void onMessage( @Nonnull final Map<String, String> context, @Nonnull final String data ) { WebPoller.this.onMessage( context, data ); WebPoller.this.pollReturned(); } @Override public void onError( @Nonnull final Throwable exception ) { WebPoller.this.onError( exception ); WebPoller.this.pollReturned(); } } }
package io.github.opencubicchunks.cubicchunks.core.asm.mixin.fixes.common.vanillaclient; import io.github.opencubicchunks.cubicchunks.api.world.ICubicWorld; import io.github.opencubicchunks.cubicchunks.core.CubicChunksConfig; import io.github.opencubicchunks.cubicchunks.core.asm.mixin.ICubicWorldInternal; import io.github.opencubicchunks.cubicchunks.core.asm.mixin.core.common.vanillaclient.ICPacketPlayer; import io.github.opencubicchunks.cubicchunks.core.asm.mixin.core.common.vanillaclient.ICPacketPlayerDigging; import io.github.opencubicchunks.cubicchunks.core.asm.mixin.core.common.vanillaclient.ICPacketPlayerTryUseItemOnBlock; import io.github.opencubicchunks.cubicchunks.core.asm.mixin.core.common.vanillaclient.ICPacketTabComplete; import io.github.opencubicchunks.cubicchunks.core.asm.mixin.core.common.vanillaclient.ICPacketUpdateSign; import io.github.opencubicchunks.cubicchunks.core.asm.mixin.core.common.vanillaclient.ICPacketVehicleMove; import io.github.opencubicchunks.cubicchunks.core.server.VanillaNetworkHandler; import io.github.opencubicchunks.cubicchunks.core.server.vanillaproxy.IPositionPacket; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.network.NetHandlerPlayServer; import net.minecraft.network.Packet; import net.minecraft.network.play.client.CPacketConfirmTeleport; import net.minecraft.network.play.client.CPacketPlayer; import net.minecraft.network.play.client.CPacketPlayerDigging; import net.minecraft.network.play.client.CPacketPlayerTryUseItemOnBlock; import net.minecraft.network.play.client.CPacketTabComplete; import net.minecraft.network.play.client.CPacketUpdateSign; import net.minecraft.network.play.client.CPacketVehicleMove; import net.minecraft.util.math.BlockPos; import net.minecraft.world.World; import net.minecraft.world.WorldServer; import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.Shadow; import org.spongepowered.asm.mixin.injection.At; import org.spongepowered.asm.mixin.injection.Inject; import org.spongepowered.asm.mixin.injection.ModifyVariable; import org.spongepowered.asm.mixin.injection.callback.CallbackInfo; @Mixin(NetHandlerPlayServer.class) public class MixinNetHandlerPlayServer { @Shadow public EntityPlayerMP player; @Inject(method = "processPlayerDigging", at = @At(value = "INVOKE", shift = At.Shift.AFTER, target = "Lnet/minecraft/network/PacketThreadUtil;checkThreadAndEnqueue(Lnet/minecraft/network/Packet;" + "Lnet/minecraft/network/INetHandler;Lnet/minecraft/util/IThreadListener;)V")) public void preprocessPacket(CPacketPlayerDigging packetIn, CallbackInfo ci) { WorldServer world = (WorldServer) player.world; if (!((ICubicWorld) world).isCubicWorld()) { return; } VanillaNetworkHandler vanillaHandler = ((ICubicWorldInternal.Server) world).getVanillaNetworkHandler(); boolean hasCC = vanillaHandler.hasCubicChunks(player); if (!hasCC) { ((ICPacketPlayerDigging) packetIn).setPosition(vanillaHandler.modifyPositionC2S(packetIn.getPosition(), player)); } } @Inject(method = "processPlayer", at = @At(value = "INVOKE", shift = At.Shift.AFTER, target = "Lnet/minecraft/network/PacketThreadUtil;checkThreadAndEnqueue(Lnet/minecraft/network/Packet;" + "Lnet/minecraft/network/INetHandler;Lnet/minecraft/util/IThreadListener;)V")) public void preprocessPacket(CPacketPlayer packet, CallbackInfo ci) { WorldServer world = (WorldServer) player.world; if (!((ICubicWorld) world).isCubicWorld()) { return; } VanillaNetworkHandler vanillaHandler = ((ICubicWorldInternal.Server) world).getVanillaNetworkHandler(); boolean hasCC = vanillaHandler.hasCubicChunks(player); if (!hasCC) { ((ICPacketPlayer) packet).setY(vanillaHandler.modifyPositionC2S(((ICPacketPlayer) packet).getY(), player)); } } @Inject(method = "processTryUseItemOnBlock", at = @At(value = "INVOKE", shift = At.Shift.AFTER, target = "Lnet/minecraft/network/PacketThreadUtil;checkThreadAndEnqueue(Lnet/minecraft/network/Packet;" + "Lnet/minecraft/network/INetHandler;Lnet/minecraft/util/IThreadListener;)V")) private void preprocessPacket(CPacketPlayerTryUseItemOnBlock packetIn, CallbackInfo ci) { WorldServer world = (WorldServer) player.world; if (!((ICubicWorld) world).isCubicWorld()) { return; } VanillaNetworkHandler vanillaHandler = ((ICubicWorldInternal.Server) world).getVanillaNetworkHandler(); boolean hasCC = vanillaHandler.hasCubicChunks(player); if (!hasCC) { ((ICPacketPlayerTryUseItemOnBlock) packetIn).setPosition(vanillaHandler.modifyPositionC2S(packetIn.getPos(), player)); } } @Inject(method = "processTabComplete", at = @At(value = "INVOKE", shift = At.Shift.AFTER, target = "Lnet/minecraft/network/PacketThreadUtil;checkThreadAndEnqueue(Lnet/minecraft/network/Packet;" + "Lnet/minecraft/network/INetHandler;Lnet/minecraft/util/IThreadListener;)V")) private void preprocessPacket(CPacketTabComplete packetIn, CallbackInfo ci) { WorldServer world = (WorldServer) player.world; if (!((ICubicWorld) world).isCubicWorld()) { return; } VanillaNetworkHandler vanillaHandler = ((ICubicWorldInternal.Server) world).getVanillaNetworkHandler(); boolean hasCC = vanillaHandler.hasCubicChunks(player); if (!hasCC) { BlockPos targetBlock = packetIn.getTargetBlock(); if (targetBlock != null) { ((ICPacketTabComplete) packetIn).setTargetBlock(vanillaHandler.modifyPositionC2S(targetBlock, player)); } } } @Inject(method = "processUpdateSign", at = @At(value = "INVOKE", shift = At.Shift.AFTER, target = "Lnet/minecraft/network/PacketThreadUtil;checkThreadAndEnqueue(Lnet/minecraft/network/Packet;" + "Lnet/minecraft/network/INetHandler;Lnet/minecraft/util/IThreadListener;)V")) private void preprocessPacket(CPacketUpdateSign packetIn, CallbackInfo ci) { WorldServer world = (WorldServer) player.world; if (!((ICubicWorld) world).isCubicWorld()) { return; } VanillaNetworkHandler vanillaHandler = ((ICubicWorldInternal.Server) world).getVanillaNetworkHandler(); boolean hasCC = vanillaHandler.hasCubicChunks(player); if (!hasCC) { ((ICPacketUpdateSign) packetIn).setPos(vanillaHandler.modifyPositionC2S(packetIn.getPosition(), player)); } } @Inject(method = "processVehicleMove", at = @At(value = "INVOKE", shift = At.Shift.AFTER, target = "Lnet/minecraft/network/PacketThreadUtil;checkThreadAndEnqueue(Lnet/minecraft/network/Packet;" + "Lnet/minecraft/network/INetHandler;Lnet/minecraft/util/IThreadListener;)V")) private void preprocessPacket(CPacketVehicleMove packetIn, CallbackInfo ci) { WorldServer world = (WorldServer) player.world; if (!((ICubicWorld) world).isCubicWorld()) { return; } VanillaNetworkHandler vanillaHandler = ((ICubicWorldInternal.Server) world).getVanillaNetworkHandler(); boolean hasCC = vanillaHandler.hasCubicChunks(player); if (!hasCC) { ((ICPacketVehicleMove) packetIn).setY(vanillaHandler.modifyPositionC2S(packetIn.getY(), player)); } } @Inject(method = "processConfirmTeleport", at = @At(value = "INVOKE", shift = At.Shift.AFTER, target = "Lnet/minecraft/network/PacketThreadUtil;checkThreadAndEnqueue(Lnet/minecraft/network/Packet;" + "Lnet/minecraft/network/INetHandler;Lnet/minecraft/util/IThreadListener;)V"), cancellable = true) public void preprocessTeleportConfirm(CPacketConfirmTeleport packetIn, CallbackInfo ci) { if (!CubicChunksConfig.allowVanillaClients) { return; } WorldServer world = (WorldServer) player.world; if (!((ICubicWorld) world).isCubicWorld()) { return; } VanillaNetworkHandler vanillaHandler = ((ICubicWorldInternal.Server) world).getVanillaNetworkHandler(); boolean hasCC = vanillaHandler.hasCubicChunks(player); if (!hasCC) { if (vanillaHandler.receiveOffsetUpdateConfirm(player, packetIn.getTeleportId())) { ci.cancel(); } } } @ModifyVariable(method = "sendPacket", at = @At("HEAD"), argsOnly = true) private Packet<?> onSendPacket(Packet<?> packetIn) { World world = this.player.world; if (!((ICubicWorld) world).isCubicWorld()) { return packetIn; } VanillaNetworkHandler vanillaHandler = ((ICubicWorldInternal.Server) world).getVanillaNetworkHandler(); if (packetIn instanceof IPositionPacket) { if (!vanillaHandler.hasCubicChunks(player)) { int targetOffset = vanillaHandler.getS2COffset(player); // we have to sometimes copy the packet because MC may attempt to send the same packet object // to multiple players if (((IPositionPacket) packetIn).hasYOffset()) { packetIn = copyPacket(packetIn); } ((IPositionPacket) packetIn).setYOffset(targetOffset); return packetIn; } else if (((IPositionPacket) packetIn).hasYOffset()) { return copyPacket(packetIn); } } return packetIn; } private Packet<?> copyPacket(Packet<?> packetIn) { // TODO: make this faster return VanillaNetworkHandler.copyPacket(packetIn); } }
package org.whitesource.archiveReaders; import net.lingala.zip4j.core.ZipFile; import net.lingala.zip4j.exception.ZipException; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; import org.apache.tools.ant.DirectoryScanner; import org.codehaus.plexus.archiver.tar.TarGZipUnArchiver; import org.codehaus.plexus.archiver.tar.TarUnArchiver; import org.codehaus.plexus.logging.console.ConsoleLogger; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.util.*; /** * The class supports recursive deCompression of compressed files (Java, Python & Ruby types). * * @author anna.rozin */ public class ArchiveExtractor { private static final Logger logger = LoggerFactory.getLogger(ArchiveExtractor.class); private static final String TEMP_FOLDER = System.getProperty("java.io.tmpdir") + "WhiteSource-ArchiveExtractor"; public static final List<String> ZIP_EXTENSIONS = Arrays.asList("jar", "war", "ear", "egg", "zip", "whl", "sca", "sda"); public static final List<String> GEM_EXTENSIONS = Arrays.asList("gem"); public static final List<String> TAR_EXTENSIONS = Arrays.asList("tar.gz", "tar"); public static final String ZIP_EXTENSION_PATTERN ; public static final String GEM_EXTENSION_PATTERN ; public static final String TAR_EXTENSION_PATTERN ; public static final String RUBY_DATA_FILE = "data.tar.gz"; public static final String TAR_SUFFIX = ".tar"; public static final String TAR_GZ_SUFFIX = TAR_SUFFIX + ".gz"; public static final String UN_ARCHIVER_LOGGER = "unArchiverLogger"; public static final String GLOB_PATTERN_PREFIX = "**/*."; public static final String DOT = "."; public static final String BLANK = ""; public static final String PATTERN_PREFIX = ".*\\."; public static final String OR = "|"; static { ZIP_EXTENSION_PATTERN = initializePattern(ZIP_EXTENSIONS); GEM_EXTENSION_PATTERN = initializePattern(GEM_EXTENSIONS); TAR_EXTENSION_PATTERN = initializePattern(TAR_EXTENSIONS); } private static String initializePattern(List<String> archiveExtensions) { StringBuilder sb = new StringBuilder(); for (String archiveExtension : archiveExtensions) { sb.append(PATTERN_PREFIX); sb.append(archiveExtension); sb.append(OR); } return sb.toString().substring(0, sb.toString().lastIndexOf(OR)); } private String[] archiveIncludesPattern; private String[] archiveExcludesPattern; public ArchiveExtractor(String[] archiveIncludes, String[] archiveExcludes) { if (archiveIncludes.length > 0 && StringUtils.isNotBlank(archiveIncludes[0])) { this.archiveIncludesPattern = archiveIncludes; } else { // create ARCHIVE_EXTENSIONS only if archiveIncludes is empty this.archiveIncludesPattern = createArchivesArray(); } this.archiveExcludesPattern = archiveExcludes; } /** * The Method extracts all the Archive files according to the archiveExtractionDepth. * archiveExtractionDepth defined by the user in the configuration file. * * The archiveExtractionDepth default value is 0 - no archive scanning, the max value is 3. * By default the method scans jar/war/ear. * If archiveIncludes/archiveExcludes params are defined the method will act accordingly. * * @param scannerBaseDir - directory for scanning. * @param archiveExtractionDepth - drill down hierarchy level in archive files * @return the temp directory for the extracted files. */ public String extractArchives(String scannerBaseDir, int archiveExtractionDepth) { String destDirectory = TEMP_FOLDER + scannerBaseDir.substring(scannerBaseDir.lastIndexOf(File.separator), scannerBaseDir.length()); extractArchive(scannerBaseDir, destDirectory, archiveExtractionDepth, 0); return destDirectory; } public void deleteArchiveDirectory() { File directory = new File(TEMP_FOLDER); try { FileUtils.deleteDirectory(directory); } catch (IOException e) { logger.warn("Error deleting archive directory", e); } } private String[] createArchivesArray() { Collection<String> archiveExtensions = new ArrayList<String>(); archiveExtensions.addAll(ZIP_EXTENSIONS); archiveExtensions.addAll(GEM_EXTENSIONS); archiveExtensions.addAll(TAR_EXTENSIONS); String[] archiveIncludesPattern = new String[archiveExtensions.size()]; int i = 0; for (String extension : archiveExtensions) { archiveIncludesPattern[i++] = GLOB_PATTERN_PREFIX + extension; } return archiveIncludesPattern; } private void extractArchive(String scannerBaseDir, String destDirectory, int archiveExtractionDepth, int curLevel) { File file = new File(scannerBaseDir); if (file.exists()) { if (file.isDirectory()) { // scan directory DirectoryScanner scanner = new DirectoryScanner(); scanner.setBasedir(scannerBaseDir); scanner.setIncludes(archiveIncludesPattern); scanner.setExcludes(archiveExcludesPattern); scanner.setCaseSensitive(false); scanner.scan(); String[] fileNames = scanner.getIncludedFiles(); for (String fileName : fileNames) { String innerDir = destDirectory + File.separator + fileName.substring(0, fileName.lastIndexOf(DOT)); String archiveFile = scannerBaseDir + File.separator + fileName; String lowerCaseFileName = fileName.toLowerCase(); if (lowerCaseFileName.matches(ZIP_EXTENSION_PATTERN)) { unZip(fileName, innerDir, archiveFile); } else if (lowerCaseFileName.matches(GEM_EXTENSION_PATTERN)) { unTar(fileName, innerDir, archiveFile); innerDir = innerDir + File.separator + RUBY_DATA_FILE ; unTar(RUBY_DATA_FILE, innerDir.substring(0, innerDir.lastIndexOf(DOT)) , innerDir); innerDir = innerDir.replaceAll(TAR_GZ_SUFFIX, BLANK); } else if (lowerCaseFileName.matches(TAR_EXTENSION_PATTERN)) { unTar(fileName, innerDir, archiveFile); innerDir = innerDir.replaceAll(TAR_SUFFIX, BLANK); } else { logger.warn("Error: {} is unsupported archive type", fileName.substring(fileName.lastIndexOf(DOT))); return; } // Extract again if needed according archiveExtractionDepth parameter if (curLevel < archiveExtractionDepth) { extractArchive(innerDir, innerDir, archiveExtractionDepth, curLevel + 1); } } } } } // Open and extract data from zip pattern files private void unZip(String fileName, String innerDir, String archiveFile) { try { ZipFile zipFile = new ZipFile(archiveFile); zipFile.extractAll(innerDir); } catch (ZipException e) { logger.warn("Error extracting file {}: {}", fileName, e.getMessage()); } } // Open and extract data from Tar pattern files private void unTar(String fileName,String innerDir, String archiveFile) { TarUnArchiver unArchiver = new TarUnArchiver(); try { if (fileName.endsWith(TAR_GZ_SUFFIX)) { innerDir = innerDir.substring(0, innerDir.lastIndexOf(DOT)); unArchiver = new TarGZipUnArchiver(); } else if (fileName.endsWith(TAR_SUFFIX) || fileName.endsWith(GEM_EXTENSION_PATTERN)) { unArchiver = new TarUnArchiver(); } unArchiver.enableLogging(new ConsoleLogger(ConsoleLogger.LEVEL_DISABLED, UN_ARCHIVER_LOGGER)); unArchiver.setSourceFile(new File(archiveFile)); File destDir = new File(innerDir); if (!destDir.exists()){ destDir.mkdirs(); } unArchiver.setDestDirectory(destDir); unArchiver.extract(); } catch (Exception e) { logger.warn("Error extracting file {}: {}", fileName, e.getMessage()); } } }
package com.payneteasy.superfly.security; import java.util.Collection; import org.springframework.security.access.AccessDecisionManager; import org.springframework.security.access.AccessDeniedException; import org.springframework.security.access.ConfigAttribute; import org.springframework.security.authentication.InsufficientAuthenticationException; import org.springframework.security.core.Authentication; /** * {@link AccessDecisionManager} which first checks whether the current * {@link Authentication} is sufficient. If not, it throws an * {@link InsufficientAuthenticationException}, otherwise just delegates to * other {@link AccessDecisionManager}. * * @author Roman Puchkovskiy */ public class InsufficientAuthenticationAccessDecisionManager extends DelegatingDecisionManager { private Class<?>[] insufficientAuthenticationClasses; public void setInsufficientAuthenticationClasses(Class<?>[] classes) { insufficientAuthenticationClasses = classes; } public InsufficientAuthenticationAccessDecisionManager() { } public InsufficientAuthenticationAccessDecisionManager( AccessDecisionManager delegate) { super(delegate); } @Override public void decide(Authentication authentication, Object object, Collection<ConfigAttribute> configAttributes) throws AccessDeniedException, InsufficientAuthenticationException { boolean insufficient = false; for (Class<?> clazz : insufficientAuthenticationClasses) { if (clazz.isAssignableFrom(authentication.getClass())) { insufficient = true; break; } } if (insufficient) { throw new InsufficientAuthenticationException(authentication.getClass().getName()); } // sufficient authentication, just proceed getDelegate().decide(authentication, object, configAttributes); } }
package pitt.search.semanticvectors.tables; import java.io.BufferedReader; import java.io.FileReader; import java.io.IOException; import java.util.ArrayList; import pitt.search.semanticvectors.FlagConfig; import pitt.search.semanticvectors.SearchResult; import pitt.search.semanticvectors.VectorStoreWriter; import pitt.search.semanticvectors.utils.StringUtils; import pitt.search.semanticvectors.utils.VerbatimLogger; import pitt.search.semanticvectors.vectors.Vector; /** * Class that reads input data from a stream and organizes it into records and columns. * * @author dwiddows */ public class TableIndexer { public static final String usageMessage = "Usage: java pitt.search.semanticvectors.tables.TableIndexer [--args] $TABLE_CSV_FILENAME"; /** Experiment in querying for a particular inauguration date. */ private static void queryForSpecialValues(Table table) { System.out.println("\nQuerying for time took office 1800"); Vector queryVector = table.makeCellVector(2, "1800"); for (SearchResult result : table.searchRowVectors(queryVector)) { System.out.println(result.toTexTableString(20)); } System.out.println("\nQuerying for year of birth 1800"); queryVector = table.makeCellVector(5, "1800"); for (SearchResult result : table.searchRowVectors(queryVector)) { System.out.println(result.toTexTableString(20)); } System.out.println("\nSorting in order of age upon taking office"); queryVector = table.getColumnAlphaVector(8).copy(); queryVector.bind(table.getColumnVector(8)); for (SearchResult result : table.searchRowVectors(queryVector)) { System.out.println(result.toTexTableString(20)); } System.out.println("\nQuerying for proximity between year of birth and time took office"); Vector elementalYOB = table.getColumnVector(5); Vector elementalTTO = table.getColumnVector(2); Vector demarcatorAlpha = table.getColumnAlphaVector(2); //should be identical across columns on account of "standard_demarcator" seed Vector demarcatorOmega = table.getColumnOmegaVector(2); //as above //System.out.println(demarcatorAlpha.measureOverlap(table.getColumnAlphaVector(5))); //System.out.println(table.getDemarcatorVector(2, 1950).measureOverlap( table.getDemarcatorVector(5, 1950))); for (SearchResult result : table.searchProxRowVectors(elementalYOB,elementalTTO, demarcatorAlpha, demarcatorOmega)) { System.out.println(result.getScore()+"\t"+result.getObjectVector().getObject().toString()); } } /** Experiment in querying for a particular president's name. */ private static void queryForName(Table table, String name) { System.out.println("Querying for name: '" + name + "'"); Vector queryVector = table.getRowVectorStore().getVector(name); for (SearchResult result : table.searchRowVectors(queryVector)) { System.out.println(result.toTexTableString(20)); } } public static void main(String[] args) throws IOException { FlagConfig flagConfig = null; try { flagConfig = FlagConfig.getFlagConfig(args); args = flagConfig.remainingArgs; } catch (IllegalArgumentException e) { System.err.println(usageMessage); throw e; } if (flagConfig.remainingArgs.length != 1) { throw new IllegalArgumentException("Wrong number of arguments after parsing command line flags.\n" + usageMessage); } VerbatimLogger.info("Building vector index of table in file: " + args[0] + "\n"); BufferedReader fileReader = new BufferedReader(new FileReader(args[0])); String[] columnHeaders = fileReader.readLine().split(","); ArrayList<String[]> dataRows = new ArrayList<>(); String dataLine; while((dataLine = fileReader.readLine()) != null) { String[] dataEntries = dataLine.split(","); if (dataEntries.length != columnHeaders.length) { throw new IllegalArgumentException(String.format( "Column headers have length %d and this row has length %d. This indicates a data error or a csv parsing error." + "\nColumn headers:%s\nData row: %s\n", columnHeaders.length, dataEntries.length, StringUtils.join(columnHeaders), StringUtils.join(dataEntries))); } dataRows.add(dataEntries); } fileReader.close(); Table table = new Table(flagConfig, columnHeaders, dataRows); VectorStoreWriter.writeVectors(flagConfig.termvectorsfile(), flagConfig, table.getRowVectorStore()); queryForSpecialValues(table); //queryForName(table, "J. Adams"); // queryForName(table, "T. Roosevelt"); } }
package org.apereo.cas.authentication; import org.apereo.cas.authentication.credential.UsernamePasswordCredential; import org.apereo.cas.authentication.principal.Service; import org.apereo.cas.util.junit.EnabledIfListeningOnPort; import org.jooq.lambda.Unchecked; import org.jooq.lambda.UncheckedException; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.springframework.test.context.TestPropertySource; import javax.security.auth.login.AccountNotFoundException; import javax.security.auth.login.LoginException; import static org.apereo.cas.util.junit.Assertions.*; import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.*; /** * Unit test for {@link LdapAuthenticationHandler}. * * @author Marvin S. Addison * @author Misagh Moayyed * @since 4.0.0 */ @TestPropertySource(properties = { "cas.authn.ldap[0].type=AUTHENTICATED", "cas.authn.ldap[0].ldap-url=ldap://localhost:10389", "cas.authn.ldap[0].base-dn=dc=something,dc=example,dc=org|dc=example,dc=org", "cas.authn.ldap[0].search-filter=cn={user}", "cas.authn.ldap[0].bind-dn=cn=Directory Manager", "cas.authn.ldap[0].bind-credential=password", "cas.authn.ldap[0].collect-dn-attribute=true", "cas.authn.ldap[0].principal-attribute-list=description,cn" }) @EnabledIfListeningOnPort(port = 10389) @Tag("Ldap") public class AuthenticatedLdapAuthenticationHandlerTests { @Nested @SuppressWarnings("ClassCanBeStatic") public class WithoutCustomPrincipalId extends BaseLdapAuthenticationHandlerTests { @Test public void verifyAuthenticateNotFound() { assertThrowsWithRootCause(UncheckedException.class, AccountNotFoundException.class, () -> ldapAuthenticationHandlers.toList() .forEach(Unchecked.consumer(h -> h.authenticate( new UsernamePasswordCredential("notfound", "badpassword"), mock(Service.class))))); } @Test public void verifyAuthenticateFailureNotFound() { assertNotEquals(0, ldapAuthenticationHandlers.size()); assertThrowsWithRootCause(UncheckedException.class, AccountNotFoundException.class, () -> ldapAuthenticationHandlers.toList().forEach( Unchecked.consumer(h -> h.authenticate(new UsernamePasswordCredential("bad", "bad"), mock(Service.class))))); } } @TestPropertySource(properties = "cas.authn.ldap[0].principal-attribute-id=unknown") @Nested @SuppressWarnings("ClassCanBeStatic") public class WithUnknownCustomPrincipalId extends BaseLdapAuthenticationHandlerTests { } @TestPropertySource(properties = { "cas.authn.ldap[0].principal-attribute-id=unknown", "cas.authn.ldap[0].allow-missing-principal-attribute-value=false" }) @Nested @SuppressWarnings("ClassCanBeStatic") public class WithUnknownCustomPrincipalIdFailing extends BaseLdapAuthenticationHandlerTests { @Override public void verifyAuthenticateSuccess() { assertThrows(LoginException.class, super::verifyAuthenticateSuccess); } } }
package restapi.weatherstation; import entities.sub_entity.WeatherStation; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.web.bind.annotation.*; import repository.WeatherStationRepository; import java.util.ArrayList; import java.util.List; @RestController public class WeatherStationController { @Autowired private WeatherStationRepository repository; @RequestMapping(value = "/weather-station/" , method = RequestMethod.POST , produces = "application/json") public void createWeatherStation(@RequestBody WeatherStation weatherStation){ repository.save(weatherStation); } @RequestMapping(value = "/weather-station/" , method = RequestMethod.GET , produces = "application/json") public ArrayList<WeatherStation> getAllWeatherStations(){ ArrayList<WeatherStation> stations = new ArrayList<>(); repository.findAll().forEach(stations :: add ); return stations; } @RequestMapping(value = "/weather-station/{id}" , method = RequestMethod.GET , produces = "application/json") public WeatherStation getAllWeatherStations(@PathVariable long id){ WeatherStation weatherStation = repository.findOne(id); return weatherStation; } }
package org.apereo.cas.support.saml.mdui.web.flow; import org.apache.commons.lang3.StringUtils; import org.apereo.cas.authentication.principal.Service; import org.apereo.cas.authentication.principal.ServiceFactory; import org.apereo.cas.authentication.principal.WebApplicationService; import org.apereo.cas.services.RegisteredService; import org.apereo.cas.services.ServicesManager; import org.apereo.cas.services.UnauthorizedServiceException; import org.apereo.cas.support.saml.mdui.MetadataResolverAdapter; import org.apereo.cas.support.saml.mdui.MetadataUIUtils; import org.apereo.cas.support.saml.mdui.SamlMetadataUIInfo; import org.apereo.cas.web.support.WebUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.webflow.action.AbstractAction; import org.springframework.webflow.execution.Event; import org.springframework.webflow.execution.RequestContext; import javax.servlet.http.HttpServletRequest; /** * This is {@link SamlMetadataUIParserAction} that attempts to parse * the MDUI extension block for a SAML SP from the provided metadata locations. * The result is put into the flow request context. The entity id parameter is * specified by default at {@link org.apereo.cas.support.saml.SamlProtocolConstants#PARAMETER_ENTITY_ID}. * <p>This action is best suited to be invoked when the CAS login page * is about to render so that the page, once the MDUI info is obtained, * has a chance to populate the UI with relevant info about the SP.</p> * * @author Misagh Moayyed * @since 4.1.0 */ public class SamlMetadataUIParserAction extends AbstractAction { private static final Logger LOGGER = LoggerFactory.getLogger(SamlMetadataUIParserAction.class); private final String entityIdParameterName; private final MetadataResolverAdapter metadataAdapter; private ServicesManager servicesManager; private ServiceFactory<WebApplicationService> serviceFactory; /** * Instantiates a new SAML MDUI parser action. * * @param entityIdParameterName the entity id parameter name * @param metadataAdapter the metadata adapter * @param serviceFactory the service factory * @param servicesManager the service manager */ public SamlMetadataUIParserAction(final String entityIdParameterName, final MetadataResolverAdapter metadataAdapter, final ServiceFactory<WebApplicationService> serviceFactory, final ServicesManager servicesManager) { this.entityIdParameterName = entityIdParameterName; this.metadataAdapter = metadataAdapter; this.serviceFactory = serviceFactory; this.servicesManager = servicesManager; } @Override public Event doExecute(final RequestContext requestContext) throws Exception { final String entityId = getEntityIdFromRequest(requestContext); if (StringUtils.isBlank(entityId)) { LOGGER.debug("No entity id found for parameter [{}]", this.entityIdParameterName); return success(); } final RegisteredService registeredService = getRegisteredServiceFromRequest(requestContext, entityId); verifyRegisteredService(requestContext, registeredService); loadSamlMetadataIntoRequestContext(requestContext, entityId, registeredService); return success(); } /** * Load saml metadata into request context. * * @param requestContext the request context * @param entityId the entity id * @param registeredService the registered service */ protected void loadSamlMetadataIntoRequestContext(final RequestContext requestContext, final String entityId, final RegisteredService registeredService) { final SamlMetadataUIInfo mdui = MetadataUIUtils.locateMetadataUserInterfaceForEntityId(this.metadataAdapter, entityId, registeredService); WebUtils.putServiceUserInterfaceMetadata(requestContext, mdui); } /** * Verify registered service. * * @param requestContext the request context * @param registeredService the registered service */ protected void verifyRegisteredService(final RequestContext requestContext, final RegisteredService registeredService) { if (registeredService == null || !registeredService.getAccessStrategy().isServiceAccessAllowed()) { LOGGER.debug("Service is not recognized/allowed by the CAS service registry", registeredService); if (registeredService != null) { WebUtils.putUnauthorizedRedirectUrlIntoFlowScope(requestContext, registeredService.getAccessStrategy().getUnauthorizedRedirectUrl()); } throw new UnauthorizedServiceException(UnauthorizedServiceException.CODE_UNAUTHZ_SERVICE, StringUtils.EMPTY); } } /** * Gets registered service from request. * * @param requestContext the request context * @param entityId the entity id * @return the registered service from request */ protected RegisteredService getRegisteredServiceFromRequest(final RequestContext requestContext, final String entityId) { final Service currentService = WebUtils.getService(requestContext); final WebApplicationService service = this.serviceFactory.createService(entityId); RegisteredService registeredService = this.servicesManager.findServiceBy(service); if (registeredService == null) { LOGGER.debug("Entity id not found in the registry. Fallback onto [{}]", entityId, currentService); registeredService = this.servicesManager.findServiceBy(currentService); } LOGGER.debug("Located service definition [{}]", registeredService); return registeredService; } /** * Gets entity id from request. * * @param requestContext the request context * @return the entity id from request */ protected String getEntityIdFromRequest(final RequestContext requestContext) { final HttpServletRequest request = WebUtils.getHttpServletRequest(requestContext); return request.getParameter(this.entityIdParameterName); } }
package ru.VirtaMarketAnalyzer.data; import com.google.gson.annotations.SerializedName; import java.util.List; final public class ServiceGuideProduct { @SerializedName("pi") final private String productId; @SerializedName("qps") final private double quantityPerSell; @SerializedName("q") final private double quality; @SerializedName("bp") final private double buyPrice; @SerializedName("sp") final private double sellPrice; @SerializedName("v") final private long volume; @SerializedName("sui") final private List<String> suppliersUnitIds; public ServiceGuideProduct( final String productId, final double quantityPerSell, final double quality, final double buyPrice, final double sellPrice, final long volume, final List<String> suppliersUnitIds ) { this.productId = productId; this.quantityPerSell = quantityPerSell; this.quality = quality; this.buyPrice = buyPrice; this.sellPrice = sellPrice; this.volume = volume; this.suppliersUnitIds = suppliersUnitIds; } public String getProductId() { return productId; } public double getQuality() { return quality; } public double getBuyPrice() { return buyPrice; } public double getSellPrice() { return sellPrice; } public long getVolume() { return volume; } public List<String> getSuppliersUnitIds() { return suppliersUnitIds; } public double getQuantityPerSell() { return quantityPerSell; } }
package ru.VirtaMarketAnalyzer.parser; import org.apache.log4j.BasicConfigurator; import org.apache.log4j.ConsoleAppender; import org.apache.log4j.PatternLayout; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; import org.jsoup.select.Elements; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ru.VirtaMarketAnalyzer.data.Product; import ru.VirtaMarketAnalyzer.data.ProductCategory; import ru.VirtaMarketAnalyzer.main.Utils; import ru.VirtaMarketAnalyzer.main.Wizard; import ru.VirtaMarketAnalyzer.scrapper.Downloader; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; public final class ProductInitParser { private static final Logger logger = LoggerFactory.getLogger(ProductInitParser.class); final static Pattern productPattern = Pattern.compile("\\{id\\s+:\\s+'([^']+)',\\s+catid\\s+:\\s+'([^']+)',\\s+symbol\\s+:\\s+'([^']+)',\\s+name\\s+:\\s+\"([^\"]+)\"\\},"); public static void main(final String[] args) throws IOException { BasicConfigurator.configure(new ConsoleAppender(new PatternLayout("%d{ISO8601} [%t] %p %C{1} %x - %m%n"))); System.out.println(Utils.getPrettyGson(getTradingProducts(Wizard.host, "olga"))); // System.out.println(Utils.getPrettyGson(getTradeProductCategories(Wizard.host, "olga"))); // System.out.println(Utils.getPrettyGson(getTradeProductCategories(Wizard.host_en, "olga"))); // System.out.println(Utils.getPrettyGson(getManufactureProductCategories(Wizard.host, "olga"))); // System.out.println(Utils.getPrettyGson(getManufactureProductCategories(Wizard.host_en, "olga"))); } public static List<Product> getTradingProducts(final String host, final String realm) throws IOException { final List<ProductCategory> productCategories = getTradeProductCategories(host, realm); return get(host, realm, "/main/globalreport/marketing/by_trade_at_cities", productCategories); } public static List<Product> getManufactureProducts(final String host, final String realm) throws IOException { final List<ProductCategory> productCategories = getManufactureProductCategories(host, realm); return get(host, realm, "/main/globalreport/manufacture", productCategories); } public static Product getTradingProduct(final String host, final String realm, final String id) throws IOException { return getTradingProducts(host, realm).stream().filter(product -> product.getId().equals(id)).findFirst().get(); } public static Product getManufactureProduct(final String host, final String realm, final String id) throws IOException { return getManufactureProducts(host, realm).stream().filter(product -> product.getId().equals(id)).findFirst().get(); } public static List<Product> get(final String host, final String realm, final String path, final List<ProductCategory> productCategories) throws IOException { List<Product> products = getInternal(host, realm, path, productCategories); if(products.isEmpty()){ Downloader.invalidateCache(host + realm + path); products = getInternal(host, realm, path, productCategories); } if(products.isEmpty()){ throw new IOException("Не удалось получить список товаров"); } return products; } private static List<Product> getInternal(final String host, final String realm, final String path, final List<ProductCategory> productCategories) throws IOException { final Document doc = Downloader.getDoc(host + realm + path); final List<Product> products = new ArrayList<>(); final Elements scripts = doc.select("script"); for (final Element script : scripts) { if (!script.html().isEmpty()) { final Matcher m = productPattern.matcher(script.html()); while (m.find()) { final String id = m.group(1); final String productCategoryID = m.group(2); final String productCategory = productCategories.stream().filter(pc -> pc.getId().equals(productCategoryID)).findFirst().get().getCaption(); final String symbol = m.group(3); final String caption = m.group(4); products.add(new Product(productCategory, id, caption, productCategoryID, symbol)); } } } return products; } public static List<ProductCategory> getTradeProductCategories(final String host, final String realm) throws IOException { return getProductCategories(host, realm, "/main/globalreport/marketing/by_trade_at_cities"); } public static List<ProductCategory> getManufactureProductCategories(final String host, final String realm) throws IOException { return getProductCategories(host, realm, "/main/globalreport/manufacture"); } public static List<ProductCategory> getProductCategories(final String host, final String realm, final String path) throws IOException { List<ProductCategory> productCategories = getProductCategoriesInternal(host, realm, path); if(productCategories.isEmpty()){ Downloader.invalidateCache(host + realm + path); productCategories = getProductCategoriesInternal(host, realm, path); } if(productCategories.isEmpty()){ throw new IOException("Не удалось получить список категорий товаров"); } return productCategories; } private static List<ProductCategory> getProductCategoriesInternal(final String host, final String realm, final String path) throws IOException { final Document doc = Downloader.getDoc(host + realm + path); final List<ProductCategory> list = new ArrayList<>(); final Elements ops = doc.select("#__product_category_list > option"); for (final Element op : ops) { final String id = op.val(); final String caption = op.text(); list.add(new ProductCategory(id, caption)); } return list; } }
package seedu.address.logic.commands; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import edu.emory.mathcs.backport.java.util.Collections; import seedu.address.commons.core.Messages; import seedu.address.logic.commands.exceptions.CommandException; import seedu.address.logic.undo.UndoManager; import seedu.address.model.booking.Booking; import seedu.address.model.booking.UniqueBookingList; import seedu.address.model.booking.UniqueBookingList.DuplicateBookingException; import seedu.address.model.task.ReadOnlyTask; import seedu.address.model.task.Task; import seedu.address.model.task.UniqueTaskList.DuplicateTaskException; //@@author A0162877N /** * Add, change or remove booking time slot in the Task Manager */ public class EditBookingCommand extends Command { public static final String COMMAND_WORD = "editbooking"; public static final String MESSAGE_DUPLICATE_BOOKING = "This booking already exists in the task manager"; public static final String MESSAGE_USAGE = COMMAND_WORD + ": Edits the booking of the task identified " + "by the index number used in the last task listing. " + "Parameters: INDEX (index of the task in the current task list and must be a positive integer)\n" + "Parameters: [add DATE, MORE DATES] | [remove BOOKING_INDEX] | [change BOOKING_INDEX DATE]\n" + "Example: " + COMMAND_WORD + " 1 add tuesday 2pm to 5pm\n" + "Example: " + COMMAND_WORD + " 1 remove 1\n" + "Example: " + COMMAND_WORD + " 1 change 1 tuesday 1pm to 4pm\n"; public static final String MESSAGE_NO_SUCH_BOOKING = "Index provided is invalid.\n" + MESSAGE_USAGE; public static final String MESSAGE_TASK_NO_BOOKING = "This task does not have bookings to update."; public static final String MESSAGE_EDIT_TASK_SUCCESS = "Edited Task: %1$s"; public static final int BOOKING_INDEX = 0; public static final int VALID_INDEX = 0; private final int filteredTaskListIndex; private final int bookingSlotIndex; private final Set<Booking> bookingSet; public EditBookingCommand(int filteredTaskListIndex, int bookingSlotIndex) { assert filteredTaskListIndex > VALID_INDEX; assert bookingSlotIndex > VALID_INDEX; this.filteredTaskListIndex = filteredTaskListIndex - 1; this.bookingSlotIndex = bookingSlotIndex - 1; this.bookingSet = new HashSet<>(); } public EditBookingCommand(int filteredTaskListIndex, String... dates) throws CommandException { assert filteredTaskListIndex > VALID_INDEX; this.filteredTaskListIndex = filteredTaskListIndex - 1; bookingSlotIndex = -1; bookingSet = new HashSet<>(); for (String bookingSlot : dates) { bookingSet.add(new Booking(bookingSlot)); } } public EditBookingCommand(int filteredTaskListIndex, int bookingSlotIndex, String date) throws CommandException { assert filteredTaskListIndex > VALID_INDEX; assert bookingSlotIndex > VALID_INDEX; this.filteredTaskListIndex = filteredTaskListIndex - 1; this.bookingSlotIndex = bookingSlotIndex - 1; bookingSet = new HashSet<>(); bookingSet.add(new Booking(date)); } @Override public CommandResult execute() throws CommandException { assert model != null; List<ReadOnlyTask> lastShownList = model.getFilteredTaskList(); try { boolean bookingSuccess = false; if (bookingSet.isEmpty() && bookingSlotIndex >= VALID_INDEX) { bookingSuccess = removeBookingsInTasks(lastShownList); } else if (!bookingSet.isEmpty() && bookingSlotIndex >= VALID_INDEX) { bookingSuccess = changeBookingsInTasks(lastShownList); } else if (!bookingSet.isEmpty()) { bookingSuccess = addBookingsInTasks(lastShownList); } else { throw new CommandException(MESSAGE_TASK_NO_BOOKING); } if (!bookingSuccess) { throw new CommandException(MESSAGE_NO_SUCH_BOOKING); } return new CommandResult(String.format(MESSAGE_EDIT_TASK_SUCCESS, filteredTaskListIndex + 1)); } catch (DuplicateBookingException e) { throw new CommandException(e.getMessage()); } } /** * Removes a specific booking time slot in the booking * * @param lastShownList * @return true if the specified booking exists * @throws DuplicateBookingException */ private boolean removeBookingsInTasks(List<ReadOnlyTask> lastShownList) throws CommandException, DuplicateBookingException { boolean bookingExist = false; if (filteredTaskListIndex >= lastShownList.size()) { throw new CommandException(Messages.MESSAGE_INVALID_TASKS_DISPLAYED_INDEX); } else { Task task = new Task(lastShownList.get(filteredTaskListIndex)); UniqueBookingList bookings = task.getBookings(); if (!bookings.isEmpty()) { List<Booking> bookingList = bookings.toList(); if (bookingSlotIndex >= bookingList.size()) { throw new CommandException(MESSAGE_NO_SUCH_BOOKING); } bookingList.remove(bookingSlotIndex); Collections.sort(bookingList); task.setBookings(new UniqueBookingList(bookingList)); bookingExist = true; try { saveCurrentState(); model.updateTask(filteredTaskListIndex, task); } catch (DuplicateTaskException dpe) { throw new CommandException(MESSAGE_DUPLICATE_BOOKING); } } else { throw new CommandException(MESSAGE_NO_SUCH_BOOKING); } } return bookingExist; } /** * Adds time slots in booking * * @param lastShownList * @return true if the specified booking exists * @throws DuplicateBookingException */ private boolean addBookingsInTasks(List<ReadOnlyTask> lastShownList) throws CommandException, DuplicateBookingException { boolean bookingSuccess = false; if (filteredTaskListIndex >= lastShownList.size()) { throw new CommandException(Messages.MESSAGE_INVALID_TASKS_DISPLAYED_INDEX); } else { Task task = new Task(lastShownList.get(filteredTaskListIndex)); UniqueBookingList bookings = task.getBookings(); if (!bookings.isEmpty()) { Set<Booking> taskBookingSet = bookings.toSet(); List<Booking> bookingList = new ArrayList<Booking>(bookingSet); for (Booking booking : bookingList) { taskBookingSet.add(booking); } task.setBookings(new UniqueBookingList(taskBookingSet)); bookingSuccess = true; try { saveCurrentState(); model.updateTask(filteredTaskListIndex, task); } catch (DuplicateTaskException dpe) { throw new CommandException(MESSAGE_DUPLICATE_BOOKING); } } else { throw new CommandException(MESSAGE_NO_SUCH_BOOKING); } } return bookingSuccess; } /** * Replaces a specific booking time slot in bookings * * @param allTaskList * @return true if the specified booking exists * @throws DuplicateBookingException */ private boolean changeBookingsInTasks(List<ReadOnlyTask> lastShownList) throws CommandException, DuplicateBookingException { boolean bookingExist = false; if (filteredTaskListIndex >= lastShownList.size()) { throw new CommandException(Messages.MESSAGE_INVALID_TASKS_DISPLAYED_INDEX); } else { Task task = new Task(lastShownList.get(filteredTaskListIndex)); UniqueBookingList bookings = task.getBookings(); if (!bookings.isEmpty()) { List<Booking> bookingList = bookings.toList(); if (bookingSlotIndex >= bookingList.size() || bookingSlotIndex < VALID_INDEX) { throw new CommandException(MESSAGE_NO_SUCH_BOOKING); } if (!bookings.contains(new ArrayList<Booking>(bookingSet).get(BOOKING_INDEX))) { bookingList.remove(bookingSlotIndex); bookingList.add(new ArrayList<Booking>(bookingSet).get(BOOKING_INDEX)); } Collections.sort(bookingList); task.setBookings(new UniqueBookingList(bookingList)); bookingExist = true; try { saveCurrentState(); model.updateTask(filteredTaskListIndex, task); } catch (DuplicateTaskException dpe) { throw new CommandException(MESSAGE_DUPLICATE_BOOKING); } } else { throw new CommandException(MESSAGE_NO_SUCH_BOOKING); } } return bookingExist; } /** * Save the data in task manager if command is mutating the data */ public void saveCurrentState() { if (isMutating()) { try { UndoManager.getInstance().addStorageHistory(model.getTaskManager().getImmutableTaskList(), model.getTaskManager().getImmutableLabelList()); } catch (CloneNotSupportedException e) { e.printStackTrace(); } } } @Override public boolean isMutating() { return true; } }