answer
stringlengths
17
10.2M
package edu.wheaton.simulator.statistics; import java.awt.Color; import java.io.BufferedReader; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.util.HashSet; import java.util.Set; import edu.wheaton.simulator.datastructure.ElementAlreadyContainedException; import edu.wheaton.simulator.datastructure.Grid; import edu.wheaton.simulator.entity.Agent; import edu.wheaton.simulator.entity.Prototype; public class Loader { private Grid grid; /** * Map of all PrototypeSnapshots for the simulation * Since PrototypeSnapshots are immutable, this collection is the same for each step */ private Set<Prototype> prototypes; public Loader(){ this.prototypes = new HashSet<Prototype>(); } public void load(String fileName){ File file = new File(fileName); BufferedReader reader = null; try { reader = new BufferedReader(new FileReader(file)); String readLine = reader.readLine(); while (readLine != null) { //TODO: Also need to save/load ending conditions //TODO: Also need to save/load triggers if(readLine.equals("AgentSnapshot")){ //Find the appropriate prototype String prototypeName = reader.readLine(); //This is an actual AgentSnapshot if(!prototypeName.equals("GRID")){ Prototype parent = getPrototype(prototypeName); //Create the Agent Agent agent = new Agent(grid, parent); //Get the Agent's position on the Grid int xpos = Integer.parseInt(reader.readLine()); int ypos = Integer.parseInt(reader.readLine()); //Add the agent's default fields readLine = reader.readLine(); while(readLine.substring(0, 13).equals("FieldSnapshot")){ String[] tokens = readLine.split(" "); try { agent.addField(tokens[1], tokens[2]); } catch (ElementAlreadyContainedException e) { System.out.println("Field already exists"); e.printStackTrace(); } } grid.addAgent(agent, xpos, ypos); } //This is a snapshot storing grid global variables else{ //prototypeName.equals("GRID") //Skip the x and y position - doesn't matter reader.readLine(); reader.readLine(); //Add the Grid's global variables readLine = reader.readLine(); while(readLine.substring(0, 13).equals("FieldSnapshot")){ String[] tokens = readLine.split(" "); try { grid.addField(tokens[1], tokens[2]); } catch (ElementAlreadyContainedException e) { System.out.println("Field already exists"); e.printStackTrace(); } } } } else if(readLine.equals("PrototypeSnapshot")){ //Parse the required prototype data String name = reader.readLine(); Color color = new Color(Integer.parseInt(reader.readLine())); byte[] design = createByteArray(reader.readLine()); //Create the prototype Prototype proto = new Prototype(grid, color, design, name); //Add the prototype's default fields readLine = reader.readLine(); while(readLine.substring(0, 13).equals("FieldSnapshot")){ String[] tokens = readLine.split(" "); try { proto.addField(tokens[1], tokens[2]); } catch (ElementAlreadyContainedException e) { System.out.println("Field already exists"); e.printStackTrace(); } } prototypes.add(proto); } else if(readLine.equals("Globals")){ } else{ reader.readLine(); } } } catch (FileNotFoundException e) { throw new RuntimeException("Could not find file: " + file.getAbsolutePath(), e); } catch (IOException e) { throw new RuntimeException("Could not read file: " + file.getAbsolutePath(), e); } finally { try { assert(reader!=null); reader.close(); } catch (IOException e) { throw new RuntimeException("Could not close stream", e); } } } /** * Create a byte array from a string * @param s String representing a byte array in the form "010111000" * @return The create byte array */ private static byte[] createByteArray(String s){ byte[] ret = new byte[s.length()]; for(int i = 0; i < s.length(); i++) ret[i] = (byte) s.charAt(i); return ret; } /** * Get the Prototype in this class's internal list with the supplied name * @param name The name of the prototype to retrieve * @return The prototype with the supplied name */ private Prototype getPrototype(String name){ Prototype ret = null; for(Prototype p : prototypes) if(p.getName().equals(name)) ret = p; if(ret == null) System.out.println("Parent Not Found"); return ret; } }
package info.nightscout.androidaps.plugins.Overview; import android.annotation.SuppressLint; import android.app.Activity; import android.app.NotificationManager; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.graphics.Color; import android.graphics.Paint; import android.os.Bundle; import android.os.Handler; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.content.ContextCompat; import android.support.v7.app.AlertDialog; import android.support.v7.widget.CardView; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.util.DisplayMetrics; import android.util.TypedValue; import android.view.ContextMenu; import android.view.HapticFeedbackConstants; import android.view.LayoutInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.LinearLayout; import android.widget.TextView; import com.crashlytics.android.Crashlytics; import com.crashlytics.android.answers.Answers; import com.crashlytics.android.answers.CustomEvent; import com.jjoe64.graphview.GraphView; import com.squareup.otto.Subscribe; import org.json.JSONException; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.text.DecimalFormat; import java.util.Calendar; import java.util.Date; import java.util.List; import java.util.Objects; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import info.nightscout.androidaps.BuildConfig; import info.nightscout.androidaps.Config; import info.nightscout.androidaps.Constants; import info.nightscout.androidaps.MainApp; import info.nightscout.androidaps.R; import info.nightscout.androidaps.data.DetailedBolusInfo; import info.nightscout.androidaps.data.GlucoseStatus; import info.nightscout.androidaps.data.IobTotal; import info.nightscout.androidaps.data.Profile; import info.nightscout.androidaps.db.BgReading; import info.nightscout.androidaps.db.CareportalEvent; import info.nightscout.androidaps.db.DatabaseHelper; import info.nightscout.androidaps.db.ExtendedBolus; import info.nightscout.androidaps.db.Source; import info.nightscout.androidaps.db.TempTarget; import info.nightscout.androidaps.db.TemporaryBasal; import info.nightscout.androidaps.events.EventCareportalEventChange; import info.nightscout.androidaps.events.EventExtendedBolusChange; import info.nightscout.androidaps.events.EventInitializationChanged; import info.nightscout.androidaps.events.EventPreferenceChange; import info.nightscout.androidaps.events.EventPumpStatusChanged; import info.nightscout.androidaps.events.EventRefreshOverview; import info.nightscout.androidaps.events.EventTempBasalChange; import info.nightscout.androidaps.events.EventTempTargetChange; import info.nightscout.androidaps.events.EventTreatmentChange; import info.nightscout.androidaps.interfaces.PluginBase; import info.nightscout.androidaps.interfaces.PumpInterface; import info.nightscout.androidaps.plugins.Careportal.CareportalFragment; import info.nightscout.androidaps.plugins.Careportal.Dialogs.NewNSTreatmentDialog; import info.nightscout.androidaps.plugins.Careportal.OptionsToShow; import info.nightscout.androidaps.plugins.ConfigBuilder.ConfigBuilderPlugin; import info.nightscout.androidaps.plugins.ConstraintsObjectives.ObjectivesPlugin; import info.nightscout.androidaps.plugins.IobCobCalculator.AutosensData; import info.nightscout.androidaps.plugins.IobCobCalculator.IobCobCalculatorPlugin; import info.nightscout.androidaps.plugins.IobCobCalculator.events.EventAutosensCalculationFinished; import info.nightscout.androidaps.plugins.Loop.LoopPlugin; import info.nightscout.androidaps.plugins.Loop.events.EventNewOpenLoopNotification; import info.nightscout.androidaps.plugins.NSClientInternal.broadcasts.BroadcastAckAlarm; import info.nightscout.androidaps.plugins.NSClientInternal.data.NSDeviceStatus; import info.nightscout.androidaps.plugins.OpenAPSAMA.DetermineBasalResultAMA; import info.nightscout.androidaps.plugins.Overview.Dialogs.CalibrationDialog; import info.nightscout.androidaps.plugins.Overview.Dialogs.ErrorHelperActivity; import info.nightscout.androidaps.plugins.Overview.Dialogs.NewTreatmentDialog; import info.nightscout.androidaps.plugins.Overview.Dialogs.WizardDialog; import info.nightscout.androidaps.plugins.Overview.events.EventDismissNotification; import info.nightscout.androidaps.plugins.Overview.events.EventSetWakeLock; import info.nightscout.androidaps.plugins.Overview.graphData.GraphData; import info.nightscout.androidaps.plugins.Overview.notifications.Notification; import info.nightscout.androidaps.plugins.Overview.notifications.NotificationStore; import info.nightscout.androidaps.plugins.SourceXdrip.SourceXdripPlugin; import info.nightscout.androidaps.plugins.Treatments.fragments.ProfileViewerDialog; import info.nightscout.androidaps.queue.Callback; import info.nightscout.utils.BolusWizard; import info.nightscout.utils.DateUtil; import info.nightscout.utils.DecimalFormatter; import info.nightscout.utils.NSUpload; import info.nightscout.utils.OKDialog; import info.nightscout.utils.Profiler; import info.nightscout.utils.SP; import info.nightscout.utils.ToastUtils; public class OverviewFragment extends Fragment implements View.OnClickListener, CompoundButton.OnCheckedChangeListener { private static Logger log = LoggerFactory.getLogger(OverviewFragment.class); TextView timeView; TextView bgView; TextView arrowView; TextView timeAgoView; TextView deltaView; TextView avgdeltaView; TextView baseBasalView; TextView extendedBolusView; TextView activeProfileView; TextView iobView; TextView cobView; TextView apsModeView; TextView tempTargetView; TextView pumpStatusView; TextView pumpDeviceStatusView; TextView openapsDeviceStatusView; TextView uploaderDeviceStatusView; LinearLayout loopStatusLayout; LinearLayout pumpStatusLayout; GraphView bgGraph; GraphView iobGraph; TextView iage; TextView cage; TextView sage; TextView pbage; CheckBox showPredictionView; CheckBox showBasalsView; CheckBox showIobView; CheckBox showCobView; CheckBox showDeviationsView; CheckBox showRatiosView; RecyclerView notificationsView; LinearLayoutManager llm; LinearLayout acceptTempLayout; Button treatmentButton; Button wizardButton; Button calibrationButton; Button acceptTempButton; Button quickWizardButton; CheckBox lockScreen; boolean smallWidth; boolean smallHeight; public static boolean shorttextmode = false; private boolean accepted; private int rangeToDisplay = 6; // for graph Handler sLoopHandler = new Handler(); Runnable sRefreshLoop = null; private static final ScheduledExecutorService worker = Executors.newSingleThreadScheduledExecutor(); private static ScheduledFuture<?> scheduledUpdate = null; public OverviewFragment() { super(); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { try { //check screen width final DisplayMetrics dm = new DisplayMetrics(); getActivity().getWindowManager().getDefaultDisplay().getMetrics(dm); int screen_width = dm.widthPixels; int screen_height = dm.heightPixels; smallWidth = screen_width <= Constants.SMALL_WIDTH; smallHeight = screen_height <= Constants.SMALL_HEIGHT; boolean landscape = screen_height < screen_width; View view; if (MainApp.sResources.getBoolean(R.bool.isTablet) && (Config.NSCLIENT || Config.G5UPLOADER)) { view = inflater.inflate(R.layout.overview_fragment_nsclient_tablet, container, false); } else if (Config.NSCLIENT || Config.G5UPLOADER) { view = inflater.inflate(R.layout.overview_fragment_nsclient, container, false); shorttextmode = true; } else if (smallHeight || landscape) { view = inflater.inflate(R.layout.overview_fragment_smallheight, container, false); } else { view = inflater.inflate(R.layout.overview_fragment, container, false); } timeView = (TextView) view.findViewById(R.id.overview_time); bgView = (TextView) view.findViewById(R.id.overview_bg); arrowView = (TextView) view.findViewById(R.id.overview_arrow); if (smallWidth) { arrowView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 35); } timeAgoView = (TextView) view.findViewById(R.id.overview_timeago); deltaView = (TextView) view.findViewById(R.id.overview_delta); avgdeltaView = (TextView) view.findViewById(R.id.overview_avgdelta); baseBasalView = (TextView) view.findViewById(R.id.overview_basebasal); extendedBolusView = (TextView) view.findViewById(R.id.overview_extendedbolus); activeProfileView = (TextView) view.findViewById(R.id.overview_activeprofile); pumpStatusView = (TextView) view.findViewById(R.id.overview_pumpstatus); pumpDeviceStatusView = (TextView) view.findViewById(R.id.overview_pump); openapsDeviceStatusView = (TextView) view.findViewById(R.id.overview_openaps); uploaderDeviceStatusView = (TextView) view.findViewById(R.id.overview_uploader); loopStatusLayout = (LinearLayout) view.findViewById(R.id.overview_looplayout); pumpStatusLayout = (LinearLayout) view.findViewById(R.id.overview_pumpstatuslayout); pumpStatusView.setBackgroundColor(MainApp.sResources.getColor(R.color.colorInitializingBorder)); iobView = (TextView) view.findViewById(R.id.overview_iob); cobView = (TextView) view.findViewById(R.id.overview_cob); apsModeView = (TextView) view.findViewById(R.id.overview_apsmode); tempTargetView = (TextView) view.findViewById(R.id.overview_temptarget); iage = (TextView) view.findViewById(R.id.careportal_insulinage); cage = (TextView) view.findViewById(R.id.careportal_canulaage); sage = (TextView) view.findViewById(R.id.careportal_sensorage); pbage = (TextView) view.findViewById(R.id.careportal_pbage); bgGraph = (GraphView) view.findViewById(R.id.overview_bggraph); iobGraph = (GraphView) view.findViewById(R.id.overview_iobgraph); treatmentButton = (Button) view.findViewById(R.id.overview_treatmentbutton); treatmentButton.setOnClickListener(this); wizardButton = (Button) view.findViewById(R.id.overview_wizardbutton); wizardButton.setOnClickListener(this); acceptTempButton = (Button) view.findViewById(R.id.overview_accepttempbutton); if (acceptTempButton != null) acceptTempButton.setOnClickListener(this); quickWizardButton = (Button) view.findViewById(R.id.overview_quickwizardbutton); quickWizardButton.setOnClickListener(this); calibrationButton = (Button) view.findViewById(R.id.overview_calibrationbutton); if (calibrationButton != null) calibrationButton.setOnClickListener(this); acceptTempLayout = (LinearLayout) view.findViewById(R.id.overview_accepttemplayout); showPredictionView = (CheckBox) view.findViewById(R.id.overview_showprediction); showBasalsView = (CheckBox) view.findViewById(R.id.overview_showbasals); showIobView = (CheckBox) view.findViewById(R.id.overview_showiob); showCobView = (CheckBox) view.findViewById(R.id.overview_showcob); showDeviationsView = (CheckBox) view.findViewById(R.id.overview_showdeviations); showRatiosView = (CheckBox) view.findViewById(R.id.overview_showratios); showPredictionView.setChecked(SP.getBoolean("showprediction", false)); showBasalsView.setChecked(SP.getBoolean("showbasals", true)); showIobView.setChecked(SP.getBoolean("showiob", false)); showCobView.setChecked(SP.getBoolean("showcob", false)); showDeviationsView.setChecked(SP.getBoolean("showdeviations", false)); showRatiosView.setChecked(SP.getBoolean("showratios", false)); showPredictionView.setOnCheckedChangeListener(this); showBasalsView.setOnCheckedChangeListener(this); showIobView.setOnCheckedChangeListener(this); showCobView.setOnCheckedChangeListener(this); showDeviationsView.setOnCheckedChangeListener(this); showRatiosView.setOnCheckedChangeListener(this); notificationsView = (RecyclerView) view.findViewById(R.id.overview_notifications); notificationsView.setHasFixedSize(true); llm = new LinearLayoutManager(view.getContext()); notificationsView.setLayoutManager(llm); bgGraph.getGridLabelRenderer().setGridColor(MainApp.sResources.getColor(R.color.graphgrid)); bgGraph.getGridLabelRenderer().reloadStyles(); iobGraph.getGridLabelRenderer().setGridColor(MainApp.sResources.getColor(R.color.graphgrid)); iobGraph.getGridLabelRenderer().reloadStyles(); iobGraph.getGridLabelRenderer().setHorizontalLabelsVisible(false); bgGraph.getGridLabelRenderer().setLabelVerticalWidth(50); iobGraph.getGridLabelRenderer().setLabelVerticalWidth(50); iobGraph.getGridLabelRenderer().setNumVerticalLabels(5); rangeToDisplay = SP.getInt(R.string.key_rangetodisplay, 6); bgGraph.setOnLongClickListener(new View.OnLongClickListener() { @Override public boolean onLongClick(View v) { rangeToDisplay += 6; rangeToDisplay = rangeToDisplay > 24 ? 6 : rangeToDisplay; SP.putInt(R.string.key_rangetodisplay, rangeToDisplay); updateGUI("rangeChange"); return false; } }); lockScreen = (CheckBox) view.findViewById(R.id.overview_lockscreen); if (lockScreen != null) { lockScreen.setChecked(SP.getBoolean("lockscreen", false)); lockScreen.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { SP.putBoolean("lockscreen", isChecked); MainApp.bus().post(new EventSetWakeLock(isChecked)); } }); } return view; } catch (Exception e) { Crashlytics.logException(e); } return null; } @Override public void onCreateContextMenu(ContextMenu menu, View v, ContextMenu.ContextMenuInfo menuInfo) { super.onCreateContextMenu(menu, v, menuInfo); if (v == apsModeView) { final LoopPlugin activeloop = ConfigBuilderPlugin.getActiveLoop(); if (activeloop == null) return; menu.setHeaderTitle(MainApp.sResources.getString(R.string.loop)); if (activeloop.isEnabled(PluginBase.LOOP)) { menu.add(MainApp.sResources.getString(R.string.disableloop)); if (!activeloop.isSuspended()) { menu.add(MainApp.sResources.getString(R.string.suspendloopfor1h)); menu.add(MainApp.sResources.getString(R.string.suspendloopfor2h)); menu.add(MainApp.sResources.getString(R.string.suspendloopfor3h)); menu.add(MainApp.sResources.getString(R.string.suspendloopfor10h)); menu.add(MainApp.sResources.getString(R.string.disconnectpumpfor30m)); menu.add(MainApp.sResources.getString(R.string.disconnectpumpfor1h)); menu.add(MainApp.sResources.getString(R.string.disconnectpumpfor2h)); menu.add(MainApp.sResources.getString(R.string.disconnectpumpfor3h)); } else { menu.add(MainApp.sResources.getString(R.string.resume)); } } if (!activeloop.isEnabled(PluginBase.LOOP)) menu.add(MainApp.sResources.getString(R.string.enableloop)); } else if (v == activeProfileView) { menu.setHeaderTitle(MainApp.sResources.getString(R.string.profile)); menu.add(MainApp.sResources.getString(R.string.danar_viewprofile)); menu.add(MainApp.sResources.getString(R.string.careportal_profileswitch)); } } @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { switch (buttonView.getId()) { case R.id.overview_showprediction: case R.id.overview_showbasals: case R.id.overview_showiob: break; case R.id.overview_showcob: showDeviationsView.setOnCheckedChangeListener(null); showDeviationsView.setChecked(false); showDeviationsView.setOnCheckedChangeListener(this); break; case R.id.overview_showdeviations: showCobView.setOnCheckedChangeListener(null); showCobView.setChecked(false); showCobView.setOnCheckedChangeListener(this); break; case R.id.overview_showratios: break; } SP.putBoolean("showiob", showIobView.isChecked()); SP.putBoolean("showprediction", showPredictionView.isChecked()); SP.putBoolean("showbasals", showBasalsView.isChecked()); SP.putBoolean("showcob", showCobView.isChecked()); SP.putBoolean("showdeviations", showDeviationsView.isChecked()); SP.putBoolean("showratios", showRatiosView.isChecked()); scheduleUpdateGUI("onGraphCheckboxesCheckedChanged"); } @Override public boolean onContextItemSelected(MenuItem item) { final LoopPlugin activeloop = ConfigBuilderPlugin.getActiveLoop(); if (item.getTitle().equals(MainApp.sResources.getString(R.string.disableloop))) { activeloop.setFragmentEnabled(PluginBase.LOOP, false); activeloop.setFragmentVisible(PluginBase.LOOP, false); MainApp.getConfigBuilder().storeSettings(); updateGUI("suspendmenu"); ConfigBuilderPlugin.getCommandQueue().cancelTempBasal(true, new Callback() { @Override public void run() { if (!result.success) { ToastUtils.showToastInUiThread(MainApp.instance().getApplicationContext(), MainApp.sResources.getString(R.string.tempbasaldeliveryerror)); } } }); NSUpload.uploadOpenAPSOffline(60); // upload 60 min, we don;t know real duration return true; } else if (item.getTitle().equals(MainApp.sResources.getString(R.string.enableloop))) { activeloop.setFragmentEnabled(PluginBase.LOOP, true); activeloop.setFragmentVisible(PluginBase.LOOP, true); MainApp.getConfigBuilder().storeSettings(); updateGUI("suspendmenu"); NSUpload.uploadOpenAPSOffline(0); return true; } else if (item.getTitle().equals(MainApp.sResources.getString(R.string.resume))) { activeloop.suspendTo(0L); updateGUI("suspendmenu"); ConfigBuilderPlugin.getCommandQueue().cancelTempBasal(true, new Callback() { @Override public void run() { if (!result.success) { ToastUtils.showToastInUiThread(MainApp.instance().getApplicationContext(), MainApp.sResources.getString(R.string.tempbasaldeliveryerror)); } } }); NSUpload.uploadOpenAPSOffline(0); return true; } else if (item.getTitle().equals(MainApp.sResources.getString(R.string.suspendloopfor1h))) { activeloop.suspendTo(System.currentTimeMillis() + 60L * 60 * 1000); updateGUI("suspendmenu"); ConfigBuilderPlugin.getCommandQueue().cancelTempBasal(true, new Callback() { @Override public void run() { if (!result.success) { ToastUtils.showToastInUiThread(MainApp.instance().getApplicationContext(), MainApp.sResources.getString(R.string.tempbasaldeliveryerror)); } } }); NSUpload.uploadOpenAPSOffline(60); return true; } else if (item.getTitle().equals(MainApp.sResources.getString(R.string.suspendloopfor2h))) { activeloop.suspendTo(System.currentTimeMillis() + 2 * 60L * 60 * 1000); updateGUI("suspendmenu"); ConfigBuilderPlugin.getCommandQueue().cancelTempBasal(true, new Callback() { @Override public void run() { if (!result.success) { ToastUtils.showToastInUiThread(MainApp.instance().getApplicationContext(), MainApp.sResources.getString(R.string.tempbasaldeliveryerror)); } } }); NSUpload.uploadOpenAPSOffline(120); return true; } else if (item.getTitle().equals(MainApp.sResources.getString(R.string.suspendloopfor3h))) { activeloop.suspendTo(System.currentTimeMillis() + 3 * 60L * 60 * 1000); updateGUI("suspendmenu"); ConfigBuilderPlugin.getCommandQueue().cancelTempBasal(true, new Callback() { @Override public void run() { if (!result.success) { ToastUtils.showToastInUiThread(MainApp.instance().getApplicationContext(), MainApp.sResources.getString(R.string.tempbasaldeliveryerror)); } } }); NSUpload.uploadOpenAPSOffline(180); return true; } else if (item.getTitle().equals(MainApp.sResources.getString(R.string.suspendloopfor10h))) { activeloop.suspendTo(System.currentTimeMillis() + 10 * 60L * 60 * 1000); updateGUI("suspendmenu"); ConfigBuilderPlugin.getCommandQueue().cancelTempBasal(true, new Callback() { @Override public void run() { if (!result.success) { ToastUtils.showToastInUiThread(MainApp.instance().getApplicationContext(), MainApp.sResources.getString(R.string.tempbasaldeliveryerror)); } } }); NSUpload.uploadOpenAPSOffline(600); return true; } else if (item.getTitle().equals(MainApp.sResources.getString(R.string.disconnectpumpfor30m))) { activeloop.disconnectTo(System.currentTimeMillis() + 30L * 60 * 1000); updateGUI("suspendmenu"); ConfigBuilderPlugin.getCommandQueue().tempBasalAbsolute(0d, 30, true, new Callback() { @Override public void run() { if (!result.success) { ToastUtils.showToastInUiThread(MainApp.instance().getApplicationContext(), MainApp.sResources.getString(R.string.tempbasaldeliveryerror)); } } }); NSUpload.uploadOpenAPSOffline(30); return true; } else if (item.getTitle().equals(MainApp.sResources.getString(R.string.disconnectpumpfor1h))) { activeloop.disconnectTo(System.currentTimeMillis() + 1 * 60L * 60 * 1000); updateGUI("suspendmenu"); ConfigBuilderPlugin.getCommandQueue().tempBasalAbsolute(0d, 60, true, new Callback() { @Override public void run() { if (!result.success) { ToastUtils.showToastInUiThread(MainApp.instance().getApplicationContext(), MainApp.sResources.getString(R.string.tempbasaldeliveryerror)); } } }); NSUpload.uploadOpenAPSOffline(60); return true; } else if (item.getTitle().equals(MainApp.sResources.getString(R.string.disconnectpumpfor2h))) { activeloop.disconnectTo(System.currentTimeMillis() + 2 * 60L * 60 * 1000); updateGUI("suspendmenu"); ConfigBuilderPlugin.getCommandQueue().tempBasalAbsolute(0d, 2 * 60, true, new Callback() { @Override public void run() { if (!result.success) { ToastUtils.showToastInUiThread(MainApp.instance().getApplicationContext(), MainApp.sResources.getString(R.string.tempbasaldeliveryerror)); } } }); NSUpload.uploadOpenAPSOffline(120); return true; } else if (item.getTitle().equals(MainApp.sResources.getString(R.string.disconnectpumpfor3h))) { activeloop.disconnectTo(System.currentTimeMillis() + 3 * 60L * 60 * 1000); updateGUI("suspendmenu"); ConfigBuilderPlugin.getCommandQueue().tempBasalAbsolute(0d, 3 * 60, true, new Callback() { @Override public void run() { if (!result.success) { ToastUtils.showToastInUiThread(MainApp.instance().getApplicationContext(), MainApp.sResources.getString(R.string.tempbasaldeliveryerror)); } } }); NSUpload.uploadOpenAPSOffline(180); return true; } else if (item.getTitle().equals(MainApp.sResources.getString(R.string.careportal_profileswitch))) { NewNSTreatmentDialog newDialog = new NewNSTreatmentDialog(); final OptionsToShow profileswitch = CareportalFragment.PROFILESWITCHDIRECT; profileswitch.executeProfileSwitch = true; newDialog.setOptions(profileswitch, R.string.careportal_profileswitch); newDialog.show(getFragmentManager(), "NewNSTreatmentDialog"); } else if (item.getTitle().equals(MainApp.sResources.getString(R.string.danar_viewprofile))) { ProfileViewerDialog pvd = ProfileViewerDialog.newInstance(System.currentTimeMillis()); FragmentManager manager = getFragmentManager(); pvd.show(manager, "ProfileViewDialog"); } return super.onContextItemSelected(item); } @Override public void onClick(View v) { FragmentManager manager = getFragmentManager(); switch (v.getId()) { case R.id.overview_accepttempbutton: onClickAcceptTemp(); break; case R.id.overview_quickwizardbutton: onClickQuickwizard(); break; case R.id.overview_wizardbutton: WizardDialog wizardDialog = new WizardDialog(); wizardDialog.show(manager, "WizardDialog"); break; case R.id.overview_calibrationbutton: CalibrationDialog calibrationDialog = new CalibrationDialog(); calibrationDialog.show(manager, "CalibrationDialog"); break; case R.id.overview_treatmentbutton: NewTreatmentDialog treatmentDialogFragment = new NewTreatmentDialog(); treatmentDialogFragment.show(manager, "TreatmentDialog"); break; case R.id.overview_pumpstatus: if (ConfigBuilderPlugin.getActivePump().isSuspended() || !ConfigBuilderPlugin.getActivePump().isInitialized()) ConfigBuilderPlugin.getCommandQueue().readStatus("RefreshClicked", null); break; } } private void onClickAcceptTemp() { if (ConfigBuilderPlugin.getActiveLoop() != null) { ConfigBuilderPlugin.getActiveLoop().invoke("Accept temp button", false); final LoopPlugin.LastRun finalLastRun = LoopPlugin.lastRun; if (finalLastRun != null && finalLastRun.lastAPSRun != null && finalLastRun.constraintsProcessed.changeRequested) { AlertDialog.Builder builder = new AlertDialog.Builder(getContext()); builder.setTitle(getContext().getString(R.string.confirmation)); builder.setMessage(getContext().getString(R.string.setbasalquestion) + "\n" + finalLastRun.constraintsProcessed); builder.setPositiveButton(getContext().getString(R.string.ok), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { hideTempRecommendation(); clearNotification(); MainApp.getConfigBuilder().applyAPSRequest(finalLastRun.constraintsProcessed, new Callback() { @Override public void run() { if (result.enacted) { finalLastRun.setByPump = result; finalLastRun.lastEnact = new Date(); finalLastRun.lastOpenModeAccept = new Date(); NSUpload.uploadDeviceStatus(); ObjectivesPlugin objectivesPlugin = MainApp.getSpecificPlugin(ObjectivesPlugin.class); if (objectivesPlugin != null) { ObjectivesPlugin.manualEnacts++; ObjectivesPlugin.saveProgress(); } } scheduleUpdateGUI("onClickAcceptTemp"); } }); Answers.getInstance().logCustom(new CustomEvent("AcceptTemp")); } }); builder.setNegativeButton(getContext().getString(R.string.cancel), null); builder.show(); } } } void onClickQuickwizard() { final BgReading actualBg = DatabaseHelper.actualBg(); final Profile profile = MainApp.getConfigBuilder().getProfile(); final TempTarget tempTarget = MainApp.getConfigBuilder().getTempTargetFromHistory(); QuickWizard.QuickWizardEntry quickWizardEntry = OverviewPlugin.getPlugin().quickWizard.getActive(); if (quickWizardEntry != null && actualBg != null) { quickWizardButton.setVisibility(View.VISIBLE); BolusWizard wizard = new BolusWizard(); wizard.doCalc(profile, tempTarget, quickWizardEntry.carbs(), 0d, actualBg.valueToUnits(profile.getUnits()), 0d, true, true, false, false); final JSONObject boluscalcJSON = new JSONObject(); try { boluscalcJSON.put("eventTime", DateUtil.toISOString(new Date())); boluscalcJSON.put("targetBGLow", wizard.targetBGLow); boluscalcJSON.put("targetBGHigh", wizard.targetBGHigh); boluscalcJSON.put("isf", wizard.sens); boluscalcJSON.put("ic", wizard.ic); boluscalcJSON.put("iob", -(wizard.insulingFromBolusIOB + wizard.insulingFromBasalsIOB)); boluscalcJSON.put("bolusiobused", true); boluscalcJSON.put("basaliobused", true); boluscalcJSON.put("bg", actualBg.valueToUnits(profile.getUnits())); boluscalcJSON.put("insulinbg", wizard.insulinFromBG); boluscalcJSON.put("insulinbgused", true); boluscalcJSON.put("bgdiff", wizard.bgDiff); boluscalcJSON.put("insulincarbs", wizard.insulinFromCarbs); boluscalcJSON.put("carbs", quickWizardEntry.carbs()); boluscalcJSON.put("othercorrection", 0d); boluscalcJSON.put("insulintrend", wizard.insulinFromTrend); boluscalcJSON.put("insulin", wizard.calculatedTotalInsulin); } catch (JSONException e) { log.error("Unhandled exception", e); } if (wizard.calculatedTotalInsulin > 0d && quickWizardEntry.carbs() > 0d) { DecimalFormat formatNumber2decimalplaces = new DecimalFormat("0.00"); String confirmMessage = getString(R.string.entertreatmentquestion); Double insulinAfterConstraints = MainApp.getConfigBuilder().applyBolusConstraints(wizard.calculatedTotalInsulin); Integer carbsAfterConstraints = MainApp.getConfigBuilder().applyCarbsConstraints(quickWizardEntry.carbs()); confirmMessage += "\n" + getString(R.string.bolus) + ": " + formatNumber2decimalplaces.format(insulinAfterConstraints) + "U"; confirmMessage += "\n" + getString(R.string.carbs) + ": " + carbsAfterConstraints + "g"; if (!insulinAfterConstraints.equals(wizard.calculatedTotalInsulin) || !carbsAfterConstraints.equals(quickWizardEntry.carbs())) { AlertDialog.Builder builder = new AlertDialog.Builder(getContext()); builder.setTitle(MainApp.sResources.getString(R.string.treatmentdeliveryerror)); builder.setMessage(getString(R.string.constraints_violation) + "\n" + getString(R.string.changeyourinput)); builder.setPositiveButton(MainApp.sResources.getString(R.string.ok), null); builder.show(); return; } final Double finalInsulinAfterConstraints = insulinAfterConstraints; final Integer finalCarbsAfterConstraints = carbsAfterConstraints; final Context context = getContext(); final AlertDialog.Builder builder = new AlertDialog.Builder(context); accepted = false; builder.setTitle(MainApp.sResources.getString(R.string.confirmation)); builder.setMessage(confirmMessage); builder.setPositiveButton(getString(R.string.ok), new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { synchronized (builder) { if (accepted) { log.debug("guarding: already accepted"); return; } accepted = true; if (finalInsulinAfterConstraints > 0 || finalCarbsAfterConstraints > 0) { DetailedBolusInfo detailedBolusInfo = new DetailedBolusInfo(); detailedBolusInfo.eventType = CareportalEvent.BOLUSWIZARD; detailedBolusInfo.insulin = finalInsulinAfterConstraints; detailedBolusInfo.carbs = finalCarbsAfterConstraints; detailedBolusInfo.context = context; detailedBolusInfo.boluscalc = boluscalcJSON; detailedBolusInfo.source = Source.USER; ConfigBuilderPlugin.getCommandQueue().bolus(detailedBolusInfo, new Callback() { @Override public void run() { if (!result.success) { Intent i = new Intent(MainApp.instance(), ErrorHelperActivity.class); i.putExtra("soundid", R.raw.boluserror); i.putExtra("status", result.comment); i.putExtra("title", MainApp.sResources.getString(R.string.treatmentdeliveryerror)); i.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); MainApp.instance().startActivity(i); } } }); Answers.getInstance().logCustom(new CustomEvent("QuickWizard")); } } } }); builder.setNegativeButton(getString(R.string.cancel), null); builder.show(); } } } @Override public void onPause() { super.onPause(); MainApp.bus().unregister(this); sLoopHandler.removeCallbacksAndMessages(null); unregisterForContextMenu(apsModeView); unregisterForContextMenu(activeProfileView); } @Override public void onResume() { super.onResume(); MainApp.bus().register(this); sRefreshLoop = new Runnable() { @Override public void run() { scheduleUpdateGUI("refreshLoop"); sLoopHandler.postDelayed(sRefreshLoop, 60 * 1000L); } }; sLoopHandler.postDelayed(sRefreshLoop, 60 * 1000L); registerForContextMenu(apsModeView); registerForContextMenu(activeProfileView); updateGUI("onResume"); } @Subscribe public void onStatusEvent(final EventInitializationChanged ev) { scheduleUpdateGUI("EventInitializationChanged"); } @Subscribe public void onStatusEvent(final EventPreferenceChange ev) { scheduleUpdateGUI("EventPreferenceChange"); } @Subscribe public void onStatusEvent(final EventRefreshOverview ev) { scheduleUpdateGUI(ev.from); } @Subscribe public void onStatusEvent(final EventAutosensCalculationFinished ev) { scheduleUpdateGUI("EventAutosensCalculationFinished"); } @Subscribe public void onStatusEvent(final EventTreatmentChange ev) { scheduleUpdateGUI("EventTreatmentChange"); } @Subscribe public void onStatusEvent(final EventCareportalEventChange ev) { scheduleUpdateGUI("EventCareportalEventChange"); } @Subscribe public void onStatusEvent(final EventTempBasalChange ev) { scheduleUpdateGUI("EventTempBasalChange"); } @Subscribe public void onStatusEvent(final EventExtendedBolusChange ev) { scheduleUpdateGUI("EventExtendedBolusChange"); } // Handled by EventAutosensCalculationFinished // @Subscribe // public void onStatusEvent(final EventNewBG ev) { // scheduleUpdateGUI("EventNewBG"); @Subscribe public void onStatusEvent(final EventNewOpenLoopNotification ev) { scheduleUpdateGUI("EventNewOpenLoopNotification"); } // Handled by EventAutosensCalculationFinished // @Subscribe // public void onStatusEvent(final EventNewBasalProfile ev) { // scheduleUpdateGUI("EventNewBasalProfile"); @Subscribe public void onStatusEvent(final EventTempTargetChange ev) { scheduleUpdateGUI("EventTempTargetChange"); } @Subscribe public void onStatusEvent(final EventPumpStatusChanged s) { Activity activity = getActivity(); if (activity != null) activity.runOnUiThread(new Runnable() { @Override public void run() { updatePumpStatus(s.textStatus()); } }); } private void hideTempRecommendation() { Activity activity = getActivity(); if (activity != null) activity.runOnUiThread(new Runnable() { @Override public void run() { if (acceptTempLayout != null) acceptTempLayout.setVisibility(View.GONE); } }); } private void clearNotification() { NotificationManager notificationManager = (NotificationManager) MainApp.instance().getSystemService(Context.NOTIFICATION_SERVICE); notificationManager.cancel(Constants.notificationID); } private void updatePumpStatus(String status) { if (!status.equals("")) { pumpStatusView.setText(status); pumpStatusLayout.setVisibility(View.VISIBLE); loopStatusLayout.setVisibility(View.GONE); } else { pumpStatusLayout.setVisibility(View.GONE); loopStatusLayout.setVisibility(View.VISIBLE); } } public void scheduleUpdateGUI(final String from) { class UpdateRunnable implements Runnable { public void run() { Activity activity = getActivity(); if (activity != null) activity.runOnUiThread(new Runnable() { @Override public void run() { updateGUI(from); scheduledUpdate = null; } }); } } // prepare task for execution in 400 msec // cancel waiting task to prevent multiple updates if (scheduledUpdate != null) scheduledUpdate.cancel(false); Runnable task = new UpdateRunnable(); final int msec = 500; scheduledUpdate = worker.schedule(task, msec, TimeUnit.MILLISECONDS); } @SuppressLint("SetTextI18n") public void updateGUI(String from) { log.debug("updateGUI entered from: " + from); Date updateGUIStart = new Date(); if (getActivity() == null) return; if (timeView != null) { //must not exists timeView.setText(DateUtil.timeString(new Date())); } if (MainApp.getConfigBuilder().getProfile() == null) {// app not initialized yet pumpStatusView.setText(R.string.noprofileset); pumpStatusLayout.setVisibility(View.VISIBLE); loopStatusLayout.setVisibility(View.GONE); return; } pumpStatusLayout.setVisibility(View.GONE); loopStatusLayout.setVisibility(View.VISIBLE); updateNotifications(); CareportalFragment.updateAge(getActivity(), sage, iage, cage, pbage); BgReading actualBG = DatabaseHelper.actualBg(); BgReading lastBG = DatabaseHelper.lastBg(); PumpInterface pump = ConfigBuilderPlugin.getActivePump(); Profile profile = MainApp.getConfigBuilder().getProfile(); String units = profile.getUnits(); if (units == null) { pumpStatusView.setText(R.string.noprofileset); pumpStatusLayout.setVisibility(View.VISIBLE); loopStatusLayout.setVisibility(View.GONE); return; } double lowLine = SP.getDouble("low_mark", 0d); double highLine = SP.getDouble("high_mark", 0d); //Start with updating the BG as it is unaffected by loop. if (lastBG != null) { int color = MainApp.sResources.getColor(R.color.inrange); if (lastBG.valueToUnits(units) < lowLine) color = MainApp.sResources.getColor(R.color.low); else if (lastBG.valueToUnits(units) > highLine) color = MainApp.sResources.getColor(R.color.high); bgView.setText(lastBG.valueToUnitsToString(units)); arrowView.setText(lastBG.directionToSymbol()); bgView.setTextColor(color); arrowView.setTextColor(color); GlucoseStatus glucoseStatus = GlucoseStatus.getGlucoseStatusData(); if (glucoseStatus != null) { deltaView.setText("Δ " + Profile.toUnitsString(glucoseStatus.delta, glucoseStatus.delta * Constants.MGDL_TO_MMOLL, units) + " " + units); if (avgdeltaView != null) avgdeltaView.setText("øΔ15m: " + Profile.toUnitsString(glucoseStatus.short_avgdelta, glucoseStatus.short_avgdelta * Constants.MGDL_TO_MMOLL, units) + " øΔ40m: " + Profile.toUnitsString(glucoseStatus.long_avgdelta, glucoseStatus.long_avgdelta * Constants.MGDL_TO_MMOLL, units)); } else { deltaView.setText("Δ " + MainApp.sResources.getString(R.string.notavailable)); if (avgdeltaView != null) avgdeltaView.setText(""); } } // open loop mode final LoopPlugin.LastRun finalLastRun = LoopPlugin.lastRun; if (Config.APS && pump.getPumpDescription().isTempBasalCapable) { apsModeView.setVisibility(View.VISIBLE); apsModeView.setBackgroundColor(MainApp.sResources.getColor(R.color.loopenabled)); apsModeView.setTextColor(Color.BLACK); final LoopPlugin activeloop = ConfigBuilderPlugin.getActiveLoop(); if (activeloop != null && activeloop.isEnabled(activeloop.getType()) && activeloop.isSuperBolus()) { apsModeView.setBackgroundColor(MainApp.sResources.getColor(R.color.looppumpsuspended)); apsModeView.setText(String.format(MainApp.sResources.getString(R.string.loopsuperbolusfor), activeloop.minutesToEndOfSuspend())); apsModeView.setTextColor(Color.WHITE); } else if (activeloop != null && activeloop.isEnabled(activeloop.getType()) && activeloop.isSuspended()) { apsModeView.setBackgroundColor(MainApp.sResources.getColor(R.color.looppumpsuspended)); apsModeView.setText(String.format(MainApp.sResources.getString(R.string.loopsuspendedfor), activeloop.minutesToEndOfSuspend())); apsModeView.setTextColor(Color.WHITE); } else if (pump.isSuspended()) { apsModeView.setBackgroundColor(MainApp.sResources.getColor(R.color.looppumpsuspended)); apsModeView.setText(MainApp.sResources.getString(R.string.pumpsuspended)); apsModeView.setTextColor(Color.WHITE); } else if (activeloop != null && activeloop.isEnabled(activeloop.getType())) { if (MainApp.getConfigBuilder().isClosedModeEnabled()) { apsModeView.setText(MainApp.sResources.getString(R.string.closedloop)); } else { apsModeView.setText(MainApp.sResources.getString(R.string.openloop)); } } else { apsModeView.setBackgroundColor(MainApp.sResources.getColor(R.color.loopdisabled)); apsModeView.setText(MainApp.sResources.getString(R.string.disabledloop)); apsModeView.setTextColor(Color.WHITE); } } else { apsModeView.setVisibility(View.GONE); } // temp target TempTarget tempTarget = MainApp.getConfigBuilder().getTempTargetFromHistory(); if (tempTarget != null) { tempTargetView.setTextColor(Color.BLACK); tempTargetView.setBackgroundColor(MainApp.sResources.getColor(R.color.tempTargetBackground)); tempTargetView.setVisibility(View.VISIBLE); tempTargetView.setText(Profile.toTargetRangeString(tempTarget.low, tempTarget.high, Constants.MGDL, units) + " " + DateUtil.untilString(tempTarget.end())); } else { tempTargetView.setTextColor(Color.WHITE); tempTargetView.setBackgroundColor(MainApp.sResources.getColor(R.color.tempTargetDisabledBackground)); tempTargetView.setText(Profile.toTargetRangeString(profile.getTargetLow(), profile.getTargetHigh(), units, units)); tempTargetView.setVisibility(View.VISIBLE); } if ((Config.NSCLIENT || Config.G5UPLOADER) && tempTarget == null) { tempTargetView.setVisibility(View.GONE); } if (acceptTempLayout != null) { boolean showAcceptButton = !MainApp.getConfigBuilder().isClosedModeEnabled(); // Open mode needed showAcceptButton = showAcceptButton && finalLastRun != null && finalLastRun.lastAPSRun != null; // aps result must exist showAcceptButton = showAcceptButton && (finalLastRun.lastOpenModeAccept == null || finalLastRun.lastOpenModeAccept.getTime() < finalLastRun.lastAPSRun.getTime()); // never accepted or before last result showAcceptButton = showAcceptButton && finalLastRun.constraintsProcessed.changeRequested; // change is requested if (showAcceptButton && pump.isInitialized() && !pump.isSuspended() && ConfigBuilderPlugin.getActiveLoop() != null) { acceptTempLayout.setVisibility(View.VISIBLE); acceptTempButton.setText(getContext().getString(R.string.setbasalquestion) + "\n" + finalLastRun.constraintsProcessed); } else { acceptTempLayout.setVisibility(View.GONE); } } if (calibrationButton != null) { if (MainApp.getSpecificPlugin(SourceXdripPlugin.class) != null && MainApp.getSpecificPlugin(SourceXdripPlugin.class).isEnabled(PluginBase.BGSOURCE) && profile != null && DatabaseHelper.actualBg() != null) { calibrationButton.setVisibility(View.VISIBLE); } else { calibrationButton.setVisibility(View.GONE); } } final TemporaryBasal activeTemp = MainApp.getConfigBuilder().getTempBasalFromHistory(System.currentTimeMillis()); String basalText = ""; if (shorttextmode) { if (activeTemp != null) { basalText = "T: " + activeTemp.toStringVeryShort(); } else { basalText = DecimalFormatter.to2Decimal(MainApp.getConfigBuilder().getProfile().getBasal()) + "U/h"; } baseBasalView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { String fullText = MainApp.sResources.getString(R.string.virtualpump_basebasalrate_label) + ": " + DecimalFormatter.to2Decimal(MainApp.getConfigBuilder().getProfile().getBasal()) + "U/h\n"; if (activeTemp != null) { fullText += MainApp.sResources.getString(R.string.virtualpump_tempbasal_label) + ": " + activeTemp.toStringFull(); } OKDialog.show(getActivity(), MainApp.sResources.getString(R.string.basal), fullText, null); } }); } else { if (activeTemp != null) { basalText = activeTemp.toStringFull() + " "; } if (Config.NSCLIENT || Config.G5UPLOADER) basalText += "(" + DecimalFormatter.to2Decimal(MainApp.getConfigBuilder().getProfile().getBasal()) + " U/h)"; else if (pump.getPumpDescription().isTempBasalCapable) { basalText += "(" + DecimalFormatter.to2Decimal(pump.getBaseBasalRate()) + "U/h)"; } } if (activeTemp != null) { baseBasalView.setTextColor(MainApp.sResources.getColor(R.color.basal)); } else { baseBasalView.setTextColor(Color.WHITE); } baseBasalView.setText(basalText); final ExtendedBolus extendedBolus = MainApp.getConfigBuilder().getExtendedBolusFromHistory(System.currentTimeMillis()); String extendedBolusText = ""; if (extendedBolusView != null) { // must not exists in all layouts if (shorttextmode) { if (extendedBolus != null && !pump.isFakingTempsByExtendedBoluses()) { extendedBolusText = DecimalFormatter.to2Decimal(extendedBolus.absoluteRate()) + "U/h"; } extendedBolusView.setText(extendedBolusText); extendedBolusView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { OKDialog.show(getActivity(), MainApp.sResources.getString(R.string.extendedbolus), extendedBolus.toString(), null); } }); } else { if (extendedBolus != null && !pump.isFakingTempsByExtendedBoluses()) { extendedBolusText = extendedBolus.toString(); } extendedBolusView.setText(extendedBolusText); } if (extendedBolusText.equals("")) extendedBolusView.setVisibility(View.GONE); else extendedBolusView.setVisibility(View.VISIBLE); } activeProfileView.setText(MainApp.getConfigBuilder().getProfileName()); activeProfileView.setBackgroundColor(Color.GRAY); tempTargetView.setOnLongClickListener(new View.OnLongClickListener() { @Override public boolean onLongClick(View view) { view.performHapticFeedback(HapticFeedbackConstants.LONG_PRESS); NewNSTreatmentDialog newTTDialog = new NewNSTreatmentDialog(); final OptionsToShow temptarget = CareportalFragment.TEMPTARGET; temptarget.executeTempTarget = true; newTTDialog.setOptions(temptarget, R.string.careportal_temporarytarget); newTTDialog.show(getFragmentManager(), "NewNSTreatmentDialog"); return true; } }); tempTargetView.setLongClickable(true); // QuickWizard button QuickWizard.QuickWizardEntry quickWizardEntry = OverviewPlugin.getPlugin().quickWizard.getActive(); if (quickWizardEntry != null && lastBG != null && pump.isInitialized() && !pump.isSuspended()) { quickWizardButton.setVisibility(View.VISIBLE); String text = quickWizardEntry.buttonText() + "\n" + DecimalFormatter.to0Decimal(quickWizardEntry.carbs()) + "g"; BolusWizard wizard = new BolusWizard(); wizard.doCalc(profile, tempTarget, quickWizardEntry.carbs(), 0d, lastBG.valueToUnits(units), 0d, true, true, false, false); text += " " + DecimalFormatter.to2Decimal(wizard.calculatedTotalInsulin) + "U"; quickWizardButton.setText(text); if (wizard.calculatedTotalInsulin <= 0) quickWizardButton.setVisibility(View.GONE); } else quickWizardButton.setVisibility(View.GONE); // Bolus and calc button if (pump.isInitialized() && !pump.isSuspended()) { wizardButton.setVisibility(View.VISIBLE); treatmentButton.setVisibility(View.VISIBLE); } else { wizardButton.setVisibility(View.GONE); treatmentButton.setVisibility(View.GONE); } if (lowLine < 1) { lowLine = Profile.fromMgdlToUnits(OverviewPlugin.bgTargetLow, units); } if (highLine < 1) { highLine = Profile.fromMgdlToUnits(OverviewPlugin.bgTargetHigh, units); } if (lastBG == null) { //left this here as it seems you want to exit at this point if it is null... return; } Integer flag = bgView.getPaintFlags(); if (actualBG == null) { flag |= Paint.STRIKE_THRU_TEXT_FLAG; } else flag &= ~Paint.STRIKE_THRU_TEXT_FLAG; bgView.setPaintFlags(flag); Long agoMsec = System.currentTimeMillis() - lastBG.date; int agoMin = (int) (agoMsec / 60d / 1000d); timeAgoView.setText(String.format(MainApp.sResources.getString(R.string.minago), agoMin)); // iob MainApp.getConfigBuilder().updateTotalIOBTreatments(); MainApp.getConfigBuilder().updateTotalIOBTempBasals(); final IobTotal bolusIob = MainApp.getConfigBuilder().getLastCalculationTreatments().round(); final IobTotal basalIob = MainApp.getConfigBuilder().getLastCalculationTempBasals().round(); if (shorttextmode) { String iobtext = DecimalFormatter.to2Decimal(bolusIob.iob + basalIob.basaliob) + "U"; iobView.setText(iobtext); iobView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { String iobtext = DecimalFormatter.to2Decimal(bolusIob.iob + basalIob.basaliob) + "U\n" + getString(R.string.bolus) + ": " + DecimalFormatter.to2Decimal(bolusIob.iob) + "U\n" + getString(R.string.basal) + ": " + DecimalFormatter.to2Decimal(basalIob.basaliob) + "U\n"; OKDialog.show(getActivity(), MainApp.sResources.getString(R.string.iob), iobtext, null); } }); } else if (MainApp.sResources.getBoolean(R.bool.isTablet)) { String iobtext = DecimalFormatter.to2Decimal(bolusIob.iob + basalIob.basaliob) + "U (" + getString(R.string.bolus) + ": " + DecimalFormatter.to2Decimal(bolusIob.iob) + "U " + getString(R.string.basal) + ": " + DecimalFormatter.to2Decimal(basalIob.basaliob) + "U)"; iobView.setText(iobtext); } else { String iobtext = DecimalFormatter.to2Decimal(bolusIob.iob + basalIob.basaliob) + "U (" + DecimalFormatter.to2Decimal(bolusIob.iob) + "/" + DecimalFormatter.to2Decimal(basalIob.basaliob) + ")"; iobView.setText(iobtext); } // cob if (cobView != null) { // view must not exists String cobText = ""; AutosensData autosensData = IobCobCalculatorPlugin.getAutosensData(System.currentTimeMillis()); if (autosensData != null) cobText = (int) autosensData.cob + " g"; cobView.setText(cobText); } boolean predictionsAvailable = finalLastRun != null && finalLastRun.request.hasPredictions; if (predictionsAvailable) { showPredictionView.setVisibility(View.VISIBLE); getActivity().findViewById(R.id.overview_showprediction_label).setVisibility(View.VISIBLE); } else { showPredictionView.setVisibility(View.GONE); getActivity().findViewById(R.id.overview_showprediction_label).setVisibility(View.GONE); } // pump status from ns if (pumpDeviceStatusView != null) { pumpDeviceStatusView.setText(NSDeviceStatus.getInstance().getPumpStatus()); pumpDeviceStatusView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { OKDialog.show(getActivity(), MainApp.sResources.getString(R.string.pump), NSDeviceStatus.getInstance().getExtendedPumpStatus(), null); } }); } // OpenAPS status from ns if (openapsDeviceStatusView != null) { openapsDeviceStatusView.setText(NSDeviceStatus.getInstance().getOpenApsStatus()); openapsDeviceStatusView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { OKDialog.show(getActivity(), MainApp.sResources.getString(R.string.openaps), NSDeviceStatus.getInstance().getExtendedOpenApsStatus(), null); } }); } // Uploader status from ns if (uploaderDeviceStatusView != null) { uploaderDeviceStatusView.setText(NSDeviceStatus.getInstance().getUploaderStatus()); uploaderDeviceStatusView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { OKDialog.show(getActivity(), MainApp.sResources.getString(R.string.uploader), NSDeviceStatus.getInstance().getExtendedUploaderStatus(), null); } }); } // align to hours Calendar calendar = Calendar.getInstance(); calendar.setTimeInMillis(System.currentTimeMillis()); calendar.set(Calendar.MILLISECOND, 0); calendar.set(Calendar.SECOND, 0); calendar.set(Calendar.MINUTE, 0); calendar.add(Calendar.HOUR, 1); int hoursToFetch; long toTime; long fromTime; long endTime; if (predictionsAvailable && showPredictionView.isChecked()) { int predHours = (int) (Math.ceil(finalLastRun.constraintsProcessed.getLatestPredictionsTime() - System.currentTimeMillis()) / (60 * 60 * 1000)); predHours = Math.min(2, predHours); predHours = Math.max(0, predHours); hoursToFetch = rangeToDisplay - predHours; toTime = calendar.getTimeInMillis() + 100000; // little bit more to avoid wrong rounding - Graphview specific fromTime = toTime - hoursToFetch * 60 * 60 * 1000L; endTime = toTime + predHours * 60 * 60 * 1000L; } else { hoursToFetch = rangeToDisplay; toTime = calendar.getTimeInMillis() + 100000; // little bit more to avoid wrong rounding - Graphview specific fromTime = toTime - hoursToFetch * 60 * 60 * 1000L; endTime = toTime; } long now = System.currentTimeMillis(); // 2nd graph // remove old data iobGraph.getSeries().clear(); GraphData secondGraphData = new GraphData(); boolean useIobForScale = false; boolean useCobForScale = false; boolean useDevForScale = false; boolean useRatioForScale = false; boolean useDSForScale = false; if (showIobView.isChecked()) { useIobForScale = true; } else if (showCobView.isChecked()) { useCobForScale = true; } else if (showDeviationsView.isChecked()) { useDevForScale = true; } else if (showRatiosView.isChecked()) { useRatioForScale = true; } else if (Config.displayDeviationSlope) { useDSForScale = true; } if (showIobView.isChecked()) secondGraphData.addIob(iobGraph, fromTime, now, useIobForScale, 1d); if (showCobView.isChecked()) secondGraphData.addCob(iobGraph, fromTime, now, useCobForScale, useCobForScale ? 1d : 0.5d); if (showDeviationsView.isChecked()) secondGraphData.addDeviations(iobGraph, fromTime, now, useDevForScale, 1d); if (showRatiosView.isChecked()) secondGraphData.addRatio(iobGraph, fromTime, now, useRatioForScale, 1d); if (Config.displayDeviationSlope) secondGraphData.addDeviationSlope(iobGraph, fromTime, now, useDSForScale, 1d); if (showIobView.isChecked() || showCobView.isChecked() || showDeviationsView.isChecked() || showRatiosView.isChecked() || Config.displayDeviationSlope) { iobGraph.setVisibility(View.VISIBLE); } else { iobGraph.setVisibility(View.GONE); } // remove old data from graph bgGraph.getSeries().clear(); GraphData graphData = new GraphData(); graphData.addInRangeArea(bgGraph, fromTime, endTime, lowLine, highLine); if (predictionsAvailable && showPredictionView.isChecked()) graphData.addBgReadings(bgGraph, fromTime, toTime, lowLine, highLine, finalLastRun.constraintsProcessed); else graphData.addBgReadings(bgGraph, fromTime, toTime, lowLine, highLine, null); // set manual x bounds to have nice steps graphData.formatAxis(bgGraph, fromTime, endTime); secondGraphData.formatAxis(iobGraph, fromTime, endTime); // Treatments graphData.addTreatments(bgGraph, fromTime, endTime); // add basal data if (pump.getPumpDescription().isTempBasalCapable && showBasalsView.isChecked()) { graphData.addBasals(bgGraph, fromTime, now, lowLine / graphData.maxY / 1.2d); } graphData.addNowLine(bgGraph, now); secondGraphData.addNowLine(iobGraph, now); // finaly enforce drawing of graphs bgGraph.onDataChanged(false, false); iobGraph.onDataChanged(false, false); Profiler.log(log, from, updateGUIStart); } //Notifications static class RecyclerViewAdapter extends RecyclerView.Adapter<RecyclerViewAdapter.NotificationsViewHolder> { List<Notification> notificationsList; RecyclerViewAdapter(List<Notification> notificationsList) { this.notificationsList = notificationsList; } @Override public NotificationsViewHolder onCreateViewHolder(ViewGroup viewGroup, int viewType) { View v = LayoutInflater.from(viewGroup.getContext()).inflate(R.layout.overview_notification_item, viewGroup, false); return new NotificationsViewHolder(v); } @Override public void onBindViewHolder(NotificationsViewHolder holder, int position) { Notification notification = notificationsList.get(position); holder.dismiss.setTag(notification); if (Objects.equals(notification.text, MainApp.sResources.getString(R.string.nsalarm_staledata))) holder.dismiss.setText("snooze"); holder.text.setText(notification.text); holder.time.setText(DateUtil.timeString(notification.date)); if (notification.level == Notification.URGENT) holder.cv.setBackgroundColor(ContextCompat.getColor(MainApp.instance(), R.color.notificationUrgent)); else if (notification.level == Notification.NORMAL) holder.cv.setBackgroundColor(ContextCompat.getColor(MainApp.instance(), R.color.notificationNormal)); else if (notification.level == Notification.LOW) holder.cv.setBackgroundColor(ContextCompat.getColor(MainApp.instance(), R.color.notificationLow)); else if (notification.level == Notification.INFO) holder.cv.setBackgroundColor(ContextCompat.getColor(MainApp.instance(), R.color.notificationInfo)); else if (notification.level == Notification.ANNOUNCEMENT) holder.cv.setBackgroundColor(ContextCompat.getColor(MainApp.instance(), R.color.notificationAnnouncement)); } @Override public int getItemCount() { return notificationsList.size(); } @Override public void onAttachedToRecyclerView(RecyclerView recyclerView) { super.onAttachedToRecyclerView(recyclerView); } static class NotificationsViewHolder extends RecyclerView.ViewHolder implements View.OnClickListener { CardView cv; TextView time; TextView text; Button dismiss; NotificationsViewHolder(View itemView) { super(itemView); cv = (CardView) itemView.findViewById(R.id.notification_cardview); time = (TextView) itemView.findViewById(R.id.notification_time); text = (TextView) itemView.findViewById(R.id.notification_text); dismiss = (Button) itemView.findViewById(R.id.notification_dismiss); dismiss.setOnClickListener(this); } @Override public void onClick(View v) { Notification notification = (Notification) v.getTag(); switch (v.getId()) { case R.id.notification_dismiss: MainApp.bus().post(new EventDismissNotification(notification.id)); if (notification.nsAlarm != null) { BroadcastAckAlarm.handleClearAlarm(notification.nsAlarm, MainApp.instance().getApplicationContext(), 60 * 60 * 1000L); } // Adding current time to snooze if we got staleData log.debug("Notification text is: " + notification.text); if (notification.text.equals(MainApp.sResources.getString(R.string.nsalarm_staledata))) { NotificationStore nstore = OverviewPlugin.getPlugin().notificationStore; long msToSnooze = SP.getInt("nsalarm_staledatavalue", 15) * 60 * 1000L; log.debug("snooze nsalarm_staledatavalue in minutes is " + SP.getInt("nsalarm_staledatavalue", 15) + "\n in ms is: " + msToSnooze + " currentTimeMillis is: " + System.currentTimeMillis()); nstore.snoozeTo(System.currentTimeMillis() + (SP.getInt("nsalarm_staledatavalue", 15) * 60 * 1000L)); } break; } } } } void updateNotifications() { Activity activity = getActivity(); if (activity != null) activity.runOnUiThread(new Runnable() { @Override public void run() { NotificationStore nstore = OverviewPlugin.getPlugin().notificationStore; nstore.removeExpired(); nstore.unSnooze(); if (nstore.store.size() > 0) { RecyclerViewAdapter adapter = new RecyclerViewAdapter(nstore.store); notificationsView.setAdapter(adapter); notificationsView.setVisibility(View.VISIBLE); } else { notificationsView.setVisibility(View.GONE); } } }); } }
package org.commcare.models.database; import org.commcare.CommCareApplication; import org.commcare.CommCareTestApplication; import org.commcare.android.CommCareTestRunner; import org.commcare.android.util.TestAppInstaller; import org.commcare.data.xml.DataModelPullParser; import org.commcare.xml.AndroidTransactionParserFactory; import org.javarosa.core.model.instance.FormInstance; import org.javarosa.core.services.storage.EntityFilter; import org.javarosa.core.services.storage.IStorageIterator; import org.javarosa.core.services.storage.IStorageUtilityIndexed; import org.javarosa.xml.util.InvalidStructureException; import org.javarosa.xml.util.UnfullfilledRequirementsException; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.annotation.Config; import org.xmlpull.v1.XmlPullParserException; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Vector; /** * Test file-backed sql storage currently used to store fixtures, which can get * large. File-backed storage can store encrypted or unencrypted files. * * @author Phillip Mates (pmates@dimagi.com). */ @Config(application = CommCareTestApplication.class) @RunWith(CommCareTestRunner.class) public class StoreFixturesOnFilesystemTests { private AndroidSandbox sandbox; @Before public void setup() { UnencryptedHybridFileBackedSqlStorageMock.alwaysPutInFilesystem(); HybridFileBackedSqlStorageMock.alwaysPutInFilesystem(); sandbox = installAppWithFixtureData(this.getClass(), "odk_level_ipm_restore.xml"); } public static AndroidSandbox installAppWithFixtureData(Class testClass, String fixtureResource) { TestAppInstaller.installAppAndLogin( "jr://resource/commcare-apps/archive_form_tests/profile.ccpr", "test", "123"); AndroidSandbox sandbox = new AndroidSandbox(CommCareApplication.instance()); try { parseIntoSandbox(testClass.getClassLoader().getResourceAsStream(fixtureResource), false); } catch (Exception e) { e.printStackTrace(); } return sandbox; } public static void parseIntoSandbox(InputStream stream, boolean failfast) throws InvalidStructureException, IOException, UnfullfilledRequirementsException, XmlPullParserException { AndroidTransactionParserFactory factory = new AndroidTransactionParserFactory(CommCareApplication.instance().getApplicationContext(), null); DataModelPullParser parser = new DataModelPullParser(stream, factory, failfast, true); parser.parse(); } /** * User level fixtures are encrypted. To do so, they are stored in * encrypted files and the key is stored in the encrypted databse. This * test ensures that the file is actually encrypted by trying to * deserialize the contents of a fixture file w/o decrypting the file * first. */ @Test public void testStoredEncrypted() { IStorageUtilityIndexed<FormInstance> userFixtureStorage = sandbox.getUserFixtureStorage(); File dbDir = ((HybridFileBackedSqlStorage<FormInstance>)userFixtureStorage).getDbDirForTesting(); File[] serializedFixtureFiles = dbDir.listFiles(); Assert.assertTrue(serializedFixtureFiles.length > 0); try { ((HybridFileBackedSqlStorage<FormInstance>)userFixtureStorage) .newObject(new FileInputStream(serializedFixtureFiles[0]), -1); } catch (FileNotFoundException e) { Assert.fail("Unable to find db storage file that should exist"); } catch (RuntimeException e) { // we expect to fail here because the stream wasn't decrypted } catch (Exception e) { Assert.fail("Should have failed with a runtime exception when trying to deserialize an encrypted object"); } } /** * App level fixtures are stored un-encrypted. To do so, they are stored in * plain-text files. This test ensures that by trying to deserialize the * contents of one of those files. */ @Test public void testStoredUnencrypted() { IStorageUtilityIndexed<FormInstance> appFixtureStorage = sandbox.getAppFixtureStorage(); File dbDir = ((HybridFileBackedSqlStorage<FormInstance>)appFixtureStorage).getDbDirForTesting(); File[] serializedFixtureFiles = dbDir.listFiles(); Assert.assertTrue(serializedFixtureFiles.length > 0); try { ((UnencryptedHybridFileBackedSqlStorage<FormInstance>)appFixtureStorage) .newObject(new FileInputStream(serializedFixtureFiles[0]), -1); } catch (Exception e) { Assert.fail("Should be able to deserialize an unencrypted object"); } } @Test public void testRemoveAllDeletesFiles() { IStorageUtilityIndexed<FormInstance> userFixtureStorage = sandbox.getUserFixtureStorage(); File dbDir = ((HybridFileBackedSqlStorage<FormInstance>)userFixtureStorage).getDbDirForTesting(); ArrayList<File> removedFiles = new ArrayList<>(); for (IStorageIterator i = userFixtureStorage.iterate(); i.hasMore(); ) { File fixtureFile = new File(((HybridFileBackedSqlStorage<FormInstance>)userFixtureStorage) .getEntryFilenameForTesting(i.nextID())); removedFiles.add(fixtureFile); } userFixtureStorage.removeAll(); for (File fixtureFile : removedFiles) { Assert.assertTrue(!fixtureFile.exists()); } Assert.assertTrue(!dbDir.exists()); } @Test public void testRemoveEntityFilterDeleteFiles() { IStorageUtilityIndexed<FormInstance> userFixtureStorage = sandbox.getUserFixtureStorage(); File dbDir = ((HybridFileBackedSqlStorage<FormInstance>)userFixtureStorage).getDbDirForTesting(); int fileCountBefore = dbDir.listFiles().length; userFixtureStorage.removeAll(new EntityFilter<FormInstance>() { @Override public boolean matches(FormInstance fixture) { return "commtrack:products".equals(fixture.getRoot().getInstanceName()); } }); int fileCountAfter = dbDir.listFiles().length; Assert.assertTrue(fileCountBefore - fileCountAfter == 1); // make sure we can read all the existing records just fine for (IStorageIterator i = userFixtureStorage.iterate(); i.hasMore(); ) { i.nextRecord(); } } @Test public void testRemoveDeletesFiles() { IStorageUtilityIndexed<FormInstance> userFixtureStorage = sandbox.getUserFixtureStorage(); File dbDir = ((HybridFileBackedSqlStorage<FormInstance>)userFixtureStorage).getDbDirForTesting(); File[] serializedFixtureFiles = dbDir.listFiles(); Assert.assertTrue(serializedFixtureFiles.length > 0); int count = 0; int idOne = -1; for (IStorageIterator i = userFixtureStorage.iterate(); i.hasMore(); ) { if (count == 0) { removeOneEntry(i.nextID(), userFixtureStorage); } else if (count == 1) { idOne = i.nextID(); } else if (count == 2) { removeTwoEntries(idOne, i.nextID(), userFixtureStorage); } else { // seems to be required; otherwise iterator loops forever. Not // sure if it is a robolectric bug or a bug in our iterator // that comes up when we iterate and delete at the same time break; } count++; } } private void removeOneEntry(int id, IStorageUtilityIndexed<FormInstance> userFixtureStorage) { String fixtureFilename = ((HybridFileBackedSqlStorage<FormInstance>)userFixtureStorage).getEntryFilenameForTesting(id); File fixtureFile = new File(fixtureFilename); Assert.assertTrue(fixtureFile.exists()); userFixtureStorage.remove(id); Assert.assertTrue(!fixtureFile.exists()); } private void removeTwoEntries(int idOne, int idTwo, IStorageUtilityIndexed<FormInstance> userFixtureStorage) { ArrayList<Integer> toRemoveList = new ArrayList<>(); toRemoveList.add(idOne); toRemoveList.add(idTwo); String fixtureOneFilename = ((HybridFileBackedSqlStorage<FormInstance>)userFixtureStorage).getEntryFilenameForTesting(idOne); String fixtureTwoFilename = ((HybridFileBackedSqlStorage<FormInstance>)userFixtureStorage).getEntryFilenameForTesting(idTwo); File fixtureFileOne = new File(fixtureOneFilename); File fixtureFileTwo = new File(fixtureTwoFilename); Assert.assertTrue(fixtureFileOne.exists()); Assert.assertTrue(fixtureFileTwo.exists()); ((HybridFileBackedSqlStorage<FormInstance>)userFixtureStorage).remove(toRemoveList); Assert.assertTrue(!fixtureFileOne.exists()); Assert.assertTrue(!fixtureFileTwo.exists()); } @Test public void testUpdate() { // test encrypted update HybridFileBackedSqlStorage<FormInstance> userFixtureStorage = CommCareApplication.instance().getFileBackedUserStorage("fixture", FormInstance.class); FormInstance form = userFixtureStorage.getRecordForValues(new String[]{FormInstance.META_ID}, new String[]{"commtrack:programs"}); String newName = "new_name"; form.setName(newName); userFixtureStorage.update(form.getID(), form); form = userFixtureStorage.getRecordForValues(new String[]{FormInstance.META_ID}, new String[]{"commtrack:programs"}); Assert.assertEquals(newName, form.getName()); // test unencrypted update UnencryptedHybridFileBackedSqlStorage<FormInstance> appFixtureStorage = CommCareApplication.instance().getCurrentApp().getFileBackedStorage("fixture", FormInstance.class); form = appFixtureStorage.getRecordForValues(new String[]{FormInstance.META_ID}, new String[]{"user-groups"}); form.setName(newName); appFixtureStorage.update(form.getID(), form); form = appFixtureStorage.getRecordForValues(new String[]{FormInstance.META_ID}, new String[]{"user-groups"}); Assert.assertEquals(newName, form.getName()); } @Test public void testRecordLookup() { // test encrypted record lookup HybridFileBackedSqlStorage<FormInstance> userFixtureStorage = CommCareApplication.instance().getFileBackedUserStorage("fixture", FormInstance.class); Vector<FormInstance> forms = userFixtureStorage.getRecordsForValues(new String[]{FormInstance.META_ID}, new String[]{"commtrack:programs"}); Assert.assertTrue(forms.size() == 1); FormInstance form = userFixtureStorage.getRecordForValues(new String[]{FormInstance.META_ID}, new String[]{"commtrack:programs"}); Assert.assertEquals(forms.firstElement().getRoot(), form.getRoot()); form = userFixtureStorage.getRecordForValue(FormInstance.META_ID, "commtrack:programs"); Assert.assertEquals(forms.firstElement().getRoot(), form.getRoot()); // Test unencrpyted record lookup UnencryptedHybridFileBackedSqlStorage<FormInstance> appFixtureStorage = CommCareApplication.instance().getCurrentApp().getFileBackedStorage("fixture", FormInstance.class); forms = appFixtureStorage.getRecordsForValues(new String[]{FormInstance.META_ID}, new String[]{"user-groups"}); Assert.assertTrue(forms.size() == 1); form = appFixtureStorage.getRecordForValues(new String[]{FormInstance.META_ID}, new String[]{"user-groups"}); Assert.assertEquals(forms.firstElement().getRoot(), form.getRoot()); form = appFixtureStorage.getRecordForValue(FormInstance.META_ID, "user-groups"); Assert.assertEquals(forms.firstElement().getRoot(), form.getRoot()); } }
package org.ovirt.engine.core.compat; import java.io.Serializable; import java.util.UUID; public class Guid implements Serializable, Comparable<Guid> { /** * Needed for the serialization/deserialization mechanism. */ private static final long serialVersionUID = 27305745737022810L; private static final byte[] CHANGE_BYTE_ORDER_INDICES = { 3, 2, 1, 0, 5, 4, 7, 6, 8, 9, 10, 11, 12, 13, 14, 15 }; private static final byte[] KEEP_BYTE_ORDER_INDICES = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 }; public static final Guid SYSTEM = new Guid("AAA00000-0000-0000-0000-123456789AAA"); public static final Guid EVERYONE = new Guid("EEE00000-0000-0000-0000-123456789EEE"); public static final Guid Empty = new Guid("00000000-0000-0000-0000-000000000000"); private UUID uuid; /** * This constructor should never be used directly - use {@link #Empty} instead. * It is left here only because GWT requires it. */ @Deprecated private Guid() { this(Empty.getUuid()); } public Guid(UUID uuid) { this.uuid = uuid; } public Guid(byte[] guid, boolean keepByteOrder) { String guidAsStr = getStrRepresentationOfGuid(guid, keepByteOrder); uuid = UUID.fromString(guidAsStr); } public Guid(String candidate) { if (candidate == null) { throw new NullPointerException( "candidate can not be null please use static method createGuidFromString"); } if (candidate.isEmpty()) { uuid = Empty.getUuid(); } else { uuid = UUID.fromString(candidate); } } public static Guid newGuid() { return new Guid(UUID.randomUUID()); } public static Guid createGuidFromString(String candidate) { return createGuidFromStringWithDefault(candidate, null); } public static Guid createGuidFromStringDefaultEmpty(String candidate) { return createGuidFromStringWithDefault(candidate, Guid.Empty); } private static Guid createGuidFromStringWithDefault(String candidate, Guid defaultValue) { if (candidate == null) { return defaultValue; } return new Guid(candidate); } public static boolean isNullOrEmpty(Guid id) { return id == null || id.equals(Empty); } public UUID getUuid() { return uuid; } @Override public boolean equals(Object other) { if (!(other instanceof Guid)) { return false; } Guid otherGuid = (Guid) other; return uuid.equals(otherGuid.getUuid()); } @Override public int hashCode() { return uuid.hashCode(); } @Override public int compareTo(Guid other) { return this.getUuid().compareTo(other.getUuid()); } @Override public String toString() { return uuid.toString(); } /** * Gets a string representation of GUID * * @param inguid * byte array containing the GUID data. * @param keepByteOrder * determines if to keep the byte order in the string representation or not. For some systems as MSSQL * the bytes order should be swapped before converting to String, and for other systems (such as * ActiveDirectory) it should be kept. * @return String representation of GUID */ private static String getStrRepresentationOfGuid(byte[] inguid, boolean keepByteOrder) { StringBuilder strGUID = new StringBuilder(); byte[] byteOrderIndices = null; if (keepByteOrder) { byteOrderIndices = KEEP_BYTE_ORDER_INDICES; } else { byteOrderIndices = CHANGE_BYTE_ORDER_INDICES; } int length = inguid.length; // A GUID format looks like xxxx-xx-xx-xx-xxxxxx where each "x" // represents a byte in hexadecimal format strGUID.append(addLeadingZero(inguid[byteOrderIndices[0 % length]] & 0xFF)); strGUID.append(addLeadingZero(inguid[byteOrderIndices[1 % length]] & 0xFF)); strGUID.append(addLeadingZero(inguid[byteOrderIndices[2 % length]] & 0xFF)); strGUID.append(addLeadingZero(inguid[byteOrderIndices[3 % length]] & 0xFF)); strGUID.append("-"); strGUID.append(addLeadingZero(inguid[byteOrderIndices[4 % length]] & 0xFF)); strGUID.append(addLeadingZero(inguid[byteOrderIndices[5 % length]] & 0xFF)); strGUID.append("-"); strGUID.append(addLeadingZero(inguid[byteOrderIndices[6 % length]] & 0xFF)); strGUID.append(addLeadingZero(inguid[byteOrderIndices[7 % length]] & 0xFF)); strGUID.append("-"); strGUID.append(addLeadingZero(inguid[byteOrderIndices[8 % length]] & 0xFF)); strGUID.append(addLeadingZero(inguid[byteOrderIndices[9 % length]] & 0xFF)); strGUID.append("-"); strGUID.append(addLeadingZero(inguid[byteOrderIndices[10 % length]] & 0xFF)); strGUID.append(addLeadingZero(inguid[byteOrderIndices[11 % length]] & 0xFF)); strGUID.append(addLeadingZero(inguid[byteOrderIndices[12 % length]] & 0xFF)); strGUID.append(addLeadingZero(inguid[byteOrderIndices[13 % length]] & 0xFF)); strGUID.append(addLeadingZero(inguid[byteOrderIndices[14 % length]] & 0xFF)); strGUID.append(addLeadingZero(inguid[byteOrderIndices[15 % length]] & 0xFF)); return strGUID.toString(); } private static String addLeadingZero(int k) { return (k <= 0xF) ? "0" + Integer.toHexString(k) : Integer .toHexString(k); } }
package org.ovirt.engine.core.dao; import java.sql.ResultSet; import java.sql.SQLException; import java.util.List; import javax.inject.Named; import javax.inject.Singleton; import org.ovirt.engine.core.common.businessentities.Tags; import org.ovirt.engine.core.common.businessentities.TagsTemplateMap; import org.ovirt.engine.core.common.businessentities.TagsType; import org.ovirt.engine.core.common.businessentities.TagsUserGroupMap; import org.ovirt.engine.core.common.businessentities.TagsUserMap; import org.ovirt.engine.core.common.businessentities.TagsVdsMap; import org.ovirt.engine.core.common.businessentities.TagsVmMap; import org.ovirt.engine.core.common.businessentities.TagsVmPoolMap; import org.ovirt.engine.core.compat.Guid; import org.springframework.jdbc.core.RowMapper; import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; /** * <code>TagDaoImpl</code> provides an implementation of {@link TagDao} that uses code refactored from the * {@link org.ovirt.engine.core.dal.dbbroker.DbFacade} class. */ @Named @Singleton public class TagDaoImpl extends BaseDao implements TagDao { private static class TagRowMapper implements RowMapper<Tags> { public static final TagRowMapper instance = new TagRowMapper(); @Override public Tags mapRow(ResultSet rs, int rowNum) throws SQLException { Tags entity = new Tags(); entity.setdescription(getValueOrNull(rs, "description", "")); entity.settag_id(getGuidDefaultNewGuid(rs, "tag_id")); entity.settag_name(getValueOrNull(rs, "tag_name", "")); entity.setparent_id(getGuidDefaultNewGuid(rs, "parent_id")); entity.setIsReadonly(rs.getBoolean("readonly")); entity.settype(TagsType.forValue(Integer.parseInt(getValueOrNull(rs, "type", "0")))); return entity; } String getValueOrNull(ResultSet rs, String name, String defval) { String result = null; try { result = rs.getString(name); } catch (SQLException e) { // consume exception, fall back to default value } return result != null ? result : defval; } } @Override public Tags get(Guid id) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("tag_id", id); return getCallsHandler() .executeRead("GettagsBytag_id", TagRowMapper.instance, parameterSource); } @Override public Tags getByName(String name) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("tag_name", name); return getCallsHandler() .executeRead("GettagsBytag_name", TagRowMapper.instance, parameterSource); } @Override public List<Tags> getAll() { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource(); return getCallsHandler() .executeReadList("GetAllFromtags", TagRowMapper.instance, parameterSource); } @Override public List<Tags> getAllForParent(Guid id) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("parent_id", id); return getCallsHandler() .executeReadList("GettagsByparent_id", TagRowMapper.instance, parameterSource); } @Override public List<Tags> getAllForUserGroups(String ids) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("group_ids", ids); return getCallsHandler() .executeReadList("GetTagsByUserGroupId", TagRowMapper.instance, parameterSource); } @Override public List<Tags> getAllUserGroupTagsWithIds(String ids) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("tag_ids", ids); return getCallsHandler() .executeReadList("GetUserGroupTagsByTagIds", TagRowMapper.instance, parameterSource); } @Override public List<Tags> getAllForUsers(String ids) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("user_ids", ids); return getCallsHandler() .executeReadList("GetTagsByUserId", TagRowMapper.instance, parameterSource); } @Override public List<Tags> getAllForUsersWithIds(String ids) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("tag_ids", ids); return getCallsHandler() .executeReadList("GetUserTagsByTagIds", TagRowMapper.instance, parameterSource); } @Override public List<Tags> getAllForVds(String ids) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("vds_ids", ids); return getCallsHandler() .executeReadList("GetTagsByVdsId", TagRowMapper.instance, parameterSource); } @Override public List<Tags> getAllForVdsWithIds(String ids) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("tag_ids", ids); return getCallsHandler() .executeReadList("GetVdsTagsByTagIds", TagRowMapper.instance, parameterSource); } @Override public List<Tags> getAllForVm(String ids) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("vm_ids", ids); return getCallsHandler() .executeReadList("GetTagsByVmId", TagRowMapper.instance, parameterSource); } /** * In the database both TemplateTags and VmTags share the same tables and * functions * @param ids * the Template ids * @return */ @Override public List<Tags> getAllForTemplate(String ids) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("vm_ids", ids); return getCallsHandler() .executeReadList("GetTagsByVmId", TagRowMapper.instance, parameterSource); } @Override public List<Tags> getAllVmTagsWithIds(String ids) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("tag_ids", ids); return getCallsHandler() .executeReadList("GetVmTagsByTagId", TagRowMapper.instance, parameterSource); } @Override public List<Tags> getAllForVmPools(String ids) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("vm_pool_ids", ids); return getCallsHandler() .executeReadList("GetTagsByVmpoolId", TagRowMapper.instance, parameterSource); } @Override public void save(Tags tag) { Guid id = tag.gettag_id(); if (Guid.isNullOrEmpty(id)) { id = Guid.newGuid(); tag.settag_id(id); } MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("description", tag.getdescription()) .addValue("tag_id", tag.gettag_id()) .addValue("tag_name", tag.gettag_name()) .addValue("parent_id", tag.getparent_id()) .addValue("readonly", tag.getIsReadonly()) .addValue("type", tag.gettype()); getCallsHandler().executeModification("Inserttags", parameterSource); } @Override public void remove(Guid id) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("tag_id", id); getCallsHandler() .executeModification("Deletetags", parameterSource); } @Override public void update(Tags tag) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource() .addValue("description", tag.getdescription()) .addValue("tag_id", tag.gettag_id()) .addValue("tag_name", tag.gettag_name()) .addValue("parent_id", tag.getparent_id()) .addValue("readonly", tag.getIsReadonly()) .addValue("type", tag.gettype()); getCallsHandler() .executeModification("Updatetags", parameterSource); } @Override public TagsUserGroupMap getTagUserGroupByGroupIdAndByTagId(Guid tag, Guid group) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource().addValue("group_id", group) .addValue("tag_id", tag); RowMapper<TagsUserGroupMap> mapper = new RowMapper<TagsUserGroupMap>() { @Override public TagsUserGroupMap mapRow(ResultSet rs, int rowNum) throws SQLException { TagsUserGroupMap entity = new TagsUserGroupMap(); entity.setgroup_id(getGuidDefaultEmpty(rs, "group_id")); entity.settag_id(getGuidDefaultEmpty(rs, "tag_id")); return entity; } }; return getCallsHandler() .executeRead("GetTagUserGroupByGroupIdAndByTagId", mapper, parameterSource); } @Override public void attachUserGroupToTag(TagsUserGroupMap tagUserGroupMap) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource().addValue("group_id", tagUserGroupMap.getgroup_id()).addValue("tag_id", tagUserGroupMap.gettag_id()); getCallsHandler() .executeModification("Inserttags_user_group_map", parameterSource); } @Override public void detachUserGroupFromTag(Guid tagId, Guid groupId) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource().addValue("group_id", groupId) .addValue("tag_id", tagId); getCallsHandler() .executeModification("Deletetags_user_group_map", parameterSource); } @Override public TagsUserMap getTagUserByTagIdAndByuserId(Guid tagId, Guid userId) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource().addValue("tag_id", tagId).addValue( "user_id", userId); RowMapper<TagsUserMap> mapper = new RowMapper<TagsUserMap>() { @Override public TagsUserMap mapRow(ResultSet rs, int rowNum) throws SQLException { TagsUserMap entity = new TagsUserMap(); entity.settag_id(getGuidDefaultEmpty(rs, "tag_id")); entity.setuser_id(getGuidDefaultEmpty(rs, "user_id")); return entity; } }; return getCallsHandler() .executeRead("GetTagUserByTagIdAndByuserId", mapper, parameterSource); } @Override public void attachUserToTag(TagsUserMap tagUserMap) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource().addValue("tag_id", tagUserMap.gettag_id()).addValue("user_id", tagUserMap.getuser_id()); getCallsHandler() .executeModification("Inserttags_user_map", parameterSource); } @Override public void detachUserFromTag(Guid tagId, Guid userId) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource().addValue("tag_id", tagId).addValue( "user_id", userId); getCallsHandler() .executeModification("Deletetags_user_map", parameterSource); } @Override public TagsVdsMap getTagVdsByTagIdAndByVdsId(Guid tagId, Guid vdsId) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource().addValue("tag_id", tagId).addValue( "vds_id", vdsId); RowMapper<TagsVdsMap> mapper = new RowMapper<TagsVdsMap>() { @Override public TagsVdsMap mapRow(ResultSet rs, int rowNum) throws SQLException { TagsVdsMap entity = new TagsVdsMap(); entity.settag_id(getGuidDefaultEmpty(rs, "tag_id")); entity.setvds_id(getGuidDefaultEmpty(rs, "vds_id")); return entity; } }; return getCallsHandler() .executeRead("GetTagVdsBytagIdAndByVdsId", mapper, parameterSource); } @Override public void attachVdsToTag(TagsVdsMap tagVdsMap) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource().addValue("tag_id", tagVdsMap.gettag_id()).addValue("vds_id", tagVdsMap.getvds_id()); getCallsHandler() .executeModification("Inserttags_vds_map", parameterSource); } @Override public void detachVdsFromTag(Guid tagId, Guid vdsId) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource().addValue("tag_id", tagId).addValue( "vds_id", vdsId); getCallsHandler() .executeModification("Deletetags_vds_map", parameterSource); } @Override public TagsVmMap getTagVmByTagIdAndByVmId(Guid tagId, Guid vmId) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource().addValue("tag_id", tagId).addValue( "vm_id", vmId); RowMapper<TagsVmMap> mapper = new RowMapper<TagsVmMap>() { @Override public TagsVmMap mapRow(ResultSet rs, int rowNum) throws SQLException { TagsVmMap entity = new TagsVmMap(); entity.settag_id(getGuidDefaultEmpty(rs, "tag_id")); entity.setvm_id(getGuidDefaultEmpty(rs, "vm_id")); entity.setDefaultDisplayType((Integer) rs.getObject("DefaultDisplayType")); return entity; } }; return getCallsHandler() .executeRead("GetTagVmByTagIdAndByvmId", mapper, parameterSource); } @Override public void attachVmToTag(TagsVmMap tagVmMap) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource().addValue("tag_id", tagVmMap.gettag_id()).addValue("vm_id", tagVmMap.getvm_id()).addValue("DefaultDisplayType", tagVmMap.getDefaultDisplayType()); getCallsHandler() .executeModification("Inserttags_vm_map", parameterSource); } @Override public void updateDefaultDisplayForVmTag(TagsVmMap tagsVmMap) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource().addValue("tag_id", tagsVmMap.gettag_id()).addValue("vm_id", tagsVmMap.getvm_id()).addValue("DefaultDisplayType", tagsVmMap.getDefaultDisplayType()); getCallsHandler() .executeModification("UpdateVmTagsDefaultDisplayType", parameterSource); } @Override public void detachVmFromTag(Guid tagId, Guid vmId) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource().addValue("tag_id", tagId).addValue( "vm_id", vmId); getCallsHandler() .executeModification("Deletetags_vm_map", parameterSource); } @Override public List<TagsVmMap> getTagVmMapByVmIdAndDefaultTag(Guid vmid) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource().addValue("vm_id", vmid); RowMapper<TagsVmMap> mapper = new RowMapper<TagsVmMap>() { @Override public TagsVmMap mapRow(ResultSet rs, int rowNum) throws SQLException { TagsVmMap entity = new TagsVmMap(); entity.settag_id(getGuidDefaultEmpty(rs, "tag_id")); entity.setvm_id(getGuidDefaultEmpty(rs, "vm_id")); entity.setDefaultDisplayType((Integer) rs.getObject("DefaultDisplayType")); return entity; } }; return getCallsHandler() .executeReadList( "GetnVmTagsByVmIdAndDefaultTag", mapper, parameterSource); } /** * In the database both Template and Vm Tags share the same tables and functions * @param tagId * @param vmId * @return */ @Override public TagsTemplateMap getTagTemplateByTagIdAndByTemplateId(Guid tagId, Guid vmId) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource().addValue("tag_id", tagId).addValue( "vm_id", vmId); RowMapper<TagsTemplateMap> mapper = new RowMapper<TagsTemplateMap>() { @Override public TagsTemplateMap mapRow(ResultSet rs, int rowNum) throws SQLException { TagsTemplateMap entity = new TagsTemplateMap(); entity.settag_id(getGuidDefaultEmpty(rs, "tag_id")); entity.settemplate_id(getGuidDefaultEmpty(rs, "vm_id")); entity.setDefaultDisplayType((Integer) rs.getObject("DefaultDisplayType")); return entity; } }; return getCallsHandler() .executeRead("GetTagVmByTagIdAndByvmId", mapper, parameterSource); } @Override public void attachTemplateToTag(TagsTemplateMap tagTemplateMap) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource().addValue("tag_id", tagTemplateMap.gettag_id()).addValue("vm_id", tagTemplateMap.gettemplate_id()).addValue("DefaultDisplayType", tagTemplateMap.getDefaultDisplayType()); getCallsHandler() .executeModification("Inserttags_vm_map", parameterSource); } @Override public void detachTemplateFromTag(Guid tagId, Guid vmId) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource().addValue("tag_id", tagId).addValue( "vm_id", vmId); getCallsHandler() .executeModification("Deletetags_vm_map", parameterSource); } @Override public List<TagsVmPoolMap> getVmPoolTagsByVmPoolIdAndAdElementId(Guid vmPoolId, Guid adElementId) { MapSqlParameterSource parameterSource = getCustomMapSqlParameterSource().addValue("ad_id", adElementId) .addValue("vm_pool_id", vmPoolId); RowMapper<TagsVmPoolMap> mapper = new RowMapper<TagsVmPoolMap>() { @Override public TagsVmPoolMap mapRow(ResultSet rs, int rowNum) throws SQLException { TagsVmPoolMap entity = new TagsVmPoolMap(); entity.settag_id(getGuidDefaultEmpty(rs, "tag_id")); entity.setvm_pool_id(getGuidDefaultEmpty(rs, "vm_pool_id")); return entity; } }; return getCallsHandler() .executeReadList( "GetVmPoolTagsByVmPoolIdAndAdElementId", mapper, parameterSource); } }
package org.intermine.bio.dataconversion; import java.io.BufferedReader; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.util.HashSet; import java.util.Map; import java.util.Properties; import java.util.Set; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.intermine.util.PropertiesUtil; /** * ID resolver for Entrez genes. * * @author Richard Smith */ public class EntrezGeneIdResolverFactory extends IdResolverFactory { protected static final Logger LOG = Logger.getLogger(EntrezGeneIdResolverFactory.class); private final String clsName = "gene"; private final String propName = "resolver.entrez.file"; // set in .intermine/MINE.properties private final String taxonId = "9606"; /** * Build an IdResolver from Entrez Gene gene_info file * @return an IdResolver for Entrez Gene */ @Override protected IdResolver createIdResolver() { Properties props = PropertiesUtil.getProperties(); String fileName = props.getProperty(propName); if (StringUtils.isBlank(fileName)) { String message = "Entrez gene resolver has no file name specified, set " + propName + " to the location of the gene_info file."; throw new IllegalArgumentException(message); } IdResolver resolver; BufferedReader reader; try { FileReader fr = new FileReader(new File(fileName)); reader = new BufferedReader(fr); resolver = createFromFile(reader); } catch (FileNotFoundException e) { throw new IllegalArgumentException("Failed to open gene_info file: " + fileName, e); } catch (IOException e) { throw new IllegalArgumentException("Error reading from gene_info file: " + fileName, e); } return resolver; } private IdResolver createFromFile(BufferedReader reader) throws IOException { IdResolver resolver = new IdResolver(clsName); NcbiGeneInfoParser parser = new NcbiGeneInfoParser(reader); Map<String, Set<GeneInfoRecord>> records = parser.getGeneInfoRecords(); if (records == null) { throw new IllegalArgumentException("Failed to read any records from gene_info file."); } if (!records.containsKey(taxonId)) { throw new IllegalArgumentException("No records in gene_info file for taxon: " + taxonId); } for (GeneInfoRecord record : records.get(taxonId)) { resolver.addMainIds(taxonId, record.entrez, lowerCase(record.getMainIds())); resolver.addSynonyms(taxonId, record.entrez, lowerCase(record.ensemblIds)); resolver.addSynonyms(taxonId, record.entrez, lowerCase(record.synonyms)); } return resolver; } private Set<String> lowerCase(Set<String> input) { Set<String> lower = new HashSet<String>(); for (String s : input) { lower.add(s.toLowerCase()); } return lower; } }
package bitronix.tm.journal.nio; import bitronix.tm.journal.nio.util.SequencedBlockingQueue; import bitronix.tm.journal.nio.util.SequencedQueueEntry; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import java.util.*; import java.util.concurrent.*; import static org.junit.Assert.*; /** * Tests the functionality of NioForceSynchronizer. * * @author juergen kellerer, 2011-05-29 */ public class NioForceSynchronizerTest { static ExecutorService service; @BeforeClass public static void initService() { service = Executors.newFixedThreadPool(128); } @AfterClass public static void shutdownService() { service.shutdown(); } volatile List<Object> elements; final SequencedBlockingQueue<Object> queue = new SequencedBlockingQueue<Object>(); final NioForceSynchronizer forceSynchronizer = new NioForceSynchronizer(queue); @Before public void setUp() throws Exception { elements = new ArrayList<Object>(); for (int i = 0; i < 6; i++) elements.add(new Object()); } @Test public void testCanEnlistAndUnWrapElements() throws Exception { for (Object element : elements) queue.putElement(element); List<Object> enlistedElements = new ArrayList<Object>(); queue.drainElementsTo(enlistedElements); assertArrayEquals(elements.toArray(), enlistedElements.toArray()); } @Test public void testWaitOnEnlisted() throws Exception { List<Future<Boolean>> futures = doTestWaitOnEnlistedWithSuccess(); for (Future<Boolean> future : futures) assertTrue(future.get()); } @Test public void testWaitOnEnlistedReceivesFailures() throws Exception { List<Future<Boolean>> futures = doTestWaitOnEnlistedWithFailure(); for (Future<Boolean> future : futures) assertFalse(future.get()); } @Test public void testWaitOnEnlistedFailuresIntersectSuccess() throws Exception { final Random random = new Random(); Map<Future<Boolean>, Boolean> expectedResults = new HashMap<Future<Boolean>, Boolean>(); int successCount = 1000, errorCount = 1000; while (successCount > 0 || errorCount > 0) { setUp(); final boolean success = random.nextBoolean(); if (success) successCount else errorCount final List<Future<Boolean>> futures = success ? doTestWaitOnEnlistedWithSuccess() : doTestWaitOnEnlistedWithFailure(); for (Future<Boolean> future : futures) expectedResults.put(future, success); } for (Map.Entry<Future<Boolean>, Boolean> entry : expectedResults.entrySet()) assertEquals(entry.getValue(), entry.getKey().get()); } private List<Future<Boolean>> doTestWaitOnEnlistedWithSuccess() throws Exception { return doTestWaitOnEnlisted(new Callable<Object>() { public Object call() throws Exception { return null; } }); } private List<Future<Boolean>> doTestWaitOnEnlistedWithFailure() throws Exception { return doTestWaitOnEnlisted(new Callable<Object>() { public Object call() throws Exception { throw new Exception(); } }); } private List<Future<Boolean>> doTestWaitOnEnlisted(Callable<Object> callable) throws Exception { final List<Object> objects = elements; final CountDownLatch enlistCountDown = new CountDownLatch(objects.size()); List<Future<Boolean>> futures = new ArrayList<Future<Boolean>>(); for (final Object element : objects) { futures.add(service.submit(new Callable<Boolean>() { public Boolean call() throws Exception { queue.putElement(element); enlistCountDown.countDown(); return forceSynchronizer.waitOnEnlisted(); } })); } enlistCountDown.await(); for (Future<?> future : futures) assertFalse(future.isDone()); try { ArrayList<SequencedQueueEntry<Object>> entries = new ArrayList<SequencedQueueEntry<Object>>(); queue.takeAndDrainElementsTo(entries, new ArrayList<Object>()); if (!forceSynchronizer.processEnlistedIfRequired(callable, entries)) forceSynchronizer.processEnlisted(callable, entries); } catch (Exception e) { // ignore. } return futures; } }
package org.sirix.service.xml.serialize; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static org.sirix.service.xml.serialize.XMLSerializerProperties.S_ID; import static org.sirix.service.xml.serialize.XMLSerializerProperties.S_INDENT; import static org.sirix.service.xml.serialize.XMLSerializerProperties.S_INDENT_SPACES; import static org.sirix.service.xml.serialize.XMLSerializerProperties.S_REST; import static org.sirix.service.xml.serialize.XMLSerializerProperties.S_XMLDECL; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.util.concurrent.ConcurrentMap; import javax.annotation.Nonnegative; import javax.annotation.Nonnull; import org.sirix.access.Database; import org.sirix.access.conf.DatabaseConfiguration; import org.sirix.access.conf.ResourceConfiguration; import org.sirix.access.conf.SessionConfiguration; import org.sirix.api.IDatabase; import org.sirix.api.INodeReadTrx; import org.sirix.api.ISession; import org.sirix.settings.ECharsForSerializing; import org.sirix.settings.IConstants; import org.sirix.utils.Files; import org.sirix.utils.LogWrapper; import org.sirix.utils.XMLToken; import org.slf4j.LoggerFactory; /** * <h1>XMLSerializer</h1> * * <p> * Most efficient way to serialize a subtree into an OutputStream. The encoding * always is UTF-8. Note that the OutputStream internally is wrapped by a * BufferedOutputStream. There is no need to buffer it again outside of this * class. * </p> */ public final class XMLSerializer extends AbsSerializer { /** {@link LogWrapper} reference. */ private static final LogWrapper LOGWRAPPER = new LogWrapper( LoggerFactory.getLogger(XMLSerializer.class)); /** Offset that must be added to digit to make it ASCII. */ private static final int ASCII_OFFSET = 48; /** Precalculated powers of each available long digit. */ private static final long[] LONG_POWERS = { 1L, 10L, 100L, 1000L, 10000L, 100000L, 1000000L, 10000000L, 100000000L, 1000000000L, 10000000000L, 100000000000L, 1000000000000L, 10000000000000L, 100000000000000L, 1000000000000000L, 10000000000000000L, 100000000000000000L, 1000000000000000000L }; /** OutputStream to write to. */ private final OutputStream mOut; /** Indent output. */ private final boolean mIndent; /** Serialize XML declaration. */ private final boolean mSerializeXMLDeclaration; /** Serialize rest header and closer and rest:id. */ private final boolean mSerializeRest; /** Serialize id. */ private final boolean mSerializeId; /** Number of spaces to indent. */ private final int mIndentSpaces; /** * Initialize XMLStreamReader implementation with transaction. The cursor * points to the node the XMLStreamReader starts to read. * * @param pSession * session for read XML * @param pNodeKey * start node key * @param pBuilder * builder of XML Serializer * @param pRevision * revision to serialize * @param pRevisions * further revisions to serialize */ private XMLSerializer(@Nonnull final ISession pSession, @Nonnegative final long pNodeKey, @Nonnull final XMLSerializerBuilder pBuilder, @Nonnegative final int pRevision, @Nonnull final int... pRevisions) { super(pSession, pNodeKey, pRevision, pRevisions); mOut = new BufferedOutputStream(pBuilder.mStream, 4096); mIndent = pBuilder.mIndent; mSerializeXMLDeclaration = pBuilder.mDeclaration; mSerializeRest = pBuilder.mREST; mSerializeId = pBuilder.mID; mIndentSpaces = pBuilder.mIndentSpaces; } /** * Emit node (start element or characters). */ @Override protected void emitStartElement(final @Nonnull INodeReadTrx pRtx) { try { switch (pRtx.getKind()) { case DOCUMENT_ROOT: if (mIndent) { mOut.write(ECharsForSerializing.NEWLINE.getBytes()); } break; case ELEMENT: // Emit start element. indent(); mOut.write(ECharsForSerializing.OPEN.getBytes()); mOut.write(pRtx.rawNameForKey(pRtx.getNameKey())); final long key = pRtx.getNodeKey(); // Emit namespace declarations. for (int index = 0, length = pRtx.getNamespaceCount(); index < length; index++) { pRtx.moveToNamespace(index); if (pRtx.nameForKey(pRtx.getNameKey()).isEmpty()) { mOut.write(ECharsForSerializing.XMLNS.getBytes()); write(pRtx.nameForKey(pRtx.getURIKey())); mOut.write(ECharsForSerializing.QUOTE.getBytes()); } else { mOut.write(ECharsForSerializing.XMLNS_COLON.getBytes()); write(pRtx.nameForKey(pRtx.getNameKey())); mOut.write(ECharsForSerializing.EQUAL_QUOTE.getBytes()); write(pRtx.nameForKey(pRtx.getURIKey())); mOut.write(ECharsForSerializing.QUOTE.getBytes()); } pRtx.moveTo(key); } // Emit attributes. // Add virtual rest:id attribute. if (mSerializeId) { if (mSerializeRest) { mOut.write(ECharsForSerializing.REST_PREFIX.getBytes()); } else { mOut.write(ECharsForSerializing.SPACE.getBytes()); } mOut.write(ECharsForSerializing.ID.getBytes()); mOut.write(ECharsForSerializing.EQUAL_QUOTE.getBytes()); write(pRtx.getNodeKey()); mOut.write(ECharsForSerializing.QUOTE.getBytes()); } // Iterate over all persistent attributes. for (int index = 0; index < pRtx.getAttributeCount(); index++) { pRtx.moveToAttribute(index); mOut.write(ECharsForSerializing.SPACE.getBytes()); mOut.write(pRtx.rawNameForKey(pRtx.getNameKey())); mOut.write(ECharsForSerializing.EQUAL_QUOTE.getBytes()); mOut.write(XMLToken.escapeAttribute(pRtx.getValue()).getBytes( IConstants.DEFAULT_ENCODING));// pRtx.getItem().getRawValue()); mOut.write(ECharsForSerializing.QUOTE.getBytes()); pRtx.moveTo(key); } if (pRtx.hasFirstChild()) { mOut.write(ECharsForSerializing.CLOSE.getBytes()); } else { mOut.write(ECharsForSerializing.SLASH_CLOSE.getBytes()); } if (mIndent) { mOut.write(ECharsForSerializing.NEWLINE.getBytes()); } break; case TEXT: indent(); mOut.write(XMLToken.escapeContent(pRtx.getValue()).getBytes( IConstants.DEFAULT_ENCODING)); if (mIndent) { mOut.write(ECharsForSerializing.NEWLINE.getBytes()); } break; } } catch (final IOException e) { LOGWRAPPER.error(e.getMessage(), e); } } /** * Emit end element. * * @param pRtx * Read Transaction */ @Override protected void emitEndElement(final @Nonnull INodeReadTrx pRtx) { try { indent(); mOut.write(ECharsForSerializing.OPEN_SLASH.getBytes()); mOut.write(pRtx.rawNameForKey(pRtx.getNameKey())); mOut.write(ECharsForSerializing.CLOSE.getBytes()); if (mIndent) { mOut.write(ECharsForSerializing.NEWLINE.getBytes()); } } catch (final IOException e) { LOGWRAPPER.error(e.getMessage(), e); } } @Override protected void emitStartDocument() { try { if (mSerializeXMLDeclaration) { write("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>"); } if (mSerializeRest) { write("<rest:sequence xmlns:rest=\"REST\"><rest:item>"); } } catch (final IOException e) { LOGWRAPPER.error(e.getMessage(), e); } } @Override protected void emitEndDocument() { try { if (mSerializeRest) { write("</rest:item></rest:sequence>"); } mOut.flush(); } catch (final IOException e) { LOGWRAPPER.error(e.getMessage(), e); } } @Override protected void emitStartManualElement(final long pVersion) { try { write("<tt revision=\""); write(Long.toString(pVersion)); write("\">"); } catch (final IOException e) { LOGWRAPPER.error(e.getMessage(), e); } } @Override protected void emitEndManualElement(final long pVersion) { try { write("</tt>"); } catch (final IOException e) { LOGWRAPPER.error(e.getMessage(), e); } } /** * Indentation of output. * * @throws IOException * if can't indent output */ private void indent() throws IOException { if (mIndent) { for (int i = 0; i < mStack.size() * mIndentSpaces; i++) { mOut.write(" ".getBytes()); } } } /** * Write characters of string. * * @param pString * String to write * @throws IOException * if can't write to string * @throws UnsupportedEncodingException * if unsupport encoding */ protected void write(@Nonnull final String pString) throws UnsupportedEncodingException, IOException { mOut.write(pString.getBytes(IConstants.DEFAULT_ENCODING)); } /** * Write non-negative non-zero long as UTF-8 bytes. * * @param pValue * value to write * @throws IOException * if can't write to string */ private void write(final long pValue) throws IOException { final int length = (int) Math.log10(pValue); int digit = 0; long remainder = pValue; for (int i = length; i >= 0; i digit = (byte) (remainder / LONG_POWERS[i]); mOut.write((byte) (digit + ASCII_OFFSET)); remainder -= digit * LONG_POWERS[i]; } } /** * Main method. * * @param args * args[0] specifies the input-TT file/folder; args[1] specifies the * output XML file. * @throws Exception * any exception */ public static void main(final String... args) throws Exception { if (args.length < 2 || args.length > 3) { throw new IllegalArgumentException( "Usage: XMLSerializer input-TT output.xml"); } LOGWRAPPER.info("Serializing '" + args[0] + "' to '" + args[1] + "' ... "); final long time = System.nanoTime(); final File target = new File(args[1]); Files.recursiveRemove(target.toPath()); target.getParentFile().mkdirs(); target.createNewFile(); try (final FileOutputStream outputStream = new FileOutputStream(target)) { final DatabaseConfiguration config = new DatabaseConfiguration(new File( args[0])); Database.createDatabase(config); try (final IDatabase db = Database.openDatabase(new File(args[0]))) { db.createResource(new ResourceConfiguration.Builder("shredded", config) .build()); final ISession session = db .getSession(new SessionConfiguration.Builder("shredded").build()); final XMLSerializer serializer = new XMLSerializerBuilder(session, outputStream).build(); serializer.call(); } } LOGWRAPPER .info(" done [" + (System.nanoTime() - time) / 1_000_000 + "ms]."); } /** * XMLSerializerBuilder to setup the XMLSerializer. */ public static final class XMLSerializerBuilder { /** * Intermediate boolean for indendation, not necessary. */ private boolean mIndent; /** * Intermediate boolean for rest serialization, not necessary. */ private boolean mREST; /** * Intermediate boolean for XML-Decl serialization, not necessary. */ private boolean mDeclaration = true; /** * Intermediate boolean for ids, not necessary. */ private boolean mID; /** * Intermediate number of spaces to indent, not necessary. */ private int mIndentSpaces = 2; /** Stream to pipe to. */ private final OutputStream mStream; /** Session to use. */ private final ISession mSession; /** Further revisions to serialize. */ private int[] mRevisions; /** Revision to serialize. */ private int mRevision; /** Node key of subtree to shredder. */ private final long mNodeKey; /** * Constructor, setting the necessary stuff. * * @param pSession * Sirix {@link ISession} * @param pStream * {@link OutputStream} to write to * @param pRevisions * revisions to serialize */ public XMLSerializerBuilder(@Nonnull final ISession pSession, @Nonnull final OutputStream pStream, final int... pRevisions) { mNodeKey = 0; mSession = checkNotNull(pSession); mStream = checkNotNull(pStream); if (pRevisions == null || pRevisions.length == 0) { mRevision = mSession.getLastRevisionNumber(); } else { mRevision = pRevisions[0]; mRevisions = new int[pRevisions.length - 1]; for (int i = 0; i < pRevisions.length - 1; i++) { mRevisions[i] = pRevisions[i + 1]; } } } /** * Constructor. * * @param pSession * Sirix {@link ISession} * @param pNodeKey * root node key of subtree to shredder * @param pStream * {@link OutputStream} to write to * @param pProperties * {@link XMLSerializerProperties} to use * @param paramVersions * version(s) to serialize */ public XMLSerializerBuilder(@Nonnull final ISession pSession, @Nonnegative final long pNodeKey, @Nonnull final OutputStream pStream, @Nonnull final XMLSerializerProperties pProperties, final int... pRevisions) { checkArgument(pNodeKey >= 0, "pNodeKey must be >= 0!"); mSession = checkNotNull(pSession); mNodeKey = pNodeKey; mStream = checkNotNull(pStream); if (pRevisions == null || pRevisions.length == 0) { mRevision = mSession.getLastRevisionNumber(); } else { mRevision = pRevisions[0]; mRevisions = new int[pRevisions.length - 1]; for (int i = 0; i < pRevisions.length - 1; i++) { mRevisions[i] = pRevisions[i + 1]; } } final ConcurrentMap<?, ?> map = checkNotNull(pProperties.getProps()); mIndent = checkNotNull((Boolean) map.get(S_INDENT[0])); mREST = checkNotNull((Boolean) map.get(S_REST[0])); mID = checkNotNull((Boolean) map.get(S_ID[0])); mIndentSpaces = checkNotNull((Integer) map.get(S_INDENT_SPACES[0])); mDeclaration = checkNotNull((Boolean) map.get(S_XMLDECL[0])); } /** * Setting the indendation. * * @param pIndent * determines if it should be indented * @return XMLSerializerBuilder reference */ public XMLSerializerBuilder setIndend(final boolean pIndent) { mIndent = pIndent; return this; } /** * Setting the RESTful output. * * @param pREST * set RESTful * @return XMLSerializerBuilder reference */ public XMLSerializerBuilder setREST(final boolean pREST) { mREST = pREST; return this; } /** * Setting the declaration. * * @param pDeclaration * determines if the XML declaration should be emitted * @return {@link XMLSerializerBuilder} reference */ public XMLSerializerBuilder setDeclaration(final boolean pDeclaration) { mDeclaration = pDeclaration; return this; } /** * Setting the IDs on nodes. * * @param pID * determines if IDs should be set for each node * @return XMLSerializerBuilder reference */ public XMLSerializerBuilder setID(final boolean pID) { mID = pID; return this; } /** * Setting the ids on nodes. * * @param pRevisions * revisions to serialize * @return XMLSerializerBuilder reference */ public XMLSerializerBuilder setVersions(final int[] pRevisions) { mRevisions = checkNotNull(pRevisions); return this; } /** * Building new {@link Serializer} instance. * * @return a new {@link Serializer} instance */ public XMLSerializer build() { return new XMLSerializer(mSession, mNodeKey, this, mRevision, mRevisions); } } }
package org.infinispan.demo; import org.infinispan.Cache; import org.infinispan.manager.DefaultCacheManager; public class CephCacheStore { public static void main(String[] args) throws Exception { /*ConfigurationBuilder cfg = new ConfigurationBuilder(); cfg.persistence().addStore(CephStoreConfigurationBuilder.class) .userName("admin") .key("AQCY2sdXyDIcJxAAK1edRJ8xOJ2NkkiXzAuq5A==") .monitorHost("192.168.122.145:6789") .poolNamePrefix("ispn_store"); DefaultCacheManager cacheManager = new DefaultCacheManager(cfg.build());*/ DefaultCacheManager cacheManager = new DefaultCacheManager("ispn-ceph.xml"); Cache<String, String> cache = cacheManager.getCache("test"); cache.put("key", "value"); System.out.printf("key = %s\n", cache.get("key")); cache.stop(); cache.start(); System.out.printf("key = %s\n", cache.get("key")); cacheManager.stop(); } }
package com.mkl.eu.client.common.util; import java.util.Collection; import java.util.Map; import java.util.Optional; import java.util.function.Predicate; import java.util.stream.Stream; /** * Various utility method. * * @author MKL. */ public final class CommonUtil { /** * No instance. */ private CommonUtil() { } /** * Find the first element of the collection that matches the predicate, or <code>null</code> if none matches. * * @param list collection to parse. * @param predicate to use for matching purpose. * @param <T> Type of the Collection. * @return the first element of the collection matching the predicate. */ public static <T> T findFirst(Collection<T> list, Predicate<T> predicate) { return findFirst(list.stream(), predicate); } /** * Find the first element of the stream that matches the predicate, or <code>null</code> if none matches. * * @param stream stream to parse. * @param predicate to use for matching purpose. * @param <T> Type of the Collection. * @return the first element of the collection matching the predicate. */ public static <T> T findFirst(Stream<T> stream, Predicate<T> predicate) { T returnValue = null; Optional<T> opt = stream.filter(predicate).findFirst(); if (opt.isPresent()) { returnValue = opt.get(); } return returnValue; } /** * Add several Integer that can be <code>null</code>. * * @param numbers to add. * @return the sum of the numbers. */ public static Integer add(Integer... numbers) { Integer sum = null; for (Integer number : numbers) { if (sum == null) { sum = number; } else if (number != null) { sum = sum + number; } } return sum; } /** * Increment a Map of K->Integer for a given key. * * @param map the map. * @param key the key. * @param <K> the class of the key. */ public static <K> void addOne(Map<K, Integer> map, K key) { if (map != null) { if (map.get(key) != null) { map.put(key, map.get(key) + 1); } else { map.put(key, 1); } } } /** * Decrement a Map of K->Integer for a given key. * If the value is 0, removes the key. * If the key doesn't exist, does nothing. * * @param map the map. * @param key the key. * @param <K> the class of the key. */ public static <K> void subtractOne(Map<K, Integer> map, K key) { if (map != null) { if (map.get(key) != null) { map.put(key, map.get(key) - 1); if (map.get(key) == 0) { map.remove(key); } } } } }
package de.dfki.asr.compass.math; import static de.dfki.asr.compass.test.matcher.Quat4fSimilarity.similarTo; import javax.vecmath.Matrix3d; import static org.hamcrest.MatcherAssert.assertThat; import static org.testng.Assert.*; import org.testng.annotations.Test; @SuppressWarnings("PMD.ExcessivePublicCount") public class OrientationQuaternionTest { private static final float EQUALS_DELTA = 0.0001f; private static final double TO_RADIANS = Math.PI / 180; // Example Quaternions taken from: // Basically, all cases of SO(3), i.e. of a Cube unto itself. @Test public void yaw0Pitch0Roll0() { sameQuaternion(new Quat4f(0, 0, 0, 1), yawPitchRoll(0, 0, 0)); } @Test public void yaw90Pitch0Roll0() { sameQuaternion(new Quat4f(0, 0.7071f, 0, 0.7071f), yawPitchRoll(90, 0, 0)); } @Test public void yaw180Pitch0Roll0() { sameQuaternion(new Quat4f(0, 1, 0, 0), yawPitchRoll(180, 0, 0)); } @Test public void yawMinus90Pitch0Roll0() { sameQuaternion(new Quat4f(0, -0.7071f, 0, 0.7071f), yawPitchRoll(-90, 0, 0)); } @Test public void yaw0Pitch90Roll0() { sameQuaternion(new Quat4f(0, 0, 0.7071f, 0.7071f), yawPitchRoll(0, 90, 0)); } @Test public void yaw90Pitch90Roll0() { sameQuaternion(new Quat4f(0.5f, 0.5f, 0.5f, 0.5f), yawPitchRoll(90, 90, 0)); } @Test public void yaw180Pitch90Roll0() { sameQuaternion(new Quat4f(0.7071f, 0.7071f, 0, 0), yawPitchRoll(180, 90, 0)); } @Test public void yawMinus90Pitch90Roll0() { sameQuaternion(new Quat4f(-0.5f, -0.5f, 0.5f, 0.5f), yawPitchRoll(-90, 90, 0)); } @Test public void yaw0PitchMinus90Roll0() { sameQuaternion(new Quat4f(0, 0, -0.7071f, 0.7071f), yawPitchRoll(0, -90, 0)); } @Test public void yaw90PitchMinus90Roll0() { sameQuaternion(new Quat4f(-0.5f, 0.5f, -0.5f, 0.5f), yawPitchRoll(90, -90, 0)); } @Test public void yaw180PitchMinus90Roll0() { sameQuaternion(new Quat4f(-0.7071f, 0.7071f, 0, 0), yawPitchRoll(180, -90, 0)); } @Test public void yawMinus90PitchMinus90Roll0() { sameQuaternion(new Quat4f(0.5f, -0.5f, -0.5f, 0.5f), yawPitchRoll(-90, -90, 0)); } @Test public void yaw0Pitch0Roll90() { sameQuaternion(new Quat4f(0.7071f, 0, 0, 0.7071f), yawPitchRoll(0, 0, 90)); } @Test public void yaw90Pitch0Roll90() { sameQuaternion(new Quat4f(0.5f, 0.5f, -0.5f, 0.5f), yawPitchRoll(90, 0, 90)); } @Test public void yaw180Pitch0Roll90() { sameQuaternion(new Quat4f(0, 0.7071f, -0.7071f, 0), yawPitchRoll(180, 0, 90)); } @Test public void yawMinus90Pitch0Roll90() { sameQuaternion(new Quat4f(0.5f, -0.5f, 0.5f, 0.5f), yawPitchRoll(-90, 0, 90)); } @Test public void yaw0Pitch0Roll180() { sameQuaternion(new Quat4f(1, 0, 0, 0), yawPitchRoll(0, 0, 180)); } @Test public void yaw90Pitch0Roll180() { sameQuaternion(new Quat4f(0.7071f, 0, -0.7071f, 0), yawPitchRoll(90, 0, 180)); } @Test public void yaw180Pitch0Roll180() { // And this example contains a typo on the source page. sameQuaternion(new Quat4f(0, 0, -1, 0), yawPitchRoll(180, 0, 180)); } @Test public void yawMinus90Pitch0Roll180() { sameQuaternion(new Quat4f(0.7071f, 0, 0.7071f, 0), yawPitchRoll(-90, 0, 180)); } @Test public void yaw0Pitch0RollMinus90() { sameQuaternion(new Quat4f(-0.7071f, 0, 0, 0.7071f), yawPitchRoll(0, 0, -90)); } @Test public void yaw90Pitch0RollMinus90() { sameQuaternion(new Quat4f(-0.5f, 0.5f, 0.5f, 0.5f), yawPitchRoll(90, 0, -90)); } @Test public void yaw180Pitch0RollMinus90() { sameQuaternion(new Quat4f(0, 0.7071f, 0.7071f, 0), yawPitchRoll(180, 0, -90)); } @Test public void yawMinus90Pitch0RollMinus90() { sameQuaternion(new Quat4f(-0.5f, -0.5f, -0.5f, 0.5f), yawPitchRoll(-90, 0, -90)); } // not part of SO(3) @Test public void yaw0Pitch90Roll90() { // by wolfram alpha Orientation o = yawPitchRoll(0, 90, 90); Matrix3d rotFromAngles = createRotationMatrix(0, 90, 90); Matrix3d rotFromQuat = new Matrix3d(); rotFromQuat.set(o.getLocalRotation()); assertTrue(rotFromQuat.epsilonEquals(rotFromAngles, EQUALS_DELTA)); } @Test(enabled = false) public void yaw0Pitch180Roll0() { // pitch 180 = half-turn around the X-axis // by sheer brain power (and a lttle bit of help from alpha) Orientation o = yawPitchRoll(0, 180, 0); Matrix3d rotFromAngles = createRotationMatrix(0, 180, 0); Matrix3d rotFromQuat = new Matrix3d(); rotFromQuat.set(o.getLocalRotation()); assertEquals(rotFromQuat, rotFromAngles); } private Orientation yawPitchRoll(final double yaw, final double pitch, final double roll) { Orientation o = new Orientation(); o.setLocalYaw(yaw); o.setLocalPitch(pitch); o.setLocalRoll(roll); return o; } private void sameQuaternion(final Quat4f quaternion, final Orientation orientation) { assertThat("Quaternion", orientation.getLocalRotation(), similarTo(quaternion, EQUALS_DELTA)); } private Matrix3d createRotationMatrix(final double yaw, final double pitch, final double roll) { Matrix3d rotation = createRollMatrix(roll); rotation.mul(createPitchMatrix(pitch)); rotation.mul(createYawMatrix(yaw)); return rotation; } private Matrix3d createYawMatrix(final double t) { double theta = t * TO_RADIANS; Matrix3d rotation = new Matrix3d(); rotation.m00 = Math.cos(theta); rotation.m01 = - Math.sin(theta); rotation.m10 = Math.sin(theta); rotation.m11 = Math.cos(theta); rotation.m22 = 1; return rotation; } private Matrix3d createPitchMatrix(final double t) { double theta = t * TO_RADIANS; Matrix3d rotation = new Matrix3d(); rotation.m00 = Math.cos(theta); rotation.m02 = Math.sin(theta); rotation.m20 = - Math.sin(theta); rotation.m22 = Math.cos(theta); rotation.m11 = 1; return rotation; } private Matrix3d createRollMatrix(final double t) { double theta = t * TO_RADIANS; Matrix3d rotation = new Matrix3d(); rotation.m11 = Math.cos(theta); rotation.m12 = - Math.sin(theta); rotation.m21 = Math.sin(theta); rotation.m22 = Math.cos(theta); rotation.m00 = 1; return rotation; } }
package com.yahoo.vespa.config.server; import ai.vespa.http.DomainName; import ai.vespa.http.HttpURL; import ai.vespa.http.HttpURL.Query; import com.yahoo.component.annotation.Inject; import com.yahoo.cloud.config.ConfigserverConfig; import com.yahoo.component.Version; import com.yahoo.config.FileReference; import com.yahoo.config.application.api.ApplicationFile; import com.yahoo.config.application.api.ApplicationMetaData; import com.yahoo.config.application.api.DeployLogger; import com.yahoo.config.model.api.HostInfo; import com.yahoo.config.model.api.ServiceInfo; import com.yahoo.config.provision.ActivationContext; import com.yahoo.config.provision.ApplicationId; import com.yahoo.config.provision.ApplicationTransaction; import com.yahoo.config.provision.Capacity; import com.yahoo.config.provision.Environment; import com.yahoo.config.provision.HostFilter; import com.yahoo.config.provision.InfraDeployer; import com.yahoo.config.provision.Provisioner; import com.yahoo.config.provision.RegionName; import com.yahoo.config.provision.SystemName; import com.yahoo.config.provision.TenantName; import com.yahoo.config.provision.Zone; import com.yahoo.config.provision.exception.ActivationConflictException; import com.yahoo.container.jdisc.HttpResponse; import com.yahoo.container.jdisc.SecretStoreProvider; import com.yahoo.container.jdisc.secretstore.SecretStore; import com.yahoo.docproc.jdisc.metric.NullMetric; import com.yahoo.io.IOUtils; import com.yahoo.jdisc.Metric; import com.yahoo.path.Path; import com.yahoo.slime.Slime; import com.yahoo.transaction.NestedTransaction; import com.yahoo.transaction.Transaction; import com.yahoo.vespa.applicationmodel.InfrastructureApplication; import com.yahoo.vespa.config.server.application.Application; import com.yahoo.vespa.config.server.application.ApplicationCuratorDatabase; import com.yahoo.vespa.config.server.application.ApplicationReindexing; import com.yahoo.vespa.config.server.application.ApplicationSet; import com.yahoo.vespa.config.server.application.ClusterReindexing; import com.yahoo.vespa.config.server.application.ClusterReindexingStatusClient; import com.yahoo.vespa.config.server.application.CompressedApplicationInputStream; import com.yahoo.vespa.config.server.application.ConfigConvergenceChecker; import com.yahoo.vespa.config.server.application.DefaultClusterReindexingStatusClient; import com.yahoo.vespa.config.server.application.FileDistributionStatus; import com.yahoo.vespa.config.server.application.HttpProxy; import com.yahoo.vespa.config.server.application.TenantApplications; import com.yahoo.vespa.config.server.configchange.ConfigChangeActions; import com.yahoo.vespa.config.server.configchange.RefeedActions; import com.yahoo.vespa.config.server.configchange.ReindexActions; import com.yahoo.vespa.config.server.configchange.RestartActions; import com.yahoo.vespa.config.server.deploy.DeployHandlerLogger; import com.yahoo.vespa.config.server.deploy.Deployment; import com.yahoo.vespa.config.server.deploy.InfraDeployerProvider; import com.yahoo.vespa.config.server.http.InternalServerException; import com.yahoo.vespa.config.server.http.LogRetriever; import com.yahoo.vespa.config.server.http.SecretStoreValidator; import com.yahoo.vespa.config.server.http.SimpleHttpFetcher; import com.yahoo.vespa.config.server.http.TesterClient; import com.yahoo.vespa.config.server.http.v2.PrepareResult; import com.yahoo.vespa.config.server.http.v2.response.DeploymentMetricsResponse; import com.yahoo.vespa.config.server.http.v2.response.ProtonMetricsResponse; import com.yahoo.vespa.config.server.metrics.DeploymentMetricsRetriever; import com.yahoo.vespa.config.server.metrics.ProtonMetricsRetriever; import com.yahoo.vespa.config.server.provision.HostProvisionerProvider; import com.yahoo.vespa.config.server.session.LocalSession; import com.yahoo.vespa.config.server.session.PrepareParams; import com.yahoo.vespa.config.server.session.RemoteSession; import com.yahoo.vespa.config.server.session.Session; import com.yahoo.vespa.config.server.session.SessionRepository; import com.yahoo.vespa.config.server.session.SilentDeployLogger; import com.yahoo.vespa.config.server.tenant.ApplicationRolesStore; import com.yahoo.vespa.config.server.tenant.ContainerEndpointsCache; import com.yahoo.vespa.config.server.tenant.EndpointCertificateMetadataStore; import com.yahoo.vespa.config.server.tenant.Tenant; import com.yahoo.vespa.config.server.tenant.TenantMetaData; import com.yahoo.vespa.config.server.tenant.TenantRepository; import com.yahoo.vespa.curator.Curator; import com.yahoo.vespa.curator.stats.LockStats; import com.yahoo.vespa.curator.stats.ThreadLockStats; import com.yahoo.vespa.defaults.Defaults; import com.yahoo.vespa.flags.FlagSource; import com.yahoo.vespa.flags.InMemoryFlagSource; import com.yahoo.vespa.orchestrator.Orchestrator; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.UncheckedIOException; import java.nio.file.Files; import java.nio.file.attribute.BasicFileAttributes; import java.time.Clock; import java.time.Duration; import java.time.Instant; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.OptionalLong; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.UnaryOperator; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; import static com.yahoo.config.model.api.container.ContainerServiceType.CONTAINER; import static com.yahoo.config.model.api.container.ContainerServiceType.LOGSERVER_CONTAINER; import static com.yahoo.vespa.config.server.application.ConfigConvergenceChecker.ServiceListResponse; import static com.yahoo.vespa.config.server.application.ConfigConvergenceChecker.ServiceResponse; import static com.yahoo.vespa.config.server.filedistribution.FileDistributionUtil.fileReferenceExistsOnDisk; import static com.yahoo.vespa.config.server.filedistribution.FileDistributionUtil.getFileReferencesOnDisk; import static com.yahoo.vespa.config.server.tenant.TenantRepository.HOSTED_VESPA_TENANT; import static com.yahoo.vespa.curator.Curator.CompletionWaiter; import static com.yahoo.yolean.Exceptions.uncheck; import static java.nio.file.Files.readAttributes; /** * The API for managing applications. * * @author bratseth */ // TODO: Move logic for dealing with applications here from the HTTP layer and make this the persistent component // owning the rest of the state public class ApplicationRepository implements com.yahoo.config.provision.Deployer { private static final Logger log = Logger.getLogger(ApplicationRepository.class.getName()); private final AtomicBoolean bootstrapping = new AtomicBoolean(true); private final TenantRepository tenantRepository; private final Optional<Provisioner> hostProvisioner; private final Optional<InfraDeployer> infraDeployer; private final ConfigConvergenceChecker convergeChecker; private final HttpProxy httpProxy; private final Clock clock; private final ConfigserverConfig configserverConfig; private final FileDistributionStatus fileDistributionStatus = new FileDistributionStatus(); private final Orchestrator orchestrator; private final LogRetriever logRetriever; private final TesterClient testerClient; private final Metric metric; private final SecretStoreValidator secretStoreValidator; private final ClusterReindexingStatusClient clusterReindexingStatusClient; private final FlagSource flagSource; @Inject public ApplicationRepository(TenantRepository tenantRepository, HostProvisionerProvider hostProvisionerProvider, InfraDeployerProvider infraDeployerProvider, ConfigConvergenceChecker configConvergenceChecker, HttpProxy httpProxy, ConfigserverConfig configserverConfig, Orchestrator orchestrator, TesterClient testerClient, Metric metric, SecretStore secretStore, FlagSource flagSource) { this(tenantRepository, hostProvisionerProvider.getHostProvisioner(), infraDeployerProvider.getInfraDeployer(), configConvergenceChecker, httpProxy, configserverConfig, orchestrator, new LogRetriever(), Clock.systemUTC(), testerClient, metric, new SecretStoreValidator(secretStore), new DefaultClusterReindexingStatusClient(), flagSource); } private ApplicationRepository(TenantRepository tenantRepository, Optional<Provisioner> hostProvisioner, Optional<InfraDeployer> infraDeployer, ConfigConvergenceChecker configConvergenceChecker, HttpProxy httpProxy, ConfigserverConfig configserverConfig, Orchestrator orchestrator, LogRetriever logRetriever, Clock clock, TesterClient testerClient, Metric metric, SecretStoreValidator secretStoreValidator, ClusterReindexingStatusClient clusterReindexingStatusClient, FlagSource flagSource) { this.tenantRepository = Objects.requireNonNull(tenantRepository); this.hostProvisioner = Objects.requireNonNull(hostProvisioner); this.infraDeployer = Objects.requireNonNull(infraDeployer); this.convergeChecker = Objects.requireNonNull(configConvergenceChecker); this.httpProxy = Objects.requireNonNull(httpProxy); this.configserverConfig = Objects.requireNonNull(configserverConfig); this.orchestrator = Objects.requireNonNull(orchestrator); this.logRetriever = Objects.requireNonNull(logRetriever); this.clock = Objects.requireNonNull(clock); this.testerClient = Objects.requireNonNull(testerClient); this.metric = Objects.requireNonNull(metric); this.secretStoreValidator = Objects.requireNonNull(secretStoreValidator); this.clusterReindexingStatusClient = clusterReindexingStatusClient; this.flagSource = flagSource; } // Should be used by tests only (first constructor in this class makes sure we use injectable components where possible) public static class Builder { private TenantRepository tenantRepository; private Optional<Provisioner> hostProvisioner; private HttpProxy httpProxy = new HttpProxy(new SimpleHttpFetcher()); private Clock clock = Clock.systemUTC(); private ConfigserverConfig configserverConfig = new ConfigserverConfig.Builder().build(); private Orchestrator orchestrator; private LogRetriever logRetriever = new LogRetriever(); private TesterClient testerClient = new TesterClient(); private Metric metric = new NullMetric(); private SecretStoreValidator secretStoreValidator = new SecretStoreValidator(new SecretStoreProvider().get()); private FlagSource flagSource = new InMemoryFlagSource(); private ConfigConvergenceChecker configConvergenceChecker = new ConfigConvergenceChecker(); public Builder withTenantRepository(TenantRepository tenantRepository) { this.tenantRepository = tenantRepository; return this; } public Builder withClock(Clock clock) { this.clock = clock; return this; } public Builder withProvisioner(Provisioner provisioner) { if (this.hostProvisioner != null) throw new IllegalArgumentException("provisioner already set in builder"); this.hostProvisioner = Optional.ofNullable(provisioner); return this; } public Builder withHostProvisionerProvider(HostProvisionerProvider hostProvisionerProvider) { if (this.hostProvisioner != null) throw new IllegalArgumentException("provisioner already set in builder"); this.hostProvisioner = hostProvisionerProvider.getHostProvisioner(); return this; } public Builder withHttpProxy(HttpProxy httpProxy) { this.httpProxy = httpProxy; return this; } public Builder withConfigserverConfig(ConfigserverConfig configserverConfig) { this.configserverConfig = configserverConfig; return this; } public Builder withOrchestrator(Orchestrator orchestrator) { this.orchestrator = orchestrator; return this; } public Builder withLogRetriever(LogRetriever logRetriever) { this.logRetriever = logRetriever; return this; } public Builder withTesterClient(TesterClient testerClient) { this.testerClient = testerClient; return this; } public Builder withFlagSource(FlagSource flagSource) { this.flagSource = flagSource; return this; } public Builder withMetric(Metric metric) { this.metric = metric; return this; } public Builder withSecretStoreValidator(SecretStoreValidator secretStoreValidator) { this.secretStoreValidator = secretStoreValidator; return this; } public Builder withConfigConvergenceChecker(ConfigConvergenceChecker configConvergenceChecker) { this.configConvergenceChecker = configConvergenceChecker; return this; } public ApplicationRepository build() { return new ApplicationRepository(tenantRepository, hostProvisioner, InfraDeployerProvider.empty().getInfraDeployer(), configConvergenceChecker, httpProxy, configserverConfig, orchestrator, logRetriever, clock, testerClient, metric, secretStoreValidator, ClusterReindexingStatusClient.DUMMY_INSTANCE, flagSource); } } public Metric metric() { return metric; } @Override public boolean bootstrapping() { return bootstrapping.get(); } public void bootstrappingDone() { bootstrapping.set(false); } public PrepareResult prepare(long sessionId, PrepareParams prepareParams) { DeployHandlerLogger logger = DeployHandlerLogger.forPrepareParams(prepareParams); Deployment deployment = prepare(sessionId, prepareParams, logger); return new PrepareResult(sessionId, deployment.configChangeActions(), logger); } private Deployment prepare(long sessionId, PrepareParams prepareParams, DeployHandlerLogger logger) { Tenant tenant = getTenant(prepareParams.getApplicationId()); Session session = validateThatLocalSessionIsNotActive(tenant, sessionId); Deployment deployment = Deployment.unprepared(session, this, hostProvisioner, tenant, prepareParams, logger, clock); deployment.prepare(); logConfigChangeActions(deployment.configChangeActions(), logger); log.log(Level.INFO, TenantRepository.logPre(prepareParams.getApplicationId()) + "Session " + sessionId + " prepared successfully. "); return deployment; } public PrepareResult deploy(CompressedApplicationInputStream in, PrepareParams prepareParams) { DeployHandlerLogger logger = DeployHandlerLogger.forPrepareParams(prepareParams); File tempDir = uncheck(() -> Files.createTempDirectory("deploy")).toFile(); ThreadLockStats threadLockStats = LockStats.getForCurrentThread(); PrepareResult prepareResult; try { threadLockStats.startRecording("deploy of " + prepareParams.getApplicationId().serializedForm()); prepareResult = deploy(decompressApplication(in, tempDir), prepareParams, logger); } finally { threadLockStats.stopRecording(); cleanupTempDirectory(tempDir, logger); } return prepareResult; } public PrepareResult deploy(File applicationPackage, PrepareParams prepareParams) { return deploy(applicationPackage, prepareParams, DeployHandlerLogger.forPrepareParams(prepareParams)); } private PrepareResult deploy(File applicationDir, PrepareParams prepareParams, DeployHandlerLogger logger) { ApplicationId applicationId = prepareParams.getApplicationId(); long sessionId = createSession(applicationId, prepareParams.getTimeoutBudget(), applicationDir, logger); Deployment deployment = prepare(sessionId, prepareParams, logger); if ( ! prepareParams.isDryRun()) deployment.activate(); return new PrepareResult(sessionId, deployment.configChangeActions(), logger); } /** * Creates a new deployment from the active application, if available. * This is used for system internal redeployments, not on application package changes. * * @param application the active application to be redeployed * @return a new deployment from the local active, or empty if a local active application * was not present for this id (meaning it either is not active or active on another * node in the config server cluster) */ @Override public Optional<com.yahoo.config.provision.Deployment> deployFromLocalActive(ApplicationId application) { return deployFromLocalActive(application, false); } /** * Creates a new deployment from the active application, if available. * This is used for system internal redeployments, not on application package changes. * * @param application the active application to be redeployed * @param bootstrap the deployment is done when bootstrapping * @return a new deployment from the local active, or empty if a local active application * was not present for this id (meaning it either is not active or active on another * node in the config server cluster) */ @Override public Optional<com.yahoo.config.provision.Deployment> deployFromLocalActive(ApplicationId application, boolean bootstrap) { return deployFromLocalActive(application, Duration.ofSeconds(configserverConfig.zookeeper().barrierTimeout()).plus(Duration.ofSeconds(5)), bootstrap); } /** * Creates a new deployment from the active application, if available. * This is used for system internal redeployments, not on application package changes. * * @param application the active application to be redeployed * @param timeout the timeout to use for each individual deployment operation * @param bootstrap the deployment is done when bootstrapping * @return a new deployment from the local active, or empty if a local active application * was not present for this id (meaning it either is not active or active on another * node in the config server cluster) */ @Override public Optional<com.yahoo.config.provision.Deployment> deployFromLocalActive(ApplicationId application, Duration timeout, boolean bootstrap) { Optional<com.yahoo.config.provision.Deployment> infraDeployment = infraDeployer.flatMap(d -> d.getDeployment(application)); if (infraDeployment.isPresent()) return infraDeployment; Tenant tenant = tenantRepository.getTenant(application.tenant()); if (tenant == null) return Optional.empty(); Optional<LocalSession> activeSession = getActiveLocalSession(tenant, application); if (activeSession.isEmpty()) return Optional.empty(); TimeoutBudget timeoutBudget = new TimeoutBudget(clock, timeout); SessionRepository sessionRepository = tenant.getSessionRepository(); DeployLogger logger = new SilentDeployLogger(); Session newSession = sessionRepository.createSessionFromExisting(activeSession.get(), true, timeoutBudget, logger); return Optional.of(Deployment.unprepared(newSession, this, hostProvisioner, tenant, logger, timeout, clock, false /* don't validate as this is already deployed */, bootstrap)); } @Override public Optional<Instant> lastDeployTime(ApplicationId application) { Tenant tenant = tenantRepository.getTenant(application.tenant()); if (tenant == null) return Optional.empty(); return getActiveSession(tenant, application).map(Session::getCreateTime); } public ApplicationId activate(Tenant tenant, long sessionId, TimeoutBudget timeoutBudget, boolean force) { DeployLogger logger = new SilentDeployLogger(); Session session = getLocalSession(tenant, sessionId); Deployment deployment = Deployment.prepared(session, this, hostProvisioner, tenant, logger, timeoutBudget.timeout(), clock, false, force); deployment.activate(); return session.getApplicationId(); } public Transaction deactivateCurrentActivateNew(Optional<Session> active, Session prepared, boolean force) { Tenant tenant = tenantRepository.getTenant(prepared.getTenantName()); Transaction transaction = tenant.getSessionRepository().createActivateTransaction(prepared); if (active.isPresent()) { checkIfActiveHasChanged(prepared, active.get(), force); checkIfActiveIsNewerThanSessionToBeActivated(prepared.getSessionId(), active.get().getSessionId()); transaction.add(active.get().createDeactivateTransaction().operations()); } transaction.add(updateMetaDataWithDeployTimestamp(tenant, clock.instant())); return transaction; } private List<Transaction.Operation> updateMetaDataWithDeployTimestamp(Tenant tenant, Instant deployTimestamp) { TenantMetaData tenantMetaData = getTenantMetaData(tenant).withLastDeployTimestamp(deployTimestamp); return tenantRepository.createWriteTenantMetaDataTransaction(tenantMetaData).operations(); } TenantMetaData getTenantMetaData(Tenant tenant) { return tenantRepository.getTenantMetaData(tenant); } static void checkIfActiveHasChanged(Session session, Session activeSession, boolean ignoreStaleSessionFailure) { long activeSessionAtCreate = session.getActiveSessionAtCreate(); log.log(Level.FINE, () -> activeSession.logPre() + "active session id at create time=" + activeSessionAtCreate); if (activeSessionAtCreate == 0) return; // No active session at create time long sessionId = session.getSessionId(); long activeSessionSessionId = activeSession.getSessionId(); log.log(Level.FINE, () -> activeSession.logPre() + "sessionId=" + sessionId + ", current active session=" + activeSessionSessionId); if (activeSession.isNewerThan(activeSessionAtCreate) && activeSessionSessionId != sessionId) { String errMsg = activeSession.logPre() + "Cannot activate session " + sessionId + " because the currently active session (" + activeSessionSessionId + ") has changed since session " + sessionId + " was created (was " + activeSessionAtCreate + " at creation time)"; if (ignoreStaleSessionFailure) { log.warning(errMsg + " (Continuing because of force.)"); } else { throw new ActivationConflictException(errMsg); } } } // Config generation is equal to session id, and config generation must be a monotonically increasing number static void checkIfActiveIsNewerThanSessionToBeActivated(long sessionId, long currentActiveSessionId) { if (sessionId < currentActiveSessionId) { throw new ActivationConflictException("Cannot activate session " + sessionId + ", because it is older than current active session (" + currentActiveSessionId + ")"); } } /** * Deletes an application and associated resources * * @return true if the application was found and deleted, false if it was not present * @throws RuntimeException if deleting the application fails. This method is exception safe. */ public boolean delete(ApplicationId applicationId) { Tenant tenant = getTenant(applicationId); if (tenant == null) return false; TenantApplications tenantApplications = tenant.getApplicationRepo(); NestedTransaction transaction = new NestedTransaction(); Optional<ApplicationTransaction> applicationTransaction = hostProvisioner.map(provisioner -> provisioner.lock(applicationId)) .map(lock -> new ApplicationTransaction(lock, transaction)); try (var applicationLock = tenantApplications.lock(applicationId)) { Optional<Long> activeSession = tenantApplications.activeSessionOf(applicationId); CompletionWaiter waiter; if (activeSession.isPresent()) { try { Session session = getRemoteSession(tenant, activeSession.get()); transaction.add(tenant.getSessionRepository().createSetStatusTransaction(session, Session.Status.DELETE)); } catch (NotFoundException e) { log.log(Level.INFO, TenantRepository.logPre(applicationId) + "Active session exists, but has not been deleted properly. Trying to cleanup"); } waiter = tenantApplications.createRemoveApplicationWaiter(applicationId); } else { // If there's no active session, we still want to clean up any resources created in a failing prepare waiter = new NoopCompletionWaiter(); } Curator curator = tenantRepository.getCurator(); transaction.add(new ContainerEndpointsCache(tenant.getPath(), curator).delete(applicationId)); // TODO: Not unit tested // Delete any application roles transaction.add(new ApplicationRolesStore(curator, tenant.getPath()).delete(applicationId)); // Delete endpoint certificates transaction.add(new EndpointCertificateMetadataStore(curator, tenant.getPath()).delete(applicationId)); // This call will remove application in zookeeper. Watches in TenantApplications will remove the application // and allocated hosts in model and handlers in RPC server transaction.add(tenantApplications.createDeleteTransaction(applicationId)); transaction.onCommitted(() -> log.log(Level.INFO, "Deleted " + applicationId)); if (applicationTransaction.isPresent()) { hostProvisioner.get().remove(applicationTransaction.get()); applicationTransaction.get().nested().commit(); } else { transaction.commit(); } // Wait for app being removed on other servers waiter.awaitCompletion(Duration.ofSeconds(30)); return activeSession.isPresent(); } finally { applicationTransaction.ifPresent(ApplicationTransaction::close); } } public HttpResponse proxyServiceHostnameRequest(ApplicationId applicationId, String hostName, String serviceName, HttpURL.Path path, Query query, HttpURL forwardedUrl) { return httpProxy.get(getApplication(applicationId), hostName, serviceName, path, query, forwardedUrl); } public Map<String, ClusterReindexing> getClusterReindexingStatus(ApplicationId applicationId) { return uncheck(() -> clusterReindexingStatusClient.getReindexingStatus(getApplication(applicationId))); } public Long getApplicationGeneration(ApplicationId applicationId) { return getApplication(applicationId).getApplicationGeneration(); } public void restart(ApplicationId applicationId, HostFilter hostFilter) { hostProvisioner.ifPresent(provisioner -> provisioner.restart(applicationId, hostFilter)); } public boolean isSuspended(ApplicationId application) { return orchestrator.getAllSuspendedApplications().contains(application); } public HttpResponse fileDistributionStatus(ApplicationId applicationId, Duration timeout) { return fileDistributionStatus.status(getApplication(applicationId), timeout); } public List<String> deleteUnusedFileDistributionReferences(File fileReferencesPath, Duration keepFileReferencesDuration, int numberToAlwaysKeep) { log.log(Level.FINE, () -> "Keep unused file references for " + keepFileReferencesDuration); if (!fileReferencesPath.isDirectory()) throw new RuntimeException(fileReferencesPath + " is not a directory"); Set<String> fileReferencesInUse = getFileReferencesInUse(); log.log(Level.FINE, () -> "File references in use : " + fileReferencesInUse); List<String> candidates = sortedUnusedFileReferences(fileReferencesPath, fileReferencesInUse, keepFileReferencesDuration); // Do not delete the newest ones List<String> fileReferencesToDelete = candidates.subList(0, Math.max(0, candidates.size() - numberToAlwaysKeep)); if (fileReferencesToDelete.size() > 0) { log.log(Level.FINE, () -> "Will delete file references not in use: " + fileReferencesToDelete); fileReferencesToDelete.forEach(fileReference -> { File file = new File(fileReferencesPath, fileReference); if ( ! IOUtils.recursiveDeleteDir(file)) log.log(Level.WARNING, "Could not delete " + file.getAbsolutePath()); }); } return fileReferencesToDelete; } private Set<String> getFileReferencesInUse() { Set<String> fileReferencesInUse = new HashSet<>(); for (var applicationId : listApplications()) { Application app = getApplication(applicationId); fileReferencesInUse.addAll(app.getModel().fileReferences().stream() .map(FileReference::value) .collect(Collectors.toSet())); } return fileReferencesInUse; } private List<String> sortedUnusedFileReferences(File fileReferencesPath, Set<String> fileReferencesInUse, Duration keepFileReferences) { Set<String> fileReferencesOnDisk = getFileReferencesOnDisk(fileReferencesPath); log.log(Level.FINE, () -> "File references on disk (in " + fileReferencesPath + "): " + fileReferencesOnDisk); Instant instant = clock.instant().minus(keepFileReferences); return fileReferencesOnDisk .stream() .filter(fileReference -> ! fileReferencesInUse.contains(fileReference)) .filter(fileReference -> isLastFileAccessBefore(new File(fileReferencesPath, fileReference), instant)) .sorted((a, b) -> { if (a.equals(b)) return 0; else if (lastAccessed(new File(fileReferencesPath, a)) .isBefore(lastAccessed(new File(fileReferencesPath, b)))) return -1; else return 1; }) .collect(Collectors.toList()); } public Set<FileReference> getFileReferences(ApplicationId applicationId) { return getOptionalApplication(applicationId).map(app -> app.getModel().fileReferences()).orElse(Set.of()); } public ApplicationFile getApplicationFileFromSession(TenantName tenantName, long sessionId, HttpURL.Path path, Session.Mode mode) { Tenant tenant = tenantRepository.getTenant(tenantName); return getLocalSession(tenant, sessionId).getApplicationFile(Path.from(path.segments()), mode); } public Tenant getTenant(ApplicationId applicationId) { return tenantRepository.getTenant(applicationId.tenant()); } Application getApplication(ApplicationId applicationId) { return getApplication(applicationId, Optional.empty()); } private Application getApplication(ApplicationId applicationId, Optional<Version> version) { try { Tenant tenant = getTenant(applicationId); if (tenant == null) throw new NotFoundException("Tenant '" + applicationId.tenant() + "' not found"); Optional<ApplicationSet> activeApplicationSet = tenant.getSessionRepository().getActiveApplicationSet(applicationId); if (activeApplicationSet.isEmpty()) throw new NotFoundException("Unknown application id '" + applicationId + "'"); return activeApplicationSet.get().getForVersionOrLatest(version, clock.instant()); } catch (NotFoundException e) { log.log(Level.WARNING, "Failed getting application for '" + applicationId + "': " + e.getMessage()); throw e; } catch (Exception e) { log.log(Level.WARNING, "Failed getting application for '" + applicationId + "'", e); throw e; } } // Will return Optional.empty() if getting application fails (instead of throwing an exception) private Optional<Application> getOptionalApplication(ApplicationId applicationId) { try { return Optional.of(getApplication(applicationId)); } catch (Exception e) { return Optional.empty(); } } public List<ApplicationId> listApplications() { return tenantRepository.getAllTenants().stream() .flatMap(tenant -> tenant.getApplicationRepo().activeApplications().stream()) .collect(Collectors.toList()); } private boolean isLastFileAccessBefore(File fileReference, Instant instant) { return lastAccessed(fileReference).isBefore(instant); } private Instant lastAccessed(File fileReference) { BasicFileAttributes fileAttributes; try { fileAttributes = readAttributes(fileReference.toPath(), BasicFileAttributes.class); return fileAttributes.lastAccessTime().toInstant(); } catch (IOException e) { throw new UncheckedIOException(e); } } public Optional<String> getApplicationPackageReference(ApplicationId applicationId) { Optional<String> applicationPackage = Optional.empty(); Optional<Session> session = getActiveSession(applicationId); if (session.isPresent()) { FileReference applicationPackageReference = session.get().getApplicationPackageReference(); File downloadDirectory = new File(Defaults.getDefaults().underVespaHome(configserverConfig().fileReferencesDir())); if (applicationPackageReference != null && ! fileReferenceExistsOnDisk(downloadDirectory, applicationPackageReference)) applicationPackage = Optional.of(applicationPackageReference.value()); } return applicationPackage; } public List<Version> getAllVersions(ApplicationId applicationId) { Optional<ApplicationSet> applicationSet = getActiveApplicationSet(applicationId); return applicationSet.isEmpty() ? List.of() : applicationSet.get().getAllVersions(applicationId); } public HttpResponse validateSecretStore(ApplicationId applicationId, SystemName systemName, Slime slime) { Application application = getApplication(applicationId); return secretStoreValidator.validateSecretStore(application, systemName, slime); } public ServiceResponse checkServiceForConfigConvergence(ApplicationId applicationId, String hostAndPort, Duration timeout, Optional<Version> vespaVersion) { return convergeChecker.getServiceConfigGeneration(getApplication(applicationId, vespaVersion), hostAndPort, timeout); } public ServiceListResponse servicesToCheckForConfigConvergence(ApplicationId applicationId, Duration timeoutPerService, Optional<Version> vespaVersion) { return convergeChecker.checkConvergenceForAllServices(getApplication(applicationId, vespaVersion), timeoutPerService); } public ConfigConvergenceChecker configConvergenceChecker() { return convergeChecker; } public HttpResponse getLogs(ApplicationId applicationId, Optional<DomainName> hostname, String apiParams) { String logServerURI = getLogServerURI(applicationId, hostname) + apiParams; return logRetriever.getLogs(logServerURI); } public HttpResponse getTesterStatus(ApplicationId applicationId) { return testerClient.getStatus(getTesterHostname(applicationId), getTesterPort(applicationId)); } public HttpResponse getTesterLog(ApplicationId applicationId, Long after) { return testerClient.getLog(getTesterHostname(applicationId), getTesterPort(applicationId), after); } public HttpResponse startTests(ApplicationId applicationId, String suite, byte[] config) { return testerClient.startTests(getTesterHostname(applicationId), getTesterPort(applicationId), suite, config); } public HttpResponse isTesterReady(ApplicationId applicationId) { return testerClient.isTesterReady(getTesterHostname(applicationId), getTesterPort(applicationId)); } public HttpResponse getTestReport(ApplicationId applicationId) { return testerClient.getReport(getTesterHostname(applicationId), getTesterPort(applicationId)); } private String getTesterHostname(ApplicationId applicationId) { return getTesterServiceInfo(applicationId).getHostName(); } private int getTesterPort(ApplicationId applicationId) { ServiceInfo serviceInfo = getTesterServiceInfo(applicationId); return serviceInfo.getPorts().stream().filter(portInfo -> portInfo.getTags().contains("http")).findFirst().get().getPort(); } private ServiceInfo getTesterServiceInfo(ApplicationId applicationId) { Application application = getApplication(applicationId); return application.getModel().getHosts().stream() .findFirst().orElseThrow(() -> new InternalServerException("Could not find any host for tester app " + applicationId.toFullString())) .getServices().stream() .filter(service -> CONTAINER.serviceName.equals(service.getServiceType())) .findFirst() .orElseThrow(() -> new InternalServerException("Could not find any tester container for tester app " + applicationId.toFullString())); } public Activation activate(Session session, ApplicationId applicationId, Tenant tenant, boolean force) { NestedTransaction transaction = new NestedTransaction(); Optional<ApplicationTransaction> applicationTransaction = hostProvisioner.map(provisioner -> provisioner.lock(applicationId)) .map(lock -> new ApplicationTransaction(lock, transaction)); try (var sessionLock = tenant.getApplicationRepo().lock(applicationId)) { Optional<Session> activeSession = getActiveSession(applicationId); CompletionWaiter waiter = session.getSessionZooKeeperClient().createActiveWaiter(); transaction.add(deactivateCurrentActivateNew(activeSession, session, force)); if (applicationTransaction.isPresent()) { hostProvisioner.get().activate(session.getAllocatedHosts().getHosts(), new ActivationContext(session.getSessionId()), applicationTransaction.get()); applicationTransaction.get().nested().commit(); } else { transaction.commit(); } return new Activation(waiter, activeSession); } finally { applicationTransaction.ifPresent(ApplicationTransaction::close); } } /** * Gets the active Session for the given application id. * * @return the active session, or null if there is no active session for the given application id. */ public Optional<Session> getActiveSession(ApplicationId applicationId) { return getActiveRemoteSession(applicationId); } /** * Gets the active Session for the given application id. * * @return the active session, or null if there is no active session for the given application id. */ public Optional<Session> getActiveRemoteSession(ApplicationId applicationId) { Tenant tenant = getTenant(applicationId); if (tenant == null) throw new IllegalArgumentException("Could not find any tenant for '" + applicationId + "'"); return getActiveSession(tenant, applicationId); } public long getSessionIdForApplication(ApplicationId applicationId) { Tenant tenant = getTenant(applicationId); if (tenant == null) throw new NotFoundException("Tenant '" + applicationId.tenant() + "' not found"); return getSessionIdForApplication(tenant, applicationId); } private long getSessionIdForApplication(Tenant tenant, ApplicationId applicationId) { TenantApplications applicationRepo = tenant.getApplicationRepo(); if (! applicationRepo.exists(applicationId)) throw new NotFoundException("Unknown application id '" + applicationId + "'"); return applicationRepo.requireActiveSessionOf(applicationId); } public void validateThatSessionIsNotActive(Tenant tenant, long sessionId) { Session session = getRemoteSession(tenant, sessionId); if (Session.Status.ACTIVATE == session.getStatus()) throw new IllegalArgumentException("Session is active: " + sessionId); } public void validateThatSessionIsPrepared(Tenant tenant, long sessionId) { Session session = getRemoteSession(tenant, sessionId); if ( Session.Status.PREPARE != session.getStatus()) throw new IllegalArgumentException("Session not prepared: " + sessionId); } public long createSessionFromExisting(ApplicationId applicationId, boolean internalRedeploy, TimeoutBudget timeoutBudget, DeployLogger deployLogger) { Tenant tenant = getTenant(applicationId); SessionRepository sessionRepository = tenant.getSessionRepository(); Session fromSession = getExistingSession(tenant, applicationId); return sessionRepository.createSessionFromExisting(fromSession, internalRedeploy, timeoutBudget, deployLogger).getSessionId(); } public long createSession(ApplicationId applicationId, TimeoutBudget timeoutBudget, InputStream in, String contentType, DeployLogger logger) { File tempDir = uncheck(() -> Files.createTempDirectory("deploy")).toFile(); long sessionId; try { sessionId = createSession(applicationId, timeoutBudget, decompressApplication(in, contentType, tempDir), logger); } finally { cleanupTempDirectory(tempDir, logger); } return sessionId; } public long createSession(ApplicationId applicationId, TimeoutBudget timeoutBudget, File applicationDirectory, DeployLogger deployLogger) { SessionRepository sessionRepository = getTenant(applicationId).getSessionRepository(); Session session = sessionRepository.createSessionFromApplicationPackage(applicationDirectory, applicationId, timeoutBudget, deployLogger); return session.getSessionId(); } public void deleteExpiredLocalSessions() { Map<Tenant, Collection<LocalSession>> sessionsPerTenant = new HashMap<>(); tenantRepository.getAllTenants() .forEach(tenant -> sessionsPerTenant.put(tenant, tenant.getSessionRepository().getLocalSessions())); Set<ApplicationId> applicationIds = new HashSet<>(); sessionsPerTenant.values() .forEach(sessionList -> sessionList.stream() .map(Session::getOptionalApplicationId) .filter(Optional::isPresent) .forEach(appId -> applicationIds.add(appId.get()))); Map<ApplicationId, Long> activeSessions = new HashMap<>(); applicationIds.forEach(applicationId -> getActiveSession(applicationId).ifPresent(session -> activeSessions.put(applicationId, session.getSessionId()))); sessionsPerTenant.keySet().forEach(tenant -> tenant.getSessionRepository().deleteExpiredSessions(activeSessions)); } public int deleteExpiredRemoteSessions(Duration expiryTime) { return deleteExpiredRemoteSessions(clock, expiryTime); } public int deleteExpiredRemoteSessions(Clock clock, Duration expiryTime) { return tenantRepository.getAllTenants() .stream() .map(tenant -> tenant.getSessionRepository().deleteExpiredRemoteSessions(clock, expiryTime)) .mapToInt(i -> i) .sum(); } public TenantRepository tenantRepository() { return tenantRepository; } public Set<TenantName> deleteUnusedTenants(Duration ttlForUnusedTenant, Instant now) { return tenantRepository.getAllTenantNames().stream() .filter(tenantName -> activeApplications(tenantName).isEmpty()) .filter(tenantName -> !tenantName.equals(TenantName.defaultName())) // Not allowed to remove 'default' tenant .filter(tenantName -> !tenantName.equals(HOSTED_VESPA_TENANT)) // Not allowed to remove 'hosted-vespa' tenant .filter(tenantName -> getTenantMetaData(tenantRepository.getTenant(tenantName)).lastDeployTimestamp().isBefore(now.minus(ttlForUnusedTenant))) .peek(tenantRepository::deleteTenant) .collect(Collectors.toSet()); } public void deleteTenant(TenantName tenantName) { List<ApplicationId> activeApplications = activeApplications(tenantName); if (activeApplications.isEmpty()) tenantRepository.deleteTenant(tenantName); else throw new IllegalArgumentException("Cannot delete tenant '" + tenantName + "', it has active applications: " + activeApplications); } private List<ApplicationId> activeApplications(TenantName tenantName) { return tenantRepository.getTenant(tenantName).getApplicationRepo().activeApplications(); } public ProtonMetricsResponse getProtonMetrics(ApplicationId applicationId) { Application application = getApplication(applicationId); ProtonMetricsRetriever protonMetricsRetriever = new ProtonMetricsRetriever(); return protonMetricsRetriever.getMetrics(application); } public DeploymentMetricsResponse getDeploymentMetrics(ApplicationId applicationId) { Application application = getApplication(applicationId); DeploymentMetricsRetriever deploymentMetricsRetriever = new DeploymentMetricsRetriever(); return deploymentMetricsRetriever.getMetrics(application); } public ApplicationMetaData getMetadataFromLocalSession(Tenant tenant, long sessionId) { return getLocalSession(tenant, sessionId).getMetaData(); } private ApplicationCuratorDatabase requireDatabase(ApplicationId id) { Tenant tenant = getTenant(id); if (tenant == null) throw new NotFoundException("Tenant '" + id.tenant().value() + "' not found"); return tenant.getApplicationRepo().database(); } public ApplicationReindexing getReindexing(ApplicationId id) { return requireDatabase(id).readReindexingStatus(id) .orElseThrow(() -> new NotFoundException("Reindexing status not found for " + id)); } public void modifyReindexing(ApplicationId id, UnaryOperator<ApplicationReindexing> modifications) { Tenant tenant = getTenant(id); if (tenant == null) throw new NotFoundException("Tenant '" + id.tenant().value() + "' not found"); tenant.getApplicationRepo().database().modifyReindexing(id, ApplicationReindexing.empty(), modifications); } public ConfigserverConfig configserverConfig() { return configserverConfig; } public ApplicationId getApplicationIdForHostname(String hostname) { Optional<ApplicationId> applicationId = tenantRepository.getAllTenantNames().stream() .map(tenantName -> tenantRepository.getTenant(tenantName).getApplicationRepo().getApplicationIdForHostName(hostname)) .filter(Objects::nonNull) .findFirst(); return applicationId.orElse(null); } public FlagSource flagSource() { return flagSource; } private Session validateThatLocalSessionIsNotActive(Tenant tenant, long sessionId) { Session session = getLocalSession(tenant, sessionId); if (Session.Status.ACTIVATE.equals(session.getStatus())) { throw new IllegalArgumentException("Session is active: " + sessionId); } return session; } private Session getLocalSession(Tenant tenant, long sessionId) { Session session = tenant.getSessionRepository().getLocalSession(sessionId); if (session == null) throw new NotFoundException("Session " + sessionId + " was not found"); return session; } private RemoteSession getRemoteSession(Tenant tenant, long sessionId) { RemoteSession session = tenant.getSessionRepository().getRemoteSession(sessionId); if (session == null) throw new NotFoundException("Session " + sessionId + " was not found"); return session; } public Optional<ApplicationSet> getActiveApplicationSet(ApplicationId appId) { return getTenant(appId).getSessionRepository().getActiveApplicationSet(appId); } public Application getActiveApplication(ApplicationId applicationId) { return getActiveApplicationSet(applicationId) .map(a -> a.getForVersionOrLatest(Optional.empty(), clock.instant())) .orElseThrow(() -> new RuntimeException("Found no active application for " + applicationId)); } private File decompressApplication(InputStream in, String contentType, File tempDir) { try (CompressedApplicationInputStream application = CompressedApplicationInputStream.createFromCompressedStream(in, contentType, configserverConfig.maxApplicationPackageSize())) { return decompressApplication(application, tempDir); } catch (IOException e) { throw new IllegalArgumentException("Unable to decompress data in body", e); } } private File decompressApplication(CompressedApplicationInputStream in, File tempDir) { try { return in.decompress(tempDir); } catch (IOException e) { throw new IllegalArgumentException("Unable to decompress stream", e); } } private void cleanupTempDirectory(File tempDir, DeployLogger logger) { if (!IOUtils.recursiveDeleteDir(tempDir)) { logger.log(Level.WARNING, "Not able to delete tmp dir '" + tempDir + "'"); } } // TODO: Merge this and getActiveSession(), they are almost identical private Session getExistingSession(Tenant tenant, ApplicationId applicationId) { TenantApplications applicationRepo = tenant.getApplicationRepo(); return getRemoteSession(tenant, applicationRepo.requireActiveSessionOf(applicationId)); } public Optional<Session> getActiveSession(Tenant tenant, ApplicationId applicationId) { TenantApplications applicationRepo = tenant.getApplicationRepo(); return applicationRepo.activeSessionOf(applicationId).map(aLong -> tenant.getSessionRepository().getRemoteSession(aLong)); } public Optional<LocalSession> getActiveLocalSession(Tenant tenant, ApplicationId applicationId) { TenantApplications applicationRepo = tenant.getApplicationRepo(); return applicationRepo.activeSessionOf(applicationId).map(aLong -> tenant.getSessionRepository().getLocalSession(aLong)); } public double getQuotaUsageRate(ApplicationId applicationId) { var application = getApplication(applicationId); return application.getModel().provisioned().all().values().stream() .map(Capacity::maxResources)// TODO: This may be unspecified -> 0 .mapToDouble(resources -> resources.nodes() * resources.nodeResources().cost()) .sum(); } @Override public Duration serverDeployTimeout() { return Duration.ofSeconds(configserverConfig.zookeeper().barrierTimeout()); } private void logConfigChangeActions(ConfigChangeActions actions, DeployLogger logger) { RestartActions restartActions = actions.getRestartActions(); if ( ! restartActions.isEmpty()) { if (configserverConfig().hostedVespa()) logger.log(Level.INFO, "Orchestrated service restart triggered due to change(s) from active to new application:\n" + restartActions.format()); else logger.log(Level.WARNING, "Change(s) between active and new application that require restart:\n" + restartActions.format()); } RefeedActions refeedActions = actions.getRefeedActions(); if ( ! refeedActions.isEmpty()) { logger.logApplicationPackage(Level.WARNING, "Change(s) between active and new application that may require re-feed:\n" + refeedActions.format()); } ReindexActions reindexActions = actions.getReindexActions(); if ( ! reindexActions.isEmpty()) { if (configserverConfig().hostedVespa()) logger.log(Level.INFO, "Re-indexing triggered due to change(s) from active to new application:\n" + reindexActions.format()); else logger.log(Level.WARNING, "Change(s) between active and new application that may require re-index:\n" + reindexActions.format()); } } private String getLogServerURI(ApplicationId applicationId, Optional<DomainName> hostname) { // Allow to get logs from a given hostname if the application is under the hosted-vespa tenant. // We make no validation that the hostname is actually allocated to the given application since // most applications under hosted-vespa are not known to the model and it's OK for a user to get // logs for any host if they are authorized for the hosted-vespa tenant. if (hostname.isPresent() && HOSTED_VESPA_TENANT.equals(applicationId.tenant())) { int port = List.of(InfrastructureApplication.CONFIG_SERVER.id(), InfrastructureApplication.CONTROLLER.id()).contains(applicationId) ? 19071 : 8080; return "http://" + hostname.get().value() + ":" + port + "/logs"; } Application application = getApplication(applicationId); Collection<HostInfo> hostInfos = application.getModel().getHosts(); HostInfo logServerHostInfo = hostInfos.stream() .filter(host -> host.getServices().stream() .anyMatch(serviceInfo -> serviceInfo.getServiceType().equalsIgnoreCase("logserver"))) .findFirst().orElseThrow(() -> new IllegalArgumentException("Could not find host info for logserver")); ServiceInfo logService = logServerHostInfo.getServices().stream() .filter(service -> LOGSERVER_CONTAINER.serviceName.equals(service.getServiceType())) .findFirst() .or(() -> logServerHostInfo.getServices().stream() .filter(service -> CONTAINER.serviceName.equals(service.getServiceType())) .findFirst()) .orElseThrow(() -> new IllegalArgumentException("No container running on logserver host")); int port = servicePort(logService); return "http://" + logServerHostInfo.getHostname() + ":" + port + "/logs"; } private int servicePort(ServiceInfo serviceInfo) { return serviceInfo.getPorts().stream() .filter(portInfo -> portInfo.getTags().stream().anyMatch(tag -> tag.equalsIgnoreCase("http"))) .findFirst().orElseThrow(() -> new IllegalArgumentException("Could not find HTTP port")) .getPort(); } public Zone zone() { return new Zone(SystemName.from(configserverConfig.system()), Environment.from(configserverConfig.environment()), RegionName.from(configserverConfig.region())); } public Clock clock() { return clock; } /** Emits as a metric the time in millis spent while holding this timer, with deployment ID as dimensions. */ public ActionTimer timerFor(ApplicationId id, String metricName) { return new ActionTimer(metric, clock, id, configserverConfig.environment(), configserverConfig.region(), metricName); } public static class ActionTimer implements AutoCloseable { private final Metric metric; private final Clock clock; private final ApplicationId id; private final String environment; private final String region; private final String name; private final Instant start; private ActionTimer(Metric metric, Clock clock, ApplicationId id, String environment, String region, String name) { this.metric = metric; this.clock = clock; this.id = id; this.environment = environment; this.region = region; this.name = name; this.start = clock.instant(); } @Override public void close() { metric.set(name, Duration.between(start, clock.instant()).toMillis(), metric.createContext(Map.of("applicationId", id.toFullString(), "tenantName", id.tenant().value(), "app", id.application().value() + "." + id.instance().value(), "zone", environment + "." + region))); } } public static class Activation { private final CompletionWaiter waiter; private final OptionalLong sourceSessionId; public Activation(CompletionWaiter waiter, Optional<Session> sourceSession) { this.waiter = waiter; this.sourceSessionId = sourceSession.map(s -> OptionalLong.of(s.getSessionId())).orElse(OptionalLong.empty()); } public void awaitCompletion(Duration timeout) { waiter.awaitCompletion(timeout); } /** The session ID this activation was based on, if any */ public OptionalLong sourceSessionId() { return sourceSessionId; } } private static class NoopCompletionWaiter implements CompletionWaiter { @Override public void awaitCompletion(Duration timeout) {} @Override public void notifyCompletion() {} } }
package com.yahoo.vespa.config.server.application; import ai.vespa.http.DomainName; import ai.vespa.http.HttpURL; import ai.vespa.http.HttpURL.Path; import ai.vespa.http.HttpURL.Query; import ai.vespa.http.HttpURL.Scheme; import com.google.inject.Inject; import com.yahoo.config.model.api.HostInfo; import com.yahoo.config.model.api.PortInfo; import com.yahoo.config.model.api.ServiceInfo; import com.yahoo.container.jdisc.HttpResponse; import com.yahoo.vespa.config.server.http.HttpFetcher; import com.yahoo.vespa.config.server.http.HttpFetcher.Params; import com.yahoo.vespa.config.server.http.NotFoundException; import com.yahoo.vespa.config.server.http.SimpleHttpFetcher; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.charset.Charset; import java.util.List; import java.util.logging.Logger; import static java.nio.charset.StandardCharsets.UTF_8; public class HttpProxy { private static final Logger logger = Logger.getLogger(HttpProxy.class.getName()); private final HttpFetcher fetcher; @Inject public HttpProxy() { this(new SimpleHttpFetcher()); } public HttpProxy(HttpFetcher fetcher) { this.fetcher = fetcher; } public HttpResponse get(Application application, String hostName, String serviceType, Path path, Query query) { return get(application, hostName, serviceType, path, query, null); } public HttpResponse get(Application application, String hostName, String serviceType, Path path, Query query, HttpURL forwardedUrl) { HostInfo host = application.getModel().getHosts().stream() .filter(hostInfo -> hostInfo.getHostname().equals(hostName)) .findFirst() .orElseThrow(() -> new NotFoundException("Failed to find host " + hostName)); ServiceInfo service = host.getServices().stream() .filter(serviceInfo -> serviceType.equals(serviceInfo.getServiceType())) .findFirst() .orElseThrow(() -> new NotFoundException("Failed to find any service of type " + serviceType + " on host " + hostName)); // "http" and "state" seems to uniquely identify an interesting HTTP port on each service PortInfo port = service.getPorts().stream() .filter(portInfo -> portInfo.getTags().containsAll(List.of("http", "state"))) .findFirst() .orElseThrow(() -> new NotFoundException("Failed to find HTTP state port")); HttpURL url = HttpURL.create(Scheme.http, DomainName.of(host.getHostname()), port.getPort(), path, query); HttpResponse response = fetcher.get(new Params(29_000), // 29 sec (30 sec on controller) url.asURI()); return forwardedUrl == null ? response : new UrlRewritingProxyResponse(response, url, forwardedUrl); } static class UrlRewritingProxyResponse extends HttpResponse { final HttpResponse wrapped; final String patten; final String replacement; public UrlRewritingProxyResponse(HttpResponse wrapped, HttpURL requestUrl, HttpURL forwardedUrl) { super(wrapped.getStatus()); this.wrapped = wrapped; this.patten = requestUrl.withPath(requestUrl.path().withoutTrailingSlash()).withQuery(Query.empty()).asURI().toString(); this.replacement = forwardedUrl.withPath(forwardedUrl.path().withoutTrailingSlash()).withQuery(Query.empty()).asURI().toString(); } @Override public void render(OutputStream outputStream) throws IOException { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); wrapped.render(buffer); outputStream.write(buffer.toString(Charset.forName(wrapped.getCharacterEncoding())) .replace(patten, replacement) .getBytes(UTF_8)); } @Override public String getContentType() { return wrapped.getContentType(); } } }
package org.jboss.as.controller.util; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.ACCESS_TYPE; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.ALTERNATIVES; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.ATTRIBUTES; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.CHILDREN; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.DEFAULT; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.DEPRECATED; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.EXPRESSIONS_ALLOWED; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.HOST; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.MANAGEMENT_MAJOR_VERSION; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.MANAGEMENT_MICRO_VERSION; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.MANAGEMENT_MINOR_VERSION; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.MODEL_DESCRIPTION; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.NILLABLE; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OPERATIONS; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.REPLY_PROPERTIES; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.REQUEST_PROPERTIES; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.STORAGE; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.SUBSYSTEM; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.TYPE; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.VALUE_TYPE; import java.io.File; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.TreeMap; import org.jboss.as.controller.ModelVersion; import org.jboss.as.controller.PathAddress; import org.jboss.as.controller.PathElement; import org.jboss.as.controller.descriptions.ModelDescriptionConstants; import org.jboss.dmr.ModelNode; import org.jboss.dmr.Property; /** * Compares the current running server with the passed in version. The passed in version * must have model and resource definition dumps in {@code src/test/resources/legacy-models} as * outlined in {@link DumpResourceDefinitionUtil} and {@link GrabModelVersionsUtil}. * <p> * To run this a big heap size is needed, e.g. -Xmx1024m * * @author <a href="kabir.khan@jboss.com">Kabir Khan</a> */ public class CompareModelVersionsUtil { private final boolean compareDifferentVersions; private final String targetVersion; private final ModelNode legacyModelVersions; private final ModelNode legacyResourceDefinitions; private final ModelNode currentModelVersions; private final ModelNode currentResourceDefinitions; private CompareModelVersionsUtil(boolean compareDifferentVersions, String targetVersion, ModelNode legacyModelVersions, ModelNode legacyResourceDefinitions, ModelNode currentModelVersions, ModelNode currentResourceDefinitions) throws Exception { this.compareDifferentVersions = compareDifferentVersions; this.targetVersion = targetVersion; this.legacyModelVersions = legacyModelVersions; this.legacyResourceDefinitions = legacyResourceDefinitions; this.currentModelVersions = currentModelVersions; this.currentResourceDefinitions = currentResourceDefinitions; } public static void main(String[] args) throws Exception { String version = System.getProperty("jboss.as.compare.version", null); String diff = System.getProperty("jboss.as.compare.different.versions", null); String type = System.getProperty("jboss.as.compare.type", null); if (version == null) { System.out.print("Enter legacy AS version: "); version = readInput(null); } System.out.println("Using target model: " + version); if (type == null) { System.out.print("Enter type [S](standalone)/H(host)/D(domain):"); type = readInput("S"); } final ResourceType resourceType; if (ResourceType.STANDALONE.toString().startsWith(type.toUpperCase())) { resourceType = ResourceType.STANDALONE; } else if (ResourceType.HOST.toString().startsWith(type.toUpperCase())) { resourceType = ResourceType.HOST; } else if (ResourceType.DOMAIN.toString().startsWith(type.toUpperCase())) { resourceType = ResourceType.DOMAIN; } else { throw new IllegalArgumentException("Could not determine type for: '" + type + "'"); } if (diff == null) { System.out.print("Report on differences in the model when the management versions are different? y/[n]: "); diff = readInput("n").toLowerCase(); } boolean compareDifferentVersions; if (diff.equals("n")) { System.out.println("Reporting on differences in the model when the management versions are different"); compareDifferentVersions = false; } else if (diff.equals("y")) { System.out.println("Not reporting on differences in the model when the management versions are different"); compareDifferentVersions = true; } else { throw new IllegalArgumentException("Please enter 'y' or 'n'"); } System.out.println("Loading legacy model versions for " + version + "...."); ModelNode legacyModelVersions = Tools.loadModelNodeFromFile(new File("target/test-classes/legacy-models/standalone-model-versions-" + version + ".dmr")); System.out.println("Loaded legacy model versions"); System.out.println("Loading legacy resource descriptions for " + version + "...."); ModelNode legacyResourceDefinitions = Tools.loadModelNodeFromFile(new File("target/test-classes/legacy-models/" + resourceType.toString().toLowerCase() + "-resource-definition-" + version + ".dmr")); System.out.println("Loaded legacy resource descriptions"); System.out.println("Loading model versions for currently running server..."); ModelNode currentModelVersions = Tools.getCurrentModelVersions(); System.out.println("Loaded current model versions"); System.out.println("Loading resource descriptions for currently running server..."); final ModelNode currentResourceDefinitions; if (resourceType == ResourceType.STANDALONE) { currentResourceDefinitions = Tools.getCurrentRunningResourceDefinition(PathAddress.EMPTY_ADDRESS); } else if (resourceType == ResourceType.DOMAIN) { currentResourceDefinitions = Tools.getCurrentRunningDomainResourceDefinition(); } else { currentResourceDefinitions = Tools.getCurrentRunningResourceDefinition(PathAddress.pathAddress(PathElement.pathElement(HOST, "master"))); } System.out.println("Loaded current resource descriptions"); CompareModelVersionsUtil compareModelVersionsUtil = new CompareModelVersionsUtil(compareDifferentVersions, version, legacyModelVersions, legacyResourceDefinitions, currentModelVersions, currentResourceDefinitions); System.out.println("Starting comparison of the current....\n"); compareModelVersionsUtil.compareModels(); System.out.println("\nDone comparison!"); } private static String readInput(String defaultAnswer) throws IOException { StringBuilder sb = new StringBuilder(); char c = (char)System.in.read(); while (c != '\n') { sb.append(c); c = (char)System.in.read(); } String s = sb.toString().trim(); if (s.equals("")) { if (defaultAnswer != null) { return defaultAnswer; } throw new IllegalArgumentException("Please enter a valid answer"); } return s; } private void compareModels() { compareCoreModels(); compareSubsystemModels(); } private void compareCoreModels() { System.out.println("====== Comparing core models ======"); ResourceDefinition currentDefinition = new ResourceDefinition(trimSubsystem(currentResourceDefinitions), currentModelVersions); ResourceDefinition legacyDefinition = new ResourceDefinition(trimSubsystem(legacyResourceDefinitions), legacyModelVersions); CompareContext context = new CompareContext(PathAddress.EMPTY_ADDRESS, PathAddress.EMPTY_ADDRESS, true, currentDefinition, legacyDefinition); if (!context.continuteWithCheck()) { return; } compareModel(context); } private void compareSubsystemModels() { System.out.println("====== Comparing subsystem models ======"); ResourceDefinition rootCurrentDefinition = new ResourceDefinition(trimNonSubsystem(currentResourceDefinitions), currentModelVersions); ResourceDefinition rootLegacyDefinition = new ResourceDefinition(trimNonSubsystem(legacyResourceDefinitions), legacyModelVersions); Map<String, ModelNode> currentSubsystems = rootCurrentDefinition.getChildren(SUBSYSTEM); Map<String, ModelNode> legacySubsystems = rootLegacyDefinition.getChildren(SUBSYSTEM); CompareContext context = new CompareContext(PathAddress.EMPTY_ADDRESS, PathAddress.EMPTY_ADDRESS, true, rootCurrentDefinition, rootLegacyDefinition); compareKeySetsAndRemoveMissing(context, "subsystems", currentSubsystems, legacySubsystems); for (Map.Entry<String, ModelNode> legacyEntry : legacySubsystems.entrySet()) { PathAddress subsystemAddress = PathAddress.pathAddress(PathElement.pathElement(SUBSYSTEM, legacyEntry.getKey())); ResourceDefinition currentDefinition = new ResourceDefinition(currentSubsystems.get(legacyEntry.getKey()), currentModelVersions); ResourceDefinition legacyDefinition = new ResourceDefinition(legacyEntry.getValue(), legacyModelVersions); context = new CompareContext(subsystemAddress, subsystemAddress, false, currentDefinition, legacyDefinition); if (!context.continuteWithCheck()) { continue; } compareModel(context); } } private ModelNode trimSubsystem(ModelNode definition) { ModelNode def = definition.clone(); def.get(CHILDREN).remove(SUBSYSTEM); return def; } private ModelNode trimNonSubsystem(ModelNode definition) { ModelNode def = definition.clone(); for (String key : def.get(CHILDREN).keys()) { if (!key.equals(SUBSYSTEM)) { def.remove(key); } } return def; } private void compareModel(CompareContext context) { compareAttributes(context); compareOperations(context); compareChildren(context); } private void compareAttributes(CompareContext context) { Map<String, ModelNode> legacyAttributes = context.getLegacyDefinition().getAttributes(); Map<String, ModelNode> currentAttributes = context.getCurrentDefinition().getAttributes(); compareKeySetsAndRemoveMissing(context, "attributes", currentAttributes, legacyAttributes); //TODO compare types, expressions etc. for (Map.Entry<String, ModelNode> legacyEntry : legacyAttributes.entrySet()) { ModelNode legacyAttribute = legacyEntry.getValue(); ModelNode currentAttribute = currentAttributes.get(legacyEntry.getKey()); String id = "attribute '" + legacyEntry.getKey() + "'"; compareAttributeOrOperationParameter(context, id, currentAttribute, legacyAttribute); compareAccessType(context, id, currentAttribute, legacyAttribute); compareStorage(context, id, currentAttribute, legacyAttribute); compareDefault(context, id, currentAttribute, legacyAttribute); } } private void compareOperations(CompareContext context) { Map<String, ModelNode> legacyOperations = context.getLegacyDefinition().getOperations(); Map<String, ModelNode> currentOperations = context.getCurrentDefinition().getOperations(); compareKeySetsAndRemoveMissing(context, "operations", currentOperations, legacyOperations); for (Map.Entry<String, ModelNode> legacyOpEntry : legacyOperations.entrySet()) { String operationName = legacyOpEntry.getKey(); ModelNode legacyOperation = legacyOpEntry.getValue(); ModelNode currentOperation = currentOperations.get(operationName); Map<String, ModelNode> legacyParameters = context.getLegacyDefinition().getOperationParameters(operationName); Map<String, ModelNode> currentParameters = context.getCurrentDefinition().getOperationParameters(operationName); compareKeySetsAndRemoveMissing(context, "parameters for operation '" + operationName + "'", currentParameters, legacyParameters); for (Map.Entry<String, ModelNode> legacyParamEntry : legacyParameters.entrySet()) { ModelNode legacyParameter = legacyParamEntry.getValue(); ModelNode currentParameter = currentParameters.get(legacyParamEntry.getKey()); String id = "parameter '" + legacyParamEntry.getKey() + "' of operation '" + operationName + "'"; compareAttributeOrOperationParameter(context, id, currentParameter, legacyParameter); } ModelNode legacyReply = legacyOperation.get(REPLY_PROPERTIES); ModelNode currentReply = currentOperation.get(REPLY_PROPERTIES); compareAttributeOrOperationParameter(context, "'reply-properties' for operation '" + operationName + "'", currentReply, legacyReply); // if (!currentReply.equals(legacyReply)) { // context.println("Different 'reply-properties' for operation '" + operationName + "'. Current: " + currentReply + "; legacy: " + legacyReply); } } private void compareAttributeOrOperationParameter(CompareContext context, String id, ModelNode current, ModelNode legacy) { compareType(context, id, current, legacy); compareValueType(context, id, current, legacy); compareNillable(context, id, current, legacy); compareExpressionsAllowed(context, id, current, legacy); compareAlternatives(context, id, current, legacy); compareDeprecated(context, id, current, legacy); //TODO compare anything else? } private void compareType(CompareContext context, String id, ModelNode current, ModelNode legacy) { if (!current.get(TYPE).equals(legacy.get(TYPE))) { context.println("Different 'type' for " + id + ". Current: " + current.get(TYPE) + "; legacy: " + legacy.get(TYPE)); } } private void compareValueType(CompareContext context, String id, ModelNode current, ModelNode legacy) { ModelNode currentValueType = current.get(VALUE_TYPE); ModelNode legacyValueType = legacy.get(VALUE_TYPE); if (!currentValueType.isDefined() && !legacyValueType.isDefined()) { return; } if (isType(legacyValueType) || isType(currentValueType)) { if (!currentValueType.equals(legacyValueType)) { context.println("Different 'value-type' for " + id + ". Current: " + current.get(VALUE_TYPE) + "; legacy: " + legacy.get(VALUE_TYPE)); } } else { Map<String, ModelNode> legacyValueTypes = createMapIndexedByKey(legacyValueType); Map<String, ModelNode> currentValueTypes = createMapIndexedByKey(currentValueType); compareKeySetsAndRemoveMissing(context, "value-type for " + id, currentValueTypes, legacyValueTypes); for (Map.Entry<String, ModelNode> entry : currentValueTypes.entrySet()) { ModelNode currentEntry = entry.getValue(); ModelNode legacyEntry = legacyValueTypes.get(entry.getKey()); compareAttributeOrOperationParameter(context, "value-type key '" + entry.getKey() + "' for " + id, currentEntry, legacyEntry); } } } private void compareNillable(CompareContext context, String id, ModelNode current, ModelNode legacy) { boolean currentNillable = current.get(NILLABLE).asBoolean(false); boolean legacyNillable = legacy.get(NILLABLE).asBoolean(false); if (currentNillable != legacyNillable) { context.println("Different 'nillable' for " + id + ". Current: " + currentNillable + "; legacy: " + legacyNillable); } } private void compareExpressionsAllowed(CompareContext context, String id, ModelNode current, ModelNode legacy) { boolean currentNillable = current.get(EXPRESSIONS_ALLOWED).asBoolean(false); boolean legacyNillable = legacy.get(EXPRESSIONS_ALLOWED).asBoolean(false); if (currentNillable != legacyNillable) { context.println("Different 'expressions-allowed' for " + id + ". Current: " + currentNillable + "; legacy: " + legacyNillable); } } private void compareAccessType(CompareContext context, String id, ModelNode current, ModelNode legacy) { if (!current.get(ACCESS_TYPE).equals(legacy.get(ACCESS_TYPE))) { context.println("Different 'access-type' for " + id + ". Current: " + current.get(ACCESS_TYPE) + "; legacy: " + legacy.get(ACCESS_TYPE)); } } private void compareStorage(CompareContext context, String id, ModelNode current, ModelNode legacy) { if (!current.get(STORAGE).equals(legacy.get(STORAGE))) { context.println("Different 'storage' for " + id + ". Current: " + current.get(STORAGE) + "; legacy: " + legacy.get(STORAGE)); } } private void compareDefault(CompareContext context, String id, ModelNode current, ModelNode legacy) { if (!current.get(DEFAULT).equals(legacy.get(DEFAULT))) { context.println("Different 'default' for " + id + ". Current: " + current.get(DEFAULT) + "; legacy: " + legacy.get(DEFAULT)); } } private void compareDeprecated(CompareContext context, String id, ModelNode current, ModelNode legacy) { if (!current.get(DEPRECATED).equals(legacy.get(DEPRECATED))) { context.println("Different 'deprecated' for " + id + ". Current: " + current.get(DEPRECATED) + "; legacy: " + legacy.get(DEPRECATED)); } } private boolean isType(ModelNode node) { if (!node.isDefined()) { return false; } try { node.asType(); return true; } catch (Exception e) { return false; } } private Map<String, ModelNode> createMapIndexedByKey(ModelNode node){ Map<String, ModelNode> map = new HashMap<String, ModelNode>(); if (!node.isDefined()) { return map; } for (Property prop : node.asPropertyList()) { map.put(prop.getName(), prop.getValue()); } return map; } private void compareAlternatives(CompareContext context, String id, ModelNode current, ModelNode legacy) { if (!current.get(ALTERNATIVES).equals(legacy.get(ALTERNATIVES))) { context.println("Different 'alternatives' for " + id + ". Current: " + current.get(ALTERNATIVES) + "; legacy: " + legacy.get(ALTERNATIVES)); } } private void compareChildren(CompareContext context) { Set<String> legacyChildTypes = context.getLegacyDefinition().getChildTypes(); Set<String> currentChildTypes = context.getCurrentDefinition().getChildTypes(); compareSetsAndRemoveMissing(context, "child types", currentChildTypes, legacyChildTypes); for (String type : legacyChildTypes) { Map<String, ModelNode> legacyChildren = context.getLegacyDefinition().getChildren(type); Map<String, ModelNode> currentChildren = context.getCurrentDefinition().getChildren(type); compareKeySetsAndRemoveMissing(context, "child names for type=" + type, currentChildren, legacyChildren); for (Map.Entry<String, ModelNode> legacyChildEntry : legacyChildren.entrySet()) { String name = legacyChildEntry.getKey(); ModelNode legacyChildDescription = legacyChildEntry.getValue(); ModelNode currentChildDescription = currentChildren.get(name); CompareContext childContext; try { childContext = new CompareContext( context.getRootAddress(), context.getPathAddress().append(PathElement.pathElement(type, name)), context.isCore(), new ResourceDefinition(currentChildDescription, currentModelVersions), new ResourceDefinition(legacyChildDescription, legacyModelVersions)); } catch (RuntimeException e) { System.out.println(context.getPathAddress() + " + " + type + "=" + name); throw e; } compareModel(childContext); } } } private void compareKeySetsAndRemoveMissing(CompareContext context, String type, Map<String, ModelNode> currentMap, Map<String, ModelNode> legacyMap) { compareSetsAndRemoveMissing(context, type, currentMap.keySet(), legacyMap.keySet()); } private void compareSetsAndRemoveMissing(CompareContext context, String type, Set<String> currentSet, Set<String> legacySet) { Set<String> extraInLegacy = getMissingNames(context, legacySet, currentSet); Set<String> extraInCurrent = getMissingNames(context, currentSet, legacySet); if (extraInLegacy.size() > 0 || extraInCurrent.size() > 0) { context.println("Missing " + type + " in current: " + extraInLegacy + "; missing in legacy " + extraInCurrent); if (extraInCurrent.size() > 0) { currentSet.removeAll(extraInCurrent); } if (extraInLegacy.size() > 0) { legacySet.removeAll(extraInLegacy); } } } private Set<String> getMissingNames(CompareContext context, Set<String> possiblyMissing, Set<String> names){ Set<String> missing = new HashSet<String>(possiblyMissing); for (String name : names) { missing.remove(name); } //7.1.2 did not have MANAGEMENT_MICRO_VERSION don't bother reporting that if (context.isVersionLevel() && missing.contains(MANAGEMENT_MICRO_VERSION) && names.contains(MANAGEMENT_MAJOR_VERSION) && names.contains(MANAGEMENT_MINOR_VERSION)) { missing.remove(MANAGEMENT_MICRO_VERSION); } return missing; } private class CompareContext { final PathAddress rootAddress; final PathAddress pathAddress; final boolean core; final ResourceDefinition legacyDefinition; final ResourceDefinition currentDefinition; boolean outputPath; CompareContext(PathAddress rootAddress, PathAddress pathAddress, boolean core, ResourceDefinition currentDefinition, ResourceDefinition legacyDefinition) { this.rootAddress = rootAddress; this.pathAddress = pathAddress; this.core = core; this.currentDefinition = currentDefinition; this.legacyDefinition = legacyDefinition; } PathAddress getRootAddress() { return rootAddress; } PathAddress getPathAddress() { return pathAddress; } boolean isVersionLevel() { return rootAddress.equals(pathAddress); } boolean isCore() { return core; } ResourceDefinition getLegacyDefinition() { return legacyDefinition; } ResourceDefinition getCurrentDefinition() { return currentDefinition; } boolean continuteWithCheck() { if (!isVersionLevel()) { return true; } ModelVersion currentVersion = getModelVersion(currentDefinition); ModelVersion legacyVersion = getModelVersion(legacyDefinition); System.out.println("====== Resource root address: " + formatAddressOneLine(pathAddress) + " - Current version: " + currentVersion + "; legacy version: " + legacyVersion + " ======="); if (!legacyVersion.equals(currentVersion) && compareDifferentVersions) { return true; } else if (legacyVersion.equals(currentVersion)){ return true; } else { System.out.println("Skipping check of resource and children"); return false; } } private ModelVersion getModelVersion(ResourceDefinition definition) { if (core) { return definition.getCoreModelVersion(); } else { return definition.getSubsystemVersion(pathAddress); } } private String formatAddressOneLine(PathAddress addr) { StringBuilder sb = new StringBuilder("["); boolean first = true; for (PathElement element : addr) { if (first) { first = false; } else { sb.append(","); } sb.append(element); } sb.append("]"); return sb.toString(); } void println(String msg) { if (!outputPath) { outputPath = true; PathAddress relative = pathAddress.subAddress(rootAddress.size()); System.out.println("--- Problems for relative address to root " + formatAddressOneLine(relative) + ":"); } System.out.println(msg); } } private static class ResourceDefinition { final ModelNode description; final ModelNode versions; ResourceDefinition(ModelNode description, ModelNode versions) { this.description = description; this.versions = versions; } Map<String, ModelNode> getAttributes() { return getSortedEntryMap(description, ATTRIBUTES); } Map<String, ModelNode> getOperations() { return getSortedEntryMap(description, OPERATIONS); } Set<String> getChildTypes() { return getSortedEntryMap(description, CHILDREN).keySet(); } Map<String, ModelNode> getChildren(String type){ return getSortedEntryMap(description.get(CHILDREN, type), MODEL_DESCRIPTION); } Map<String, ModelNode> getOperationParameters(String opName) { return getSortedEntryMap(description.get(OPERATIONS, opName), REQUEST_PROPERTIES); } private Map<String, ModelNode> getSortedEntryMap(ModelNode parent, String name){ if (!parent.hasDefined(name)) { return Collections.emptyMap(); } Map<String, ModelNode> sorted = new TreeMap<String, ModelNode>(); for (Property prop : parent.get(name).asPropertyList()) { sorted.put(prop.getName(), prop.getValue()); } return sorted; } private ModelVersion getCoreModelVersion() { return Tools.createModelVersion(versions.get(Tools.CORE, Tools.STANDALONE)); } private ModelVersion getSubsystemVersion(PathAddress address) { for (PathElement element : address) { if (element.getKey().equals(SUBSYSTEM)) { return Tools.createModelVersion(versions.get(SUBSYSTEM, element.getValue())); } } throw new IllegalArgumentException("Could not find subsystem version for " + address); } } }
package nu.yona.server.subscriptions.service; import java.time.Duration; import java.time.LocalDateTime; import java.util.UUID; import javax.transaction.Transactional; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import nu.yona.server.exceptions.PinResetRequestConfirmationException; import nu.yona.server.properties.YonaProperties; import nu.yona.server.sms.SmsService; import nu.yona.server.subscriptions.entities.ConfirmationCode; import nu.yona.server.subscriptions.entities.User; import nu.yona.server.util.TimeUtil; @Service public class PinResetRequestService { private static final Logger logger = LoggerFactory.getLogger(PinResetRequestService.class); private enum Moment { IMMEDIATELY, DELAYED } @Autowired private UserService userService; @Autowired private YonaProperties yonaProperties; @Transactional public void requestPinReset(UUID userId) { User userEntity = userService.getUserEntityById(userId); logger.info("User with mobile number '{}' and ID '{}' requested a pin reset confirmation code", userEntity.getMobileNumber(), userId); ConfirmationCode confirmationCode = createConfirmationCode(Moment.DELAYED); setConfirmationCode(userEntity, confirmationCode); if (confirmationCode.getConfirmationCode() != null) { sendConfirmationCodeTextMessage(userEntity, confirmationCode); } } @Transactional public void verifyPinResetConfirmationCode(UUID userId, String userProvidedConfirmationCode) { User userEntity = userService.getUserEntityById(userId); logger.info("User with mobile number '{}' and ID '{}' requested to verify the pin reset confirmation code", userEntity.getMobileNumber(), userId); ConfirmationCode confirmationCode = userEntity.getPinResetConfirmationCode(); if ((confirmationCode == null) || isExpired(confirmationCode)) { throw PinResetRequestConfirmationException.confirmationCodeNotSet(userEntity.getMobileNumber()); } int remainingAttempts = yonaProperties.getSecurity().getConfirmationCodeMaxAttempts() - confirmationCode.getAttempts(); if (remainingAttempts <= 0) { throw PinResetRequestConfirmationException.tooManyAttempts(userEntity.getMobileNumber()); } if (!userProvidedConfirmationCode.equals(confirmationCode.getConfirmationCode())) { userService.registerFailedAttempt(userEntity, confirmationCode); throw PinResetRequestConfirmationException.confirmationCodeMismatch(userEntity.getMobileNumber(), userProvidedConfirmationCode, remainingAttempts - 1); } } @Transactional public void resendPinResetConfirmationCode(UUID userId) { User userEntity = userService.getUserEntityById(userId); logger.info("User with mobile number '{}' and ID '{}' requested to resend the pin reset confirmation code", userEntity.getMobileNumber(), userEntity.getId()); ConfirmationCode confirmationCode = createConfirmationCode(Moment.IMMEDIATELY); setConfirmationCode(userEntity, confirmationCode); sendConfirmationCodeTextMessage(userEntity, confirmationCode); } @Transactional public void clearPinResetRequest(UUID userId) { User userEntity = userService.getUserEntityById(userId); logger.info("User with mobile number '{}' and ID '{}' requested to clear the pin reset confirmation code", userEntity.getMobileNumber(), userEntity.getId()); setConfirmationCode(userEntity, null); } public void sendConfirmationCodeTextMessage(User userEntity, ConfirmationCode confirmationCode) { userService.sendConfirmationCodeTextMessage(userEntity.getMobileNumber(), confirmationCode, SmsService.TemplateName_PinResetRequestConfirmation); } private ConfirmationCode createConfirmationCode(Moment moment) { String confirmationCode = (moment == Moment.IMMEDIATELY) || yonaProperties.getSecurity().getPinResetRequestConfirmationCodeDelay().equals(Duration.ZERO) ? userService.generateConfirmationCode() : null; return ConfirmationCode.createInstance(confirmationCode); } private void setConfirmationCode(User userEntity, ConfirmationCode confirmationCode) { userEntity.setPinResetConfirmationCode(confirmationCode); User.getRepository().save(userEntity); } public boolean isExpired(ConfirmationCode confirmationCode) { LocalDateTime creationTime = confirmationCode.getCreationTime(); return creationTime.plus(yonaProperties.getSecurity().getPinResetRequestExpirationTime()).isBefore(TimeUtil.utcNow()); } }
package org.hisp.dhis.android.core.program; import org.hisp.dhis.android.core.common.ModelBuilder; public class ProgramStageModelBuilder extends ModelBuilder<ProgramStage, ProgramStageModel> { private final ProgramStageModel.Builder builder; ProgramStageModelBuilder(Program program) { this.builder = ProgramStageModel.builder() .program(program.uid()); } @Override public ProgramStageModel buildModel(ProgramStage programStage) { return builder .uid(programStage.uid()) .code(programStage.code()) .name(programStage.name()) .displayName(programStage.displayName()) .created(programStage.created()) .lastUpdated(programStage.lastUpdated()) .executionDateLabel(programStage.executionDateLabel()) .allowGenerateNextVisit(programStage.allowGenerateNextVisit()) .validCompleteOnly(programStage.validCompleteOnly()) .reportDateToUse(programStage.reportDateToUse()) .openAfterEnrollment(programStage.openAfterEnrollment()) .repeatable(programStage.repeatable()) .captureCoordinates(programStage.captureCoordinates()) .formType(programStage.formType()) .displayGenerateEventBox(programStage.displayGenerateEventBox()) .generatedByEnrollmentDate(programStage.generatedByEnrollmentDate()) .autoGenerateEvent(programStage.autoGenerateEvent()) .sortOrder(programStage.sortOrder()) .hideDueDate(programStage.hideDueDate()) .blockEntryForm(programStage.blockEntryForm()) .minDaysFromStart(programStage.minDaysFromStart()) .standardInterval(programStage.standardInterval()) .build(); } }
package org.eclipse.birt.data.engine.impl; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import org.eclipse.birt.core.data.DataType; import org.eclipse.birt.core.data.ExpressionUtil; import org.eclipse.birt.data.engine.api.IBaseExpression; import org.eclipse.birt.data.engine.api.IBaseQueryDefinition; import org.eclipse.birt.data.engine.api.IComputedColumn; import org.eclipse.birt.data.engine.api.IConditionalExpression; import org.eclipse.birt.data.engine.api.IFilterDefinition; import org.eclipse.birt.data.engine.api.IGroupDefinition; import org.eclipse.birt.data.engine.api.IQueryDefinition; import org.eclipse.birt.data.engine.api.IQueryResults; import org.eclipse.birt.data.engine.api.IResultMetaData; import org.eclipse.birt.data.engine.api.ISortDefinition; import org.eclipse.birt.data.engine.api.querydefn.ComputedColumn; import org.eclipse.birt.data.engine.api.querydefn.ConditionalExpression; import org.eclipse.birt.data.engine.api.querydefn.FilterDefinition; import org.eclipse.birt.data.engine.api.querydefn.ScriptExpression; import org.eclipse.birt.data.engine.api.script.IDataSourceInstanceHandle; import org.eclipse.birt.data.engine.core.DataException; import org.eclipse.birt.data.engine.executor.BaseQuery; import org.eclipse.birt.data.engine.executor.DataSetCacheManager; import org.eclipse.birt.data.engine.executor.JointDataSetQuery; import org.eclipse.birt.data.engine.expression.ExpressionCompilerUtil; import org.eclipse.birt.data.engine.expression.ExpressionProcessor; import org.eclipse.birt.data.engine.i18n.ResourceConstants; import org.eclipse.birt.data.engine.impl.aggregation.AggregateTable; import org.eclipse.birt.data.engine.impl.group.GroupCalculatorFactory; import org.eclipse.birt.data.engine.odi.ICandidateQuery; import org.eclipse.birt.data.engine.odi.IDataSource; import org.eclipse.birt.data.engine.odi.IEventHandler; import org.eclipse.birt.data.engine.odi.IPreparedDSQuery; import org.eclipse.birt.data.engine.odi.IQuery; import org.eclipse.birt.data.engine.odi.IResultIterator; import org.eclipse.birt.data.engine.odi.IResultObjectEvent; import org.eclipse.birt.data.engine.script.OnFetchScriptHelper; import org.mozilla.javascript.Context; import org.mozilla.javascript.Scriptable; public abstract class QueryExecutor implements IQueryExecutor { private IBaseQueryDefinition baseQueryDefn; private AggregateTable aggrTable; // from PreparedQuery->PreparedDataSourceQuery->DataEngineImpl private Scriptable sharedScope; /** Externally provided query scope; can be null */ // from PreparedQuery->PreparedDataSourceQuery private Scriptable parentScope; // for query execution private Scriptable queryScope; private boolean isPrepared = false; private boolean isExecuted = false; private Map queryAppContext; /** Query nesting level, 1 - outermost query */ private int nestedLevel = 1; /** Runtime data source and data set used by this instance of executor */ protected DataSourceRuntime dataSource; protected DataSetRuntime dataSet; protected IDataSource odiDataSource; protected IQuery odiQuery; /** Outer query's results; null if this query is not nested */ protected IQueryService outerResults; private IResultIterator odiResult; private IExecutorHelper parentHelper; private List temporaryComputedColumns = new ArrayList( ); private static Logger logger = Logger.getLogger( DataEngineImpl.class.getName( ) ); /** * @param sharedScope * @param baseQueryDefn * @param aggrTable */ QueryExecutor( Scriptable sharedScope, IBaseQueryDefinition baseQueryDefn, AggregateTable aggrTable ) { this.sharedScope = sharedScope; this.baseQueryDefn = baseQueryDefn; this.aggrTable = aggrTable; } /** * Provide the actual DataSourceRuntime used for the query. * * @return */ abstract protected DataSourceRuntime findDataSource( ) throws DataException; /** * Create a new instance of data set runtime * * @return */ abstract protected DataSetRuntime newDataSetRuntime( ) throws DataException; /** * Create a new unopened odiDataSource given the data source runtime * definition * * @return */ abstract protected IDataSource createOdiDataSource( ) throws DataException; /** * Create an empty instance of odi query * * @return */ abstract protected IQuery createOdiQuery( ) throws DataException; /** * Prepares the ODI query */ protected void prepareOdiQuery( ) throws DataException { } /** * Executes the ODI query to reproduce a ODI result set * @param eventHandler * * @return */ abstract protected IResultIterator executeOdiQuery( IEventHandler eventHandler ) throws DataException; /** * @param context */ void setAppContext( Map context ) { queryAppContext = context; } /** * Prepare Executor so that it is ready to execute the query * * @param outerRts * @param targetScope * @throws DataException */ void prepareExecution( IQueryResults outerRts, Scriptable targetScope ) throws DataException { if ( isPrepared ) return; this.parentScope = targetScope; dataSource = findDataSource( ); if ( outerRts != null ) { outerResults = ( (IQueryService) outerRts ); if ( outerResults.isClosed( ) ) { // Outer result is closed; invalid throw new DataException( ResourceConstants.RESULT_CLOSED ); } this.nestedLevel = outerResults.getNestedLevel( ); // TODO: check helper is null IExecutorHelper helper = outerResults.getExecutorHelper( ); this.setParentExecutorHelper( helper ); } // Create the data set runtime // Since data set runtime contains the execution result, a new data set // runtime is needed for each execute dataSet = newDataSetRuntime( ); assert dataSet != null; openDataSource( ); // Run beforeOpen script now so the script can modify the DataSetRuntime properties dataSet.beforeOpen( ); // Let subclass create a new and empty intance of the appropriate // odi IQuery odiQuery = createOdiQuery( ); odiQuery.setDistinctValueFlag( dataSet.needDistinctValue( ) ); odiQuery.setExprProcessor( new ExpressionProcessor( dataSet ) ); populateOdiQuery( ); prepareOdiQuery( ); isPrepared = true; } /** * Open the required DataSource. This method should be called after * "dataSource" is initialized by findDataSource() method. * * @throws DataException */ protected void openDataSource( ) throws DataException { assert odiDataSource == null; // Open the underlying data source // dataSource = findDataSource( ); if ( dataSource != null ) { // TODO: potential bug if ( !dataSource.isOpen( ) || DataSetCacheManager.getInstance( ).doesLikeToCache( ) == true ) { // Data source is not open; create an Odi Data Source and open it // We should run the beforeOpen script now to give it a chance to modify // runtime data source properties dataSource.beforeOpen( ); // Let subclass create a new unopened odi data source odiDataSource = createOdiDataSource( ); // Passes thru the prepared query executor's // context to the new odi data source odiDataSource.setAppContext( queryAppContext ); // Open the odi data source dataSource.openOdiDataSource( odiDataSource ); dataSource.afterOpen( ); } else { // Use existing odiDataSource created for the data source runtime odiDataSource = dataSource.getOdiDataSource( ); // Passes thru the prepared query executor's // current context to existing data source odiDataSource.setAppContext( queryAppContext ); } } } /** * Populates odiQuery with this query's definitions * * @throws DataException */ protected void populateOdiQuery( ) throws DataException { assert odiQuery != null; assert this.baseQueryDefn != null; Context cx = Context.enter( ); try { // Set grouping populateGrouping( cx ); // Set sorting populateSorting( ); // set fetch event populateFetchEvent( cx ); // specify max rows the query should fetch odiQuery.setMaxRows( this.baseQueryDefn.getMaxRows( ) ); prepareCacheQuery( ); } finally { Context.exit( ); } } /** * TODO: enhance me, this is only a temp logic * Set temporary computed columns to DataSourceQuery where cache is used */ private void prepareCacheQuery( ) { if ( odiQuery instanceof org.eclipse.birt.data.engine.executor.dscache.DataSourceQuery && temporaryComputedColumns != null && temporaryComputedColumns.size( ) > 0 ) { ( (org.eclipse.birt.data.engine.executor.dscache.DataSourceQuery) odiQuery ).setTempComputedColumn( this.temporaryComputedColumns ); } } /** * Populate grouping to the query. * * @param cx * @throws DataException */ private void populateGrouping( Context cx ) throws DataException { List groups = this.baseQueryDefn.getGroups( ); if ( groups != null && !groups.isEmpty( ) ) { IQuery.GroupSpec[] groupSpecs = new IQuery.GroupSpec[groups.size( )]; Iterator it = groups.iterator( ); for ( int i = 0; it.hasNext( ); i++ ) { IGroupDefinition src = (IGroupDefinition) it.next( ); validateGroupExpression( src ); String expr = getGroupKeyExpression( src ); String groupName = populateGroupName( i, expr ); IQuery.GroupSpec dest = QueryExecutorUtil.groupDefnToSpec( cx, src, expr, groupName, -1 ); groupSpecs[i] = dest; this.temporaryComputedColumns.add( getComputedColumnInstance( cx, groupSpecs[i].getInterval( ), src, expr, groupName, dest ) ); } odiQuery.setGrouping( Arrays.asList( groupSpecs ) ); } } /** * Validating the group expression. * * @param src * @throws DataException */ private void validateGroupExpression( IGroupDefinition src ) throws DataException { if ( ( src.getKeyColumn( ) == null || src.getKeyColumn( ) .trim( ) .length( ) == 0 ) && ( src.getKeyExpression( ) == null || src.getKeyExpression( ) .trim( ) .length( ) == 0 ) ) throw new DataException( ResourceConstants.BAD_GROUP_EXPRESSION ); } /** * Populate the group name according to the given expression. * * @param i * @param expr * @return */ private String populateGroupName( int i, String expr ) { String groupName; if ( expr.trim( ).equalsIgnoreCase( "row[0]" ) || expr.trim( ).equalsIgnoreCase( "row._rowPosition" ) || expr.trim( ).equalsIgnoreCase( "dataSetRow[0]" ) || expr.trim( ) .equalsIgnoreCase( "dataSetRow._rowPosition" ) ) { groupName = "_{$TEMP_GROUP_" + i + "ROWID$}_"; } else { groupName = "_{$TEMP_GROUP_" + i + "$}_"; } return groupName; } /** * Get the computed column instance according to the group type.If group has * interval, return GroupComputedColumn, otherwise return normal computed * column. * * @param cx * @param groupSpecs * @param i * @param src * @param expr * @param groupName * @param dest * @return * @throws DataException */ private IComputedColumn getComputedColumnInstance( Context cx, int interval, IGroupDefinition src, String expr, String groupName, IQuery.GroupSpec dest ) throws DataException { if ( ( dest.getInterval( ) != IGroupDefinition.NO_INTERVAL ) && ( dest.getIntervalRange( ) != 0 ) ) { return new GroupComputedColumn( groupName, expr, QueryExecutorUtil.getTempComputedColumnType( interval ), GroupCalculatorFactory.getGroupCalculator( src.getInterval( ), getColumnDataType( cx, expr ), src.getIntervalStart( ), src.getIntervalRange( ) ) ); } else { return new ComputedColumn( groupName, expr, QueryExecutorUtil.getTempComputedColumnType( interval ) ); } } /** * Populate the sortings in a query. * * @throws DataException */ private void populateSorting( ) throws DataException { List sorts = this.baseQueryDefn.getSorts( ); if ( sorts != null && !sorts.isEmpty( ) ) { IQuery.SortSpec[] sortSpecs = new IQuery.SortSpec[sorts.size( )]; Iterator it = sorts.iterator( ); for ( int i = 0; it.hasNext( ); i++ ) { ISortDefinition src = (ISortDefinition) it.next( ); int sortIndex = -1; String sortKey = src.getColumn( ); if ( sortKey == null ) sortKey = src.getExpression( ).getText( ); else { sortKey = getColumnRefExpression( sortKey ); } temporaryComputedColumns.add( new ComputedColumn( "_{$TEMP_SORT_" + i + "$}_", sortKey, DataType.ANY_TYPE ) ); sortIndex = -1; sortKey = String.valueOf( "_{$TEMP_SORT_" + i + "$}_" ); IQuery.SortSpec dest = new IQuery.SortSpec( sortIndex, sortKey, src.getSortDirection( ) == ISortDefinition.SORT_ASC ); sortSpecs[i] = dest; } odiQuery.setOrdering( Arrays.asList( sortSpecs ) ); } } /** * * @param cx * @throws DataException */ private void populateFetchEvent( Context cx ) throws DataException { List dataSetFilters = new ArrayList( ); List queryFilters = new ArrayList( ); if ( dataSet.getFilters( ) != null ) { dataSetFilters = dataSet.getFilters( ); } if ( this.baseQueryDefn.getFilters( ) != null ) { for ( int i = 0; i < this.baseQueryDefn.getFilters( ).size( ); i++ ) { queryFilters.add( this.baseQueryDefn.getFilters( ).get( i ) ); } } //When prepare filters, the temporaryComputedColumns would also be effect. List multipassFilters = prepareFilters( cx, dataSetFilters, queryFilters, temporaryComputedColumns ); / List computedColumns = null; // set computed column event computedColumns = this.dataSet.getComputedColumns( ); if ( computedColumns == null ) computedColumns = new ArrayList( ); if ( computedColumns.size( ) > 0 || temporaryComputedColumns.size( ) > 0 ) { IResultObjectEvent objectEvent = new ComputedColumnHelper( this.dataSet, computedColumns, temporaryComputedColumns ); odiQuery.addOnFetchEvent( objectEvent ); this.dataSet.getComputedColumns( ) .addAll( temporaryComputedColumns ); } if ( dataSet.getEventHandler( ) != null ) { OnFetchScriptHelper event = new OnFetchScriptHelper( dataSet ); odiQuery.addOnFetchEvent( event ); } if ( dataSetFilters.size( ) + queryFilters.size( ) + multipassFilters.size( ) > 0 ) { IResultObjectEvent objectEvent = new FilterByRow( dataSetFilters, queryFilters, multipassFilters, dataSet ); odiQuery.addOnFetchEvent( objectEvent ); } } /** * get the data type of a expression * @param cx * @param expr * @return */ private int getColumnDataType( Context cx, String expr ) { String columnName = QueryExecutorUtil.getColInfoFromJSExpr( cx, expr ) .getColumnName( ); if ( columnName == null ) { return DataType.UNKNOWN_TYPE; } if ( columnName.equals( "__rownum" ) ) { return DataType.INTEGER_TYPE; } Object baseExpr = ( this.baseQueryDefn.getResultSetExpressions( ).get( columnName ) ); if ( baseExpr == null ) { return DataType.UNKNOWN_TYPE; } return ( (IBaseExpression) baseExpr ).getDataType( ); } /** * @param src * @return */ private String getGroupKeyExpression( IGroupDefinition src ) { String expr = src.getKeyColumn( ); if ( expr == null ) { expr = src.getKeyExpression( ); } else { expr = getColumnRefExpression( expr ); } return expr; } /** * * @param expr * @return */ private String getColumnRefExpression( String expr ) { return ExpressionUtil.createJSRowExpression( expr ); } void setParentExecutorHelper( IExecutorHelper helper ) { this.parentHelper = helper; } /** * * @param cx * @param dataSetFilters * @param queryFilters * @param temporaryComputedColumns * @return * @throws DataException */ private List prepareFilters( Context cx, List dataSetFilters, List queryFilters, List temporaryComputedColumns ) throws DataException { List result = new ArrayList( ); prepareFilter( cx, dataSetFilters, temporaryComputedColumns, result ); prepareFilter( cx, queryFilters, temporaryComputedColumns, result ); return result; } /** * * @param cx * @param dataSetFilters * @param temporaryComputedColumns * @param result * @throws DataException */ private void prepareFilter( Context cx, List dataSetFilters, List temporaryComputedColumns, List result ) throws DataException { if ( dataSetFilters != null && !dataSetFilters.isEmpty( ) ) { Iterator it = dataSetFilters.iterator( ); for ( int i = 0; it.hasNext( ); i++ ) { IFilterDefinition src = (IFilterDefinition) it.next( ); IBaseExpression expr = src.getExpression( ); if ( isGroupFilter( src ) ) { ConditionalExpression ce = ( (ConditionalExpression) expr ); String exprText = ce.getExpression( ).getText( ); ColumnInfo columnInfo = QueryExecutorUtil.getColInfoFromJSExpr( cx, exprText ); int index = columnInfo.getColumnIndex( ); String name = columnInfo.getColumnName( ); if ( name == null && index < 0 ) { // If failed to treate filter key as a column reference // expression // then treat it as a computed column expression temporaryComputedColumns.add( new ComputedColumn( "_{$TEMP_FILTER_" + i + "$}_", exprText, DataType.ANY_TYPE ) ); it.remove( ); result.add( new FilterDefinition( new ConditionalExpression( new ScriptExpression( String.valueOf( "dataSetRow[\"_{$TEMP_FILTER_" + i + "$}_\"]" ) ), ce.getOperator( ), ce.getOperand1( ), ce.getOperand2( ) ) ) ); } } } } } /** * * @param filter * @return * @throws DataException */ private boolean isGroupFilter( IFilterDefinition filter ) throws DataException { IBaseExpression expr = filter.getExpression( ); if ( expr instanceof IConditionalExpression ) { if ( !ExpressionCompilerUtil.isValidExpressionInQueryFilter( expr ) ) throw new DataException( ResourceConstants.INVALID_DEFINITION_IN_FILTER, new Object[]{ ( (IConditionalExpression) expr ).getExpression( ).getText( ) } ); try { if ( odiQuery instanceof BaseQuery ) { return ( (BaseQuery) odiQuery ).getExprProcessor( ) .hasAggregation( expr ); } } catch ( DataException e ) { return true; } } return false; } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getResultMetaData() */ public IResultMetaData getResultMetaData( ) throws DataException { assert odiQuery instanceof IPreparedDSQuery || odiQuery instanceof ICandidateQuery || odiQuery instanceof JointDataSetQuery; if ( odiQuery instanceof IPreparedDSQuery ) { if ( ( (IPreparedDSQuery) odiQuery ).getResultClass( ) != null ) return new ResultMetaData( ( (IPreparedDSQuery) odiQuery ).getResultClass( ) ); else return null; } else if ( odiQuery instanceof JointDataSetQuery ) { return new ResultMetaData( ( (JointDataSetQuery) odiQuery ).getResultClass( ) ); } else { return new ResultMetaData( ( (ICandidateQuery) odiQuery ).getResultClass( ) ); } } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#execute() */ public void execute( IEventHandler eventHandler ) throws DataException { logger.logp( Level.FINER, QueryExecutor.class.getName( ), "execute", "Start to execute" ); if ( this.isExecuted ) return; ExecutorHelper helper = new ExecutorHelper( this.parentHelper ); eventHandler.setExecutorHelper( helper ); // Execute the query odiResult = executeOdiQuery( eventHandler ); helper.setJSRowObject( this.dataSet.getJSResultRowObject( ) ); resetComputedColumns( ); // Bind the row object to the odi result set this.dataSet.setResultSet( odiResult, false ); // Calculate aggregate values //this.aggrTable.calculate( odiResult, getQueryScope( ) ); this.isExecuted = true; logger.logp( Level.FINER, QueryExecutor.class.getName( ), "execute", "Finish executing" ); } /** * reset computed columns */ private void resetComputedColumns( ) { List l = this.getDataSet( ).getComputedColumns( ); if ( l != null ) l.removeAll( this.temporaryComputedColumns ); } /* * Closes the executor; release all odi resources * * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#close() */ public void close( ) { if ( odiQuery == null ) { // already closed logger.logp( Level.FINER, QueryExecutor.class.getName( ), "close", "executor closed " ); return; } // Close the data set and associated odi query try { dataSet.beforeClose( ); } catch ( DataException e ) { logger.logp( Level.FINE, QueryExecutor.class.getName( ), "close", e.getMessage( ), e ); } if ( odiResult != null ) odiResult.close( ); odiQuery.close( ); try { dataSet.close( ); } catch ( DataException e ) { logger.logp( Level.FINE, QueryExecutor.class.getName( ), "close", e.getMessage( ), e ); } odiQuery = null; odiDataSource = null; odiResult = null; queryScope = null; isPrepared = false; isExecuted = false; // Note: reset dataSet and dataSource only after afterClose() is executed, since // the script may access these two objects try { dataSet.afterClose( ); } catch ( DataException e ) { logger.logp( Level.FINE, QueryExecutor.class.getName( ), "close", e.getMessage( ), e ); } dataSet = null; dataSource = null; logger.logp( Level.FINER, QueryExecutor.class.getName( ), "close", "executor closed " ); } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getDataSet() */ public DataSetRuntime getDataSet( ) { return dataSet; } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getSharedScope() */ public Scriptable getSharedScope( ) { return this.sharedScope; } /** * Gets the Javascript scope for evaluating expressions for this query * * @return */ public Scriptable getQueryScope( ) { if ( queryScope == null ) { // Set up a query scope. All expressions are evaluated against the // Data set JS object as the prototype (so that it has access to all // data set properties). It uses a subscope of the externally provided // parent scope, or the global shared scope queryScope = newSubScope( parentScope ); queryScope.setPrototype( dataSet.getJSDataSetObject( ) ); } return queryScope; } /** * Creates a subscope within parent scope * @param parentAndProtoScope parent scope. If null, the shared top-level scope is used as parent */ private Scriptable newSubScope( Scriptable parentAndProtoScope ) { if ( parentAndProtoScope == null ) parentAndProtoScope = sharedScope; Context cx = Context.enter( ); try { Scriptable scope = cx.newObject( parentAndProtoScope ); scope.setParentScope( parentAndProtoScope ); scope.setPrototype( parentAndProtoScope ); return scope; } finally { Context.exit( ); } } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getNestedLevel() */ public int getNestedLevel( ) { return this.nestedLevel; } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getDataSourceInstanceHandle() */ public IDataSourceInstanceHandle getDataSourceInstanceHandle( ) { return this.dataSource; } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getJSAggrValueObject() */ public Scriptable getJSAggrValueObject( ) { return this.aggrTable.getJSAggrValueObject( ); } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getNestedDataSets(int) */ public DataSetRuntime[] getNestedDataSets( int nestedCount ) { return outerResults.getDataSetRuntime( nestedCount ); } /* * @see org.eclipse.birt.data.engine.impl.IQueryExecutor#getOdiResultSet() */ public IResultIterator getOdiResultSet( ) { return this.odiResult; } /** * @param evaluateValue * @return * @throws DataException */ protected Collection resolveDataSetParameters( boolean evaluateValue ) throws DataException { return new ParameterUtil( this.outerResults, this.getDataSet( ), (IQueryDefinition) this.baseQueryDefn, this.getQueryScope( ) ).resolveDataSetParameters( evaluateValue ); } }
package it.unibz.inf.ontop.dbschema.impl; import com.google.common.collect.ImmutableList; import com.google.inject.assistedinject.Assisted; import com.google.inject.assistedinject.AssistedInject; import it.unibz.inf.ontop.dbschema.NamedRelationDefinition; import it.unibz.inf.ontop.dbschema.QuotedID; import it.unibz.inf.ontop.dbschema.RelationID; import it.unibz.inf.ontop.exception.MetadataExtractionException; import it.unibz.inf.ontop.exception.RelationNotFoundInMetadataException; import it.unibz.inf.ontop.injection.CoreSingletons; import it.unibz.inf.ontop.model.type.TypeFactory; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Arrays; import java.util.stream.Collectors; import static it.unibz.inf.ontop.dbschema.RelationID.TABLE_INDEX; public class DremioDBMetadataProvider extends AbstractDBMetadataProvider { @AssistedInject DremioDBMetadataProvider(@Assisted Connection connection, CoreSingletons coreSingletons) throws MetadataExtractionException { super(connection, metadata -> new DremioQuotedIDFactory(), coreSingletons); try { System.out.println("DREMIO CATALOG/SCHEMA: " + connection.getCatalog() + " / " + connection.getSchema()); try (Statement stmt = connection.createStatement(); ResultSet rs = stmt.executeQuery("SELECT CURRENT_SCHEMA()")) { rs.next(); System.out.println("DREMIO SCHEMA: " + rs.getString("TABLE_SCHEM")); } } catch (SQLException e) { // NO-OP } } @Override protected RelationID getCanonicalRelationId(RelationID id) { return id; } @Override protected ImmutableList<RelationID> getAllIDs(RelationID id) { return ImmutableList.of(id); } @Override public NamedRelationDefinition getRelation(RelationID id0) throws MetadataExtractionException { try { return super.getRelation(id0); } catch (RelationNotFoundInMetadataException e) { try (Statement st = connection.createStatement()) { st.execute("SELECT * FROM " + id0.getSQLRendering() + " WHERE 1 = 0"); } catch (SQLException ex) { throw new MetadataExtractionException(ex); } return super.getRelation(id0); } } @Override protected RelationID getRelationID(ResultSet rs, String catalogNameColumn, String schemaNameColumn, String tableNameColumn) throws SQLException { String[] schemaComponents = rs.getString(schemaNameColumn).split("\\."); String[] components = Arrays.copyOf(schemaComponents, schemaComponents.length + 1); components[schemaComponents.length] = rs.getString(tableNameColumn); return rawIdFactory.createRelationID(components); } @Override protected String getRelationCatalog(RelationID id) { return null; } @Override protected String getRelationSchema(RelationID id) { return id.getComponents().subList(1, id.getComponents().size()).reverse().stream() .map(QuotedID::getName) // IMPORTANT: no quotation marks! .collect(Collectors.joining(".")); } @Override protected String getRelationName(RelationID id) { return id.getComponents().get(TABLE_INDEX).getName(); } }
package org.eclipse.persistence.internal.xr; // Javase imports import java.util.HashMap; import java.util.Locale; import java.util.Map; import org.w3c.dom.Document; // Java extension libraries import javax.xml.namespace.QName; // EclipseLink imports import org.eclipse.persistence.internal.databaseaccess.DatabasePlatform; import org.eclipse.persistence.platform.xml.XMLPlatform; import org.eclipse.persistence.platform.xml.XMLPlatformFactory; import static org.eclipse.persistence.internal.helper.ClassConstants.APBYTE; import static org.eclipse.persistence.internal.helper.ClassConstants.BIGDECIMAL; import static org.eclipse.persistence.internal.helper.ClassConstants.BIGINTEGER; import static org.eclipse.persistence.internal.helper.ClassConstants.BOOLEAN; import static org.eclipse.persistence.internal.helper.ClassConstants.BYTE; import static org.eclipse.persistence.internal.helper.ClassConstants.CALENDAR; import static org.eclipse.persistence.internal.helper.ClassConstants.DOUBLE; import static org.eclipse.persistence.internal.helper.ClassConstants.FLOAT; import static org.eclipse.persistence.internal.helper.ClassConstants.INTEGER; import static org.eclipse.persistence.internal.helper.ClassConstants.LONG; import static org.eclipse.persistence.internal.helper.ClassConstants.Object_Class; import static org.eclipse.persistence.internal.helper.ClassConstants.SHORT; import static org.eclipse.persistence.internal.helper.ClassConstants.STRING; import static org.eclipse.persistence.internal.xr.sxf.SimpleXMLFormat.SIMPLE_XML_FORMAT_TYPE; import static org.eclipse.persistence.oxm.XMLConstants.ANY_SIMPLE_TYPE_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.BASE_64_BINARY_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.BOOLEAN_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.BYTE_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.DATE_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.DATE_TIME_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.DECIMAL_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.DOUBLE_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.DURATION_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.FLOAT_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.G_DAY_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.G_MONTH_DAY_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.G_MONTH_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.G_YEAR_MONTH_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.G_YEAR_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.HEX_BINARY_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.INTEGER_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.INT_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.LONG_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.QNAME_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.SHORT_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.STRING_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.TIME_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.UNSIGNED_BYTE_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.UNSIGNED_INT_QNAME; import static org.eclipse.persistence.oxm.XMLConstants.UNSIGNED_SHORT_QNAME; /** * <p><b>INTERNAL</b>: provides useful constants, SQL Column <-> to XML name mapping and * a few other misc. features * * @author Mike Norman - michael.norman@oracle.com * @since EclipseLink 1.x */ @SuppressWarnings("serial") public class Util { /** * Convert a SQL name to a valid XML name. Because not all characters that * are valid in a SQL name is valid in an XML name, they need to be escaped * using a special format. See the Oracle paper,"SQL/XML candidate base * document", for more detail * * @param name * the SQL name * * @return the escaped valid XML name */ public static String sqlToXmlName(String name) { int length = name.length(); if (length == 0) { return name; } String xmlName = new String(); int beginAt = 1; char firstChar = name.charAt(0); // escape : to _x003A_ if (firstChar == ':') { xmlName = xmlName + "_x003A_"; } // escape _ of _x to _x005F_ else if ((length >= 2) && name.substring(0, 2).equals("_x")) { xmlName = xmlName + "_x005F_"; } // check to see if it is a valid first character else { if ((firstChar >= 0xd800) && (firstChar < 0xdc00)) { // surrogate if (length > 1) { xmlName += hexEscape((firstChar << 16) | (name.charAt(1) & 0xffff)); beginAt = 2; } else { xmlName += hexEscape(firstChar); } } else if (isFirstNameChar(firstChar)) { xmlName = xmlName + firstChar; } else { xmlName = xmlName + hexEscape(firstChar); } } // check each following character to see if it is a valid NameChar char c; for (int x = beginAt; x < length; x++) { c = name.charAt(x); if ((c >= 0xd800) && (c < 0xdc00)) { // surrogate if ((x + 1) < length) { xmlName += hexEscape((c << 16) | (name.charAt(x + 1) & 0xffff)); x++; } else { xmlName += hexEscape(c); } } else if (!isNameChar(c)) { // escape xmlName = xmlName + hexEscape(c); } else { xmlName = xmlName + c; } } return xmlName; } /** * Convert an escaped XML name back to the original SQL name * * @param name * the escaped XML name * * @return the original SQL name */ public static String xmlToSqlName(String name) { String sqlName = new String(); int length = name.length(); boolean unescapeMode = false; String hexString = null; char c; // step through each one for (int x = 0; x < length; x++) { c = name.charAt(x); if (unescapeMode) { // we are in unescape mode if (((c >= 'A') && (c <= 'F')) || ((c >= '0') && (c <= '9'))) { // gather the hex string from the escape sequence hexString = hexString + c; } else if (c == '_') { // done with escape mode unescapeMode = false; int i; int len; if (hexString != null && (len = hexString.length()) > 4) { char i1 = (char) (Integer.parseInt(hexString.substring(0, len - 4), 16)); char i2 = (char) (Integer.parseInt(hexString.substring(len - 4), 16)); sqlName += i1; sqlName += i2; } else if ((i = Integer.parseInt(hexString, 16)) == 0xffff) { } else { sqlName += (char) i; } } else { // invalid char in escape sequence! write everything into // sqlName as is, or we could throw an exception here // in the future sqlName += ("_x" + hexString + c); unescapeMode = false; } } else { if ((c == '_') && ((x + 1) < length) && (name.charAt(x + 1) == 'x')) { // found escape beginning _x // go into unescape mode unescapeMode = true; hexString = new String(); x++; } else { // just copy src char to destination sqlName += c; } } } return sqlName; } public static String hexEscape(char c) { String outPutString; outPutString = Integer.toHexString(c); switch (outPutString.length()) { case 1: outPutString = "_x000" + outPutString.toUpperCase(Locale.US) + "_"; break; case 2: outPutString = "_x00" + outPutString.toUpperCase(Locale.US) + "_"; break; case 3: outPutString = "_x0" + outPutString.toUpperCase(Locale.US) + "_"; break; case 4: outPutString = "_x" + outPutString.toUpperCase(Locale.US) + "_"; break; } return outPutString; } public static String hexEscape(int c) { String outPutString; outPutString = Integer.toHexString(c); switch (outPutString.length()) { case 1: case 5: outPutString = "_x000" + outPutString.toUpperCase(Locale.US) + "_"; break; case 2: case 6: outPutString = "_x00" + outPutString.toUpperCase(Locale.US) + "_"; break; case 3: case 7: outPutString = "_x0" + outPutString.toUpperCase(Locale.US) + "_"; break; case 4: case 8: outPutString = "_x" + outPutString.toUpperCase(Locale.US) + "_"; break; } return outPutString; } /** * return true if character can be part of a name * * @param c - * char to be checked * @return true/false */ public static boolean isNameChar(char c) { // In most cases the character is less than 256, so this check // is made fast. For the rest of the characters the check is // based on the conformance tests. XML 1.1 has changed the list // of chars allowed, and the check is quite simple. So the // complete check based on XML 1.0 is not performed. boolean res; if (c < 256) res = (chartype[c] & (FLETTER | FDIGIT | FMISCNAME)) != 0; else { // [#x2180-#x2182] | [#x3041-#x3094] | [#x30A1-#x30FA] | // [#x3105-#x312C] | [#xAC00-#xD7A3] // [#x0E47-#x0E4E] if (((c >= 0x2180) && (c <= 0x2182)) || ((c >= 0x3041) && (c <= 0x3094)) || ((c >= 0x30A1) && (c <= 0x30FA)) || ((c >= 0x3105) && (c <= 0x312C)) || ((c >= 0xAC00) && (c <= 0xD7A3)) || ((c >= 0x0E47) && (c <= 0x0E4E))) res = true; else { if ((c == 0x02FF) || (c == 0x0346) || (c == 0x0362) || (c == 0x0487) || (c == 0x05A2) || (c == 0x05BA) || (c == 0x05BE) || (c == 0x05C0) || (c == 0x05C3) || (c == 0x0653) || (c == 0x06B8) || (c == 0x06B9) || (c == 0x06E9) || (c == 0x06EE) || (c == 0x0904) || (c == 0x093B) || (c == 0x094E) || (c == 0x0955) || (c == 0x0964) || (c == 0x0984) || (c == 0x09C5) || (c == 0x09C9) || (c == 0x09CE) || (c == 0x09D8) || (c == 0x09E4) || (c == 0x0A03) || (c == 0x0A3D) || (c == 0x0A46) || (c == 0x0A49) || (c == 0x0A4E) || (c == 0x0A80) || (c == 0x0A84) || (c == 0x0ABB) || (c == 0x0AC6) || (c == 0x0ACA) || (c == 0x0ACE) || (c == 0x0B04) || (c == 0x0B3B) || (c == 0x0B44) || (c == 0x0B4A) || (c == 0x0B4E) || (c == 0x0B58) || (c == 0x0B84) || (c == 0x0BC3) || (c == 0x0BC9) || (c == 0x0BD6) || (c == 0x0C0D) || (c == 0x0C45) || (c == 0x0C49) || (c == 0x0C54) || (c == 0x0C81) || (c == 0x0C84) || (c == 0x0CC5) || (c == 0x0CC9) || (c == 0x0CD4) || (c == 0x0CD7) || (c == 0x0D04) || (c == 0x0D45) || (c == 0x0D49) || (c == 0x0D4E) || (c == 0x0D58) || (c == 0x0E3F) || (c == 0x0E3B) || (c == 0x0E4F) || (c == 0x0EBA) || (c == 0x0EBE) || (c == 0x0ECE) || (c == 0x0F1A) || (c == 0x0F36) || (c == 0x0F38) || (c == 0x0F3B) || (c == 0x0F3A) || (c == 0x0F70) || (c == 0x0F85) || (c == 0x0F8C) || (c == 0x0F96) || (c == 0x0F98) || (c == 0x0FB0) || (c == 0x0FB8) || (c == 0x0FBA) || (c == 0x20DD) || (c == 0x20E2) || (c == 0x3030) || (c == 0x309B) || (c == 0x066A) || (c == 0x06FA) || (c == 0x0970) || (c == 0x09F2) || (c == 0x0AF0) || (c == 0x0B70) || (c == 0x0C65) || (c == 0x0CE5) || (c == 0x0CF0) || (c == 0x0D70) || (c == 0x0E5A) || (c == 0x0EDA) || (c == 0x0F2A) || (c == 0x02D2) || (c == 0x03FE) || (c == 0x065F) || (c == 0x0E5C) || (c == 0x0C04)) res = false; else { res = Character.isLetter(c) || Character.isDigit(c) || c == '-' || c == '_' || c == '.'; res = true; } } } return res; } /** * return true if character can be part of a name * * @param c - * char to be checked * @return true/false */ public static boolean isFirstNameChar(char c) { // In most cases the character is less than 256, so this check // is made fast. For the rest of the characters the check is // based on the conformance tests. XML 1.1 has changed the list // of chars allowed, and the check is quite simple. So the // complete check based on XML 1.0 is not performed. boolean res; if (c < 256) res = (chartype[c] & (FLETTER | FSTARTNAME)) != 0; else { // [#x2180-#x2182] | [#x3041-#x3094] | [#x30A1-#x30FA] | // [#x3105-#x312C] | [#xAC00-#xD7A3] if (((c >= 0x2180) && (c <= 0x2182)) || (c == 0x3007) || ((c >= 0x3021) && (c <= 0x3029)) || ((c >= 0x3041) && (c <= 0x3094)) || ((c >= 0x30A1) && (c <= 0x30FA)) || ((c >= 0x3105) && (c <= 0x312C)) || ((c >= 0xAC00) && (c <= 0xD7A3))) res = true; else { if ((c == 0x1101) || (c == 0x1104) || (c == 0x1108) || (c == 0x110A) || (c == 0x110D) || (c == 0x113B) || (c == 0x1141) || (c == 0x114D) || (c == 0x114F) || (c == 0x1151) || (c == 0x1156) || (c == 0x1162) || (c == 0x1164) || (c == 0x1166) || (c == 0x116B) || (c == 0x116F) || (c == 0x1174) || (c == 0x119F) || (c == 0x11AC) || (c == 0x11B6) || (c == 0x11B9) || (c == 0x11BB) || (c == 0x11C3) || (c == 0x11F1) || (c == 0x0132) || (c == 0x0133) || (c == 0x013F) || (c == 0x0140) || (c == 0x0149) || (c == 0x017F) || (c == 0x01C4) || (c == 0x01CC) || (c == 0x01F1) || (c == 0x01F3) || (c == 0x0E46) || (c == 0x113F) || (c == 0x01F6) || (c == 0x01F9) || (c == 0x0230) || (c == 0x03D7) || (c == 0x03DD) || (c == 0x03E1) || (c == 0x040D) || (c == 0x0450) || (c == 0x045D) || (c == 0x04EC) || (c == 0x04ED) || (c == 0x06B8) || (c == 0x06BF) || (c == 0x06CF) || (c == 0x0E2F) || (c == 0x0EAF) || (c == 0x0F6A) || (c == 0x4CFF) || (c == 0x212F) || (c == 0x0587)) res = false; else res = Character.isLetter(c) || c == '_'; } } return res; } /** * Char type table */ static final int chartype[] = new int[256]; static final int FWHITESPACE = 1; static final int FDIGIT = 2; static final int FLETTER = 4; static final int FMISCNAME = 8; static final int FSTARTNAME = 16; static { for (int i = 0; i < 256; i++) { char c = (char) i; chartype[i] = 0; if ((c == 32) || (c == 9) || (c == 13) || (c == 10)) chartype[i] = FWHITESPACE; if (Character.isLetter(c)) chartype[i] |= FLETTER; if (Character.isDigit(c)) chartype[i] |= FDIGIT; } chartype['.'] |= FMISCNAME; chartype['-'] |= FMISCNAME; chartype['_'] |= FMISCNAME | FSTARTNAME; chartype[0xb7] |= FMISCNAME; // Extender } public static Class<?> getClassFromJDBCType(String typeName, DatabasePlatform databasePlatform) { Class<?> clz = (Class<?>)databasePlatform.getClassTypes().get(typeName); if (clz == null) { return Object_Class; } return clz; } public static final QName SXF_QNAME = new QName("", SIMPLE_XML_FORMAT_TYPE); /* * if (xmlField.getSchemaType().equals(DATE_QNAME)) { xmlField.setType(SQLDATE); } else if (xmlField.getSchemaType().equals(TIME_QNAME)) { xmlField.setType(TIME); } else if (xmlField.getSchemaType().equals(DATE_TIME_QNAME)) { xmlField.setType(TIMESTAMP); } */ public static final Map<QName, Class<?>> SCHEMA_2_CLASS; static { SCHEMA_2_CLASS = new HashMap<QName, Class<?>>() {{ put(ANY_SIMPLE_TYPE_QNAME,Object_Class); put(BASE_64_BINARY_QNAME, APBYTE); put(BOOLEAN_QNAME, BOOLEAN); put(BYTE_QNAME, BYTE); //put(DATE_QNAME, SQLDATE); put(DATE_QNAME, CALENDAR); //put(DATE_TIME_QNAME, TIMESTAMP); put(DATE_TIME_QNAME, CALENDAR); put(DECIMAL_QNAME, BIGDECIMAL); put(DOUBLE_QNAME, DOUBLE); put(DURATION_QNAME, STRING); put(FLOAT_QNAME, FLOAT); put(G_YEAR_MONTH_QNAME, STRING); put(G_YEAR_QNAME, STRING); put(G_MONTH_QNAME, STRING); put(G_MONTH_DAY_QNAME, STRING); put(G_DAY_QNAME, STRING); put(HEX_BINARY_QNAME, APBYTE); put(INT_QNAME, INTEGER); put(INTEGER_QNAME, BIGINTEGER); put(LONG_QNAME, LONG); put(QNAME_QNAME, QName.class); put(SHORT_QNAME, SHORT); put(STRING_QNAME, STRING); //put(TIME_QNAME, TIME); put(TIME_QNAME, CALENDAR); put(UNSIGNED_BYTE_QNAME, SHORT); put(UNSIGNED_INT_QNAME, LONG); put(UNSIGNED_SHORT_QNAME, INTEGER); }}; } public static XMLPlatform XML_PLATFORM = XMLPlatformFactory.getInstance().getXMLPlatform(); public static Document TEMP_DOC = XML_PLATFORM.createDocument(); public static final String DEFAULT_ATTACHMENT_MIMETYPE = "application/octet-stream"; public static final String WEB_INF_DIR = "WEB-INF/"; public static final String WSDL_DIR = "wsdl/"; public static final String[] META_INF_PATHS = {"META-INF/", "/META-INF/"}; public static final String DBWS_SERVICE_XML = "eclipselink-dbws.xml"; public static final String DBWS_OR_LABEL = "dbws-or"; public static final String DBWS_OX_LABEL = "dbws-ox"; public static final String DBWS_OR_XML = "eclipselink-" + DBWS_OR_LABEL + ".xml"; public static final String DBWS_OX_XML = "eclipselink-" + DBWS_OX_LABEL + ".xml"; public static final String DBWS_SCHEMA_XML = "eclipselink-dbws-schema.xsd"; public static final String DBWS_WSDL = "eclipselink-dbws.wsdl"; public static final String DBWS_SESSIONS_XML = "eclipselink-dbws-sessions.xml"; public static final String DBWS_OR_SESSION_NAME_SUFFIX = DBWS_OR_LABEL + "-session"; public static final String DBWS_OX_SESSION_NAME_SUFFIX = DBWS_OX_LABEL + "-session"; public static final String TARGET_NAMESPACE_PREFIX = "ns1"; public static final String SERVICE_NAMESPACE_PREFIX = "srvc"; public static final String SERVICE_SUFFIX = "Service"; }
package figglewatts.slagd.graphics.tile; import java.util.ArrayList; import java.util.List; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.files.FileHandle; import com.badlogic.gdx.graphics.OrthographicCamera; import com.badlogic.gdx.graphics.Texture; import com.badlogic.gdx.graphics.g2d.SpriteBatch; import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.badlogic.gdx.math.Vector2; import com.badlogic.gdx.math.Vector3; import com.badlogic.gdx.utils.Array; import com.badlogic.gdx.utils.XmlReader; import com.badlogic.gdx.utils.XmlReader.*; import figglewatts.slagd.Settings; /** * A class to represent a map of tiles. Contains methods to manipulate tiles. * @author Figglewatts * */ public class TileMap { /** * A 2-dimensional array containing information about each map cell */ public MapCell[][] Cells; public List<TiledObject> Objects = new ArrayList<TiledObject>(); private int mapWidth; private int mapHeight; public int getMapWidth() { return mapWidth; } public int getMapHeight() { return mapHeight; } /** * Fills the map with a tile of given specifications * @param tile BaseTile instance */ public void fillWithTile(BaseTile tile) { for (int y = 0; y < mapHeight; y++) { for (int x = 0; x < mapWidth; x++) { this.Cells[y][x] = new MapCell(x, y, tile); } } } /** * Fills the map with a tile of given specifications * @param tileID * @param sheetID */ public void fillWithTile(int tileID, int sheetID) { for (int y = 0; y < mapHeight; y++) { for (int x = 0; x < mapWidth; x++) { this.Cells[y][x] = new MapCell(x, y, tileID, sheetID); } } } public MapCell getCell(int x, int y) { //System.out.println("Getting cell at: " + x + ", " + y); //System.out.println("Cell: " + this.Cells[y][x]); return this.Cells[y][x]; } public BaseTile getTile(int x, int y) { return this.Cells[y][x].getTile(0); } public BaseTile getTile(int x, int y, int layer) { return this.Cells[y][x].getTile(layer); } /** * Sets the tile at (x, y) * @param x * @param y * @param tileID * @param sheetID */ public void setTile(int x, int y, int tileID, int sheetID) { this.Cells[y][x].setTile(tileID, sheetID); } /** * Sets the tile on given layer at (x, y) * @param x * @param y * @param tileID * @param sheetID * @param layer */ public void setTile(int x, int y, int tileID, int sheetID, int layer) { this.Cells[y][x].setTile(tileID, sheetID, layer); } /** * Sets the tile at (x, y) * @param x * @param y * @param tile BaseTile instance */ public void setTile(int x, int y, BaseTile tile) { this.Cells[y][x].setTile(tile); } /** * Sets the tile on given layer at (x, y) * @param x * @param y * @param tile BaseTile instance * @param layer */ public void setTile(int x, int y, BaseTile tile, int layer) { this.Cells[y][x].setTile(tile, layer); } /** * Converts a world position (in pixels) to a tile position (in tiles) * @param worldPos * @return A vector2 containing the given point's position in tiles */ public Vector2 worldPosToTilePos(Vector2 worldPos) { return new Vector2((float)Math.floor(worldPos.x / Tile.TILE_WIDTH), (float)Math.floor(worldPos.y / Tile.TILE_HEIGHT)); } /** * Converts a world position (in pixels) to a tile position (in tiles) * @param x * @param y * @return A vector2 containing the given point's position in tiles */ public Vector2 worldPosToTilePos(int x, int y) { return new Vector2((float)Math.floor(x / Tile.TILE_WIDTH), (float)Math.floor(y / Tile.TILE_HEIGHT)); } public void render(SpriteBatch batch, OrthographicCamera cam, int squaresDown, int squaresAcross) { Vector3 cameraPosition = new Vector3(cam.position.x-((Settings.VIRTUAL_VIEWPORT_WIDTH*cam.zoom)/2), cam.position.y-((Settings.VIRTUAL_VIEWPORT_HEIGHT*cam.zoom)/2), 0); Vector2 firstSquare = new Vector2((cameraPosition.x) / Tile.TILE_WIDTH, (cameraPosition.y) / Tile.TILE_HEIGHT); int firstX = (int)firstSquare.x; int firstY = (int)firstSquare.y; for (int y = 0; y < squaresDown; y++) { for (int x = 0; x < squaresAcross; x++) { Vector3 screenCoords = cam.project(new Vector3((x+firstX)*Tile.TILE_WIDTH, (y+firstY)*Tile.TILE_HEIGHT, 0), cameraPosition.x, cameraPosition.y, cam.viewportWidth, cam.viewportHeight); MapCell thisCell = this.getCell(x+firstX, y+firstY); for (int t = 0; t < thisCell.BaseTiles.size; t++) { BaseTile tile = thisCell.BaseTiles.get(t); TextureRegion srcRect = Tile.getSourceRectangle(tile.getTileID(), tile.getSheetID()); batch.draw(Tile.TILESET_TEXTURES.get(tile.getSheetID()), screenCoords.x, screenCoords.y, (float)Tile.TILE_WIDTH, (float)Tile.TILE_HEIGHT, (int)srcRect.getRegionX(), (int)srcRect.getRegionY(), (int)srcRect.getRegionWidth(), (int)srcRect.getRegionHeight(), false, false); } } } } /** * Create a tile map with given width and height, and fill it with tile 0 from sprite sheet 0 * @param width * @param height */ public TileMap(int width, int height) { this.Cells = new MapCell[height][width]; this.mapWidth = width; this.mapHeight = height; fillWithTile(0, 0); } /** * Create a tile map from a .tmx file from the program Tiled * @param xmlInput The XML string to deserialize (.tmx file string) * @param pathToTilesheets The path to the folder in /assets where the tilesheets are kept */ public TileMap(String xmlInput, String pathToTilesheets) { if (pathToTilesheets != "") { pathToTilesheets += "/"; } XmlReader reader = new XmlReader(); Element mapData = reader.parse(xmlInput); this.mapWidth = Integer.parseInt(mapData.getAttribute("width")); this.mapHeight = Integer.parseInt(mapData.getAttribute("height")); this.Cells = new MapCell[this.mapHeight][this.mapWidth]; Tile.TILE_WIDTH = Integer.parseInt(mapData.getAttribute("tilewidth")); Tile.TILE_HEIGHT = Integer.parseInt(mapData.getAttribute("tileheight")); fillWithTile(0, 0); Array<Element> tileSets = mapData.getChildrenByName("tileset"); Array<Integer> firstGid = new Array<Integer>(); for (Element tileSet : tileSets) { String source = tileSet.getChildByName("image").getAttribute("source"); Tile.addTilesheet(new Texture(Gdx.files.internal(pathToTilesheets + source))); firstGid.add(Integer.parseInt(tileSet.getAttribute("firstgid"))); } Array<Element> layers = mapData.getChildrenByName("layer"); int layerIndex = 0; for (Element layer : layers) { Array<Element> tiles = layer.getChildByName("data").getChildrenByName("tile"); int i = 0; int tileSheetIndex = 0; for (int y = this.mapHeight-1; y >= 0; y for (int x = 0; x < this.mapWidth; x++) { // get tile id int gid = Integer.parseInt(tiles.get(i).getAttribute("gid")); if (gid == 0) { i++; continue; // tile is nonexistent } // calculate which tilesheet it's on and normalize gid for (int k = 0; k < firstGid.size; k++) { tileSheetIndex = k; // if it's not the last tilesheet if (k != firstGid.size - 1) { // if it's between the start ID of this and the start ID of the next tilesheet if (gid >= firstGid.get(k) && gid < firstGid.get(k+1)) { gid -= firstGid.get(k); // calculate correct ID break; } else { continue; } } else { gid -= firstGid.get(k); break; } } this.Cells[y][x].setTile(new BaseTile(gid, tileSheetIndex), layerIndex); i++; } } layerIndex++; } Array<Element> objectLayers = mapData.getChildrenByName("objectgroup"); for (Element oLayer : objectLayers) { Array<Element> objects = oLayer.getChildrenByName("object"); for (Element object : objects) { TiledObject objInstance = new TiledObject(); objInstance.setName(object.getAttribute("name")); for (String attr : object.getAttributes().keys()) { switch (attr) { case "x": objInstance.setxPos(Integer.parseInt(object.getAttribute("x"))); break; case "y": objInstance.setyPos(Integer.parseInt(object.getAttribute("y"))); break; case "width": objInstance.setWidth(Integer.parseInt(object.getAttribute("width"))); break; case "height": objInstance.setHeight(Integer.parseInt(object.getAttribute("height"))); break; } } Array<Element> properties = object.getChildByName("properties").getChildrenByName("property"); for (Element property : properties) { objInstance.addProperty(property.getAttribute("name"), property.getAttribute("value")); } Objects.add(objInstance); } } } /** * Create a tile map from a .tmx file from the program Tiled * @param tmx The FileHandle of the .tmx file * @param pathToTilesheets The path to the folder in /assets where the tilesheets are kept */ public TileMap(FileHandle tmx, String pathToTilesheets) { this(tmx.readString(), pathToTilesheets); } }
package gamedata.events; import authoring_environment.GUIGrid; import gamedata.gamecomponents.Game; import gamedata.gamecomponents.Piece; /** * Deletes a specified piece off the grid * @author Rica, Mike Zhu * */ public class DeletePieceGlobalAction extends GlobalAction { public static final String DESCRIPTION = "Delete "; private Game myGame; private String myID; /** * Make sure you construct this referring to the piece that you want to delete rather than * creating a new piece because the grid will try to look for that piece to delete when the * method is called * @param game * @param pieceToDelete */ public DeletePieceGlobalAction(String name, Game game, String ID) { super(name); myGame = game; myID=ID; } @Override public void doBehavior () { GUIGrid grid = myGame.getCurrentLevel().getGrid(); for(Piece p : grid.getPieces().getData()){ if(p.getID().equals(myID)){ grid.removePiece(p); } } } }
package gov.nih.nci.cananolab.ui.core; import gov.nih.nci.cananolab.dto.common.GridNodeBean; import gov.nih.nci.cananolab.dto.particle.composition.CompositionBean; import gov.nih.nci.cananolab.exception.BaseException; import gov.nih.nci.cananolab.exception.GridDownException; import gov.nih.nci.cananolab.service.common.GridService; import gov.nih.nci.cananolab.service.common.LookupService; import gov.nih.nci.cananolab.util.ClassUtils; import gov.nih.nci.cananolab.util.Comparators; import gov.nih.nci.cananolab.util.Constants; import gov.nih.nci.cananolab.util.DateUtils; import gov.nih.nci.cananolab.util.StringUtils; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.SortedSet; import java.util.TreeSet; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import org.apache.struts.upload.FormFile; import org.apache.struts.util.LabelValueBean; /** * This class sets up information required for all forms. * * @author pansu, cais * */ public class InitSetup { private InitSetup() { } public static InitSetup getInstance() { return new InitSetup(); } /** * Queries and common_lookup table and creates a map in application context * * @param appContext * @return * @throws BaseException */ public Map<String, Map<String, SortedSet<String>>> getDefaultLookupTable( ServletContext appContext) throws BaseException { Map<String, Map<String, SortedSet<String>>> defaultLookupTable = null; if (appContext.getAttribute("defaultLookupTable") == null) { defaultLookupTable = LookupService.findAllLookups(); appContext.setAttribute("defaultLookupTable", defaultLookupTable); } else { defaultLookupTable = new HashMap<String, Map<String, SortedSet<String>>>( (Map<? extends String, Map<String, SortedSet<String>>>) appContext .getAttribute("defaultLookupTable")); } return defaultLookupTable; } /** * Retrieve lookup attribute for lookup name from the database and store in * the application context * * @param appContext * @param contextAttribute * @param lookupName * @param lookupAttribute * @return * @throws BaseException */ public SortedSet<String> getServletContextDefaultLookupTypes( ServletContext appContext, String contextAttribute, String lookupName, String lookupAttribute) throws BaseException { Map<String, Map<String, SortedSet<String>>> defaultLookupTable = getDefaultLookupTable(appContext); SortedSet<String> types = defaultLookupTable.get(lookupName).get( lookupAttribute); appContext.setAttribute(contextAttribute, types); return types; } /** * Retrieve lookup attribute and other attribute for lookup name from the * database and store in the session * * @param request * @param sessionAttribute * @param lookupName * @param lookupAttribute * @param otherTypeAttribute * @aparam updateSession * @return * @throws BaseException */ public SortedSet<String> getDefaultAndOtherLookupTypes( HttpServletRequest request, String sessionAttribute, String lookupName, String lookupAttribute, String otherTypeAttribute, boolean updateSession) throws BaseException { SortedSet<String> types = null; if (updateSession) { types = LookupService.getDefaultAndOtherLookupTypes(lookupName, lookupAttribute, otherTypeAttribute); request.getSession().setAttribute(sessionAttribute, types); } else { types = new TreeSet<String>((SortedSet<? extends String>) (request .getSession().getAttribute(sessionAttribute))); } return types; } public SortedSet<String> getReflectionDefaultAndOtherLookupTypes( HttpServletRequest request, String contextAttributeForDefaults, String sessionAttribute, String fullParentClassName, String otherFullParentClassName, boolean updateSession) throws Exception { ServletContext appContext = request.getSession().getServletContext(); SortedSet<String> defaultTypes = getServletContextDefaultTypesByReflection( appContext, contextAttributeForDefaults, fullParentClassName); SortedSet<String> types = null; if (updateSession) { types = new TreeSet<String>(defaultTypes); SortedSet<String> otherTypes = LookupService .getAllOtherObjectTypes(otherFullParentClassName); if (otherTypes != null) types.addAll(otherTypes); request.getSession().setAttribute(sessionAttribute, types); } else { types = new TreeSet<String>((SortedSet<? extends String>) (request .getSession().getAttribute(sessionAttribute))); } return types; } /** * Retrieve lookup attribute and other attribute for lookup name based on * reflection and store in the application context * * @param appContext * @param contextAttribute * @param lookupName * @param fullParentClassName * @return * @throws Exception */ public SortedSet<String> getServletContextDefaultTypesByReflection( ServletContext appContext, String contextAttribute, String fullParentClassName) throws Exception { if (appContext.getAttribute(contextAttribute) == null) { SortedSet<String> types = new TreeSet<String>(); List<String> classNames = ClassUtils .getChildClassNames(fullParentClassName); for (String name : classNames) { if (!name.contains("Other")) { String shortClassName = ClassUtils.getShortClassName(name); String displayName = ClassUtils .getDisplayName(shortClassName); types.add(displayName); } } appContext.setAttribute(contextAttribute, types); return types; } else { return new TreeSet<String>((SortedSet<? extends String>) appContext .getAttribute(contextAttribute)); } } public String getFileUriFromFormFile(FormFile file, String folderType, String sampleName, String submitType) { if (file != null && !StringUtils.isEmpty(file.getFileName())) { String prefix = folderType; if (!StringUtils.isEmpty(sampleName) && !StringUtils.isEmpty(submitType) && folderType.equals(Constants.FOLDER_PARTICLE)) { prefix += "/" + sampleName + "/"; prefix += StringUtils .getOneWordLowerCaseFirstLetter(submitType); } String timestamp = DateUtils.convertDateToString(new Date(), "yyyyMMdd_HH-mm-ss-SSS"); return prefix + "/" + timestamp + "_" + file.getFileName(); } else { return null; } } // check whether the value is already stored in context private Boolean isLookupInContext(HttpServletRequest request, String lookupName, String attribute, String otherAttribute, String value) throws BaseException { Map<String, Map<String, SortedSet<String>>> defaultLookupTable = getDefaultLookupTable(request .getSession().getServletContext()); SortedSet<String> defaultValues = null; if (defaultLookupTable.get(lookupName) != null) { defaultValues = defaultLookupTable.get(lookupName).get(attribute); } if (defaultValues != null && defaultValues.contains(value)) { return true; } else { SortedSet<String> otherValues = null; if (defaultLookupTable.get(lookupName) != null) { otherValues = defaultLookupTable.get(lookupName).get( otherAttribute); } if (otherValues != null && otherValues.contains(value)) { return true; } } return false; } public void persistLookup(HttpServletRequest request, String lookupName, String attribute, String otherAttribute, String value) throws BaseException { if (value == null || value.length() == 0) { return; } if (isLookupInContext(request, lookupName, attribute, otherAttribute, value)) { return; } else { LookupService.saveOtherType(lookupName, otherAttribute, value); } } public String getGridServiceUrl(HttpServletRequest request, String gridHostName) throws Exception { List<GridNodeBean> remoteNodes = getGridNodesInContext(request); GridNodeBean theNode = GridService.getGridNodeByHostName(remoteNodes, gridHostName); if (theNode == null) { throw new GridDownException("Grid node " + gridHostName + " is not available at this time."); } return theNode.getAddress(); } public List<GridNodeBean> getGridNodesInContext(HttpServletRequest request) throws Exception { URL localURL = new URL(request.getRequestURL().toString()); int port = (localURL.getPort() == -1) ? 80 : localURL.getPort(); String localGridURL = localURL.getProtocol() + ": + localURL.getHost() + ":" + port + "/" + Constants.GRID_SERVICE_PATH; GridDiscoveryServiceJob gridDiscoveryJob = new GridDiscoveryServiceJob(); List<GridNodeBean> gridNodes = gridDiscoveryJob.getAllGridNodes(); GridNodeBean localGrid = GridService.getGridNodeByURL(gridNodes, localGridURL); // don't remove from original list List<GridNodeBean> remoteNodes = new ArrayList<GridNodeBean>(); remoteNodes.addAll(gridNodes); if (localGrid != null) { remoteNodes.remove(localGrid); } Collections.sort(remoteNodes, new Comparators.GridNodeHostNameComparator()); request.getSession().getServletContext().setAttribute("allGridNodes", remoteNodes); return gridNodes; } public List<LabelValueBean> getLookupValuesAsOptions(String lookupName, String lookupAttribute, String otherTypeAttribute) throws Exception { List<LabelValueBean> lvBeans = new ArrayList<LabelValueBean>(); SortedSet<String> defaultValues = LookupService.findLookupValues( lookupName, lookupAttribute); // annotate the label of the default ones with *s. for (String name : defaultValues) { LabelValueBean lv = new LabelValueBean(name, name); lvBeans.add(lv); } SortedSet<String> otherValues = LookupService.findLookupValues( lookupName, otherTypeAttribute); for (String name : otherValues) { LabelValueBean lv = new LabelValueBean("[" + name + "]", name); lvBeans.add(lv); } return lvBeans; } public List<LabelValueBean> getReflectionDefaultAndOtherLookupTypesAsOptions( ServletContext appContext, String contextAttributeForDefaults, String fullParentClassName, String otherFullParentClassName) throws Exception { List<LabelValueBean> lvBeans = new ArrayList<LabelValueBean>(); SortedSet<String> defaultTypes = getServletContextDefaultTypesByReflection( appContext, contextAttributeForDefaults, fullParentClassName); for (String type : defaultTypes) { LabelValueBean lv = new LabelValueBean(type, type); lvBeans.add(lv); } SortedSet<String> otherTypes = LookupService .getAllOtherObjectTypes(otherFullParentClassName); if (otherTypes != null) { for (String type : otherTypes) { LabelValueBean lv = new LabelValueBean("[" + type + "]", type); lvBeans.add(lv); } } return lvBeans; } public void setStaticOptions(ServletContext appContext) { LabelValueBean[] booleanOptions = new LabelValueBean[] { new LabelValueBean("true", "1"), new LabelValueBean("false", "0") }; appContext.setAttribute("booleanOptions", booleanOptions); LabelValueBean[] stringOperands = new LabelValueBean[] { new LabelValueBean("equals", "equals"), new LabelValueBean("contains", "contains") }; appContext.setAttribute("stringOperands", stringOperands); LabelValueBean[] booleanOperands = new LabelValueBean[] { new LabelValueBean( "equals", "is") }; appContext.setAttribute("booleanOperands", booleanOperands); LabelValueBean[] numberOperands = new LabelValueBean[] { new LabelValueBean("=", "="), new LabelValueBean(">", ">"), new LabelValueBean(">=", ">="), new LabelValueBean("<", "<"), new LabelValueBean("<=", "<=") }; appContext.setAttribute("numberOperands", numberOperands); appContext.setAttribute("allCompositionSections", CompositionBean.ALL_COMPOSITION_SECTIONS); } }
package gov.nih.nci.cananolab.ui.core; import gov.nih.nci.cananolab.dto.common.GridNodeBean; import gov.nih.nci.cananolab.exception.CaNanoLabException; import gov.nih.nci.cananolab.exception.GridAutoDiscoveryException; import gov.nih.nci.cananolab.exception.GridDownException; import gov.nih.nci.cananolab.service.common.GridService; import gov.nih.nci.cananolab.service.common.LookupService; import gov.nih.nci.cananolab.util.CaNanoLabConstants; import gov.nih.nci.cananolab.util.ClassUtils; import gov.nih.nci.cananolab.util.StringUtils; import java.net.URL; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.SortedSet; import java.util.TreeSet; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import org.apache.struts.upload.FormFile; /** * This class sets up information required for all forms. * * @author pansu, cais * */ public class InitSetup { private InitSetup() { } public static InitSetup getInstance() { return new InitSetup(); } public Map<String, Map<String, SortedSet<String>>> getDefaultLookupTable( ServletContext appContext) throws CaNanoLabException { Map<String, Map<String, SortedSet<String>>> defaultLookupTable = null; if (appContext.getAttribute("defaultLookupTable") == null) { defaultLookupTable = LookupService.findAllLookups(); appContext.setAttribute("defaultLookupTable", defaultLookupTable); } else { defaultLookupTable = new HashMap<String, Map<String, SortedSet<String>>>( (Map<? extends String, Map<String, SortedSet<String>>>) appContext .getAttribute("defaultLookupTable")); } return defaultLookupTable; } /** * Returns a map between an object name and its display name * * @param appContext * @return * @throws CaNanoLabException */ public Map<String, String> getClassNameToDisplayNameLookup( ServletContext appContext) throws CaNanoLabException { Map<String, String> lookup = null; if (appContext.getAttribute("displayNameLookup") == null) { lookup = LookupService.findSingleAttributeLookupMap("displayName"); appContext.setAttribute("displayNameLookup", lookup); } else { lookup = new HashMap<String, String>( (Map<? extends String, String>) (appContext .getAttribute("displayNameLookup"))); } return lookup; } /** * Returns a map between a display name and its corresponding object name * * @param appContext * @return * @throws CaNanoLabException */ public Map<String, String> getDisplayNameToClassNameLookup( ServletContext appContext) throws Exception { Map<String, String> lookup = null; if (appContext.getAttribute("displayNameReverseLookup") == null) { Map<String, String> displayLookup = LookupService .findSingleAttributeLookupMap("displayName"); lookup = new HashMap<String, String>(); for (Map.Entry entry : displayLookup.entrySet()) { lookup.put(entry.getValue().toString(), entry.getKey() .toString()); } appContext.setAttribute("displayNameReverseLookup", lookup); } else { lookup = new HashMap<String, String>( (Map<? extends String, String>) (appContext .getAttribute("displayNameReverseLookup"))); } return lookup; } /** * Returns a map between a display name and its corresponding full class * name * * @param appContext * @return * @throws CaNanoLabException */ public Map<String, String> getDisplayNameToFullClassNameLookup( ServletContext appContext) throws Exception { Map<String, String> lookup = null; if (appContext.getAttribute("displayNameReverseLookup") == null) { Map<String, String> displayLookup = LookupService .findSingleAttributeLookupMap("displayName"); lookup = new HashMap<String, String>(); for (Map.Entry entry : displayLookup.entrySet()) { String className = entry.getKey().toString(); String fullClassName = ClassUtils.getFullClass(className) .getCanonicalName(); lookup.put(entry.getValue().toString(), fullClassName); } appContext.setAttribute("displayNameReverseLookup", lookup); } else { lookup = new HashMap<String, String>( (Map<? extends String, String>) (appContext .getAttribute("displayNameReverseLookup"))); } return lookup; } public String getDisplayName(String objectName, ServletContext appContext) throws CaNanoLabException { Map<String, String> lookup = getClassNameToDisplayNameLookup(appContext); if (lookup.get(objectName) != null) { return lookup.get(objectName); } else { return ""; } } public String getObjectName(String displayName, ServletContext appContext) throws Exception { Map<String, String> lookup = getDisplayNameToClassNameLookup(appContext); if (lookup.get(displayName) != null) { return lookup.get(displayName); } else { return ""; } } /** * Retrieve lookup attribute and other attribute for lookup name from the * database and store in the application context * * @param appContext * @param contextAttribute * @param lookupName * @param lookupAttribute * @return * @throws CaNanoLabException */ public SortedSet<String> getServletContextDefaultLookupTypes( ServletContext appContext, String contextAttribute, String lookupName, String lookupAttribute) throws CaNanoLabException { Map<String, Map<String, SortedSet<String>>> defaultLookupTable = getDefaultLookupTable(appContext); SortedSet<String> types = defaultLookupTable.get(lookupName).get( lookupAttribute); appContext.setAttribute(contextAttribute, types); return types; } /** * Retrieve lookup attribute and other attribute for lookup name from the * database and store in the session * * @param request * @param sessionAttribute * @param lookupName * @param lookupAttribute * @param otherTypeAttribute * @aparam updateSession * @return * @throws CaNanoLabException */ public SortedSet<String> getDefaultAndOtherLookupTypes( HttpServletRequest request, String sessionAttribute, String lookupName, String lookupAttribute, String otherTypeAttribute, boolean updateSession) throws CaNanoLabException { SortedSet<String> types = null; if (updateSession) { types = LookupService.getDefaultAndOtherLookupTypes(lookupName, lookupAttribute, otherTypeAttribute); request.getSession().setAttribute(sessionAttribute, types); } else { types = new TreeSet<String>((SortedSet<? extends String>) (request .getSession().getAttribute(sessionAttribute))); } return types; } public List<String> getDefaultFunctionTypes(ServletContext appContext) throws Exception { if (appContext.getAttribute("defaultFunctionTypes") == null) { List<String> functionTypes = new ArrayList<String>(); List<String> functionClassNames = ClassUtils .getChildClassNames("gov.nih.nci.cananolab.domain.particle.samplecomposition.Function"); for (String name : functionClassNames) { if (!name.contains("Other")) { String displayName = InitSetup.getInstance() .getDisplayName(ClassUtils.getShortClassName(name), appContext); functionTypes.add(displayName); } } appContext.setAttribute("defaultFunctionTypes", functionTypes); return functionTypes; } else { return new ArrayList<String>((List<? extends String>) appContext .getAttribute("defaultFunctionTypes")); } } /** * Retrieve lookup attribute and other attribute for lookup name based on * reflection and store in the application context * * @param appContext * @param contextAttribute * @param lookupName * @param fullParentClassName * @return * @throws Exception */ public SortedSet<String> getServletContextDefaultTypesByReflection( ServletContext appContext, String contextAttribute, String fullParentClassName) throws Exception { if (appContext.getAttribute(contextAttribute) == null) { SortedSet<String> types = new TreeSet<String>(); List<String> classNames = ClassUtils .getChildClassNames(fullParentClassName); for (String name : classNames) { if (!name.contains("Other")) { String displayName = InitSetup.getInstance() .getDisplayName(ClassUtils.getShortClassName(name), appContext); types.add(displayName); } } appContext.setAttribute(contextAttribute, types); return types; } else { return new TreeSet<String>((SortedSet<? extends String>) appContext .getAttribute(contextAttribute)); } } public String getFileUriFromFormFile(FormFile file, String folderType, String particleName, String submitType) { if (file != null && file.getFileName().length() > 0) { String prefix = folderType; if (particleName != null && submitType != null && folderType.equals(CaNanoLabConstants.FOLDER_PARTICLE)) { prefix += "/" + particleName + "/"; prefix += StringUtils .getOneWordLowerCaseFirstLetter(submitType); } String timestamp = StringUtils.convertDateToString(new Date(), "yyyyMMdd_HH-mm-ss-SSS"); return prefix + "/" + timestamp + "_" + file.getFileName(); } else { return null; } } // check whether the value is already stored in context private Boolean isLookupInContext(HttpServletRequest request, String lookupName, String attribute, String otherAttribute, String value) throws CaNanoLabException { Map<String, Map<String, SortedSet<String>>> defaultLookupTable = getDefaultLookupTable(request .getSession().getServletContext()); SortedSet<String> defaultValues = null; if (defaultLookupTable.get(lookupName) != null) { defaultValues = defaultLookupTable.get(lookupName).get(attribute); } if (defaultValues != null && defaultValues.contains(value)) { return true; } else { SortedSet<String> otherValues = null; if (defaultLookupTable.get(lookupName) != null) { otherValues = defaultLookupTable.get(lookupName).get( otherAttribute); } if (otherValues != null && otherValues.contains(value)) { return true; } } return false; } public void persistLookup(HttpServletRequest request, String lookupName, String attribute, String otherAttribute, String value) throws CaNanoLabException { if (value == null || value.length() == 0) { return; } if (isLookupInContext(request, lookupName, attribute, otherAttribute, value)) { return; } else { LookupService.saveOtherType(lookupName, otherAttribute, value); } } public String getGridServiceUrl(HttpServletRequest request, String gridHostName) throws Exception { List<GridNodeBean> remoteNodes = getGridNodesInContext(request); GridNodeBean theNode = GridService.getGridNodeByHostName(remoteNodes, gridHostName); if (theNode == null) { throw new GridDownException("Grid node " + gridHostName + " is not available at this time."); } return theNode.getAddress(); } public List<GridNodeBean> getGridNodesInContext(HttpServletRequest request) throws Exception { URL localURL = new URL(request.getRequestURL().toString()); String localGridURL = localURL.getProtocol() + ": + localURL.getHost() + ":" + localURL.getPort() + "/" + CaNanoLabConstants.GRID_SERVICE_PATH; GridDiscoveryServiceJob gridDiscoveryJob = new GridDiscoveryServiceJob(); List<GridNodeBean> gridNodes = gridDiscoveryJob.getAllGridNodes(); if (gridNodes.isEmpty()) { throw new GridAutoDiscoveryException("No remote grid nodes found."); } // remove local grid from the list GridNodeBean localGrid = GridService.getGridNodeByURL(gridNodes, localGridURL); if (localGrid != null) { gridNodes.remove(localGrid); } request.getSession().getServletContext().setAttribute("allGridNodes", gridNodes); return gridNodes; } }
package org.commcare.suite.model; import org.javarosa.core.services.storage.Persistable; import org.javarosa.core.util.externalizable.DeserializationException; import org.javarosa.core.util.externalizable.ExtUtil; import org.javarosa.core.util.externalizable.ExtWrapList; import org.javarosa.core.util.externalizable.ExtWrapMap; import org.javarosa.core.util.externalizable.ExtWrapMapPoly; import org.javarosa.core.util.externalizable.PrototypeFactory; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.util.Hashtable; import java.util.Vector; /** * Suites are containers for a set of actions, * detail definitions, and UI information. A suite * generally contains a set of form entry actions * related to the same case ID, sometimes including * referrals. * * @author ctsims */ public class Suite implements Persistable { public static final String STORAGE_KEY = "SUITE"; private int version; private int recordId = -1; /** * Detail id -> Detail Object * */ private Hashtable<String, Detail> details; /** * Entry id (also the same for menus) -> Entry Object * */ private Hashtable<String, Entry> entries; private Vector<Menu> menus; @SuppressWarnings("unused") public Suite() { } public Suite(int version, Hashtable<String, Detail> details, Hashtable<String, Entry> entries, Vector<Menu> menus) { this.version = version; this.details = details; this.entries = entries; this.menus = menus; } @Override public int getID() { return recordId; } @Override public void setID(int ID) { recordId = ID; } /** * @return The menus which define how to access the actions * which are available in this suite. */ public Vector<Menu> getMenus() { return menus; } /** * WOAH! UNSAFE! Copy, maybe? But this is _wicked_ dangerous. * * @return The set of entry actions which are defined by this * suite, indexed by their id (which is present in the menu * definitions). */ public Hashtable<String, Entry> getEntries() { return entries; } public Entry getEntry(String id) { return entries.get(id); } /** * @param id The String ID of a detail definition * @return A Detail definition associated with the provided * id. */ public Detail getDetail(String id) { return details.get(id); } @Override public void readExternal(DataInputStream in, PrototypeFactory pf) throws IOException, DeserializationException { this.recordId = ExtUtil.readInt(in); this.version = ExtUtil.readInt(in); this.details = (Hashtable<String, Detail>)ExtUtil.read(in, new ExtWrapMap(String.class, Detail.class), pf); this.entries = (Hashtable)ExtUtil.read(in, new ExtWrapMapPoly(String.class, true), pf); this.menus = (Vector<Menu>)ExtUtil.read(in, new ExtWrapList(Menu.class), pf); } @Override public void writeExternal(DataOutputStream out) throws IOException { ExtUtil.writeNumeric(out, recordId); ExtUtil.writeNumeric(out, version); ExtUtil.write(out, new ExtWrapMap(details)); ExtUtil.write(out, new ExtWrapMapPoly(entries)); ExtUtil.write(out, new ExtWrapList(menus)); } }
package io.spine.annotation; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Annotates a program element (class, method, package etc.) as an element of * Service Provider Interface (SPI). * * <p>SPI is used to enable framework extension and replaceable components * (implement a new storage, etc). * * <p>See "Effective Java 2nd Edition", chapter 2, item 1 for more info about * service provider framework pattern. */ @SPI @Retention(RetentionPolicy.SOURCE) @Target({ ElementType.ANNOTATION_TYPE, ElementType.CONSTRUCTOR, ElementType.FIELD, ElementType.METHOD, ElementType.PACKAGE, ElementType.TYPE}) @Documented public @interface SPI { }
package info.guardianproject.util; import android.text.Spannable; import android.text.SpannableString; import android.text.style.CharacterStyle; import android.text.style.ClickableSpan; import android.text.style.URLSpan; import android.text.util.Linkify; import android.text.util.Linkify.TransformFilter; import android.widget.TextView; import java.util.regex.Matcher; import java.util.regex.Pattern; public class LinkifyHelper { private static Pattern bitcoin = Pattern.compile("bitcoin:[1-9a-km-zA-HJ-NP-Z]{27,34}(\\?[a-zA-Z0-9$\\-_.+!*'(),%:;@&=]*)?"); private static Pattern geo = Pattern.compile("geo:[-0-9.]+,[-0-9.]+[^ \t\n\"\':]*"); private static Pattern market = Pattern.compile("market: private static Pattern openpgp4fpr = Pattern.compile("openpgp4fpr:[A-Za-z0-9]{8,40}"); private static Pattern xmpp = Pattern.compile("xmpp:[^ \t\n\"\':,<>]+"); private static Pattern twitterHandle = Pattern.compile("@([A-Za-z0-9_-]+)"); private static Pattern hashtag = Pattern.compile("#([A-Za-z0-9_-]+)"); private static Pattern bridge = Pattern.compile("bridge:[^ \t\n\"\':,<>]+"); static TransformFilter returnMatchFilter = new TransformFilter() { @Override public final String transformUrl(final Matcher match, String url) { return match.group(1); } }; /* Right now, if there is no app to handle */ public static void addLinks(TextView text, SpanConverter<URLSpan, ClickableSpan> converter) { Linkify.addLinks(text, Linkify.ALL); Linkify.addLinks(text, geo, null); Linkify.addLinks(text, market, null); Linkify.addLinks(text, openpgp4fpr, null); Linkify.addLinks(text, xmpp, null); Linkify.addLinks(text, twitterHandle, "https://twitter.com/", null, returnMatchFilter); Linkify.addLinks(text, hashtag, "https://twitter.com/hashtag/", null, returnMatchFilter); text.setText(replaceAll(text.getText(), URLSpan.class, converter)); } /** * These are clickable links that will always be safe to click on, whether * or not ChatSecure is using Tor or not. * * @param text */ public static void addTorSafeLinks(TextView text) { Linkify.addLinks(text, bridge, null); } /** * Do not create this static utility class. */ private LinkifyHelper() { } public static <A extends CharacterStyle, B extends CharacterStyle> Spannable replaceAll( CharSequence original, Class<A> sourceType, SpanConverter<A, B> converter) { SpannableString result = new SpannableString(original); A[] spans = result.getSpans(0, result.length(), sourceType); for (A span : spans) { int start = result.getSpanStart(span); int end = result.getSpanEnd(span); int flags = result.getSpanFlags(span); result.removeSpan(span); result.setSpan(converter.convert(span), start, end, flags); } return (result); } public interface SpanConverter<A extends CharacterStyle, B extends CharacterStyle> { B convert(A span); } }
package info.meoblast001.thugaim; import android.content.Context; import android.content.res.Resources; import android.content.res.XmlResourceParser; import android.graphics.BitmapFactory; import android.graphics.Bitmap; import android.graphics.Color; import android.graphics.Paint; import android.graphics.PointF; import info.meoblast001.thugaim.engine.*; import info.meoblast001.thugaim.npc.*; import java.io.IOException; import java.util.Vector; import org.xmlpull.v1.XmlPullParserException; /** Top level of game code. Manages all gameplay elements either directly or indirectly. */ public class ThugaimRuntime implements IGameRuntime { /** Exception thrown if levels cannot be loaded. */ public static class LoadLevelsException extends Exception { // No additional functionality. } private static final long SHOW_LEVEL_COMPLETE_BEFORE_END_MILIS = 3000; public static final int CHECKPOINT_INTERVAL = 2; private Engine engine; private Context context; private World world; private StationGraph station_graph; private Player player; private HealthBar health_bar; private boolean player_won = false, player_lost = false; private long started_level_complete_millis = Long.MAX_VALUE; //Level information. private static int current_level = 0; private static Vector<LevelDescriptor> levels = null; //Checkpoint information. private static int checkpoint_level = 0; //Music enabled status. private static boolean music_enabled = true; /** Constructs game runtime. Failure is fatal. @throws LoadLevelsException If levels cannot be loaded. */ public ThugaimRuntime(Resources resources) throws LoadLevelsException { if (levels == null) loadLevels(resources); } public void init(Engine engine) { this.engine = engine; context = engine.getGraphics().getContext(); //Get the level descriptor for this level. LevelDescriptor level = getCurrentLevelDescriptor(); //Only show what's in the play area. int half_play_size = level.getPlaySize() / 2; engine.getGraphics().enableClip(-half_play_size, half_play_size, half_play_size, -half_play_size); world = new World(engine, level.getPlaySize()); station_graph = new StationGraph(engine, world, level.getStations(), level.getPlaySize()); player = new Player(engine, station_graph); world.insertActor(player); world.focusOnActor("player"); HydrogenFighter.generateAll(engine, world, level.getPlaySize(), station_graph, level.getHydrogenFighters()); HeliumFighter.generateAll(engine, world, level.getPlaySize(), station_graph, level.getHeliumFighters()); health_bar = new HealthBar(engine.getGraphics(), player); PlayAreaShield.generateAll(engine, world, level.getPlaySize()); //Start music if it exists and the user selected music to be played, else //stop any currently playing music. if (level.getMusic() != null && music_enabled) { int res_id = context.getResources().getIdentifier(level.getMusic(), "raw", context.getPackageName()); engine.getAudio().startMusic(res_id); } else engine.getAudio().stopMusic(); } public void update(long millisecond_delta, float rotation, boolean tapped) { station_graph.update(); world.update(millisecond_delta, rotation, tapped); health_bar.update(); displayLevelNumber(); //Player won if there are no stations remaining and the player didn't //already lose. if (station_graph.getStations().length == 0 && !player_lost) player_won = true; //Player lost if it's no longer in the world and has not already won. if (player.getWorld() == null && !player_won) player_lost = true; if (player_won) displayLevelComplete(); } public boolean isRunning() { return !((player_won && System.currentTimeMillis() - started_level_complete_millis > SHOW_LEVEL_COMPLETE_BEFORE_END_MILIS) || player_lost); } public boolean didPlayerWin() { return player_won; } /** Does a level proceed the current level? @return True if yes, false if no. */ public boolean hasNextLevel() { return current_level + 1 < levels.size(); } /** Select level to use. @param current_level 0-based level number. @return True if level exists, else level does not exist. */ public boolean setLevel(int current_level) { if (current_level < levels.size()) { this.current_level = current_level; //If the level is higher than the last checkpoint and is a checkpoint //itself, set this checkpoint as reached. if (current_level > checkpoint_level && (current_level + 1) % CHECKPOINT_INTERVAL == 0) checkpoint_level = current_level; return true; } else return false; } /** Return the current level descriptor. @return Level descriptor */ public LevelDescriptor getCurrentLevelDescriptor() { return levels.get(current_level); } /** Return the current checkpoint. This is the level to which the game returns after game over. @return The checkpoint. */ public static int getCheckpointLevel() { return checkpoint_level; } /** Load all of the level descriptions from the XML file. @param resources Activity's resources. @throws LoadLevelsException If levels file cannot be loaded. */ private static boolean loadLevels(Resources resources) throws LoadLevelsException { try { levels = new Vector<LevelDescriptor>(); XmlResourceParser xml_parser = resources.getXml(R.xml.levels); int event_type = xml_parser.getEventType(); while (event_type != XmlResourceParser.END_DOCUMENT) { if (event_type == XmlResourceParser.START_TAG && xml_parser.getName().equals("level")) { LevelDescriptor level = new LevelDescriptor(); for (int i = 0; i < xml_parser.getAttributeCount(); ++i) { String attr_name = xml_parser.getAttributeName(i); String attr_value = xml_parser.getAttributeValue(i); if (attr_name.equals("music")) level.setMusic(attr_value); else if (attr_name.equals("stations")) level.setStations(Integer.parseInt(attr_value)); else if (attr_name.equals("hydrogen_fighters")) level.setHydrogenFighters(Integer.parseInt(attr_value)); else if (attr_name.equals("helium_fighters")) level.setHeliumFighters(Integer.parseInt(attr_value)); else if (attr_name.equals("play_size")) level.setPlaySize(Integer.parseInt(attr_value)); //Else ignore this attribute. } levels.add(level); } event_type = xml_parser.next(); } return true; } catch (XmlPullParserException e) { throw new LoadLevelsException(); } catch (IOException e) { throw new LoadLevelsException(); } catch (NumberFormatException e) { throw new LoadLevelsException(); } } /** Set whether music will be played in future initialised runtimes. @param enabled True if music should be played, else false. */ public static void setMusicEnabled(boolean enabled) { music_enabled = enabled; } /** Displays the current level at the bottom-left of the screen. */ private void displayLevelNumber() { Paint fill = new Paint(); fill.setColor(Color.BLACK); Paint stroke = new Paint(); stroke.setColor(Color.WHITE); Graphics graphics = engine.getGraphics(); graphics.drawTextHud(context.getString(R.string.level_number_indicator, current_level + 1), 10, graphics.getHeight() - 10, 30.0f, Paint.Align.LEFT, fill, stroke); } /** Displays the level complete screen. If called first time, sets the timer to end the game. */ private void displayLevelComplete() { if (started_level_complete_millis == Long.MAX_VALUE) started_level_complete_millis = System.currentTimeMillis(); Paint fill = new Paint(); fill.setARGB(255, 0, 100, 0); Paint stroke = new Paint(); stroke.setColor(Color.WHITE); Graphics graphics = engine.getGraphics(); graphics.drawTextHud(context.getString(R.string.level_complete), graphics.getWidth() / 2, graphics.getHeight() / 2, 30.0f, Paint.Align.CENTER, fill, stroke); } }
package io.flutter.samples; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import com.intellij.execution.OutputListener; import com.intellij.execution.process.ProcessEvent; import com.intellij.ide.impl.ProjectUtil; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.projectImport.ProjectOpenProcessor; import io.flutter.FlutterBundle; import io.flutter.FlutterUtils; import io.flutter.module.FlutterModuleBuilder; import io.flutter.module.FlutterProjectType; import io.flutter.pub.PubRoot; import io.flutter.sdk.FlutterCreateAdditionalSettings; import io.flutter.sdk.FlutterSdk; import org.jetbrains.annotations.NotNull; import java.io.BufferedInputStream; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; public class FlutterSampleManager { private static final boolean DISABLE_SAMPLES = true; private static final String SNIPPETS_REMOTE_INDEX_URL = "https://docs.flutter.io/snippets/index.json"; private static final Logger LOG = Logger.getInstance(FlutterSampleManager.class); private static List<FlutterSample> SAMPLES; public static List<FlutterSample> getSamples() { if (DISABLE_SAMPLES) { return Collections.emptyList(); } if (SAMPLES == null) { // When we're reading from the repo and the file may be changing, consider a fresh read on each access. SAMPLES = loadSamples(); } return SAMPLES; } private static JsonArray readSampleIndex(final URL sampleUrl) throws IOException { final BufferedInputStream in = new BufferedInputStream(sampleUrl.openStream()); final StringBuilder contents = new StringBuilder(); final byte[] bytes = new byte[1024]; int bytesRead; while ((bytesRead = in.read(bytes)) != -1) { contents.append(new String(bytes, 0, bytesRead)); } return new JsonParser().parse(contents.toString()).getAsJsonArray(); } private static JsonArray readSampleIndex() { // Try fetching snippets index remotely, and fall back to local cache. try { return readSampleIndex(new URL(SNIPPETS_REMOTE_INDEX_URL)); } catch (IOException ignored) { try { return readSampleIndex(FlutterSampleManager.class.getResource("index.json")); } catch (IOException e) { FlutterUtils.warn(LOG, e); } } return new JsonArray(); } private static List<FlutterSample> loadSamples() { final List<FlutterSample> samples = new ArrayList<>(); final JsonArray jsonArray = readSampleIndex(); for (JsonElement element : jsonArray) { final JsonObject sample = element.getAsJsonObject(); samples.add(new FlutterSample(sample.getAsJsonPrimitive("element").getAsString(), sample.getAsJsonPrimitive("library").getAsString(), sample.getAsJsonPrimitive("id").getAsString(), sample.getAsJsonPrimitive("file").getAsString(), sample.getAsJsonPrimitive("sourcePath").getAsString(), sample.getAsJsonPrimitive("description").getAsString())); } // Sort by display label. samples.sort(Comparator.comparing(FlutterSample::getDisplayLabel)); return samples; } public static String createSampleProject(@NotNull FlutterSample sample, @NotNull Project project) { final FlutterSdk sdk = FlutterSdk.getFlutterSdk(project); if (sdk == null) { return "unable to find Flutter SDK"; } final File projectRoot = new File(ProjectUtil.getBaseDir()); final String projectNamePrefix = deriveValidPackageName(sample.getElement()); final String projectDir = FileUtil.createSequentFileName(projectRoot, projectNamePrefix + "_sample", ""); final File dir = new File(projectRoot, projectDir); if (!dir.mkdir()) { return "unable to create project root: " + dir.getPath(); } final VirtualFile baseDir = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(dir); if (baseDir == null) { return "unable to find project root (" + dir.getPath() + ") on refresh"; } final OutputListener outputListener = new OutputListener() { @Override public void onTextAvailable(@NotNull ProcessEvent event, @NotNull Key outputType) { // TODO(pq): consider showing progress in the status line. } @Override public void processTerminated(@NotNull ProcessEvent event) { // TODO(pq): handle event.getExitCode(). } }; final PubRoot root = FlutterModuleBuilder.runFlutterCreateWithProgress(baseDir, sdk, project, outputListener, getCreateSettings(sample)); final ProjectOpenProcessor openProcessor = ProjectOpenProcessor.getImportProvider(baseDir); if (openProcessor == null) { return "unable to find a processor to finish opening the project: " + baseDir.getPath(); } openProcessor.doOpenProject(baseDir, null, true); return null; } private static String deriveValidPackageName(String name) { return name.split("\\.")[0].toLowerCase(); } private static FlutterCreateAdditionalSettings getCreateSettings(@NotNull FlutterSample sample) { return new FlutterCreateAdditionalSettings.Builder() .setDescription(sample.getElement() + " Sample Project") .setType(FlutterProjectType.APP) .setKotlin(false) .setOrg(FlutterBundle.message("flutter.module.create.settings.org.default_text")) .setSwift(false) .setSampleContent(sample) .setOffline(false) .build(); } }
package it.unimi.dsi.sux4j.bits; import static it.unimi.dsi.bits.LongBigArrayBitVector.bits; import static it.unimi.dsi.bits.LongBigArrayBitVector.word; import static it.unimi.dsi.fastutil.BigArrays.get; import java.io.IOException; import java.io.ObjectInputStream; import it.unimi.dsi.bits.Fast; import it.unimi.dsi.bits.LongArrayBitVector; import it.unimi.dsi.bits.LongBigArrayBitVector; import it.unimi.dsi.fastutil.Arrays; import it.unimi.dsi.fastutil.longs.LongArrays; import it.unimi.dsi.fastutil.longs.LongBigArrays; import it.unimi.dsi.fastutil.longs.LongBigList; /** * A big version of {@link SimpleSelect} that can only be used with a {@link LongBigArrayBitVector} * (or {@linkplain LongBigArrays long big arrays}). */ public class SimpleBigSelect implements Select { private static final long serialVersionUID = 1L; private static final int MAX_ONES_PER_INVENTORY = 8192; private static final int MAX_LOG2_LONGWORDS_PER_SUBINVENTORY = 3; /** The maximum size of span to qualify for a subinventory made of 16-bit offsets. */ private static final int MAX_SPAN = (1 << 16); /** The underlying big bit vector. */ private final LongBigArrayBitVector bitVector; /** The number of ones in {@link #bitVector}. */ private final long numOnes; /** The number of words in {@link #bitVector}. */ private final long numWords; /** The cached result of {@link LongBigArrayBitVector#bigBits() bitVector.bigBits()}. */ private transient long[][] bits; /** The first-level inventory containing information about one bit each {@link #onesPerInventory}. * If the entry is nonnegative, it is the rank of the bit and subsequent information is recorded in {@link #subinventory16} * as offsets of one bit each {@link #onesPerSub16} (then, sequential search is necessary). Otherwise, a negative value * means that offsets are too large and they have been recorded as 64-bit values. If {@link #onesPerSub64} is 1, then offsets are directly stored * into {@link #subinventory}. Otherwise, the first {@link #subinventory} entry is actually a pointer to {@link #exactSpill}, * where the offsets can be found. */ private final long[] inventory; /** The logarithm of the number of ones per {@link #inventory} entry. */ private final int log2OnesPerInventory; /** The number of ones per {@link #inventory} entry. */ private final long onesPerInventory; /** The mask associated to the number of ones per {@link #inventory} entry. */ private final long onesPerInventoryMask; /** The second-level inventory (records the offset of each bit w.r.t. the first-level inventory). */ private final long[] subinventory; /** Exposes {@link #subinventory} as a list of 16-bits positive integers. */ private transient LongBigList subinventory16; /** The logarithm of the number of longwords used in the part of the subinventory associated to an inventory entry. */ private final int log2LongwordsPerSubinventory; /** The logarithm of the number of ones for each {@link #subinventory} longword. */ private final int log2OnesPerSub64; /** The number of ones for each {@link #subinventory} longword. */ private final int onesPerSub64; /** The logarithm of the number of ones for each {@link #subinventory} short. */ private final int log2OnesPerSub16; /** The number of ones for each {@link #subinventory} short. */ private final int onesPerSub16; /** The mask associated to number of ones for each {@link #subinventory} short. */ private final int onesPerSub16Mask; /** The list of exact spills. */ private final long[] exactSpill; /** Creates a new selection structure using a bit vector specified by a big array of longs and a number of bits. * * @param bits a big array of longs representing a bit array. * @param length the number of bits to use from <code>bits</code>. */ public SimpleBigSelect(final long[][] bits, final long length) { this(LongBigArrayBitVector.wrap(bits, length)); } /** * Creates a new selection structure using the specified instance of {@link LongArrayBitVector}. * * @param bitVector an instance of {@link LongArrayBitVector}. */ public SimpleBigSelect(final LongBigArrayBitVector bitVector) { this.bitVector = bitVector; this.bits = bitVector.bigBits(); final long length = bitVector.length(); numWords = word(length + Long.SIZE - 1); // We compute quickly the number of ones (possibly counting spurious bits in the last word). long d = 0; for (final long[] s : bits) for (final long t : s) d += Long.bitCount(t); onesPerInventory = 1L << (log2OnesPerInventory = Fast.mostSignificantBit(length == 0 ? 1 : ((d * MAX_ONES_PER_INVENTORY + length - 1) / length))); onesPerInventoryMask = onesPerInventory - 1; assert ((d + onesPerInventory - 1) / onesPerInventory) >= Integer.MAX_VALUE - Arrays.MAX_ARRAY_SIZE : "Inventory too large: " + ((d + onesPerInventory - 1) / onesPerInventory); final int inventorySize = (int)((d + onesPerInventory - 1) / onesPerInventory); inventory = new long[inventorySize + 1]; final long numWords = this.numWords; final long[] inventory = this.inventory; final int log2OnesPerInventory = this.log2OnesPerInventory; final long onesPerInventoryMask = this.onesPerInventoryMask; // First phase: we build an inventory for each one out of onesPerInventory. d = 0; for (long i = 0; i < numWords; i++) for (int j = 0; j < Long.SIZE; j++) { if (bits(i) + j >= length) break; if ((get(bits, i) & 1L << j) != 0) { if ((d & onesPerInventoryMask) == 0) inventory[(int)(d >>> log2OnesPerInventory)] = bits(i) + j; d++; } } numOnes = d; inventory[inventorySize] = length; log2LongwordsPerSubinventory = Math.min(MAX_LOG2_LONGWORDS_PER_SUBINVENTORY, Math.max(0, log2OnesPerInventory - 2)); log2OnesPerSub64 = Math.max(0, log2OnesPerInventory - log2LongwordsPerSubinventory); log2OnesPerSub16 = Math.max(0, log2OnesPerSub64 - 2); onesPerSub64 = (1 << log2OnesPerSub64); onesPerSub16 = (1 << log2OnesPerSub16); onesPerSub16Mask = onesPerSub16 - 1; final long numOnes = this.numOnes; final long onesPerInventory = this.onesPerInventory; final int onesPerSub16 = this.onesPerSub16; final int log2OnesPerSub16 = this.log2OnesPerSub16; final int onesPerSub64 = this.onesPerSub64; if (onesPerInventory > 1) { d = 0; long ones; long diff16 = 0, start = 0, span = 0; int spilled = 0, inventoryIndex = 0; for (long i = 0; i < numWords; i++) // We estimate the subinventory and exact spill size for (int j = 0; j < Long.SIZE; j++) { if (bits(i) + j >= length) break; if ((get(bits, i) & 1L << j) != 0) { if ((d & onesPerInventoryMask) == 0) { inventoryIndex = (int)(d >>> log2OnesPerInventory); start = inventory[inventoryIndex]; span = inventory[inventoryIndex + 1] - start; ones = (int)Math.min(numOnes - d, onesPerInventory); // We must always count (possibly unused) diff16's. And we cannot store less then 4 diff16. diff16 += Math.max(4, (ones + onesPerSub16 - 1) >>> log2OnesPerSub16); if (span >= MAX_SPAN && onesPerSub64 > 1) spilled += ones; } d++; } } final int subinventorySize = (int)((diff16 + 3) >> 2); final int exactSpillSize = spilled; subinventory = new long[subinventorySize]; exactSpill = new long[exactSpillSize]; subinventory16 = LongArrayBitVector.wrap(subinventory).asLongBigList(Short.SIZE); long offset = 0; spilled = 0; d = 0; final int onesPerSub16Mask = this.onesPerSub16Mask; final int log2LongwordsPerSubinventory = this.log2LongwordsPerSubinventory; final long[] subinventory = this.subinventory; final LongBigList subinventory16 = this.subinventory16; for (long i = 0; i < numWords; i++) for (int j = 0; j < Long.SIZE; j++) { if (bits(i) + j >= length) break; if ((get(bits, i) & 1L << j) != 0) { if ((d & onesPerInventoryMask) == 0) { inventoryIndex = (int)(d >>> log2OnesPerInventory); start = inventory[inventoryIndex]; span = inventory[inventoryIndex + 1] - start; offset = 0; } if (span < MAX_SPAN) { assert bits(i) + j - start <= MAX_SPAN; if ((d & onesPerSub16Mask) == 0) { subinventory16.set(((long)inventoryIndex << log2LongwordsPerSubinventory + 2) + offset++, bits(i) + j - start); } } else { assert onesPerSub64 > 1; if ((d & onesPerInventoryMask) == 0) { inventory[inventoryIndex] |= 1L << 63; subinventory[inventoryIndex << log2LongwordsPerSubinventory] = spilled; } exactSpill[spilled++] = bits(i) + j; } d++; } } } else { subinventory = exactSpill = LongArrays.EMPTY_ARRAY; subinventory16 = null; } } @Override public long select(final long rank) { assert rank >= 0; assert rank < numOnes; final int inventoryIndex = (int)(rank >>> log2OnesPerInventory); final long inventoryRank = inventory[inventoryIndex]; final int subrank = (int)(rank & onesPerInventoryMask); if (subrank == 0) return inventoryRank & ~(1L << 63); long start; int residual; if (inventoryRank >= 0) { final long index16 = ((long)inventoryIndex << log2LongwordsPerSubinventory + 2) + (subrank >>> log2OnesPerSub16); start = inventoryRank + ((subinventory[(int)(index16 >>> 2)] >>> ((index16 & 3) << 4) & 0xFFFF)); residual = subrank & onesPerSub16Mask; } else { assert onesPerSub64 > 1; return exactSpill[(int)(subinventory[inventoryIndex << log2LongwordsPerSubinventory] + subrank)]; } if (residual == 0) return start; final long[][] bits = this.bits; long wordIndex = word(start); long word = get(bits, wordIndex) & -1L << start; for(;;) { final int bitCount = Long.bitCount(word); if (residual < bitCount) break; word = get(bits, ++wordIndex); residual -= bitCount; } return bits(wordIndex) + Fast.select(word, residual); } @Override public long[] select(final long rank, final long[] dest, int offset, final int length) { assert rank >= 0; assert rank < numOnes; assert offset >= 0; assert dest != null; assert offset < dest.length; assert length >= 0; assert offset + length <= dest.length; final long s = select(rank); dest[offset] = s; long curr = word(s); final long[][] bits = this.bits; long window = get(bits, curr) & -1L << s; window &= window - 1; for(int i = 1; i < length; i++) { while (window == 0) window = get(bits, ++curr); dest[++offset] = bits(curr) + Long.numberOfTrailingZeros(window); window &= window - 1; } return dest; } @Override public long[] select(final long rank, final long[] dest) { assert rank >= 0; assert rank < numOnes; assert dest != null; assert dest.length > 0; return select(rank, dest, 0, dest.length); } private void readObject(final ObjectInputStream s) throws IOException, ClassNotFoundException { s.defaultReadObject(); subinventory16 = LongArrayBitVector.wrap(subinventory).asLongBigList(Short.SIZE); bits = bitVector.bigBits(); } @Override public long numBits() { return bits(inventory.length) + bits(subinventory.length) + bits(exactSpill.length); } @Override public LongBigArrayBitVector bitVector() { return bitVector; } }
package com.sun.facelets.compiler; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import javax.el.ELException; import javax.el.ExpressionFactory; import javax.faces.FacesException; import javax.faces.context.FacesContext; import com.sun.facelets.FaceletException; import com.sun.facelets.FaceletHandler; import com.sun.facelets.tag.CompositeTagDecorator; import com.sun.facelets.tag.CompositeTagLibrary; import com.sun.facelets.tag.TagDecorator; import com.sun.facelets.tag.TagLibrary; import com.sun.facelets.util.ParameterCheck; import com.sun.facelets.util.FacesAPI; /** * A Compiler instance may handle compiling multiple sources * * @author Jacob Hookom * @version $Id: Compiler.java,v 1.10 2005/08/02 05:02:04 jhook Exp $ */ public abstract class Compiler { protected final static Logger log = Logger.getLogger("facelets.compiler"); public final static String EXPRESSION_FACTORY = "compiler.ExpressionFactory"; private static final TagLibrary EMPTY_LIBRARY = new CompositeTagLibrary( new TagLibrary[0]); private static final TagDecorator EMPTY_DECORATOR = new CompositeTagDecorator( new TagDecorator[0]); private boolean validating = true; private boolean trimmingWhitespace = false; private boolean trimmingComments = true; private final List libraries = new ArrayList(); private final List decorators = new ArrayList(); private final Map features = new HashMap(); private boolean initialized = false; public Compiler() { this.features.put(EXPRESSION_FACTORY, "com.sun.el.ExpressionFactoryImpl"); } private synchronized void initialize() { if (this.initialized) return; log.fine("Initializing"); try { TagLibraryConfig cfg = new TagLibraryConfig(); cfg.loadImplicit(this); } catch (IOException e) { log.log(Level.SEVERE, "Compiler Initialization Error", e); } finally { this.initialized = true; } log.fine("Initialization Successful"); } public final FaceletHandler compile(URL src, String alias) throws IOException, FaceletException, ELException, FacesException { if (!this.initialized) this.initialize(); return this.doCompile(src, alias); } protected abstract FaceletHandler doCompile(URL src, String alias) throws IOException, FaceletException, ELException, FacesException; public final TagDecorator createTagDecorator() { if (this.decorators.size() > 0) { return new CompositeTagDecorator((TagDecorator[]) this.decorators .toArray(new TagDecorator[this.decorators.size()])); } return EMPTY_DECORATOR; } public final void addTagDecorator(TagDecorator decorator) { ParameterCheck.notNull("decorator", decorator); if (!this.decorators.contains(decorator)) { this.decorators.add(decorator); } } public final ExpressionFactory createExpressionFactory() { ExpressionFactory el = null; if (FacesAPI.getVersion() >= 12) { try { el = FacesContext.getCurrentInstance().getApplication() .getExpressionFactory(); if (el == null) { log.warning("No default ExpressionFactory from Faces Implementation, attempting to load from Feature[" + EXPRESSION_FACTORY + "]"); } } catch (Exception e) { // do nothing } } if (el == null) { el = (ExpressionFactory) this.featureInstance(EXPRESSION_FACTORY); } return el; } private final Object featureInstance(String name) { String type = (String) this.features.get(name); if (type != null) { try { return Class.forName(type).newInstance(); } catch (Throwable t) { throw new FaceletException("Could not instantiate feature[" + name + "]: " + type); } } return null; } public final TagLibrary createTagLibrary() { if (this.libraries.size() > 0) { return new CompositeTagLibrary((TagLibrary[]) this.libraries .toArray(new TagLibrary[this.libraries.size()])); } return EMPTY_LIBRARY; } public final void addTagLibrary(TagLibrary library) { ParameterCheck.notNull("library", library); if (!this.libraries.contains(library)) { this.libraries.add(library); } } public final void setFeature(String name, String value) { this.features.put(name, value); } public final String getFeature(String name) { return (String) this.features.get(name); } public final boolean isTrimmingComments() { return this.trimmingComments; } public final void setTrimmingComments(boolean trimmingComments) { this.trimmingComments = trimmingComments; } public final boolean isTrimmingWhitespace() { return this.trimmingWhitespace; } public final void setTrimmingWhitespace(boolean trimmingWhitespace) { this.trimmingWhitespace = trimmingWhitespace; } public final boolean isValidating() { return this.validating; } public final void setValidating(boolean validating) { this.validating = validating; } }
package io.compgen.cgpipe.runner; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.PrintStream; import java.nio.charset.Charset; import java.nio.file.Paths; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import io.compgen.cgpipe.exceptions.ASTExecException; import io.compgen.cgpipe.exceptions.ASTParseException; import io.compgen.cgpipe.exceptions.RunnerException; import io.compgen.cgpipe.exceptions.VarTypeException; import io.compgen.cgpipe.loader.NumberedLine; import io.compgen.cgpipe.parser.context.RootContext; import io.compgen.cgpipe.parser.target.BuildTarget; import io.compgen.cgpipe.parser.variable.VarList; import io.compgen.cgpipe.parser.variable.VarString; import io.compgen.cgpipe.parser.variable.VarValue; import io.compgen.common.MapBuilder; import io.compgen.common.StringUtils; public abstract class JobRunner { abstract public boolean submit(JobDef jobdef) throws RunnerException; abstract public boolean isJobIdValid(String jobId) throws RunnerException; abstract public void runnerDone() throws RunnerException; abstract protected void setConfig(String k, VarValue varValue); public static String defaultShell = null; static { for (String path: new String[] {"/bin/bash", "/usr/bin/bash", "/usr/local/bin/bash", "/bin/sh"}) { if (new File(path).exists()) { defaultShell=path; break; } } } static protected Log log = LogFactory.getLog(JobRunner.class); protected boolean dryrun = false; protected boolean done = false; protected PrintStream joblog = null; protected Map<String, JobDependency> submittedJobs = new HashMap<String, JobDependency>(); // key = output-file, value = job-id protected List<JobDependency> submittedJobDefs = new ArrayList<JobDependency>(); protected RootContext rootContext = null; protected JobDef setupJob = null; protected List<NumberedLine> prelines=null; protected List<NumberedLine> postlines=null; // private List<NumberedLine> postSubmitLines=null; protected List<String> outputFilesSubmitted = new ArrayList<String>(); protected List<String> tempOutputFilesSubmitted = new ArrayList<String>(); public static JobRunner load(RootContext cxt, boolean dryrun) throws RunnerException { String runner = cxt.getString("cgpipe.runner"); if (runner == null) { runner = "shell"; } if (cxt.contains("cgpipe.shell")) { defaultShell = cxt.getString("cgpipe.shell"); } JobRunner.log.info("job-runner: " +runner); JobRunner obj = null; switch (runner) { case "shell": obj = new ShellScriptRunner(); break; case "sge": obj = new SGETemplateRunner(); break; case "slurm": obj = new SLURMTemplateRunner(); break; case "pbs": obj = new PBSTemplateRunner(); break; case "sbs": obj = new SBSTemplateRunner(); break; case "graphviz": obj = new GraphvizRunner(); break; default: throw new RunnerException("Can't load job runner: "+runner +" (valid options: shell, sge, slurm, pbs, sjq, graphviz)"); } obj.rootContext = cxt; String prefix = "cgpipe.runner."+runner; Map<String, VarValue> cxtvals = cxt.cloneValues(prefix); for (String k: cxtvals.keySet()) { obj.setConfig(k, cxtvals.get(k)); } obj.dryrun = dryrun; // Attempt to load a list of existing jobs // TODO: add a lock mechanism for job-log? // TODO: add start/stop/retcode to job log? String joblog = cxt.getString("cgpipe.joblog"); JobRunner.log.info("job-log: " +joblog); if (joblog != null) { try { File jobfile = new File(joblog); if (jobfile.exists()) { BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(joblog))); String line; while ((line = reader.readLine()) != null) { String[] cols = line.split("\t"); if (cols[1].equals("OUTPUT")) { String absOutput = Paths.get(cols[2]).toAbsolutePath().toString(); obj.submittedJobs.put(absOutput, new ExistingJob(cols[0])); cxt.getRoot().addPendingJobOutput(absOutput, cols[0], obj); log.trace("Existing/pending output: "+ absOutput); } } reader.close(); } else if (jobfile.getParentFile() != null && !jobfile.getParentFile().exists()) { jobfile.getParentFile().mkdirs(); } obj.joblog = new PrintStream(new FileOutputStream(joblog, true)); } catch (IOException e) { throw new RunnerException(e); } } return obj; } public void abort() { } protected void shexec(JobDef jobdef) throws RunnerException { try { Process proc = Runtime.getRuntime().exec(new String[] { defaultShell }); proc.getOutputStream().write(jobdef.getBody().getBytes(Charset.forName("UTF8"))); proc.getOutputStream().close(); InputStream is = proc.getInputStream(); InputStream es = proc.getErrorStream(); int retcode = proc.waitFor(); // Note: This will block for large strings // TODO: Make this run in a thread to consume the stream String out = StringUtils.readInputStream(is); String err = StringUtils.readInputStream(es); log.trace("retcode: "+retcode); log.trace("stdout: " + out); log.trace("stderr: " + err); is.close(); es.close(); if (retcode != 0) { throw new RunnerException("Error running job via shexec: "+jobdef.getName()); } } catch (IOException | InterruptedException e) { throw new RunnerException(e); } } protected void shexec(String src) throws RunnerException { try { Process proc = Runtime.getRuntime().exec(new String[] { defaultShell }); proc.getOutputStream().write(src.getBytes(Charset.forName("UTF8"))); proc.getOutputStream().close(); InputStream is = proc.getInputStream(); InputStream es = proc.getErrorStream(); int retcode = proc.waitFor(); // Note: This will block for large strings // TODO: Make this run in a thread to consume the stream String out = StringUtils.readInputStream(is); String err = StringUtils.readInputStream(es); System.out.print(out); System.err.print(err); is.close(); es.close(); if (retcode != 0) { throw new RunnerException("Error running script!"); } } catch (IOException | InterruptedException e) { throw new RunnerException(e); } } private List<NumberedLine> getLinesForTarget(String name, RootContext context, boolean allowMissing) { BuildTarget tgt = context.build(name, allowMissing); List<NumberedLine> lines = null; if (tgt != null) { lines = tgt.getLines(); } return lines; } private void setup(RootContext context) throws RunnerException { if (setupJob == null) { BuildTarget setupTgt = context.build("__setup__", true); if (setupTgt != null) { try { setupJob = setupTgt.eval(null, null, context); if (setupJob.getSettingBool("job.shexec", false)) { if (!dryrun) { shexec(setupJob); } } else { submit(setupJob); } } catch (ASTParseException | ASTExecException e) { throw new RunnerException(e); } } prelines = getLinesForTarget("__pre__", context, true); postlines = getLinesForTarget("__post__", context, true); } } public void submitAll(BuildTarget initialTarget, RootContext context) throws RunnerException { if (initialTarget.getOutputs() != null && initialTarget.getOutputs().size() > 0) { setup(context); markSkippable(initialTarget, context, initialTarget.getOutputs().get(0)); submitTargets(initialTarget, context, initialTarget.getOutputs().get(0), true); } } private long markSkippable(BuildTarget target, RootContext context, String outputName) throws RunnerException { long lastModified = 0; String lastModifiedDep = ""; for (String dep: target.getDepends().keySet()) { long depLastMod = markSkippable(target.getDepends().get(dep), context, dep); if (depLastMod == -1) { lastModified = -1; } else if (depLastMod > lastModified) { lastModified = depLastMod; lastModifiedDep = dep; } } long retval = 0; if (lastModified > -1) { // Check to see if the outputName file exists on disk. // Note: this could also be used to look for remote resources (S3, etc), but not implemented for (String allout: target.getOutputs()) { File outputFile = new File(allout); if (outputFile.exists()) { if (outputFile.lastModified() >= lastModified) { log.debug("Marking output-target as skippable: "+allout); target.setSkippable(allout); if (retval != -1 && outputFile.lastModified() > retval) { retval = outputFile.lastModified(); } } else { log.debug("Marking output-target as not skippable: " + allout + " is older than " + lastModifiedDep + " (" + outputFile.lastModified() + " vs " + lastModified + ")"); retval = -1; } } else { log.debug("Marking output-target as not skippable: " + allout + " doesn't exist! (" + outputFile.getAbsolutePath()+")"); retval = -1; } } } else { log.debug("Marking output-target as not skippable: "+outputName + " a dependency will be built"); retval = -1; } return retval; } private JobDependency submitTargets(BuildTarget target, RootContext context, String outputName, boolean isRoot) throws RunnerException { log.trace("Submitting target: "+outputName); // Can we skip this target (file exists) if (target.isSkippable()) { log.trace("Skipping target: "+outputName); return null; } // Has it already been submitted in another part of the tree? if (target.getJobDep() != null) { log.trace("Skipping target (already submitted): "+outputName); return target.getJobDep(); } // Have we already submitted this job in a prior run? JobDependency depJob = findJobProviding(outputName); if (depJob != null) { log.trace("Skipping target (job queued): "+outputName); return depJob; } // Okay... we are submitting this job, start with submitting it's dependencies... List<JobDependency> deps = new ArrayList<JobDependency>(); try { JobDef job = target.eval(prelines, postlines, context); if (job != null) { boolean blankRoot = false; if (isRoot) { String tmp = job.getBody().replaceAll("[ \\t\\r\\n]", ""); if (tmp.equals("")) { blankRoot = true; } } for (String out: target.getDepends().keySet()) { log.info("Submitting dependency: "+out); JobDependency dep = submitTargets(target.getDepends().get(out), context, out, blankRoot); if (dep != null) { deps.add(dep); } else { log.debug("Dependency not found?: "+out); } } job.addDependencies(deps); if (setupJob != null && setupJob.getJobId() != null) { job.addDependency(setupJob); } if (!blankRoot) { submit(job); if (job.getJobId() == null) { abort(); log.error("Error submitting job: "+ target); throw new RunnerException("Error submitting job: "+job); } postSubmit(job, context); } else { log.debug("Skipping empty target: "+target); job.setJobId(""); } target.setSubmittedJobDep(job); for (String out: target.getOutputs()) { submittedJobs.put(Paths.get(out).toAbsolutePath().toString(), job); } this.submittedJobDefs.add(job); this.outputFilesSubmitted.addAll(target.getOutputs()); this.tempOutputFilesSubmitted.addAll(target.getTempOutputs()); } else { log.debug("Empty job for target: "+target); } return job; } catch (ASTParseException | ASTExecException e) { abort(); throw new RunnerException(e); } } private JobDependency findJobProviding(String input) throws RunnerException { log.trace("Looking for output: "+ input); String absInput = Paths.get(input).toAbsolutePath().toString(); if (submittedJobs.containsKey(absInput)) { log.debug("Found existing job providing: "+ absInput + " ("+submittedJobs.get(absInput).getJobId()+")"); JobDependency job = submittedJobs.get(absInput); if (isJobIdValid(job.getJobId())) { return job; } log.debug("Existing job: "+ job.getJobId()+" is no longer valid... resubmitting"); submittedJobs.remove(absInput); } return null; } public void done() throws RunnerException { // look for a __teardown__ target and execute if found. JobDef teardown = null; // TODO: Move this lower? And add all of the job defs to the context? // (Like -- show the final outputs and temp. files...) BuildTarget tdTgt = rootContext.build("__teardown__", true); if (tdTgt!=null) { try { //System.err.println("ALL OUTPUTS : "+StringUtils.join(",",outputFilesSubmitted)); //System.err.println("TEMP-OUTPUTS: "+StringUtils.join(",",tempOutputFilesSubmitted)); MapBuilder<String, VarValue> mb = new MapBuilder<String, VarValue>(); if (tempOutputFilesSubmitted.size() > 0) { VarString[] tmpFileVar = new VarString[tempOutputFilesSubmitted.size()]; for (int i=0; i<tempOutputFilesSubmitted.size(); i++) { tmpFileVar[i] = new VarString(tempOutputFilesSubmitted.get(i)); } try { mb.put("cgpipe.tmpfiles", new VarList(tmpFileVar)); } catch (VarTypeException e) { throw new RunnerException(e); } } if (outputFilesSubmitted.size() > 0) { VarString[] tmpFileVar = new VarString[outputFilesSubmitted.size()]; for (int i=0; i<outputFilesSubmitted.size(); i++) { tmpFileVar[i] = new VarString(outputFilesSubmitted.get(i)); } try { mb.put("cgpipe.outputfiles", new VarList(tmpFileVar)); } catch (VarTypeException e) { throw new RunnerException(e); } } teardown = tdTgt.eval(null, null, rootContext, mb.build()); } catch (ASTParseException | ASTExecException e) { throw new RunnerException(e); } if (teardown.getSettingBool("job.shexec", false)) { if (!dryrun) { shexec(teardown); } } else { teardown.addDependencies(submittedJobDefs); if (setupJob != null && setupJob.getJobId() != null) { teardown.addDependency(setupJob); } submit(teardown); } } runnerDone(); if (joblog!=null) { joblog.close(); } } protected void logJob(JobDef job) { log.info("Submitted job: "+job.getJobId() +" "+ job.getName()); for (String k:job.getSettings()) { if (k.startsWith("job.")) { log.debug("setting: "+k+" => "+job.getSetting(k)); } } for (String out:job.getOutputs()) { log.debug("output: "+out); } for (String inp:job.getInputs()) { log.debug("input: "+inp); } for (String s: job.getBody().split("\n")) { log.debug("src: "+StringUtils.strip(s)); } if (!dryrun && joblog != null && job.getJobId() != null && !job.getJobId().equals("")) { joblog.println(job.getJobId()+"\t"+"NAME\t"+job.getName()); joblog.println(job.getJobId()+"\t"+"SUBMIT\t"+System.currentTimeMillis()); joblog.println(job.getJobId()+"\t"+"USER\t"+System.getProperty("user.name")); for (JobDependency dep:job.getDependencies()) { if (job.getJobId()!=null && !job.getJobId().equals("")) { joblog.println(job.getJobId()+"\t"+"DEP\t"+dep.getJobId()); } } for (String out:job.getOutputs()) { joblog.println(job.getJobId()+"\t"+"OUTPUT\t"+out); } for (String inp:job.getInputs()) { joblog.println(job.getJobId()+"\t"+"INPUT\t"+inp); } for (String s: job.getBody().split("\n")) { joblog.println(job.getJobId()+"\t"+"SRC\t"+s); } for (String k:job.getSettings()) { if (k.startsWith("job.")) { if (k.equals("job.custom")) { for (String s: job.getSettings("job.custom")) { joblog.println(job.getJobId()+"\t"+"SETTING\t"+k+"\t"+s); } } else { joblog.println(job.getJobId()+"\t"+"SETTING\t"+k+"\t"+job.getSetting(k)); } } } } } public void postSubmit(JobDef jobdef, RootContext context) throws RunnerException { BuildTarget postSubmitTgt = context.build("__postsubmit__", true); if (postSubmitTgt != null) { try { RootContext jobRoot = new RootContext(); for (String setting: jobdef.getSettings()) { if (setting.startsWith("job.")) { jobRoot.set(setting, jobdef.getSettingsMap().get(setting)); } } jobRoot.set("job.id", new VarString(jobdef.getJobId())); String deps = ""; for (JobDependency dep: jobdef.getDependencies()) { if (!deps.equals("")) { deps += ":"; } deps += dep.getJobId(); } jobRoot.set("job.depids", new VarString(deps)); JobDef postSubmit = postSubmitTgt.eval(null, null, null, jobRoot.cloneValues()); System.err.println(postSubmit.getBody()); shexec(postSubmit); } catch (ASTParseException | ASTExecException e) { throw new RunnerException(e); } } } }
package org.jsmpp.session; import java.io.DataInputStream; import java.io.IOException; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.Socket; import java.net.SocketException; import java.net.SocketTimeoutException; import java.util.Hashtable; import java.util.concurrent.atomic.AtomicInteger; import org.apache.log4j.Logger; import org.jsmpp.BindType; import org.jsmpp.DefaultPDUReader; import org.jsmpp.DefaultPDUSender; import org.jsmpp.InterfaceVersion; import org.jsmpp.InvalidCommandLengthException; import org.jsmpp.InvalidResponseException; import org.jsmpp.NumberingPlanIndicator; import org.jsmpp.PDUReader; import org.jsmpp.PDUSender; import org.jsmpp.PDUStringException; import org.jsmpp.SMPPConstant; import org.jsmpp.SynchronizedPDUReader; import org.jsmpp.SynchronizedPDUSender; import org.jsmpp.TypeOfNumber; import org.jsmpp.bean.BindResp; import org.jsmpp.bean.Command; import org.jsmpp.bean.DataCoding; import org.jsmpp.bean.DeliverSm; import org.jsmpp.bean.ESMClass; import org.jsmpp.bean.EnquireLinkResp; import org.jsmpp.bean.QuerySmResp; import org.jsmpp.bean.RegisteredDelivery; import org.jsmpp.bean.SubmitSmResp; import org.jsmpp.bean.UnbindResp; import org.jsmpp.extra.NegativeResponseException; import org.jsmpp.extra.PendingResponse; import org.jsmpp.extra.ProcessMessageException; import org.jsmpp.extra.ResponseTimeoutException; import org.jsmpp.extra.SessionState; import org.jsmpp.session.state.SMPPSessionState; import org.jsmpp.util.DefaultComposer; import org.jsmpp.util.Sequence; /** * @author uudashr * @version 2.0 * */ public class SMPPSession { private static final Logger logger = Logger.getLogger(SMPPSession.class); private static final PDUSender pduSender = new SynchronizedPDUSender(new DefaultPDUSender(new DefaultComposer())); private static final PDUReader pduReader = new SynchronizedPDUReader(new DefaultPDUReader()); private static final AtomicInteger sessionIdSequence = new AtomicInteger(); private Socket socket = new Socket(); private DataInputStream in; private OutputStream out; private SessionState sessionState = SessionState.CLOSED; private SMPPSessionState stateProcessor = SMPPSessionState.CLOSED; private final Sequence sequence = new Sequence(); private final SMPPSessionHandler sessionHandler = new SMPPSessionHandlerImpl(); private final Hashtable<Integer, PendingResponse<? extends Command>> pendingResponse = new Hashtable<Integer, PendingResponse<? extends Command>>(); private int sessionTimer = 5000; private long transactionTimer = 2000; private int enquireLinkToSend; private long lastActivityTimestamp; private MessageReceiverListener messageReceiverListener; private SessionStateListener sessionStateListener; private IdleActivityChecker idleActivityChecker; private EnquireLinkSender enquireLinkSender; private int sessionId = sessionIdSequence.incrementAndGet(); public SMPPSession() { } public int getSessionId() { return sessionId; } public void connectAndBind(String host, int port, BindType bindType, String systemId, String password, String systemType, TypeOfNumber addrTon, NumberingPlanIndicator addrNpi, String addressRange) throws IOException { if (sequence.currentValue() != 0) throw new IOException("Failed connecting"); socket.connect(new InetSocketAddress(host, port)); if (socket.getInputStream() == null) { logger.fatal("InputStream is null"); } else if (socket.isInputShutdown()) { logger.fatal("Input shutdown"); } logger.info("Connected"); changeState(SessionState.OPEN); try { in = new DataInputStream(socket.getInputStream()); new PDUReaderWorker().start(); new PDUReaderWorker().start(); new PDUReaderWorker().start(); out = socket.getOutputStream(); sendBind(bindType, systemId, password, systemType, InterfaceVersion.IF_34, addrTon, addrNpi, addressRange); changeToBoundState(bindType); socket.setSoTimeout(sessionTimer); enquireLinkSender = new EnquireLinkSender(); enquireLinkSender.start(); idleActivityChecker = new IdleActivityChecker(); idleActivityChecker.start(); } catch (PDUStringException e) { logger.error("Failed sending bind command", e); } catch (NegativeResponseException e) { String message = "Receive negative bind response"; logger.error(message, e); closeSocket(); throw new IOException(message + ": " + e.getMessage()); } catch (InvalidResponseException e) { String message = "Receive invalid response of bind"; logger.error(message, e); closeSocket(); throw new IOException(message + ": " + e.getMessage()); } catch (ResponseTimeoutException e) { String message = "Waiting bind response take time to long"; logger.error(message, e); closeSocket(); throw new IOException(message + ": " + e.getMessage()); } catch (IOException e) { logger.error("IO Error occure", e); closeSocket(); throw e; } } /** * @param bindType * @param systemId * @param password * @param systemType * @param interfaceVersion * @param addrTon * @param addrNpi * @param addressRange * @return SMSC system id. * @throws PDUStringException if we enter invalid bind parameter(s). * @throws ResponseTimeoutException if there is no valid response after defined millis. * @throws InvalidResponseException if there is invalid response found. * @throws NegativeResponseException if we receive negative response. * @throws IOException */ private String sendBind(BindType bindType, String systemId, String password, String systemType, InterfaceVersion interfaceVersion, TypeOfNumber addrTon, NumberingPlanIndicator addrNpi, String addressRange) throws PDUStringException, ResponseTimeoutException, InvalidResponseException, NegativeResponseException, IOException { int seqNum = sequence.nextValue(); PendingResponse<BindResp> pendingResp = new PendingResponse<BindResp>(transactionTimer); pendingResponse.put(seqNum, pendingResp); try { pduSender.sendBind(out, bindType, seqNum, systemId, password, systemType, interfaceVersion, addrTon, addrNpi, addressRange); } catch (IOException e) { logger.error("Failed sending bind command", e); pendingResponse.remove(seqNum); throw e; } try { pendingResp.waitDone(); logger.info("Bind response received"); } catch (ResponseTimeoutException e) { pendingResponse.remove(seqNum); throw e; } catch (InvalidResponseException e) { pendingResponse.remove(seqNum); throw e; } if (pendingResp.getResponse().getCommandStatus() != SMPPConstant.STAT_ESME_ROK) throw new NegativeResponseException(pendingResp.getResponse().getCommandStatus()); return pendingResp.getResponse().getSystemId(); } /** * Ensure we have proper link. * @throws ResponseTimeoutException if there is no valid response after defined millis. * @throws InvalidResponseException if there is invalid response found. * @throws IOException */ private void enquireLink() throws ResponseTimeoutException, InvalidResponseException, IOException { int seqNum = sequence.nextValue(); PendingResponse<EnquireLinkResp> pendingResp = new PendingResponse<EnquireLinkResp>(transactionTimer); pendingResponse.put(seqNum, pendingResp); try { pduSender.sendEnquireLink(out, seqNum); } catch (IOException e) { logger.error("Failed sending enquire link", e); pendingResponse.remove(seqNum); throw e; } try { pendingResp.waitDone(); logger.info("Enquire link response received"); } catch (ResponseTimeoutException e) { pendingResponse.remove(seqNum); throw e; } catch (InvalidResponseException e) { pendingResponse.remove(seqNum); throw e; } if (pendingResp.getResponse().getCommandStatus() != SMPPConstant.STAT_ESME_ROK) { // this is ok logger.warn("Receive NON-OK response of enquire link"); } } public void unbindAndClose() { try { unbind(); } catch (ResponseTimeoutException e) { logger.error("Timeout waiting unbind response", e); } catch (InvalidResponseException e) { logger.error("Receive invalid unbind response", e); } catch (IOException e) { logger.error("IO error found ", e); } closeSocket(); } private void unbind() throws ResponseTimeoutException, InvalidResponseException, IOException { int seqNum = sequence.nextValue(); PendingResponse<UnbindResp> pendingResp = new PendingResponse<UnbindResp>(transactionTimer); pendingResponse.put(seqNum, pendingResp); try { pduSender.sendUnbind(out, seqNum); } catch (IOException e) { logger.error("Failed sending unbind", e); pendingResponse.remove(seqNum); throw e; } try { pendingResp.waitDone(); logger.info("Unbind response received"); changeState(SessionState.UNBOUND); } catch (ResponseTimeoutException e) { pendingResponse.remove(seqNum); throw e; } catch (InvalidResponseException e) { pendingResponse.remove(seqNum); throw e; } if (pendingResp.getResponse().getCommandStatus() != SMPPConstant.STAT_ESME_ROK) logger.warn("Receive NON-OK response of unbind"); } /** * @param serviceType * @param sourceAddrTon * @param sourceAddrNpi * @param sourceAddr * @param destAddrTon * @param destAddrNpi * @param destinationAddr * @param esmClass * @param protocoId * @param priorityFlag * @param scheduleDeliveryTime * @param validityPeriod * @param registeredDelivery * @param replaceIfPresent * @param dataCoding * @param smDefaultMsgId * @param shortMessage * @return message id. * @throws PDUStringException if we enter invalid bind parameter(s). * @throws ResponseTimeoutException if there is no valid response after defined millis. * @throws InvalidResponseException if there is invalid response found. * @throws NegativeResponseException if we receive negative response. * @throws IOException */ public String submitShortMessage(String serviceType, TypeOfNumber sourceAddrTon, NumberingPlanIndicator sourceAddrNpi, String sourceAddr, TypeOfNumber destAddrTon, NumberingPlanIndicator destAddrNpi, String destinationAddr, ESMClass esmClass, byte protocoId, byte priorityFlag, String scheduleDeliveryTime, String validityPeriod, RegisteredDelivery registeredDelivery, byte replaceIfPresent, DataCoding dataCoding, byte smDefaultMsgId, byte[] shortMessage) throws PDUStringException, ResponseTimeoutException, InvalidResponseException, NegativeResponseException, IOException { int seqNum = sequence.nextValue(); PendingResponse<SubmitSmResp> pendingResp = new PendingResponse<SubmitSmResp>(transactionTimer); pendingResponse.put(seqNum, pendingResp); try { pduSender.sendSubmitSm(out, seqNum, serviceType, sourceAddrTon, sourceAddrNpi, sourceAddr, destAddrTon, destAddrNpi, destinationAddr, esmClass, protocoId, priorityFlag, scheduleDeliveryTime, validityPeriod, registeredDelivery, replaceIfPresent, dataCoding, smDefaultMsgId, shortMessage); } catch (IOException e) { logger.error("Failed submit short message", e); pendingResponse.remove(seqNum); closeSocket(); throw e; } try { pendingResp.waitDone(); logger.debug("Submit sm response received"); } catch (ResponseTimeoutException e) { pendingResponse.remove(seqNum); logger.debug("Response timeout for submit_sm with sessionIdSequence number " + seqNum); throw e; } catch (InvalidResponseException e) { pendingResponse.remove(seqNum); throw e; } if (pendingResp.getResponse().getCommandStatus() != SMPPConstant.STAT_ESME_ROK) throw new NegativeResponseException(pendingResp.getResponse().getCommandStatus()); return pendingResp.getResponse().getMessageId(); } public QuerySmResult queryShortMessage(String messageId, TypeOfNumber sourceAddrTon, NumberingPlanIndicator sourceAddrNpi, String sourceAddr) throws PDUStringException, ResponseTimeoutException, InvalidResponseException, NegativeResponseException, IOException { int seqNum = sequence.nextValue(); PendingResponse<QuerySmResp> pendingResp = new PendingResponse<QuerySmResp>(transactionTimer); pendingResponse.put(seqNum, pendingResp); try { pduSender.sendQuerySm(out, seqNum, messageId, sourceAddrTon, sourceAddrNpi, sourceAddr); } catch (IOException e) { logger.error("Failed submit short message", e); pendingResponse.remove(seqNum); closeSocket(); throw e; } try { pendingResp.waitDone(); logger.info("Query sm response received"); } catch (ResponseTimeoutException e) { pendingResponse.remove(seqNum); throw e; } catch (InvalidResponseException e) { pendingResponse.remove(seqNum); throw e; } QuerySmResp resp = pendingResp.getResponse(); if (resp.getCommandStatus() != SMPPConstant.STAT_ESME_ROK) throw new NegativeResponseException(resp.getCommandStatus()); if (resp.getMessageId().equals(messageId)) { return new QuerySmResult(resp.getFinalDate(), resp.getMessageState(), resp.getErrorCode()); } else { // message id requested not same as the returned throw new InvalidResponseException("Requested message_id doesn't match with the result"); } } private void changeToBoundState(BindType bindType) { if (bindType.equals(BindType.BIND_TX)) { changeState(SessionState.BOUND_TX); } else if (bindType.equals(BindType.BIND_RX)) { changeState(SessionState.BOUND_RX); } else if (bindType.equals(BindType.BIND_TRX)){ changeState(SessionState.BOUND_TRX); } else { throw new IllegalArgumentException("Bind type " + bindType + " not supported"); } try { socket.setSoTimeout(sessionTimer); } catch (SocketException e) { logger.error("Failed setting so_timeout for session timer", e); } } private synchronized void changeState(SessionState newState) { if (sessionState != newState) { final SessionState oldState = sessionState; sessionState = newState; // change the session state processor if (sessionState == SessionState.OPEN) { stateProcessor = SMPPSessionState.OPEN; } else if (sessionState == SessionState.BOUND_RX) { stateProcessor = SMPPSessionState.BOUND_RX; } else if (sessionState == SessionState.BOUND_TX) { stateProcessor = SMPPSessionState.BOUND_TX; } else if (sessionState == SessionState.BOUND_TRX) { stateProcessor = SMPPSessionState.BOUND_TRX; } else if (sessionState == SessionState.UNBOUND) { stateProcessor = SMPPSessionState.UNBOUND; } else if (sessionState == SessionState.CLOSED) { stateProcessor = SMPPSessionState.CLOSED; } fireChangeState(newState, oldState); } } private void updateActivityTimestamp() { lastActivityTimestamp = System.currentTimeMillis(); } private void addEnquireLinkJob() { enquireLinkToSend++; } public int getSessionTimer() { return sessionTimer; } public void setSessionTimer(int sessionTimer) { this.sessionTimer = sessionTimer; if (sessionState.isBound()) { try { socket.setSoTimeout(sessionTimer); } catch (SocketException e) { logger.error("Failed setting so_timeout for session timer", e); } } } public long getTransactionTimer() { return transactionTimer; } public void setTransactionTimer(long transactionTimer) { this.transactionTimer = transactionTimer; } public synchronized SessionState getSessionState() { return sessionState; } public void setSessionStateListener( SessionStateListener sessionStateListener) { this.sessionStateListener = sessionStateListener; } public void setMessageReceiverListener( MessageReceiverListener messageReceiverListener) { this.messageReceiverListener = messageReceiverListener; } private void closeSocket() { changeState(SessionState.CLOSED); if (!socket.isClosed()) { try { socket.close(); } catch (IOException e) { logger.warn("Failed closing socket", e); } } } private synchronized boolean isReadPdu() { return sessionState.isBound() || sessionState.equals(SessionState.OPEN); } private void readPDU() { try { Command pduHeader = null; byte[] pdu = null; synchronized (in) { pduHeader = pduReader.readPDUHeader(in); pdu = pduReader.readPDU(in, pduHeader); } switch (pduHeader.getCommandId()) { case SMPPConstant.CID_BIND_RECEIVER_RESP: case SMPPConstant.CID_BIND_TRANSMITTER_RESP: case SMPPConstant.CID_BIND_TRANSCEIVER_RESP: updateActivityTimestamp(); stateProcessor.processBindResp(pduHeader, pdu, sessionHandler); break; case SMPPConstant.CID_GENERIC_NACK: updateActivityTimestamp(); stateProcessor.processGenericNack(pduHeader, pdu, sessionHandler); break; case SMPPConstant.CID_ENQUIRE_LINK: updateActivityTimestamp(); stateProcessor.processEnquireLink(pduHeader, pdu, sessionHandler); break; case SMPPConstant.CID_ENQUIRE_LINK_RESP: updateActivityTimestamp(); stateProcessor.processEnquireLinkResp(pduHeader, pdu, sessionHandler); break; case SMPPConstant.CID_SUBMIT_SM_RESP: updateActivityTimestamp(); stateProcessor.processSubmitSmResp(pduHeader, pdu, sessionHandler); break; case SMPPConstant.CID_QUERY_SM_RESP: updateActivityTimestamp(); stateProcessor.processQuerySmResp(pduHeader, pdu, sessionHandler); break; case SMPPConstant.CID_DELIVER_SM: updateActivityTimestamp(); stateProcessor.processDeliverSm(pduHeader, pdu, sessionHandler); break; case SMPPConstant.CID_UNBIND: updateActivityTimestamp(); stateProcessor.processUnbind(pduHeader, pdu, sessionHandler); changeState(SessionState.UNBOUND); break; case SMPPConstant.CID_UNBIND_RESP: updateActivityTimestamp(); stateProcessor.processUnbindResp(pduHeader, pdu, sessionHandler); break; default: stateProcessor.processUnknownCid(pduHeader, pdu, sessionHandler); } } catch (InvalidCommandLengthException e) { logger.warn("Receive invalid command length", e); // FIXME uud: response to this error, generick nack or close socket } catch (SocketTimeoutException e) { addEnquireLinkJob(); try { Thread.sleep(1); } catch (InterruptedException ee) {} } catch (IOException e) { closeSocket(); } } @Override protected void finalize() throws Throwable { closeSocket(); } private void fireAcceptDeliverSm(DeliverSm deliverSm) throws ProcessMessageException { if (messageReceiverListener != null) { messageReceiverListener.onAcceptDeliverSm(deliverSm); } else { logger.warn("Receive deliver_sm but MessageReceiverListener is null. Short message = " + new String(deliverSm.getShortMessage())); } } private void fireChangeState(SessionState newState, SessionState oldState) { if (sessionStateListener != null) { sessionStateListener.onStateChange(newState, oldState, this); } else { logger.warn("SessionStateListener is null"); } } private class SMPPSessionHandlerImpl implements SMPPSessionHandler { public void processDeliverSm(DeliverSm deliverSm) throws ProcessMessageException { fireAcceptDeliverSm(deliverSm); } @SuppressWarnings("unchecked") public PendingResponse<Command> removeSentItem(int sequenceNumber) { return (PendingResponse<Command>)pendingResponse.remove(sequenceNumber); } public void sendDeliverSmResp(int sequenceNumber) throws IOException { try { pduSender.sendDeliverSmResp(out, sequenceNumber); // FIXME uud: delete this log logger.debug("deliver_sm_resp with seq_number " + sequenceNumber + " has been sent"); } catch (PDUStringException e) { logger.fatal("Failed sending deliver_sm_resp", e); } } public void sendEnquireLinkResp(int sequenceNumber) throws IOException { pduSender.sendEnquireLinkResp(out, sequenceNumber); } public void sendGenerickNack(int commandStatus, int sequenceNumber) throws IOException { pduSender.sendGenericNack(out, commandStatus, sequenceNumber); } public void sendNegativeResponse(int originalCommandId, int commandStatus, int sequenceNumber) throws IOException { pduSender.sendHeader(out, originalCommandId | SMPPConstant.MASK_CID_RESP, commandStatus, sequenceNumber); } public void sendUnbindResp(int sequenceNumber) throws IOException { pduSender.sendUnbindResp(out, SMPPConstant.STAT_ESME_ROK, sequenceNumber); } } private class PDUReaderWorker extends Thread { @Override public void run() { logger.info("Starting PDUReaderWorker"); while (isReadPdu()) { readPDU(); } logger.info("PDUReaderWorker stop"); } } private class EnquireLinkSender extends Thread { @Override public void run() { logger.info("Starting EnquireLinkSender"); while (isReadPdu()) { long sleepTime = 1000; if (enquireLinkToSend > 0) { enquireLinkToSend try { enquireLink(); } catch (ResponseTimeoutException e) { closeSocket(); } catch (InvalidResponseException e) { // lets unbind gracefully unbindAndClose(); } catch (IOException e) { closeSocket(); } } try { Thread.sleep(sleepTime); } catch (InterruptedException e) { } } logger.info("EnquireLinkSender stop"); } } private class IdleActivityChecker extends Thread { @Override public void run() { logger.info("Starting IdleActivityChecker"); while (isReadPdu()) { long timeLeftToEnquire = lastActivityTimestamp + sessionTimer - System.currentTimeMillis(); if (timeLeftToEnquire <= 0) { try { enquireLink(); } catch (ResponseTimeoutException e) { closeSocket(); } catch (InvalidResponseException e) { // lets unbind gracefully unbindAndClose(); } catch (IOException e) { closeSocket(); } } else { try { Thread.sleep(timeLeftToEnquire); } catch (InterruptedException e) { } } } logger.info("IdleActivityChecker stop"); } } }
package org.apache.velocity.test.misc; import java.io.*; import java.util.*; import org.apache.velocity.Context; import org.apache.velocity.Template; import org.apache.velocity.io.*; import org.apache.velocity.runtime.Runtime; import org.apache.velocity.test.provider.TestProvider; /** * This class the testbed for Velocity. It is used to * test all the directives support by Velocity. * * @author <a href="mailto:jvanzyl@periapt.com">Jason van Zyl</a> * @version $Id: Test.java,v 1.6 2000/11/27 18:16:51 jvanzyl Exp $ */ public class Test { /** * Cache of writers */ private static Stack writerStack = new Stack(); public Test(String templateFile) { Writer writer = null; // JspWriterImpl vw = null; TestProvider provider = new TestProvider(); ArrayList al = provider.getCustomers(); Hashtable h = new Hashtable(); h.put("Bar", "this is from a hashtable!"); try { Runtime.setDefaultProperties(); Runtime.setProperty(Runtime.RUNTIME_LOG_ERROR_STACKTRACE, "true"); Runtime.setProperty(Runtime.RUNTIME_LOG_WARN_STACKTRACE, "true"); Runtime.setProperty(Runtime.RUNTIME_LOG_INFO_STACKTRACE, "true"); Runtime.init(); if (templateFile == null) templateFile = "examples/example.vm"; Template template = Runtime.getTemplate(templateFile); Context context = new Context(); context.put("provider", provider); context.put("name", "jason"); context.put("providers", provider.getCustomers2()); context.put("list", al); context.put("hashtable", h); context.put("search", provider.getSearch()); context.put("relatedSearches", provider.getRelSearches()); context.put("searchResults", provider.getRelSearches()); context.put("menu", provider.getMenu()); context.put("stringarray", provider.getArray()); writer = new BufferedWriter(new OutputStreamWriter(System.out)); template.merge(context, writer); writer.flush(); writer.close(); // gmj 11/04/00 : this goes boom in merge... //try // vw = (JspWriterImpl) writerStack.pop(); //catch (Exception e) // Runtime.error( e ); //if (vw == null) // vw = new JspWriterImpl(new OutputStreamWriter(System.out), 4*1024, true); //else // vw.recycle(new OutputStreamWriter(System.out)); //template.merge(context, vw); //writerStack.push(vw); } catch( Exception e ) { Runtime.error(e); } // finally // try // if (vw != null) // vw.flush(); // catch (Exception e) // // do nothing } public static void main(String[] args) { Test t; t = new Test(args[0]); } }
public class Chicken{ private String name; private String color; private int age; public void layAnEgg(){ System.out.println("The chicken "+name+" laid an egg."); } public void poop(){ System.out.println("The chicken "+name+" did pooped."); } public void drink(){ System.out.println("The chicken "+name+" drank water."); } public void print(){ System.out.println("The name of the chicken is:" + name + ", Color:" + color + "Age:"+ age+" "); } public void initializeData(String color, int age){ this.color=color; this.age=age; } public void addName(String name){ this.name = name; } //constructor public Chicken(String name,String color,int age){ this.name=name; this.color=color; this.age=age; } //setters public void setName(String name){ this.name=name; } public void setColor(String color){ this.color=color; } public void setAge(int age){ this.age=age; } //getters public String getName(){ return this.name; } public String getColor(){ return this.color; } public int getAge(){ return this.age; } }
package com.mikosik.jsolid.d1; import static com.mikosik.jsolid.d1.Anchor1.CENTER; import static com.mikosik.jsolid.d1.Anchor1.MAX; import static com.mikosik.jsolid.d1.Anchor1.MIN; import static org.testory.Testory.given; import static org.testory.Testory.thenReturned; import static org.testory.Testory.thenThrown; import static org.testory.Testory.when; import org.junit.Test; public class Anchor1Test { private Range range; @Test public void move_to_min() throws Exception { given(range = new Range(3, 5)); when(MIN.moveTo(range, 7)); thenReturned(new Range(7, 9)); } @Test public void move_to_center() throws Exception { given(range = new Range(3, 5)); when(CENTER.moveTo(range, 7)); thenReturned(new Range(6, 8)); } @Test public void move_to_max() throws Exception { given(range = new Range(3, 5)); when(MAX.moveTo(range, 7)); thenReturned(new Range(5, 7)); } @Test public void resize_to_min_fails_for_negative_size() throws Exception { given(range = new Range(3, 5)); when(() -> MIN.resizeTo(range, -1)); thenThrown(IllegalArgumentException.class); } @Test public void resize_to_min() throws Exception { given(range = new Range(3, 5)); when(MIN.resizeTo(range, 6)); thenReturned(new Range(3, 9)); } @Test public void resize_to_max_fails_for_negative_size() throws Exception { given(range = new Range(3, 5)); when(() -> MAX.resizeTo(range, -1)); thenThrown(IllegalArgumentException.class); } @Test public void resize_to_max() throws Exception { given(range = new Range(3, 5)); when(MAX.resizeTo(range, 6)); thenReturned(new Range(-1, 5)); } @Test public void resize_to_center_fails_for_negative_size() throws Exception { given(range = new Range(3, 5)); when(() -> CENTER.resizeTo(range, -1)); thenThrown(IllegalArgumentException.class); } @Test public void resize_to_center() throws Exception { given(range = new Range(3, 5)); when(CENTER.resizeTo(range, 6)); thenReturned(new Range(1, 7)); } }
package main.app.controllers; import javafx.fxml.FXML; import javafx.scene.control.Tab; public class DetektorController { @FXML private Tab detektorTab; }
package com.blockscore.models; import com.fasterxml.jackson.annotation.JsonProperty; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; public class Address { @NotNull private String street1; @Nullable private String street2; @NotNull private String city; @NotNull private String subdivision; @NotNull private String postalCode; @NotNull private String countryCode; public Address() { // do nothing } public Address(@NotNull final String street1, @Nullable final String street2, @NotNull final String city , @NotNull final String state, @NotNull final String postalCode, @NotNull final String countryCode) { this.street1 = street1; this.street2 = street2; this.city = city; mState = state; mPostalCode = postalCode; mCountryCode = countryCode; } /** * The primary street address of the customer. This is automatically normalized. * @param street1 Street (Line 1) * @return this */ @NotNull public Address setStreet1(@NotNull final String street1) { this.street1 = street1; return this; } /** * The second address line typically used for apartment or suite numbers. This is automatically normalized. * @param street2 Street (Line 2) * @return this */ @Nullable public Address setStreet2(@NotNull final String street2) { this.street2 = street2; return this; } /** * The city name of the customer. This is automatically normalized. * @param city City * @return this */ @NotNull public Address setCity(@NotNull final String city) { this.city = city; return this; } /** * The subdivision of the entered country. For instance in the United States, this would be a 'state'. * Ex: The subdivision for California would be CA. * @param subdivision Subdivision (FIPS code format) * @return this */ @NotNull public Address setSubdivision(@NotNull final String subdivision) { this.subdivision = subdivision; return this; } /** * The postal code, also known as the ZIP code of the address. * @param postalCode ZIP code * @return this */ @NotNull public Address setPostalCode(@NotNull final String postalCode) { this.postalCode = postalCode; return this; } /** * The country of the customer. Should be of the ISO code form. * @param countryCode Country code of the individual. * @return this */ @NotNull public Address setCountryCode(@NotNull final String countryCode) { this.countryCode = countryCode; return this; } /** * Gets the primary street address of the customer. This is automatically normalized. * @return Line 1 of the address. */ @NotNull public String getStreet1() { return street1; } /** * The second address line typically used for apartment or suite numbers. This is automatically normalized. * @return Line 2 of the address. */ @Nullable public String getStreet2() { return street2; } /** * The city name of the customer. This is automatically normalized. * @return City of the address. */ @NotNull public String getCity() { return city; } /** * The subdivision of the entered country. For instance in the United States, this would be a 'state'. * @return The state of the customer. */ @NotNull public String getSubdivision() { return subdivision; } /** * The postal code, also known as the ZIP code of the address. * @return Postal code for the customer. */ @NotNull public String getPostalCode() { return postalCode; } /** * The country of the customer. Should be of the ISO code form. * @return Country code of the customer */ @NotNull public String getCountryCode() { return countryCode; } }
package com.blockscore.models; import com.blockscore.models.base.BasicResponse; import com.blockscore.net.BlockscoreApiClient; import com.blockscore.net.BlockscoreRestAdapter; import com.fasterxml.jackson.annotation.JsonProperty; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.Map; /** * Company model. */ public class Company extends BasicResponse { // Request fields @NotNull @JsonProperty("entity_name") private String entityName; @NotNull @JsonProperty("tax_id") private String taxId; @Nullable @JsonProperty("incorporation_state") private String incorporationState; @NotNull @JsonProperty("incorporation_country_code") private String incorporationCountryCode; @NotNull @JsonProperty("incorporation_type") private String incorporationType; @Nullable @JsonProperty("incorporation_day") private Integer incorporationDay; @Nullable @JsonProperty("incorporation_month") private Integer incorporationMonth; @Nullable @JsonProperty("incorporation_year") private Integer incorporationYear; @Nullable @JsonProperty("dbas") private String dbas; @Nullable @JsonProperty("registration_number") private String registrationNumber; @Nullable @JsonProperty("email") private String email; @Nullable @JsonProperty("url") private String url; @Nullable @JsonProperty("phone_number") private String phoneNumber; @Nullable @JsonProperty("ip_address") private String ipAddress; @Nullable @JsonProperty("note") private String note; @NotNull @JsonProperty("address_street1") private String addressStreet1; @Nullable @JsonProperty("address_street2") private String addressStreet2; @NotNull @JsonProperty("address_city") private String addressCity; @NotNull @JsonProperty("address_subdivision") private String addressSubdivision; @NotNull @JsonProperty("address_postal_code") private String addressPostalCode; @NotNull @JsonProperty("address_country_code") private String addressCountryCode; // Response fields @NotNull @JsonProperty("details") private CompanyDetails details; @NotNull @JsonProperty("status") private String status; private Company() { // Restricts access to end user so they must use a Company.Builder to create a Person } /** * Gets the name of the entity. * * @return Name of the entity. */ @NotNull public String getEntityName() { return entityName; } /** * Gets the Tax ID associated with this entity. * * @return Tax ID */ @NotNull public String getTaxId() { return taxId; } /** * Gets the incorporation state. Can be either of ISO code form or the full length name of the state. * * @return the incorporation state */ @Nullable public String getIncorporationState() { return incorporationState; } /** * Gets the incorporation country code. Should be of the ISO alpha-2 code form. * * @return the incorporation country code */ @NotNull public String getIncorporationCountryCode() { return incorporationCountryCode; } @NotNull public CorporationType getIncorporationType() { return CorporationType.toEnum(incorporationType); } /** * Gets the date of incorporation. * * @return the incorporation date */ @Nullable public Date getIncorporationDate() { if (incorporationDay == null || incorporationMonth == null || incorporationYear == null) { return null; } GregorianCalendar calendarDay = new GregorianCalendar(incorporationYear, incorporationMonth, incorporationDay); return calendarDay.getTime(); } /** * Gets a list of "doing business as" names, which are other names this business may be known by. * * @return the "doing business as" names */ @Nullable public String getDbas() { return dbas; } /** * Gets the registration number. This is the number typically provided by the state of * incorporation which is assigned to a business. Should only include the digits of the * registration number with no extraneous characters like dashes. * * @return the registration number */ @Nullable public String getRegistrationNumber() { return registrationNumber; } /** * Gets the email for this entity. * * @return the email address */ @Nullable public String getEmail() { return email; } /** * Gets the URL for the entity. * * @return the URL */ @Nullable public String getUrl() { return url; } /** * Gets the company's phone number. * * @return the phone number */ @Nullable public String getPhoneNumber() { return phoneNumber; } /** * Gets a company's IP address. * * @return the IP address */ @Nullable public String getIpAddress() { return ipAddress; } /** * Gets any note data you have associated with the Company. * * @return the note data */ @Nullable public String getNote() { return note; } /** * Gets the address for this company. * * @return Address */ @NotNull public Address getAddress() { return new Address(addressStreet1, addressStreet2, addressCity, addressSubdivision, addressPostalCode, addressCountryCode); } /** * Gets either valid or invalid and is the culmination of whether or not the passed * in information is valid against various databases and signals. * * @return true if valid. */ public boolean isValid() { return status.equalsIgnoreCase("valid"); } /** * Contains a breakdown of how the status (validity) was determined. It will let you diagnose * problems like address inconsistencies. * * @return the details */ @Nullable public CompanyDetails getDetails() { return details; } public static class Builder { private BlockscoreRestAdapter restAdapter; private Map<String, String> queryOptions; public Builder(BlockscoreApiClient client) { this.restAdapter = client.getAdapter(); queryOptions = new HashMap<String, String>(); } @NotNull public Builder setEntityName(@NotNull final String entityName) { queryOptions.put("entity_name", entityName); return this; } /** * Sets the Tax ID for this entity. The tax ID should only include the digits of the ID with * no extraneous characters like dashes. * * @param taxId the tax ID for the company * @return this */ @NotNull public Builder setTaxId(@NotNull final String taxId) { queryOptions.put("tax_id", taxId); return this; } /** * Sets the incorporation state. Can be either of ISO code form or the full length name of the state. * * @param incorporationState the incorporation state * @return this */ @NotNull public Builder setIncorporationState(@Nullable final String incorporationState) { queryOptions.put("incorporation_state", incorporationState); return this; } /** * Sets the incorporation country code. Should be of the ISO alpha-2 code form. * * @param incorporationCountryCode the country code * @return this */ @NotNull public Builder setIncorporationCountryCode(@NotNull final String incorporationCountryCode) { queryOptions.put("incorporation_country_code", incorporationCountryCode); return this; } /** * Sets the incorporation type. * * @param incorporationType the corporation type * @return this */ public Builder setIncorporationType(@NotNull final CorporationType incorporationType) { queryOptions.put("incorporation_type", String.valueOf(incorporationType)); return this; } /** * Sets the incorporation date. * * @param incorporationDate the incorporation date * @return this */ @NotNull public Builder setIncorporationDate(@Nullable final Date incorporationDate) { if (incorporationDate == null) { return this; } Calendar calendar = new GregorianCalendar(); calendar.setTime(incorporationDate); queryOptions.put("incorporation_day", String.valueOf(calendar.get(Calendar.DAY_OF_MONTH))); queryOptions.put("incorporation_month", String.valueOf(calendar.get(Calendar.MONTH) + 1)); // Months begin at 0 queryOptions.put("incorporation_year", String.valueOf(calendar.get(Calendar.YEAR))); return this; } /** * Sets the "doing business as" names for the company. * @param dbas the doing business as names * @return this. */ public Builder setDbas(@Nullable final String dbas) { queryOptions.put("dbas", dbas); return this; } /** * Sets the registration number for this entity. Should only include the digits of the * registration number with no extraneous characters like dashes. * * @param registrationNumber the registration number * @return this */ public Builder setRegistrationNumber(@Nullable final String registrationNumber) { queryOptions.put("registration_number", registrationNumber); return this; } /** * Sets the email for this entity. Any form of valid email is accepted. * * @param email the email address * @return this */ public Builder setEmail(@Nullable final String email) { queryOptions.put("email", email); return this; } public Builder setUrl(@Nullable final String url) { queryOptions.put("url", url); return this; } /** * Sets a company's phone number. Extra characters like parenthesis and dashes are * accepted - can either contain the country code or not. * * @param phoneNumber the phone number * @return this */ @NotNull public Builder setPhoneNumber(@Nullable final String phoneNumber) { queryOptions.put("phone_number", phoneNumber); return this; } /** * Sets a company's IP address. Both IPv4 and IPv6 style IP addresses are acceptable. * * @param ipAddress the IP address * @return this */ @NotNull public Builder setIpAddress(@Nullable final String ipAddress) { queryOptions.put("ip_address", ipAddress); return this; } /** * Store additional information about the candidate here such as your internal system's identifier * for this individual. This will allow you to keep track of them. * * @param note the additional information * @return this */ @NotNull public Builder setNote(@Nullable final String note) { queryOptions.put("note", note); return this; } /** * Sets the company's address. * * @param address the address * @return this */ public Builder setAddress(@NotNull final Address address) { queryOptions.put("address_street1", address.getStreet1()); queryOptions.put("address_street2", address.getStreet2()); queryOptions.put("address_city", address.getCity()); queryOptions.put("address_subdivision", address.getSubdivision()); queryOptions.put("address_postal_code", address.getPostalCode()); queryOptions.put("address_country_code", address.getCountryCode()); return this; } /** * Creates a new {@code Company}. * * @return the new company */ public Company create() { Company company = restAdapter.createCompany(queryOptions); return company; } } }
// File: Guide.java (20-Oct-2011) // Tim Niblett (the Author) and may not be used, // sold, licenced, transferred, copied or reproduced in whole or in // part in any manner or form or in or on any media to any person // other than in accordance with the terms of The Author's agreement // or otherwise without the prior written consent of The Author. All // information contained in this source file is confidential information // belonging to The Author and as such may not be disclosed other // than in accordance with the terms of The Author's agreement, or // otherwise, without the prior written consent of The Author. As // confidential information this source file must be kept fully and // effectively secure at all times. package com.cilogi.ds.guide; import com.cilogi.ds.guide.diagrams.Diagrams; import com.cilogi.ds.guide.mapper.GuideMapper; import com.cilogi.ds.guide.galleries.Gallery; import com.cilogi.ds.guide.listings.Listing; import com.cilogi.ds.guide.mapper.Location; import com.cilogi.ds.guide.media.GuideAudio; import com.cilogi.ds.guide.media.GuideImage; import com.cilogi.ds.guide.pages.Page; import com.cilogi.ds.guide.pages.PageImage; import com.cilogi.ds.guide.shop.Shop; import com.cilogi.ds.guide.tours.PageRef; import com.cilogi.ds.guide.tours.Tour; import com.cilogi.ds.guide.tours.TourStop; import com.cilogi.util.path.PathUtil; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationFeature; import com.google.common.collect.Sets; import lombok.*; import java.io.IOException; import java.io.Serializable; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Logger; @JsonInclude(JsonInclude.Include.NON_NULL) @EqualsAndHashCode @ToString @Data public class GuideJson implements Serializable, IGuide { @SuppressWarnings("unused") static final Logger LOG = Logger.getLogger(GuideJson.class.getName()); private static final long serialVersionUID = -9153256781053121634L; private static final String CONFIG_NAME = "config.json"; private static final String DEFAULT_VERSION = "1"; private static final String DEFAULT_GUIDE_SPEC_VERSION = "3"; private java.lang.String name; private final String guideSpecVersion; private Config config; private String title; private String description; private List<Page> pages; private Map<String,String> pageDigests; private Diagrams diagrams; private Set<GuideImage> images; private Set<GuideAudio> audioClips; private List<Tour> tours; private List<Gallery> galleries; private List<Listing> listings; private Map<String,byte[]> etags; private Shop shop; public static GuideJson fromJSON(String data) throws IOException { GuideMapper mapper = new GuideMapper(); mapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES); return mapper.readValue(data, GuideJson.class); } public String getConfigName() { return CONFIG_NAME; } public GuideJson() { title = ""; description = ""; guideSpecVersion = DEFAULT_GUIDE_SPEC_VERSION; config = new Config(); pages = new ArrayList<>(); pageDigests = new ConcurrentHashMap<>(); diagrams = new Diagrams(); images = Sets.newConcurrentHashSet(); audioClips = Sets.newConcurrentHashSet(); tours = new ArrayList<>(); galleries = new ArrayList<>(); listings = new ArrayList<>(); etags = new java.util.HashMap<>(); } public GuideJson(@NonNull String name) { this(); this.name = name; setTitle(name); } /** * Copy constructor * @param guide The guide to copy */ public GuideJson(IGuide guide) { this.guideSpecVersion = guide.getGuideSpecVersion(); this.name = guide.getName(); this.title = guide.getTitle(); this.description = guide.getDescription(); this.pages = new ArrayList<>(guide.getPages()); this.pageDigests = new ConcurrentHashMap<>(guide.getPageDigests()); this.diagrams = new Diagrams(guide.getDiagrams()); this.images = Sets.newConcurrentHashSet(guide.getImages()); this.audioClips = Sets.newConcurrentHashSet(guide.getAudioClips()); this.tours = new ArrayList<>(guide.getTours()); this.galleries = new ArrayList<>(guide.getGalleries()); this.listings = new ArrayList<>(guide.getListings()); this.etags = new java.util.HashMap<>(guide.getEtags()); this.config = (guide.getConfig() == null) ? null : new Config(guide.getConfig()); if (this.config != null && this.title != null) { this.config.setTitle(this.title); } this.shop = (guide.getShop() == null) ? null : new Shop(guide.getShop()); } /** * Can this guide be saved properly? * @return true iff its OK to save */ @JsonIgnore public boolean isValidState() { return name.length() > 0; } @JsonIgnore public boolean isShared() { return config != null && config.isShared(); } public String getTitle() { return (title == null) ? name : title; } public void setTitle(@NonNull String name) { title = name; getConfig().setTitle(name); } public Set<String> tourNames() { List<Tour> tours = getTours(); Set<String> names = new HashSet<>(); for (Tour tour : tours) { names.add(tour.getId()); } return names; } /** * Export named tour * @param name The name of the tour * @return null if there is no tour of that name, else the tour with stops prefixed with the guide * name, so that the tour can be imported without change into other guides. */ public Tour exportTour(@NonNull String name) { Tour tour = findTour(name); if (tour == null) { return null; } else { Tour out = new Tour(tour); for (TourStop stop : out.getStops()) { PageRef pageRef = stop.getPageRef(); if (!pageRef.isExternal()) { stop.setPageRef(new PageRef(getName(), pageRef.getPageIndex())); } } return out; } } /** * Import tour into guide * @param tourToImport The tour * @return The imported tour, where stops that are in this guide get converted to be local */ public Tour importTour(@NonNull Tour tourToImport) { Tour tour = new Tour(tourToImport); Tour current = findTour(tour.getId()); if (current != null) { getTours().remove(current); } String guideName = getName(); for (TourStop stop : tour.getStops()) { PageRef ref = stop.getPageRef(); if (ref.isExternal() && guideName.equals(ref.getGuideName())) { stop.setPageRef(new PageRef("", ref.getPageIndex())); } } getTours().add(tour); return tour; } public Tour findTour(@NonNull String name) { for (Tour tour : tours) { if (name.equals(tour.getId())) { return tour; } } return null; } public Page findPage(int pageId) { for (Page page: pages) { if (page.getId() == pageId) { return page; } } return null; } public GuideImage findImage(@NonNull String name) { Set<GuideImage> images = getImages(); for (GuideImage image : images) { if (name.equals(image.getId())) { return image; } } return null; } public void updatePage(@NonNull Page page) { int id = page.getId(); Page already = findPage(id); if (already != null) { pages.remove(already); } pages.add(page); } public void setPageDigest(int pageId, @NonNull String digest) { getPageDigests().put(Integer.toString(pageId), digest); } public synchronized GuideImage guideImageFor(String imageName) { String imageId = PathUtil.name(imageName); Set<GuideImage> images = getImages(); for (GuideImage image : images) { if (imageId.equals(image.getId())) { return image; } } return null; } public synchronized GuideAudio guideAudioFor(String audioName) { String audioId = PathUtil.name(audioName); Set<GuideAudio> audioClips = getAudioClips(); for (GuideAudio audioClip : audioClips) { if (audioId.equals(audioClip.getId())) { return audioClip; } } return null; } public synchronized void setGuideImageDigest(GuideImage guideImage, int width, int height, String digest) { guideImage.setDigest(digest) .setWidth(width) .setHeight(height); } public synchronized boolean setAudioDigest(String audioName, String digest) { String audioId = PathUtil.name(audioName); Set<GuideAudio> audios = getAudioClips(); for (GuideAudio audio : audios) { if (audioId.equals(audio.getId())) { audio.setDigest(digest) .setUrl(audioName); return true; } } return false; } public synchronized void appendPage(@NonNull Page page) { pages.add(page); } /** * If a page image has some alt text and the GuideImage does not have * a title set then set the title from the alt text. */ public synchronized void synchronizeGuideImagesWithPageImages() { for (Page page : getPages()) { List<PageImage> images = page.getImages(); for (PageImage image : images) { String src = PathUtil.name(image.getSrc()); String alt = image.getAlt(); GuideImage guideImage = findImage(src); if (guideImage == null) { LOG.warning("Can't find image " + src); continue; } String existingTitle = guideImage.getTitle(); if (alt != null) { if (existingTitle == null || existingTitle.trim().equals("")) { guideImage.setTitle(alt); } } else { assert alt == null; image.setAlt(existingTitle); } } } } public synchronized void setTourLocations() { for (Tour tour : getTours()) { if (tour.getLocation() == null) { for (TourStop stop : tour.getStops()) { PageRef ref = stop.getPageRef(); if (ref.isCompatibleGuide(getName())) { Page page = findPage(ref.getPageIndex()); if (page != null && page.getLocation() != null && page.getLocation().isLatLng()) { tour.setLocation(page.getLocation()); break; } } } if (getConfig().getLatlng() != null) { tour.setLocation(new Location(getConfig().getLatlng())); } } } } public void makeToursPublic() { String guideName = getName(); for (Tour tour : getTours()) { tour.makePublic(guideName); } } public String toJSONString() { try { GuideMapper mapper = new GuideMapper(); return mapper.writeValueAsString(this); } catch (JsonProcessingException e) { LOG.severe("Can't convert Guide " + this + " to JSON string"); return "{}"; } } }
package com.fishercoder.solutions; import java.util.Arrays; public class _16 { public static class Solution1 { public int threeSumClosest(int[] nums, int target) { Arrays.sort(nums); int sum = nums[0] + nums[1] + nums[2]; for (int i = 0; i < nums.length - 2; i++) { int left = i + 1; int right = nums.length - 1; while (left < right) { int thisSum = nums[i] + nums[left] + nums[right]; if (Math.abs(thisSum - target) < Math.abs(sum - target)) { sum = thisSum; if (sum == target) { return sum; } } else if (target > thisSum) { left++; } else { right } } } return sum; } } }
package com.fishercoder.solutions; import java.util.ArrayList; import java.util.List; /** * 89. Gray Code * * The gray code is a binary numeral system where two successive values differ in only one bit. * Given a non-negative integer n representing the total number of bits in the code, * print the sequence of gray code. * A gray code sequence must begin with 0. For example, given n = 2, return [0,1,3,2]. Its gray code sequence is: 00 - 0 01 - 1 11 - 3 10 - 2 Note: For a given n, a gray code sequence is not uniquely defined. For example, [0,2,3,1] is also a valid gray code sequence according to the above definition. For now, the judge is able to judge based on one instance of gray code sequence. Sorry about that. */ public class _89 { public static class Solution1 { public List<Integer> grayCode(int n) { List<Integer> result = new ArrayList(); for (int i = 0; i < (1 << n); i++) { result.add(i ^ (i >> 1)); } return result; } } public static class Solution2 { public List<Integer> grayCode(int n) { List<Integer> result = new ArrayList(); for (int i = 0; i < Math.pow(2, n); i++) { result.add(i ^ (i >> 1)); } return result; } } public static void main(String... args) { System.out.println(" System.out.println("How to understand i << n? It means n to the power of two, see below. So we have an equivalent solution, which is solution2."); System.out.println("1 << 2: " + (1 << 2)); System.out.println("1 << 3: " + (1 << 3)); System.out.println("1 << 4: " + (1 << 4)); System.out.println("1 << 5: " + (1 << 5)); System.out.println("1 << 6: " + (1 << 6)); System.out.println(" System.out.println("How to understand i >> 1? It means to shift the number i to the right by 1 bit, see below"); System.out.println("0 >> 1: " + (0 >> 1)); System.out.println("1 >> 1: " + (1 >> 1)); System.out.println("2 >> 1: " + (2 >> 1)); System.out.println("3 >> 1: " + (3 >> 1)); System.out.println("4 >> 1: " + (4 >> 1)); System.out.println("5 >> 1: " + (5 >> 1)); System.out.println("6 >> 1: " + (6 >> 1)); } }
package com.jaamsim.BasicObjects; import com.jaamsim.Graphics.DisplayEntity; import com.jaamsim.Samples.SampleConstant; import com.jaamsim.Samples.SampleExpInput; import com.jaamsim.basicsim.Entity; import com.jaamsim.input.EntityInput; import com.jaamsim.input.InputErrorException; import com.jaamsim.input.Keyword; import com.jaamsim.input.Output; import com.jaamsim.math.Vec3d; import com.jaamsim.units.DimensionlessUnit; import com.jaamsim.units.TimeUnit; public class Pack extends LinkedService { @Keyword(description = "The prototype for EntityContainers to be generated.\n" + "The generated EntityContainers will be copies of this entity.", example = "Pack1 PrototypeEntityContainer { ProtoContainer }") protected final EntityInput<EntityContainer> prototypeEntityContainer; @Keyword(description = "The number of entities to pack into the container.", example = "Pack1 NumberOfEntities { 2 }") private final SampleExpInput numberOfEntities; @Keyword(description = "The service time required to pack each entity in the container.", example = "Pack1 ServiceTime { 3.0 h }") private final SampleExpInput serviceTime; protected EntityContainer container; // the generated EntityContainer private int numberGenerated; // Number of EntityContainers generated so far private int numberInserted; // Number of entities inserted to the EntityContainer private int numberToInsert; // Number of entities to insert in the present EntityContainer private boolean startedPacking; // True if the packing process has already started private DisplayEntity packedEntity; // the entity being packed { prototypeEntityContainer = new EntityInput<>(EntityContainer.class, "PrototypeEntityContainer", "Key Inputs", null); this.addInput(prototypeEntityContainer); numberOfEntities = new SampleExpInput("NumberOfEntities", "Key Inputs", new SampleConstant(1.0)); numberOfEntities.setUnitType(DimensionlessUnit.class); numberOfEntities.setEntity(this); numberOfEntities.setValidRange(1, Double.POSITIVE_INFINITY); this.addInput(numberOfEntities); serviceTime = new SampleExpInput("ServiceTime", "Key Inputs", new SampleConstant(TimeUnit.class, 0.0)); serviceTime.setUnitType(TimeUnit.class); serviceTime.setEntity(this); serviceTime.setValidRange(0, Double.POSITIVE_INFINITY); this.addInput(serviceTime); } @Override public void validate() { super.validate(); // Confirm that prototype entity has been specified if (!prototypeEntityContainer.getHidden() && prototypeEntityContainer.getValue() == null) { throw new InputErrorException("The keyword PrototypeEntityContainer must be set."); } numberOfEntities.validate(); serviceTime.validate(); } @Override public void earlyInit() { super.earlyInit(); container = null; numberGenerated = 0; numberInserted = 0; startedPacking = false; packedEntity = null; } protected EntityContainer getNextContainer() { numberGenerated++; EntityContainer proto = prototypeEntityContainer.getValue(); StringBuilder sb = new StringBuilder(); sb.append(proto.getName()).append("_Copy").append(numberGenerated); EntityContainer ret = Entity.fastCopy(proto, sb.toString()); ret.earlyInit(); return ret; } @Override public void startAction() { // Do any of the thresholds stop the generator? if (this.isClosed()) { this.setBusy(false); this.setPresentState(); return; } // If necessary, get a new container if (container == null) { container = this.getNextContainer(); numberInserted = 0; // Position the container over the pack object Vec3d tmp = this.getGlobalPositionForAlignment(new Vec3d()); tmp.add3(new Vec3d(0,0,0.01)); container.setGlobalPosition(tmp); } // Are there sufficient entities in the queue to start packing? if (!startedPacking) { numberToInsert = (int) numberOfEntities.getValue().getNextSample(this.getSimTime()); if (waitQueue.getValue().getCount() < numberToInsert) { this.setBusy(false); this.setPresentState(); return; } startedPacking = true; } // Schedule the insertion of the next entity packedEntity = this.getNextEntity(); double dt = serviceTime.getValue().getNextSample(getSimTime()); this.scheduleProcess(dt, 5, endActionTarget); } @Override public void endAction() { // Remove the next entity from the queue and pack the container container.addEntity(packedEntity); packedEntity = null; numberInserted++; // If the container is full, send it to the next component if (numberInserted == numberToInsert) { this.sendToNextComponent(container); container = null; numberInserted = 0; startedPacking = false; } // Insert the next entity this.startAction(); } @Output(name = "Container", description = "The EntityContainer that is being filled.") public DisplayEntity getContainer(double simTime) { return container; } }
package com.pump.window; import java.awt.Component; import java.awt.Point; import java.awt.Window; import java.awt.event.MouseEvent; import javax.swing.SwingUtilities; import javax.swing.event.MouseInputAdapter; import com.pump.util.JVM; public class WindowDragger extends MouseInputAdapter { Point mouseLoc; boolean dragging; boolean active; @Override public void mousePressed(MouseEvent e) { mouseLoc = e.getLocationOnScreen(); dragging = true; } @Override public void mouseReleased(MouseEvent e) { dragging = false; mouseLoc = null; } @Override public void mouseDragged(MouseEvent e) { if (mouseLoc == null || dragging == false) { return; } synchronized (mouseLoc) { Point p = e.getLocationOnScreen(); if (JVM.isMac) p.y = Math.max(0, p.y); if (active) { WindowDragger.translateWindow(p.x - mouseLoc.x, p.y - mouseLoc.y, SwingUtilities .getWindowAncestor((Component) e.getSource())); } mouseLoc.setLocation(p); } } public WindowDragger() { } public WindowDragger(Component c) { this(new Component[] { c }); } public WindowDragger(Component[] c) { for (int a = 0; a < c.length; a++) { c[a].addMouseListener(this); c[a].addMouseMotionListener(this); } } /** * Translates a window, after possibly adjusting dx and dy for OS-based * restraints. */ protected static void translateWindow(int dx, int dy, Window window) { Point p = window.getLocation(); p.x += dx; p.y += dy; if (JVM.isMac) p.y = Math.max(0, p.y); window.setLocation(p); } public void setActive(boolean b) { active = b; } public boolean isActive() { return active; } }
package com.untamedears.humbug; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.util.Collections; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import org.bukkit.Material; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.configuration.file.FileConfiguration; import org.bukkit.plugin.Plugin; import com.google.common.base.Splitter; import com.untamedears.humbug.Humbug; import com.untamedears.humbug.annotations.BahHumbug; import com.untamedears.humbug.annotations.BahHumbugs; import com.untamedears.humbug.annotations.ConfigOption; public class Config { private static Config global_instance_ = null; // Configuration defaults private static final boolean debug_log_ = false; private static final int quartz_gravel_percentage_ = 0; private static final int cobble_from_lava_scan_radius_ = 0; private static final String find_end_portals_ = null; private static final int projectile_slow_ticks_ = 100; private static final int loot_multiplier_ = -1; private static final String book_name_ = "Welcome to Devoted"; private static final String book_author_ = "Bonkill"; private static final String book_text_ = " {|oWhat is Devoted?\n" + "\"A new type of Minecraft server built towards providing a competitive, high stakes, immersive experience controlled by the players.\"{|r\n" + "-ShadedJon, server owner\n}|" + " {|oWhat is Devoted?{|r\n" + "{|lNOT just survival{|r Admins are hands-off and only handle glitches and hackers\n" + "{|lNOT just chaos{|r Mods allow players and towns to enforce their own rules\n" + "{|lNOT just roleplay{|r Nobody pretends -- conflict is genuine and heated\n}|" + " {|oBasic Mechanics{|r\n" + "-The world is a series of islands\n" + "-Each island is about 1000 blocks across\n" + "-Limited chat range\n" + "-Respawn in random area unless you sleep\n" + "-Food grows slowly\n" + "-Mobs spawn sparsely\n" + "-No Nether portals\n" + "-Nether biomes instead\n" + " {|oDevoted Mods{|r\n" + "{|lCitadel{|r: Reinforces things so it takes numerous breaks to destroy them. Locks some things too.\n" + "{|lMore Info:{|r {|oVisual Guide:{|r imgur.com/BnlL2 {|oWiki Page:{|r tinyurl.com/citadelmod\n" + "{|oType \"/help citadel\" in chat to get commands\n}|" + " {|lPrison Pearl{|r: Ender Pearls trap players in the end. Others can steal back your pearl and free you -- you always know where it is so they cannot hide it\n" + "{|lMore Info:{|r {|oVisual Guide:{|r imgur.com/XbhkK {|oWiki Page:{|r tinyurl.com/prisperl\n}|" + " {|lJuke Alert{|r: Creates 'Juke' blocks that record player activity in radius. If you steal, grief, or trespass -- people will know about it & put a bounty for you to be pearled.\n" + "{|lMore Info:{|r {|oWiki Page:{|r tinyurl.com/snitchblock\n}|" + " {|lMusterCull{|r: Kills some of your farm animals when there's too many to decrease lag. Mob spawners stop spawning if there are too many mobs around -- grinders must be cleared a lot.\n" + "{|lMore Info:{|r {|oWiki Page:{|r tinyurl.com/mustercull\n}|" + " {|lItem Exchange{|r: It's a minecraft shop mod that enables different chests (and other inventory blocks) to perform an exchange of items with a player.\n" + "{|lMore Info:{|r {|oWiki Page:{|r tinyurl.com/itemxchng\n}|" + " {|lHumbug:{|r Disables some features of minecraft -- see wiki for short list. Please read it to ensure you don't waste resources on a useless block.\n" + "{|lMore Info:{|r {|oWiki Page:{|r tinyurl.com/humbugwiki\n}|" + " {|lFactory Mod{|r: Factories are hard to create but can mass produce goods for cheaper. Gives groups gear advantages over lone wolves. Trading may be cheaper than crafting due to this.\n" + "{|lMore Info:{|r {|oWiki Page:{|r tinyurl.com/realbiome\n}|" + " {|oFurther Info{|r\n" + "Visit our subreddit at: {|oreddit.com/r/Devoted{|r\n" + "We extend our appreciation to the devs, admins, and players at: {|oreddit.com/r/Civcraft{|r\n"; private static final Iterable<String> compiled_book_text_ = Splitter.on("}|").split(book_text_.replaceAll("\\{\\|", "\u00A7")); private static final String holiday_book_name_ = "Happy Holidays"; private static final String holiday_book_author_ = "the CivCraft Admins"; private static final String holiday_book_text_ = " {|2H{|4a{|2p{|4p{|2y {|4H{|2o{|4l{|2i{|4d{|2a{|4y{|2s{|4!{|0\n" + "Thank each and every one of you for making the server what it is. " + "Our best wishes go out to you and yours. May your New Year " + "be full of griefing, drama, and mayhem.\n\n" + "-Santa Ttk2 and the Admin Elves"; private static final Iterable<String> compiled_holiday_book_text_ = Splitter.on("}|").split(holiday_book_text_.replaceAll("\\{\\|", "\u00A7")); private static FileConfiguration config_ = null; public static Config initialize(Humbug plugin) { if (global_instance_ == null) { plugin.reloadConfig(); config_ = plugin.getConfig(); config_.options().copyDefaults(true); global_instance_ = new Config(plugin); global_instance_.load(); } return global_instance_; } public static ConfigurationSection getStorage() { return config_; } private Humbug plugin_ = null; private Set<Integer> remove_item_drops_ = null; public Config(Humbug plugin) { plugin_ = plugin; scanAnnotations(); } private Map<String, ConfigOption> dynamicOptions_ = new TreeMap<String, ConfigOption>(); private void addToConfig(BahHumbug bug) { if (dynamicOptions_.containsKey(bug.opt())) { Humbug.info("Duplicate configuration option detected: " + bug.opt()); return; } dynamicOptions_.put(bug.opt(), new ConfigOption(bug)); } private void scanAnnotations() { try { for (Method method : Humbug.class.getMethods()) { BahHumbug bug = method.getAnnotation(BahHumbug.class); if (bug != null) { addToConfig(bug); continue; } BahHumbugs bugs = method.getAnnotation(BahHumbugs.class); if (bugs != null) { for (BahHumbug drone : bugs.value()) { addToConfig(drone); } continue; } } } catch(Exception ex) { Humbug.info(ex.toString()); } } public void load() { // Setting specific initialization loadRemoveItemDrops(); } public void reload() { plugin_.reloadConfig(); } public void save() { plugin_.saveConfig(); } public ConfigOption get(String optionName) { return dynamicOptions_.get(optionName); } public boolean set(String optionName, String value) { ConfigOption opt = dynamicOptions_.get(optionName); if (opt != null) { opt.setString(value); return true; } return false; } public boolean getDebug() { return config_.getBoolean("debug", debug_log_); } public void setDebug(boolean value) { config_.set("debug", value); } public String getTitle(){ return config_.getString("noobbook.name", book_name_); } public String getAuthor(){ return config_.getString("noobbook.author", book_author_); } public List<String> getPages(){ List<String> book_pages = new LinkedList<String>(); for(final String text: compiled_book_text_){ book_pages.add(text); } return book_pages; } public String getHolidayTitle(){ return holiday_book_name_; } public String getHolidayAuthor(){ return holiday_book_author_; } public List<String> getHolidayPages(){ List<String> book_pages = new LinkedList<String>(); for(final String text: compiled_holiday_book_text_){ book_pages.add(text); } return book_pages; } public int getLootMultiplier(String entity_type){ return config_.getInt("loot_multiplier." + entity_type.toLowerCase(), loot_multiplier_); } public void setLootMultiplier(String entity_type, int value){ config_.set("loot_multiplier." + entity_type.toLowerCase(), value); } public int getQuartzGravelPercentage() { return config_.getInt("quartz_gravel_percentage", quartz_gravel_percentage_); } public void setQuartzGravelPercentage(int value) { if (value < 0) { value = 0; Humbug.warning("quartz_gravel_percentage adjusted to 0"); } else if (value > 100) { value = 100; Humbug.warning("quartz_gravel_percentage adjusted to 100"); } config_.set("quartz_gravel_percentage", value); } public int getCobbleFromLavaScanRadius() { return config_.getInt("cobble_from_lava_scan_radius", cobble_from_lava_scan_radius_); } public void setCobbleFromLavaScanRadius(int value) { if (value < 0) { value = 0; Humbug.warning("cobble_from_lava_scan_radius adjusted to 0"); } else if (value > 20) { // 8000 blocks to scan at 20 value = 20; Humbug.warning("cobble_from_lava_scan_radius adjusted to 20"); } config_.set("cobble_from_lava_scan_radius", value); } public int getProjectileSlowTicks() { int ticks = config_.getInt("projectile_slow_ticks", projectile_slow_ticks_); if (ticks <= 0 || ticks > 600) { ticks = 100; } return ticks; } private void loadRemoveItemDrops() { if (!config_.isSet("remove_mob_drops")) { remove_item_drops_ = new HashSet<Integer>(4); return; } remove_item_drops_ = new HashSet<Integer>(); if (!config_.isList("remove_mob_drops")) { Integer val = config_.getInt("remove_mob_drops"); if (val == null) { config_.set("remove_mob_drops", new LinkedList<Integer>()); Humbug.info("remove_mob_drops was invalid, reset"); return; } remove_item_drops_.add(val); List<Integer> list = new LinkedList<Integer>(); list.add(val); config_.set("remove_mob_drops", val); Humbug.info("remove_mob_drops was not an Integer list, converted"); return; } remove_item_drops_.addAll(config_.getIntegerList("remove_mob_drops")); } public boolean doRemoveItemDrops() { return !remove_item_drops_.isEmpty(); } public Set<Integer> getRemoveItemDrops() { return Collections.unmodifiableSet(remove_item_drops_); } public void addRemoveItemDrop(int item_id) { if (item_id < 0) { return; } remove_item_drops_.add(item_id); List<Integer> list; if (!config_.isSet("remove_mob_drops")) { list = new LinkedList<Integer>(); } else { list = config_.getIntegerList("remove_mob_drops"); } list.add(item_id); config_.set("remove_mob_drops", list); } public void removeRemoveItemDrop(int item_id) { if (item_id < 0) { return; } if (!remove_item_drops_.remove(item_id)) { return; } List<Integer> list = config_.getIntegerList("remove_mob_drops"); list.remove((Object)item_id); config_.set("remove_mob_drops", list); } public void setRemoveItemDrops(Set<Integer> item_ids) { remove_item_drops_ = new HashSet<Integer>(); remove_item_drops_.addAll(item_ids); List<Integer> list = new LinkedList<Integer>(); list.addAll(item_ids); config_.set("remove_mob_drops", list); } public String toDisplayRemoveItemDrops() { StringBuilder sb = new StringBuilder(); for (Integer item_id : remove_item_drops_) { Material mat = Material.getMaterial(item_id); if (mat == null) { sb.append(item_id); } else { sb.append(mat.toString()); } sb.append(","); } return sb.toString(); } public void tag_on_join(boolean value){ config_.set("tag_on_join", value); } }
package com.xiaoleilu.hutool; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.Closeable; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.net.URI; import java.net.URL; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.jar.JarEntry; import java.util.jar.JarFile; import com.xiaoleilu.hutool.exceptions.UtilException; /** * * @author xiaoleilu * */ public class FileUtil { /** The Unix separator character. */ private static final char UNIX_SEPARATOR = '/'; /** The Windows separator character. */ private static final char WINDOWS_SEPARATOR = '\\'; /** Class */ public static final String CLASS_EXT = ".class"; /** Jar */ public static final String JAR_FILE_EXT = ".jar"; /** Jarjar */ public static final String JAR_PATH_EXT = ".jar!"; /** Path, path */ public static final String PATH_FILE_PRE = "file:"; /** * <br> * * @param path * @return */ public static File[] ls(String path) { if(path == null) { return null; } path = getAbsolutePath(path); File file = file(path); if(file.isDirectory()) { return file.listFiles(); } throw new UtilException(StrUtil.format("Path [{}] is not directory!", path)); } /** * <br> * * @param path ClassPath * @return jar.jar!/xxx/xxx * @throws IOException */ public static List<String> listFileNames(String path) { if(path == null) { return null; } path = getAbsolutePath(path); if(path.endsWith(String.valueOf(UNIX_SEPARATOR)) == false) { path = path + UNIX_SEPARATOR; } List<String> paths = new ArrayList<String>(); int index = path.lastIndexOf(FileUtil.JAR_PATH_EXT); try { if(index == -1) { File[] files = ls(path); for (File file : files) { if(file.isFile()) { paths.add(file.getName()); } } }else { //jar index = index + FileUtil.JAR_FILE_EXT.length(); final String jarPath = path.substring(0, index); final String subPath = path.substring(index + 2); for (JarEntry entry : Collections.list(new JarFile(jarPath).entries())) { final String name = entry.getName(); if(name.startsWith(subPath)) { String nameSuffix = StrUtil.removePrefix(name, subPath); if(nameSuffix.contains(String.valueOf(UNIX_SEPARATOR)) == false) { paths.add(nameSuffix); } } } } } catch (Exception e) { throw new UtilException(StrUtil.format("Can not read file path of [{}]", path), e); } return paths; } /** * File * @param path * @return File */ public static File file(String path) { if(StrUtil.isBlank(path)) { throw new NullPointerException("File path is blank!"); } return new File(path); } /** * File * @param parent * @param path * @return File */ public static File file(String parent, String path) { if(StrUtil.isBlank(path)) { throw new NullPointerException("File path is blank!"); } return new File(parent, path); } /** * File * @param parent * @param path * @return File */ public static File file(File parent, String path) { if(StrUtil.isBlank(path)) { throw new NullPointerException("File path is blank!"); } return new File(parent, path); } /** * File * @param uri URI * @return File */ public static File file(URI uri) { if(uri == null) { throw new NullPointerException("File uri is null!"); } return new File(uri); } /** * * @param fullFilePath POSIX * @return nullnull * @throws IOException */ public static File touch(String fullFilePath) throws IOException { if(fullFilePath == null) { return null; } return touch(file(fullFilePath)); } /** * * @param file * @return nullnull * @throws IOException */ public static File touch(File file) throws IOException { if(null == file){ return null; } file.getParentFile().mkdirs(); if(false == file.exists()) { file.createNewFile(); } return file; } /** * * @param fullFileOrDirPath * @return * @throws IOException */ public static boolean del(String fullFileOrDirPath) throws IOException { return del(file(fullFileOrDirPath)); } /** * * @param file * @return * @throws IOException */ public static boolean del(File file) throws IOException { if(file == null || file.exists() == false) { return true; } if(file.isDirectory()) { File[] files = file.listFiles(); for (File childFile : files) { boolean isOk = del(childFile); if(isOk == false) { return false; } } } return file.delete(); } /** * * @param dirPath POSIX * @return */ public static File mkdir(String dirPath){ if(dirPath == null) { return null; } File dir = file(dirPath); if(false == dir.exists()) { dir.mkdirs(); } return dir; } /** * <br> * prefix[Randon].suffix * From com.jodd.io.FileUtil * @param prefix 3 * @param suffix null.tmp * @param dir * @param isReCreat * @return * @throws IOException */ public static File createTempFile(String prefix, String suffix, File dir, boolean isReCreat) throws IOException { int exceptionsCount = 0; while (true) { try { File file = File.createTempFile(prefix, suffix, dir).getCanonicalFile(); if(isReCreat) { file.delete(); file.createNewFile(); } return file; } catch (IOException ioex) { // fixes java.io.WinNTFileSystem.createFileExclusively access denied if (++exceptionsCount >= 50) { throw ioex; } } } } /** * <br> * * @param src * @param dest * @param isOverride * @throws IOException */ public static void copy(File src, File dest, boolean isOverride) throws IOException { //check if (! src.exists()) { throw new FileNotFoundException("File not exist: " + src); } if (! src.isFile()) { throw new IOException("Not a file:" + src); } if (equals(src, dest)) { throw new IOException("Files '" + src + "' and '" + dest + "' are equal"); } if (dest.exists()) { if (dest.isDirectory()) { dest = new File(dest, src.getName()); } if (dest.exists() && ! isOverride) { throw new IOException("File already exist: " + dest); } } // do copy file FileInputStream input = new FileInputStream(src); FileOutputStream output = new FileOutputStream(dest); try { IoUtil.copy(input, output); } finally { close(output); close(input); } if (src.length() != dest.length()) { throw new IOException("Copy file failed of '" + src + "' to '" + dest + "' due to different sizes"); } } /** * * @param src * @param dest * @param isOverride * @throws IOException */ public static void move(File src, File dest, boolean isOverride) throws IOException { //check if (! src.exists()) { throw new FileNotFoundException("File already exist: " + src); } if (dest.exists()) { if (! isOverride) { throw new IOException("File already exist: " + dest); } dest.delete(); } if(src.isDirectory() && dest.isFile()) { throw new IOException(StrUtil.format("Can not move directory [{}] to file [{}]", src, dest)); } if(src.isFile() && dest.isDirectory()) { dest = new File(dest, src.getName()); } if (src.renameTo(dest) == false) { //renameTocopy try { copy(src, dest, isOverride); src.delete(); } catch (Exception e) { throw new IOException(StrUtil.format("Move [{}] to [{}] failed!", src, dest), e); } } } /** * <br/> * * @param path * @param baseClass * @return */ public static String getAbsolutePath(String path, Class<?> baseClass){ if(path == null) { path = StrUtil.EMPTY; } if(baseClass == null) { return getAbsolutePath(path); } // return baseClass.getResource(path).getPath(); return StrUtil.removePrefix(PATH_FILE_PRE, baseClass.getResource(path).getPath()); } /** * classes<br> * \/ * @param path * @return */ public static String getAbsolutePath(String path){ if(path == null) { path = StrUtil.EMPTY; }else { path = normalize(path); if(path.startsWith("/") || path.matches("^[a-zA-Z]:/.*")){ return path; } } ClassLoader classLoader = ClassUtil.getClassLoader(); URL url = classLoader.getResource(path); String reultPath= url != null ? url.getPath() : classLoader.getResource(StrUtil.EMPTY).getPath() + path; return StrUtil.removePrefix(reultPath, PATH_FILE_PRE); } /** * * @param path * @return */ public static boolean isExist(String path){ return file(path).exists(); } /** * * @param closeable */ public static void close(Closeable closeable){ if(closeable == null) return; try { closeable.close(); } catch (IOException e) { } } /** * * @param file1 1 * @param file2 2 * @return */ public static boolean equals(File file1, File file2) { try { file1 = file1.getCanonicalFile(); file2 = file2.getCanonicalFile(); } catch (IOException ignore) { return false; } return file1.equals(file2); } /** * * @param file * @return * @throws FileNotFoundException */ public static BufferedInputStream getInputStream(File file) throws FileNotFoundException{ return new BufferedInputStream(new FileInputStream(file)); } /** * * @param path * @return * @throws FileNotFoundException */ public static BufferedInputStream getInputStream(String path) throws FileNotFoundException{ return getInputStream(file(path)); } /** * * @param path * @param charset * @param isAppend * @return BufferedReader * @throws IOException */ public static BufferedWriter getBufferedWriter(String path, String charset, boolean isAppend) throws IOException { return new BufferedWriter( new OutputStreamWriter( new FileOutputStream(touch(path), isAppend), charset ) ); } /** * print * @param path * @param charset * @param isAppend * @return * @throws IOException */ public static PrintWriter getPrintWriter(String path, String charset, boolean isAppend) throws IOException { return new PrintWriter(getBufferedWriter(path, charset, isAppend)); } /** * * @param file * @return * @throws IOException */ public static BufferedOutputStream getOutputStream(File file) throws IOException { return new BufferedOutputStream(new FileOutputStream(file)); } /** * * @param path * @return * @throws IOException */ public static BufferedOutputStream getOutputStream(String path) throws IOException { return getOutputStream(touch(path)); } /** * * @param file * @param charset * @return BufferedReader * @throws IOException */ public static BufferedReader getReader(File file, String charset) throws IOException{ return IoUtil.getReader(getInputStream(file), charset); } /** * * @param path * @param charset * @return BufferedReader * @throws IOException */ public static BufferedReader getReader(String path, String charset) throws IOException{ return getReader(file(path), charset); } /** * * @param path * @param charset * @param collection * @return * @throws IOException */ public static <T extends Collection<String>> T readLines(String path, String charset, T collection) throws IOException{ BufferedReader reader = null; try { reader = getReader(path, charset); String line; while(true){ line = reader.readLine(); if(line == null) break; collection.add(line); } return collection; }finally { close(reader); } } /** * * @param url URL * @param charset * @param collection * @return * @throws IOException */ public static <T extends Collection<String>> T readLines(URL url, String charset, T collection) throws IOException{ InputStream in = null; try { in = url.openStream(); return IoUtil.getLines(in, charset, collection); } finally { close(in); } } /** * * @param url URL * @param charset * @return List * @throws IOException */ public static List<String> readLines(URL url, String charset) throws IOException { return readLines(url, charset, new ArrayList<String>()); } /** * * @param path * @param charset * @return List * @throws IOException */ public static List<String> readLines(String path, String charset) throws IOException { return readLines(path, charset, new ArrayList<String>()); } /** * readerHandler * @param readerHandler Reader * @param path * @param charset * @return load * @throws IOException */ public static <T> T load(ReaderHandler<T> readerHandler, String path, String charset) throws IOException { BufferedReader reader = null; T result = null; try { reader = getReader(path, charset); result = readerHandler.handle(reader); } catch (IOException e) { throw new IOException(e); }finally { close(reader); } return result; } /** * * @param fileName * @return */ public static String getExtension(String fileName) { if (fileName == null) { return null; } int index = fileName.lastIndexOf(StrUtil.DOT); if (index == -1) { return StrUtil.EMPTY; } else { String ext = fileName.substring(index + 1); return (ext.contains(String.valueOf(UNIX_SEPARATOR)) || ext.contains(String.valueOf(WINDOWS_SEPARATOR))) ? StrUtil.EMPTY : ext; } } /** * * @param filePath * @return */ public static int indexOfLastSeparator(String filePath) { if (filePath == null) { return -1; } int lastUnixPos = filePath.lastIndexOf(UNIX_SEPARATOR); int lastWindowsPos = filePath.lastIndexOf(WINDOWS_SEPARATOR); return (lastUnixPos >= lastWindowsPos) ? lastUnixPos : lastWindowsPos; } /** * String * @param content * @param path * @param charset * @throws IOException */ public static void writeString(String content, String path, String charset) throws IOException { PrintWriter writer = null; try { writer = getPrintWriter(path, charset, false); writer.print(content); }finally { close(writer); } } /** * String * @param content * @param path * @param charset * @throws IOException */ public static void appendString(String content, String path, String charset) throws IOException { PrintWriter writer = null; try { writer = getPrintWriter(path, charset, true); writer.print(content); }finally { close(writer); } } /** * * @param path * @param charset * @return * @throws IOException */ public static String readString(String path, String charset) throws IOException { return new String(readBytes(file(path)), charset); } /** * * @param url URL * @param charset * @return * @throws IOException */ public static String readString(URL url, String charset) throws IOException { if(url == null) { throw new RuntimeException("Empty url provided!"); } InputStream in = null; try { in = url.openStream(); return IoUtil.getString(in, charset); } finally { close(in); } } /** * * @param list * @param path * @param charset * @throws IOException */ public static <T> void writeLines(Collection<T> list, String path, String charset) throws IOException { writeLines(list, path, charset, false); } /** * * @param list * @param path * @param charset * @throws IOException */ public static <T> void appendLines(Collection<T> list, String path, String charset) throws IOException { writeLines(list, path, charset, true); } /** * * @param list * @param path * @param charset * @param isAppend * @throws IOException */ public static <T> void writeLines(Collection<T> list, String path, String charset, boolean isAppend) throws IOException { PrintWriter writer = null; try { writer = getPrintWriter(path, charset, isAppend); for (T t : list) { if(t != null) { writer.println(t.toString()); } } }finally { close(writer); } } /** * * @param data * @param path * @throws IOException */ public static void writeBytes(byte[] data, String path) throws IOException { writeBytes(touch(path), data); } /** * * @param dest * @param data * @throws IOException */ public static void writeBytes(File dest, byte[] data) throws IOException { writeBytes(dest, data, 0, data.length, false); } /** * * @param dest * @param data * @param off * @param len * @param append * @throws IOException */ public static void writeBytes(File dest, byte[] data, int off, int len, boolean append) throws IOException { if (dest.exists() == true) { if (dest.isFile() == false) { throw new IOException("Not a file: " + dest); } } FileOutputStream out = null; try { out = new FileOutputStream(dest, append); out.write(data, off, len); } finally { close(out); } } /** * <br> * Integer.MAX_VALUE * @param file * @return * @throws IOException */ public static byte[] readBytes(File file) throws IOException { //check if (! file.exists()) { throw new FileNotFoundException("File not exist: " + file); } if (! file.isFile()) { throw new IOException("Not a file:" + file); } long len = file.length(); if (len >= Integer.MAX_VALUE) { throw new IOException("File is larger then max array size"); } byte[] bytes = new byte[(int) len]; FileInputStream in = null; try { in = new FileInputStream(file); in.read(bytes); }finally { close(in); } return bytes; } /** * <br> * @param dest * @param in * @throws IOException */ public static void writeStream(File dest, InputStream in) throws IOException { FileOutputStream out = null; try { out = new FileOutputStream(dest); IoUtil.copy(in, out); } finally { close(out); } } /** * <br> * @param fullFilePath * @param in * @throws IOException */ public static void writeStream(String fullFilePath, InputStream in) throws IOException { writeStream(touch(fullFilePath), in); } /** * <br> * null * @param file * @param lastModifyTime * @return */ public static boolean isModifed(File file, long lastModifyTime) { if(null == file || false == file.exists()) { return true; } return file.lastModified() != lastModifyTime; } /** * <br> * 1. / <br> * 2. / * @param path * @return */ public static String normalize(String path){ return path.replaceAll("[/\\\\]{1,}", "/"); } /** * * @param rootDir * @param filePath * @return */ public static String subPath(String rootDir, String filePath){ return subPath(rootDir, file(filePath)); } /** * * @param rootDir * @param file * @return */ public static String subPath(String rootDir, File file){ if(StrUtil.isEmpty(rootDir)) { } String subPath = null; try { subPath = file.getCanonicalPath(); } catch (IOException e) { throw new UtilException(e); } if(StrUtil.isNotEmpty(rootDir) && StrUtil.isNotEmpty(subPath)) { rootDir = normalize(rootDir); subPath = normalize(subPath); if (subPath != null && subPath.toLowerCase().startsWith(subPath.toLowerCase())) { subPath = subPath.substring(rootDir.length() + 1); } } return subPath; } /** * Reader * @author Luxiaolei * * @param <T> */ public interface ReaderHandler<T> { public T handle(BufferedReader reader) throws IOException; } }
package com.xiaoleilu.hutool; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import java.net.HttpURLConnection; import java.net.URL; import java.net.URLDecoder; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.regex.Pattern; import javax.servlet.http.HttpServletRequest; import com.xiaoleilu.hutool.exceptions.UtilException; /** * Http * * @author xiaoleilu */ public class HttpUtil { public final static String UNKNOW = "unknown"; public final static Pattern CHARSET_PATTERN = Pattern.compile("charset=(.*?)\""); private static Map<String, String> cookies = new HashMap<String, String>(); /** * application/x-www-form-urlencoded * * @param content * @return */ public static String encode(String content, String charset) { if (StrUtil.isBlank(content)) return content; String encodeContent = null; try { encodeContent = URLEncoder.encode(content, charset); } catch (UnsupportedEncodingException e) { throw new UtilException(StrUtil.format("Unsupported encoding: [{}]", charset), e); } return encodeContent; } /** * application/x-www-form-urlencoded * * @param content * @return */ public static String decode(String content, String charset) { if (StrUtil.isBlank(content)) return content; String encodeContnt = null; try { encodeContnt = URLDecoder.decode(content, charset); } catch (UnsupportedEncodingException e) { throw new UtilException(StrUtil.format("Unsupported encoding: [{}]", charset), e); } return encodeContnt; } /** * IP * * @param request * @return IP */ public static String getClientIP(HttpServletRequest request) { String ip = request.getHeader("X-Forwarded-For"); if (isUnknow(ip)) { ip = request.getHeader("Proxy-Client-IP"); } if (isUnknow(ip)) { ip = request.getHeader("WL-Proxy-Client-IP"); } if (isUnknow(ip)) { ip = request.getHeader("X-Real-IP"); } if (isUnknow(ip)) { ip = request.getRemoteAddr(); } if (ip != null && ip.indexOf(",") > 0) { ip = ip.trim().split(",")[0]; } return ip; } /** * get * * @param urlString * @param customCharset * @param isPassCodeError 200 * @return "" null * @throws IOException */ public static String get(String urlString, String customCharset, boolean isPassCodeError) throws IOException { final URL url = new URL(urlString); final String host = url.getHost(); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.addRequestProperty("User-Agent", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.83 Safari/537.1"); final String cookie = cookies.get(host); if (cookie != null) conn.addRequestProperty("Cookie", cookie); conn.setRequestMethod("GET"); conn.setDoInput(true); if (conn.getResponseCode() != 200) { if (!isPassCodeError) { throw new IOException("Status code not 200!"); } } final String setCookie = conn.getHeaderField("Set-Cookie"); if (StrUtil.isBlank(setCookie) == false) { Log.debug("Set cookie: [{}]", setCookie); cookies.put(host, setCookie); } String charset = getCharsetFromConn(conn); boolean isGetCharsetFromContent = false; if (StrUtil.isBlank(charset)) { charset = customCharset; isGetCharsetFromContent = true; } String content = getString(conn.getInputStream(), charset, isGetCharsetFromContent); conn.disconnect(); return content; } /** * post * * @param urlString * @param paramMap post * @param customCharset * @param isPassCodeError 200 * @return * @throws IOException */ public static String post(String urlString, Map<String, Object> paramMap, String customCharset, boolean isPassCodeError) throws IOException { return post(urlString, toParams(paramMap), customCharset, isPassCodeError); } /** * post * * @param urlString * @param params post * @param customCharset * @param isPassCodeError 200 * @return * @throws IOException */ public static String post(String urlString, String params, String customCharset, boolean isPassCodeError) throws IOException { URL url = new URL(urlString); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("POST"); conn.setDoOutput(true); conn.setDoInput(true); conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded"); IoUtil.write(conn.getOutputStream(), customCharset, true, params); if (conn.getResponseCode() != 200) { if (!isPassCodeError) { throw new IOException("Status code not 200!"); } } String charset = getCharsetFromConn(conn); String content = IoUtil.getString(conn.getInputStream(), StrUtil.isBlank(charset) ? customCharset : charset); conn.disconnect(); return content; } /** * String * * @param url url * @param customCharset * @return * @throws IOException */ public static String downloadString(String url, String customCharset) throws IOException { InputStream inputStream = new URL(url).openStream(); return IoUtil.getString(inputStream, customCharset); } /** * MapFormUrl * * @param paramMap * @return url */ public static String toParams(Map<String, Object> paramMap) { return CollectionUtil.join(paramMap.entrySet(), "&"); } public static Map<String, List<String>> decodeParams(String paramsStr, String charset) { if (StrUtil.isBlank(paramsStr)) { return Collections.emptyMap(); } // Path int pathEndPos = paramsStr.indexOf('?'); if (pathEndPos > 0) { paramsStr = StrUtil.subSuf(paramsStr, pathEndPos + 1); } paramsStr = decode(paramsStr, charset); final Map<String, List<String>> params = new LinkedHashMap<String, List<String>>(); String name = null; int pos = 0; int i; char c; for (i = 0; i < paramsStr.length(); i++) { c = paramsStr.charAt(i); if (c == '=' && name == null) { if (pos != i) { name = paramsStr.substring(pos, i); } pos = i + 1; } else if (c == '&' || c == ';') { if (name == null && pos != i) { // &a&namea"" addParam(params, paramsStr.substring(pos, i), StrUtil.EMPTY); } else if (name != null) { addParam(params, name, paramsStr.substring(pos, i)); name = null; } pos = i + 1; } } if (pos != i) { if (name == null) { addParam(params, paramsStr.substring(pos, i), StrUtil.EMPTY); } else { addParam(params, name, paramsStr.substring(pos, i)); } } else if (name != null) { addParam(params, name, StrUtil.EMPTY); } return params; } /** * Http * * @param conn HTTP * @return */ private static String getCharsetFromConn(HttpURLConnection conn) { String charset = conn.getContentEncoding(); if (charset == null || "".equals(charset.trim())) { String contentType = conn.getContentType(); charset = ReUtil.get("charset=(.*)", contentType, 1); } return charset; } /** * HTTP<br/> * * @param checkString * @return */ private static boolean isUnknow(String checkString) { return StrUtil.isBlank(checkString) || UNKNOW.equalsIgnoreCase(checkString); } /** * * * @param in * @param charset * @return * @throws IOException */ private static String getString(InputStream in, String charset, boolean isGetCharsetFromContent) throws IOException { StringBuilder content = new StringBuilder(); BufferedReader reader = new BufferedReader(new InputStreamReader(in, charset)); String line = null; while ((line = reader.readLine()) != null) { content.append(line).append('\n'); if (isGetCharsetFromContent) { String charsetInContent = ReUtil.get(CHARSET_PATTERN, line, 1); if (StrUtil.isBlank(charsetInContent) == false) { charset = charsetInContent; reader = new BufferedReader(new InputStreamReader(in, charset)); isGetCharsetFromContent = true; } } } return content.toString(); } /** * ListMap * * @param params * @param name key * @param value value * @return */ private static boolean addParam(Map<String, List<String>> params, String name, String value) { List<String> values = params.get(name); if (values == null) { values = new ArrayList<String>(1); params.put(name, values); } values.add(value); return true; } }
package edu.rit.csh; import edu.rit.csh.auth.LDAPUser; import edu.rit.csh.auth.UserWebSession; import edu.rit.csh.pages.HomePage; import org.apache.directory.api.ldap.model.cursor.CursorException; import org.apache.directory.api.ldap.model.exception.LdapException; import org.apache.wicket.Session; import org.apache.wicket.markup.html.WebPage; import org.apache.wicket.protocol.http.WebApplication; import org.apache.wicket.request.Request; import org.apache.wicket.request.Response; import org.apache.wicket.request.http.WebRequest; /** * Application object for your web application. If you want to run this application without deploying, run the Start class. * * @see edu.rit.csh.Start#main(String[]) */ public class WicketApplication extends WebApplication { /** * @see org.apache.wicket.Application#getHomePage() */ @Override public Class<? extends WebPage> getHomePage() { return HomePage.class; } @Override public final Session newSession(Request request, Response response){ UserWebSession sess = new UserWebSession(request); LDAPUser user = null;; String uidnum; //Get the uidnum if (usesDevelopmentConfig()){ uidnum = "10412"; }else{ WebRequest wRequest = (WebRequest)request; uidnum = wRequest.getHeader("X-WEBAUTH-LDAP-UIDN"); } //LDAP library seems to throw an exception related to //its connection timing out after inactivity, //which is normal. If an exception occurs or the user isn't returned, //try again. int ldapTries = 0, ldapTriesMax = 3; while (ldapTries++ < ldapTriesMax){ try { user = Resources.ldapProxy.getUser(uidnum); } catch(LdapException | CursorException e){ e.printStackTrace(); } if (user != null){ sess.setUser(user); break; } } return sess; } }
package function.variant.base; import utils.FormatManager; import java.sql.ResultSet; /** * * @author nick */ public class Variant extends Region { public int variantId; // public String variantIdStr; public String allele; public String refAllele; public String rsNumber; public float cscorePhred; //Indel attributes public String indelType; private boolean isIndel; public Variant(int v_id, boolean isIndel, ResultSet rset) throws Exception { variantId = v_id; allele = rset.getString("allele"); refAllele = rset.getString("ref_allele"); rsNumber = FormatManager.getString(rset.getString("rs_number")); cscorePhred = FormatManager.getFloat(rset.getString("cscore_phred")); if (isIndel) { indelType = rset.getString("indel_type").substring(0, 3).toUpperCase(); } this.isIndel = isIndel; int position = rset.getInt("seq_region_pos"); int id = rset.getInt("seq_region_id"); initRegion(RegionManager.getChrById(id), position, position); } public int getVariantId() { return variantId; } public String getType() { if (isIndel) { return "indel"; } else { return "snv"; } } public String getAllele() { return allele; } public String getRefAllele() { return refAllele; } public String getRsNumber() { return rsNumber; } public float getCscore() { return cscorePhred; } public boolean isSnv() { return !isIndel; } public boolean isIndel() { return isIndel; } public boolean isDel() { return refAllele.length() > allele.length(); } public String getSiteId() { return getChrStr() + "-" + getStartPosition(); } public String getVariantIdStr() { String chrStr = getChrStr(); if (isInsideXPseudoautosomalRegions()) { chrStr = "XY"; } StringBuilder sb = new StringBuilder(); sb.append(chrStr).append("-").append(getStartPosition()).append("-").append(refAllele).append("-").append(allele); return sb.toString(); } }
package gvs.ui.logic.app; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.Observable; import java.util.Observer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.gluonhq.ignite.guice.GuiceContext; import com.google.inject.Inject; import gvs.GuiceBaseModule; import gvs.interfaces.IPersistor; import gvs.interfaces.ISessionController; import gvs.ui.application.controller.ApplicationController; import gvs.ui.application.model.ApplicationModel; import gvs.ui.logic.session.SessionViewModel; import gvs.ui.view.session.SessionView; import javafx.application.Platform; import javafx.beans.property.SimpleStringProperty; import javafx.beans.property.StringProperty; import javafx.collections.FXCollections; import javafx.collections.ListChangeListener; import javafx.collections.ObservableList; import javafx.fxml.FXMLLoader; import javafx.scene.Node; import javafx.scene.layout.AnchorPane; import javafx.scene.layout.BorderPane; /** * The ViewModel class for the GVS Application. Corresponds to the classical * ViewModel of the MVVM Pattern. It observes the ApplicationModel and handles * the GUI logic. * * @author muriele * */ public class AppViewModel implements Observer { private ApplicationModel appModel; private ApplicationController appController; private IPersistor persistor; private AnchorPane sessionContentPane; private BorderPane rootLayout; private boolean sessionIsInitialized = false; private GuiceContext context = new GuiceContext(this, () -> Arrays.asList(new GuiceBaseModule())); private final StringProperty currentSessionName = new SimpleStringProperty(); private final ObservableList<String> sessionNames = FXCollections .observableArrayList(); private final Map<String, ISessionController> controllerMap = new HashMap<>(); private static final String PROMT_MESSAGE = "no active session"; private static final Logger logger = LoggerFactory .getLogger(AppViewModel.class); @Inject private FXMLLoader fxmlLoader; // TODO: do we still need the persistor here? public AppViewModel(ApplicationModel appModel, ApplicationController appController, IPersistor persistor, BorderPane rootLayout) { this.appModel = appModel; this.appModel.addObserver(this); this.appController = appController; this.persistor = persistor; this.currentSessionName.set(PROMT_MESSAGE); this.rootLayout = rootLayout; sessionNames.addListener(this::changeSessionVisibility); context.init(); } private void changeSessionVisibility( ListChangeListener.Change<? extends String> c) { if (!sessionIsInitialized) { initSessionLayout(); } if (sessionNames.size() == 1) { displaySession(); } else if (sessionNames.isEmpty()) { hideSession(); } } private void initSessionLayout() { logger.info("Initializing session layout."); try { fxmlLoader.setLocation( getClass().getResource("/gvs/ui/view/session/SessionView.fxml")); BorderPane sessionLayout = (BorderPane) fxmlLoader.load(); sessionContentPane = new AnchorPane(); sessionContentPane.getChildren().add(sessionLayout); final int anchorMargin = 0; setAnchors(sessionLayout, anchorMargin, anchorMargin, anchorMargin, anchorMargin); ((SessionView) fxmlLoader.getController()) .setViewModel(new SessionViewModel(appModel)); sessionIsInitialized = true; } catch (IOException e) { logger.error("Could not load session layout", e); } } private void hideSession() { logger.info("Hiding session layout."); rootLayout.setCenter(null); } private void displaySession() { logger.info("Displaying session layout."); rootLayout.setCenter(sessionContentPane); } public ObservableList<String> getSessionNames() { return sessionNames; } public StringProperty getCurrentSessionName() { return currentSessionName; } /** * This method is invoked whenever a new current session is set in the * ApplicationModel. */ @Override public void update(Observable o, Object arg) { ISessionController c = ((ApplicationModel) o).getSession(); String name = c.getSessionName(); if (name == null) { currentSessionName.set(PROMT_MESSAGE); } else { currentSessionName.set(name); controllerMap.put(name, c); if (!sessionNames.contains(name)) { sessionNames.add(name); } } } public void removeCurrentSession() { logger.info("Removing current session..."); ISessionController currentSession = appModel.getSession(); String sessionName = currentSession.getSessionName(); sessionNames.remove(sessionName); controllerMap.remove(sessionName); appController.deleteSession(currentSession); } public void loadSession(File file) { logger.info("Loading session from file..."); appController.setRequestedFile(file.getPath(), persistor); } public void saveSession() { logger.info("Saving session to file..."); appModel.getSession().saveSession(); } public void changeSession(String name) { logger.info("Detecting change in combobox."); if (isInvalidSessionName(name)) { return; } ISessionController c = controllerMap.get(name); if (appModel.getSession().getSessionName() != name) { appController.changeCurrentSession(c); logger.info(String.format("Changing current session to '%s'...", name)); } } private boolean isInvalidSessionName(String name) { return name == null || name.isEmpty() || PROMT_MESSAGE.equals(name); } public void terminateApplication() { logger.info("Quitting GVS..."); Platform.exit(); System.exit(0); } /** * Helper function. Set anchors for a child of an AnchorPane. * * @param top * @param bottom * @param left * @param right */ private void setAnchors(Node anchorChild, int top, int bottom, int left, int right) { AnchorPane.setTopAnchor(anchorChild, (double) top); AnchorPane.setBottomAnchor(anchorChild, (double) bottom); AnchorPane.setLeftAnchor(anchorChild, (double) left); AnchorPane.setRightAnchor(anchorChild, (double) right); } }
package hex.deeplearning; import hex.*; import water.*; import water.util.*; import static water.util.MRUtils.sampleFrame; import static water.util.MRUtils.sampleFrameStratified; import hex.FrameTask.DataInfo; import water.api.*; import water.fvec.Frame; import water.fvec.RebalanceDataSet; import water.fvec.Vec; import java.lang.reflect.Field; import java.util.Arrays; import java.util.Random; /** * Deep Learning Neural Net implementation based on MRTask2 */ public class DeepLearning extends Job.ValidatedJob { static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields public static DocGen.FieldDoc[] DOC_FIELDS; public static final String DOC_GET = "Deep Learning"; /** * A model key associated with a previously trained Deep Learning * model. This option allows users to build a new model as a * continuation of a previously generated model (e.g., by a grid search). */ @API(help = "Model checkpoint to resume training with", filter= Default.class, json = true) public Key checkpoint; /** * If given, store the best model so far under this key. * Model performance is measured by MSE for regression and overall * error rate for classification (at F1-optimal threshold for binary classification). */ @API(help = "Key to store the always-best model under", filter= Default.class, json = true) public Key best_model_key = null; /** * Unlock expert mode parameters than can affect model building speed, * predictive accuracy and scoring. Leaving expert mode parameters at default * values is fine for many problems, but best results on complex datasets are often * only attainable via expert mode options. */ @API(help = "Enable expert mode (to access all options from GUI)", filter = Default.class, json = true) public boolean expert_mode = false; @API(help = "Auto-Encoder (Experimental)", filter= Default.class, json = true) public boolean autoencoder = false; @API(help="Use all factor levels of categorical variables. Otherwise, the first factor level is omitted (without loss of accuracy). Useful for variable importances and auto-enabled for autoencoder.",filter=Default.class, json=true, importance = ParamImportance.SECONDARY) public boolean use_all_factor_levels = false; /*Neural Net Topology*/ /** * The activation function (non-linearity) to be used the neurons in the hidden layers. * Tanh: Hyperbolic tangent function (same as scaled and shifted sigmoid). * Rectifier: Chooses the maximum of (0, x) where x is the input value. * Maxout: Choose the maximum coordinate of the input vector. * With Dropout: Zero out a random user-given fraction of the * incoming weights to each hidden layer during training, for each * training row. This effectively trains exponentially many models at * once, and can improve generalization. */ @API(help = "Activation function", filter = Default.class, json = true, importance = ParamImportance.CRITICAL) public Activation activation = Activation.Tanh; /** * The number and size of each hidden layer in the model. * For example, if a user specifies "100,200,100" a model with 3 hidden * layers will be produced, and the middle hidden layer will have 200 * neurons.To specify a grid search, add parentheses around each * model's specification: "(100,100), (50,50,50), (20,20,20,20)". */ @API(help = "Hidden layer sizes (e.g. 100,100). Grid search: (10,10), (20,20,20)", filter = Default.class, json = true, importance = ParamImportance.CRITICAL) public int[] hidden = new int[] { 200, 200 }; /** * The number of passes over the training dataset to be carried out. * It is recommended to start with lower values for initial grid searches. * This value can be modified during checkpoint restarts and allows continuation * of selected models. */ @API(help = "How many times the dataset should be iterated (streamed), can be fractional", filter = Default.class, dmin = 1e-3, json = true, importance = ParamImportance.CRITICAL) public double epochs = 10; /** * The number of training data rows to be processed per iteration. Note that * independent of this parameter, each row is used immediately to update the model * with (online) stochastic gradient descent. This parameter controls the * synchronization period between nodes in a distributed environment and the * frequency at which scoring and model cancellation can happen. For example, if * it is set to 10,000 on H2O running on 4 nodes, then each node will * process 2,500 rows per iteration, sampling randomly from their local data. * Then, model averaging between the nodes takes place, and scoring can happen * (dependent on scoring interval and duty factor). Special values are 0 for * one epoch per iteration and -1 for processing the maximum amount of data * per iteration. If **replicate training data** is enabled, N epochs * will be trained per iteration on N nodes, otherwise one epoch. */ @API(help = "Number of training samples (globally) per MapReduce iteration. Special values are 0: one epoch, -1: all available data (e.g., replicated training data)", filter = Default.class, lmin = -1, json = true, importance = ParamImportance.SECONDARY) public long train_samples_per_iteration = -1; public long actual_train_samples_per_iteration; /** * The random seed controls sampling and initialization. Reproducible * results are only expected with single-threaded operation (i.e., * when running on one node, turning off load balancing and providing * a small dataset that fits in one chunk). In general, the * multi-threaded asynchronous updates to the model parameters will * result in (intentional) race conditions and non-reproducible * results. Note that deterministic sampling and initialization might * still lead to some weak sense of determinism in the model. */ @API(help = "Seed for random numbers (affects sampling) - Note: only reproducible when running single threaded", filter = Default.class, json = true) public long seed = new Random().nextLong(); /*Adaptive Learning Rate*/ /** * The implemented adaptive learning rate algorithm (ADADELTA) automatically * combines the benefits of learning rate annealing and momentum * training to avoid slow convergence. Specification of only two * parameters (rho and epsilon) simplifies hyper parameter search. * In some cases, manually controlled (non-adaptive) learning rate and * momentum specifications can lead to better results, but require the * specification (and hyper parameter search) of up to 7 parameters. * If the model is built on a topology with many local minima or * long plateaus, it is possible for a constant learning rate to produce * sub-optimal results. Learning rate annealing allows digging deeper into * local minima, while rate decay allows specification of different * learning rates per layer. When the gradient is being estimated in * a long valley in the optimization landscape, a large learning rate * can cause the gradient to oscillate and move in the wrong * direction. When the gradient is computed on a relatively flat * surface with small learning rates, the model can converge far * slower than necessary. */ @API(help = "Adaptive learning rate (ADADELTA)", filter = Default.class, json = true, importance = ParamImportance.SECONDARY) public boolean adaptive_rate = true; /** * The first of two hyper parameters for adaptive learning rate (ADADELTA). * It is similar to momentum and relates to the memory to prior weight updates. * Typical values are between 0.9 and 0.999. * This parameter is only active if adaptive learning rate is enabled. */ @API(help = "Adaptive learning rate time decay factor (similarity to prior updates)", filter = Default.class, dmin = 0.01, dmax = 1, json = true, importance = ParamImportance.SECONDARY) public double rho = 0.95; /** * The second of two hyper parameters for adaptive learning rate (ADADELTA). * It is similar to learning rate annealing during initial training * and momentum at later stages where it allows forward progress. * Typical values are between 1e-10 and 1e-4. * This parameter is only active if adaptive learning rate is enabled. */ @API(help = "Adaptive learning rate smoothing factor (to avoid divisions by zero and allow progress)", filter = Default.class, dmin = 1e-15, dmax = 1, json = true, importance = ParamImportance.SECONDARY) public double epsilon = 1e-6; /*Learning Rate*/ /** * When adaptive learning rate is disabled, the magnitude of the weight * updates are determined by the user specified learning rate * (potentially annealed), and are a function of the difference * between the predicted value and the target value. That difference, * generally called delta, is only available at the output layer. To * correct the output at each hidden layer, back propagation is * used. Momentum modifies back propagation by allowing prior * iterations to influence the current update. Using the momentum * parameter can aid in avoiding local minima and the associated * instability. Too much momentum can lead to instabilities, that's * why the momentum is best ramped up slowly. * This parameter is only active if adaptive learning rate is disabled. */ @API(help = "Learning rate (higher => less stable, lower => slower convergence)", filter = Default.class, dmin = 1e-10, dmax = 1, json = true, importance = ParamImportance.SECONDARY) public double rate = .005; /** * Learning rate annealing reduces the learning rate to "freeze" into * local minima in the optimization landscape. The annealing rate is the * inverse of the number of training samples it takes to cut the learning rate in half * (e.g., 1e-6 means that it takes 1e6 training samples to halve the learning rate). * This parameter is only active if adaptive learning rate is disabled. */ @API(help = "Learning rate annealing: rate / (1 + rate_annealing * samples)", filter = Default.class, dmin = 0, dmax = 1, json = true, importance = ParamImportance.SECONDARY) public double rate_annealing = 1e-6; /** * The learning rate decay parameter controls the change of learning rate across layers. * For example, assume the rate parameter is set to 0.01, and the rate_decay parameter is set to 0.5. * Then the learning rate for the weights connecting the input and first hidden layer will be 0.01, * the learning rate for the weights connecting the first and the second hidden layer will be 0.005, * and the learning rate for the weights connecting the second and third hidden layer will be 0.0025, etc. * This parameter is only active if adaptive learning rate is disabled. */ @API(help = "Learning rate decay factor between layers (N-th layer: rate*alpha^(N-1))", filter = Default.class, dmin = 0, json = true, importance = ParamImportance.EXPERT) public double rate_decay = 1.0; /*Momentum*/ /** * The momentum_start parameter controls the amount of momentum at the beginning of training. * This parameter is only active if adaptive learning rate is disabled. */ @API(help = "Initial momentum at the beginning of training (try 0.5)", filter = Default.class, dmin = 0, dmax = 0.9999999999, json = true, importance = ParamImportance.SECONDARY) public double momentum_start = 0; /** * The momentum_ramp parameter controls the amount of learning for which momentum increases * (assuming momentum_stable is larger than momentum_start). The ramp is measured in the number * of training samples. * This parameter is only active if adaptive learning rate is disabled. */ @API(help = "Number of training samples for which momentum increases", filter = Default.class, dmin = 1, json = true, importance = ParamImportance.SECONDARY) public double momentum_ramp = 1e6; /** * The momentum_stable parameter controls the final momentum value reached after momentum_ramp training samples. * The momentum used for training will remain the same for training beyond reaching that point. * This parameter is only active if adaptive learning rate is disabled. */ @API(help = "Final momentum after the ramp is over (try 0.99)", filter = Default.class, dmin = 0, dmax = 0.9999999999, json = true, importance = ParamImportance.SECONDARY) public double momentum_stable = 0; /** * The Nesterov accelerated gradient descent method is a modification to * traditional gradient descent for convex functions. The method relies on * gradient information at various points to build a polynomial approximation that * minimizes the residuals in fewer iterations of the descent. * This parameter is only active if adaptive learning rate is disabled. */ @API(help = "Use Nesterov accelerated gradient (recommended)", filter = Default.class, json = true, importance = ParamImportance.SECONDARY) public boolean nesterov_accelerated_gradient = true; /*Regularization*/ /** * A fraction of the features for each training row to be omitted from training in order * to improve generalization (dimension sampling). */ @API(help = "Input layer dropout ratio (can improve generalization, try 0.1 or 0.2)", filter = Default.class, dmin = 0, dmax = 1, json = true, importance = ParamImportance.SECONDARY) public double input_dropout_ratio = 0.0; /** * A fraction of the inputs for each hidden layer to be omitted from training in order * to improve generalization. Defaults to 0.5 for each hidden layer if omitted. */ @API(help = "Hidden layer dropout ratios (can improve generalization), specify one value per hidden layer, defaults to 0.5", filter = Default.class, dmin = 0, dmax = 1, json = true, importance = ParamImportance.SECONDARY) public double[] hidden_dropout_ratios; /** * A regularization method that constrains the absolute value of the weights and * has the net effect of dropping some weights (setting them to zero) from a model * to reduce complexity and avoid overfitting. */ @API(help = "L1 regularization (can add stability and improve generalization, causes many weights to become 0)", filter = Default.class, dmin = 0, dmax = 1, json = true, importance = ParamImportance.SECONDARY) public double l1 = 0.0; /** * A regularization method that constrdains the sum of the squared * weights. This method introduces bias into parameter estimates, but * frequently produces substantial gains in modeling as estimate variance is * reduced. */ @API(help = "L2 regularization (can add stability and improve generalization, causes many weights to be small", filter = Default.class, dmin = 0, dmax = 1, json = true, importance = ParamImportance.SECONDARY) public double l2 = 0.0; /** * A maximum on the sum of the squared incoming weights into * any one neuron. This tuning parameter is especially useful for unbound * activation functions such as Maxout or Rectifier. */ @API(help = "Constraint for squared sum of incoming weights per unit (e.g. for Rectifier)", filter = Default.class, dmin = 1e-10, json = true, importance = ParamImportance.EXPERT) public float max_w2 = Float.POSITIVE_INFINITY; /*Initialization*/ /** * The distribution from which initial weights are to be drawn. The default * option is an optimized initialization that considers the size of the network. * The "uniform" option uses a uniform distribution with a mean of 0 and a given * interval. The "normal" option draws weights from the standard normal * distribution with a mean of 0 and given standard deviation. */ @API(help = "Initial Weight Distribution", filter = Default.class, json = true, importance = ParamImportance.EXPERT) public InitialWeightDistribution initial_weight_distribution = InitialWeightDistribution.UniformAdaptive; /** * The scale of the distribution function for Uniform or Normal distributions. * For Uniform, the values are drawn uniformly from -initial_weight_scale...initial_weight_scale. * For Normal, the values are drawn from a Normal distribution with a standard deviation of initial_weight_scale. */ @API(help = "Uniform: -value...value, Normal: stddev)", filter = Default.class, dmin = 0, json = true, importance = ParamImportance.EXPERT) public double initial_weight_scale = 1.0; /** * The loss (error) function to be minimized by the model. * Cross Entropy loss is used when the model output consists of independent * hypotheses, and the outputs can be interpreted as the probability that each * hypothesis is true. Cross entropy is the recommended loss function when the * target values are class labels, and especially for imbalanced data. * It strongly penalizes error in the prediction of the actual class label. * Mean Square loss is used when the model output are continuous real values, but can * be used for classification as well (where it emphasizes the error on all * output classes, not just for the actual class). */ @API(help = "Loss function", filter = Default.class, json = true, importance = ParamImportance.EXPERT) public Loss loss = Loss.Automatic; /*Scoring*/ /** * The minimum time (in seconds) to elapse between model scoring. The actual * interval is determined by the number of training samples per iteration and the scoring duty cycle. */ @API(help = "Shortest time interval (in secs) between model scoring", filter = Default.class, dmin = 0, json = true, importance = ParamImportance.SECONDARY) public double score_interval = 5; /** * The number of training dataset points to be used for scoring. Will be * randomly sampled. Use 0 for selecting the entire training dataset. */ @API(help = "Number of training set samples for scoring (0 for all)", filter = Default.class, lmin = 0, json = true, importance = ParamImportance.EXPERT) public long score_training_samples = 10000l; /** * The number of validation dataset points to be used for scoring. Can be * randomly sampled or stratified (if "balance classes" is set and "score * validation sampling" is set to stratify). Use 0 for selecting the entire * training dataset. */ @API(help = "Number of validation set samples for scoring (0 for all)", filter = Default.class, lmin = 0, json = true, importance = ParamImportance.EXPERT) public long score_validation_samples = 0l; /** * Maximum fraction of wall clock time spent on model scoring on training and validation samples, * and on diagnostics such as computation of feature importances (i.e., not on training). */ @API(help = "Maximum duty cycle fraction for scoring (lower: more training, higher: more scoring).", filter = Default.class, dmin = 0, dmax = 1, json = true, importance = ParamImportance.EXPERT) public double score_duty_cycle = 0.1; /** * The stopping criteria in terms of classification error (1-accuracy) on the * training data scoring dataset. When the error is at or below this threshold, * training stops. */ @API(help = "Stopping criterion for classification error fraction on training data (-1 to disable)", filter = Default.class, dmin=-1, dmax=1, json = true, importance = ParamImportance.EXPERT) public double classification_stop = 0; /** * The stopping criteria in terms of regression error (MSE) on the training * data scoring dataset. When the error is at or below this threshold, training * stops. */ @API(help = "Stopping criterion for regression error (MSE) on training data (-1 to disable)", filter = Default.class, dmin=-1, json = true, importance = ParamImportance.EXPERT) public double regression_stop = 1e-6; /** * Enable quiet mode for less output to standard output. */ @API(help = "Enable quiet mode for less output to standard output", filter = Default.class, json = true) public boolean quiet_mode = false; /** * For classification models, the maximum size (in terms of classes) of the * confusion matrix for it to be printed. This option is meant to avoid printing * extremely large confusion matrices. */ @API(help = "Max. size (number of classes) for confusion matrices to be shown", filter = Default.class, json = true) public int max_confusion_matrix_size = 20; /** * The maximum number (top K) of predictions to use for hit ratio computation (for multi-class only, 0 to disable) */ @API(help = "Max. number (top K) of predictions to use for hit ratio computation (for multi-class only, 0 to disable)", filter = Default.class, lmin=0, json = true, importance = ParamImportance.EXPERT) public int max_hit_ratio_k = 10; /*Imbalanced Classes*/ /** * For imbalanced data, balance training data class counts via * over/under-sampling. This can result in improved predictive accuracy. */ @API(help = "Balance training data class counts via over/under-sampling (for imbalanced data)", filter = Default.class, json = true, importance = ParamImportance.EXPERT) public boolean balance_classes = false; /** * When classes are balanced, limit the resulting dataset size to the * specified multiple of the original dataset size. */ @API(help = "Maximum relative size of the training data after balancing class counts (can be less than 1.0)", filter = Default.class, json = true, dmin=1e-3, importance = ParamImportance.EXPERT) public float max_after_balance_size = 5.0f; /** * Method used to sample the validation dataset for scoring, see Score Validation Samples above. */ @API(help = "Method used to sample validation dataset for scoring", filter = Default.class, json = true, importance = ParamImportance.EXPERT) public ClassSamplingMethod score_validation_sampling = ClassSamplingMethod.Uniform; /*Misc*/ /** * Gather diagnostics for hidden layers, such as mean and RMS values of learning * rate, momentum, weights and biases. */ @API(help = "Enable diagnostics for hidden layers", filter = Default.class, json = true) public boolean diagnostics = true; /** * Whether to compute variable importances for input features. * The implemented method (by Gedeon) considers the weights connecting the * input features to the first two hidden layers. */ @API(help = "Compute variable importances for input features (Gedeon method) - can be slow for large networks", filter = Default.class, json = true) public boolean variable_importances = false; /** * Enable fast mode (minor approximation in back-propagation), should not affect results significantly. */ @API(help = "Enable fast mode (minor approximation in back-propagation)", filter = Default.class, json = true, importance = ParamImportance.EXPERT) public boolean fast_mode = true; /** * Ignore constant training columns (no information can be gained anyway). */ @API(help = "Ignore constant training columns (no information can be gained anyway)", filter = Default.class, json = true, importance = ParamImportance.EXPERT) public boolean ignore_const_cols = true; /** * Increase training speed on small datasets by splitting it into many chunks * to allow utilization of all cores. */ @API(help = "Force extra load balancing to increase training speed for small datasets (to keep all cores busy)", filter = Default.class, json = true) public boolean force_load_balance = true; /** * Replicate the entire training dataset onto every node for faster training on small datasets. */ @API(help = "Replicate the entire training dataset onto every node for faster training on small datasets", filter = Default.class, json = true, importance = ParamImportance.EXPERT) public boolean replicate_training_data = true; /** * Run on a single node for fine-tuning of model parameters. Can be useful for * checkpoint resumes after training on multiple nodes for fast initial * convergence. */ @API(help = "Run on a single node for fine-tuning of model parameters", filter = Default.class, json = true) public boolean single_node_mode = false; /** * Enable shuffling of training data (on each node). This option is * recommended if training data is replicated on N nodes, and the number of training samples per iteration * is close to N times the dataset size, where all nodes train will (almost) all * the data. It is automatically enabled if the number of training samples per iteration is set to -1 (or to N * times the dataset size or larger). */ @API(help = "Enable shuffling of training data (recommended if training data is replicated and train_samples_per_iteration is close to #nodes x #rows)", filter = Default.class, json = true, importance = ParamImportance.EXPERT) public boolean shuffle_training_data = false; @API(help = "Sparse data handling (Experimental).", filter = Default.class, json = true, importance = ParamImportance.EXPERT) public boolean sparse = false; @API(help = "Use a column major weight matrix for input layer. Can speed up forward propagation, but might slow down backpropagation (Experimental).", filter = Default.class, json = true, importance = ParamImportance.EXPERT) public boolean col_major = false; @API(help = "Sparsity (Experimental)", filter= Default.class, json = true) public double average_activation = -0.9; @API(help = "Sparsity regularization (Experimental)", filter= Default.class, json = true) public double sparsity_beta = 0; public enum ClassSamplingMethod { Uniform, Stratified } public enum InitialWeightDistribution { UniformAdaptive, Uniform, Normal } /** * Activation functions */ public enum Activation { Tanh, TanhWithDropout, Rectifier, RectifierWithDropout, Maxout, MaxoutWithDropout } /** * Loss functions * CrossEntropy is recommended */ public enum Loss { Automatic, MeanSquare, CrossEntropy } // the following parameters can only be specified in expert mode transient final String [] expert_options = new String[] { "use_all_factor_levels", "loss", "max_w2", "score_training_samples", "score_validation_samples", "initial_weight_distribution", "initial_weight_scale", "diagnostics", "rate_decay", "score_duty_cycle", "variable_importances", "fast_mode", "score_validation_sampling", "balance_classes", "max_after_balance_size", "max_after_balance_size", "ignore_const_cols", "force_load_balance", "replicate_training_data", "shuffle_training_data", "nesterov_accelerated_gradient", "classification_stop", "regression_stop", "quiet_mode", "max_confusion_matrix_size", "max_hit_ratio_k", "hidden_dropout_ratios", "single_node_mode", "sparse", "col_major", "autoencoder", "average_activation", "sparsity_beta", }; // the following parameters can be modified when restarting from a checkpoint transient final String [] cp_modifiable = new String[] { "best_model_key", "expert_mode", "seed", "epochs", "score_interval", "train_samples_per_iteration", "score_duty_cycle", "classification_stop", "regression_stop", "quiet_mode", "max_confusion_matrix_size", "max_hit_ratio_k", "diagnostics", "variable_importances", "force_load_balance", "replicate_training_data", "shuffle_training_data", "single_node_mode", "sparse", "col_major", }; /** * Helper to specify which arguments trigger a refresh on change * @param ver */ @Override protected void registered(RequestServer.API_VERSION ver) { super.registered(ver); for (Argument arg : _arguments) { if ( arg._name.equals("activation") || arg._name.equals("initial_weight_distribution") || arg._name.equals("expert_mode") || arg._name.equals("adaptive_rate") || arg._name.equals("replicate_training_data") || arg._name.equals("balance_classes") || arg._name.equals("checkpoint")) { arg.setRefreshOnChange(); } } } /** * Helper to handle arguments based on existing input values * @param arg * @param inputArgs */ @Override protected void queryArgumentValueSet(Argument arg, java.util.Properties inputArgs) { super.queryArgumentValueSet(arg, inputArgs); if (!arg._name.equals("checkpoint") && !Utils.contains(cp_modifiable, arg._name)) { if (checkpoint != null) { arg.disable("Taken from model checkpoint."); final DeepLearningModel cp_model = UKV.get(checkpoint); if (cp_model == null) { throw new IllegalArgumentException("Checkpointed model was not found."); } if (cp_model.model_info().unstable()) { throw new IllegalArgumentException("Checkpointed model was unstable. Not restarting."); } return; } } if(arg._name.equals("initial_weight_scale") && (initial_weight_distribution == InitialWeightDistribution.UniformAdaptive) ) { arg.disable("Using sqrt(6 / (# units + # units of previous layer)) for Uniform distribution.", inputArgs); } if (classification) { if(arg._name.equals("regression_stop")) { arg.disable("Only for regression.", inputArgs); } if(arg._name.equals("max_after_balance_size") && !balance_classes) { arg.disable("Requires balance_classes.", inputArgs); } } else { if(arg._name.equals("classification_stop") || arg._name.equals("max_confusion_matrix_size") || arg._name.equals("max_hit_ratio_k") || arg._name.equals("max_after_balance_size") || arg._name.equals("balance_classes")) { arg.disable("Only for classification.", inputArgs); } if (validation != null && arg._name.equals("score_validation_sampling")) { score_validation_sampling = ClassSamplingMethod.Uniform; arg.disable("Using uniform sampling for validation scoring dataset.", inputArgs); } } if ((arg._name.equals("score_validation_samples") || arg._name.equals("score_validation_sampling")) && validation == null) { arg.disable("Requires a validation data set.", inputArgs); } if (Utils.contains(expert_options, arg._name) && !expert_mode) { arg.disable("Only in expert mode.", inputArgs); } if (!adaptive_rate) { if (arg._name.equals("rho") || arg._name.equals("epsilon")) { arg.disable("Only for adaptive learning rate.", inputArgs); rho = 0; epsilon = 0; } } else { if (arg._name.equals("rate") || arg._name.equals("rate_annealing") || arg._name.equals("rate_decay") || arg._name.equals("nesterov_accelerated_gradient") || arg._name.equals("momentum_start") || arg._name.equals("momentum_ramp") || arg._name.equals("momentum_stable") ) { arg.disable("Only for non-adaptive learning rate.", inputArgs); momentum_start = 0; momentum_stable = 0; } } if (arg._name.equals("hidden_dropout_ratios")) { if (activation != Activation.TanhWithDropout && activation != Activation.MaxoutWithDropout && activation != Activation.RectifierWithDropout) { arg.disable("Only for activation functions with dropout.", inputArgs); } } if (arg._name.equals("replicate_training_data") && (H2O.CLOUD.size() == 1)) { arg.disable("Only for multi-node operation."); replicate_training_data = false; } if (arg._name.equals("single_node_mode") && (H2O.CLOUD.size() == 1 || !replicate_training_data)) { arg.disable("Only for multi-node operation with replication."); single_node_mode = false; } if (arg._name.equals("use_all_factor_levels") && autoencoder ) { arg.disable("Automatically enabled for auto-encoders."); use_all_factor_levels = true; } } /** Print model parameters as JSON */ @Override public boolean toHTML(StringBuilder sb) { return makeJsonBox(sb); } /** * Return a query link to this page * @param k Model Key * @param content Link text * @return HTML Link */ public static String link(Key k, String content) { return link(k, content, null, null, null); } /** * Return a query link to this page * @param k Model Key * @param content Link text * @param cp Key to checkpoint to continue training with (optional) * @param response Response * @param val Validation data set key * @return HTML Link */ public static String link(Key k, String content, Key cp, String response, Key val) { DeepLearning req = new DeepLearning(); RString rs = new RString("<a href='" + req.href() + ".query?source=%$key" + (cp == null ? "" : "&checkpoint=%$cp") + (response == null ? "" : "&response=%$resp") + (val == null ? "" : "&validation=%$valkey") + "'>%content</a>"); rs.replace("key", k.toString()); rs.replace("content", content); if (cp != null) rs.replace("cp", cp.toString()); if (response != null) rs.replace("resp", response); if (val != null) rs.replace("valkey", val); return rs.toString(); } /** * Report the relative progress of building a Deep Learning model (measured by how many epochs are done) * @return floating point number between 0 and 1 */ @Override public float progress(){ if(UKV.get(dest()) == null)return 0; DeepLearningModel m = UKV.get(dest()); if (m != null && m.model_info()!=null ) { final float p = (float) Math.min(1, (m.epoch_counter / m.model_info().get_params().epochs)); return cv_progress(p); } return 0; } @Override protected final void execImpl() { buildModel(); if (n_folds > 0) CrossValUtils.crossValidate(this); delete(); } /** * Train a Deep Learning model, assumes that all members are populated * If checkpoint == null, then start training a new model, otherwise continue from a checkpoint */ private void buildModel() { DeepLearningModel cp = null; if (checkpoint == null) { cp = initModel(); cp.start_training(null); } else { final DeepLearningModel previous = UKV.get(checkpoint); if (previous == null) throw new IllegalArgumentException("Checkpoint not found."); Log.info("Resuming from checkpoint."); if (n_folds != 0) { throw new UnsupportedOperationException("n_folds must be 0: Cross-validation is not supproted during checkpoint restarts."); } else { ((ValidatedJob)previous.job()).xval_models = null; //remove existing cross-validation keys after checkpoint restart } if (source == null || !Arrays.equals(source._key._kb, previous.model_info().get_params().source._key._kb)) { throw new IllegalArgumentException("source must be the same as for the checkpointed model."); } autoencoder = previous.model_info().get_params().autoencoder; if (!autoencoder && (response == null || !Arrays.equals(response._key._kb, previous.model_info().get_params().response._key._kb))) { throw new IllegalArgumentException("response must be the same as for the checkpointed model."); } if (Utils.difference(ignored_cols, previous.model_info().get_params().ignored_cols).length != 0 || Utils.difference(previous.model_info().get_params().ignored_cols, ignored_cols).length != 0) { ignored_cols = previous.model_info().get_params().ignored_cols; Log.warn("Automatically re-using ignored_cols from the checkpointed model."); } if ((validation == null) == (previous.model_info().get_params().validation != null) || (validation != null && validation._key != null && previous.model_info().get_params().validation._key != null && !Arrays.equals(validation._key._kb, previous.model_info().get_params().validation._key._kb))) { throw new IllegalArgumentException("validation must be the same as for the checkpointed model."); } if (classification != previous.model_info().get_params().classification) { Log.warn("Automatically switching to " + ((classification=!classification) ? "classification" : "regression") + " (same as the checkpointed model)."); } epochs += previous.epoch_counter; //add new epochs to existing model Log.info("Adding " + String.format("%.3f", previous.epoch_counter) + " epochs from the checkpointed model."); try { final DataInfo dataInfo = prepareDataInfo(); cp = new DeepLearningModel(previous, destination_key, job_key, dataInfo); cp.write_lock(self()); cp.start_training(previous); assert(state==JobState.RUNNING); final DeepLearning A = cp.model_info().get_params(); Object B = this; for (Field fA : A.getClass().getDeclaredFields()) { if (Utils.contains(cp_modifiable, fA.getName())) { if (!expert_mode && Utils.contains(expert_options, fA.getName())) continue; for (Field fB : B.getClass().getDeclaredFields()) { if (fA.equals(fB)) { try { if (fB.get(B) == null || fA.get(A) == null || !fA.get(A).toString().equals(fB.get(B).toString())) { // if either of the two parameters is null, skip the toString() if (fA.get(A) == null && fB.get(B) == null) continue; //if both parameters are null, we don't need to do anything Log.info("Applying user-requested modification of '" + fA.getName() + "': " + fA.get(A) + " -> " + fB.get(B)); fA.set(A, fB.get(B)); } } catch (IllegalAccessException e) { e.printStackTrace(); } } } } } if (A.n_folds != 0) { Log.warn("Disabling cross-validation: Not supported when resuming training from a checkpoint."); A.n_folds = 0; } cp.update(self()); } finally { if (cp != null) cp.unlock(self()); } } trainModel(cp); cp.stop_training(); } /** * Redirect to the model page for that model that is trained by this job * @return Response */ @Override protected Response redirect() { return DeepLearningProgressPage.redirect(this, self(), dest()); } private boolean _fakejob; //Sanity check for Deep Learning job parameters private void checkParams() { if (source.numCols() <= 1) throw new IllegalArgumentException("Training data must have at least 2 features (incl. response)."); if (hidden == null) throw new IllegalArgumentException("There must be at least one hidden layer."); for (int i=0;i<hidden.length;++i) { if (hidden[i]==0) throw new IllegalArgumentException("Hidden layer size must be >0."); } //Auto-fill defaults if (hidden_dropout_ratios == null) { if (activation == Activation.TanhWithDropout || activation == Activation.MaxoutWithDropout || activation == Activation.RectifierWithDropout) { hidden_dropout_ratios = new double[hidden.length]; if (!quiet_mode) Log.info("Automatically setting all hidden dropout ratios to 0.5."); Arrays.fill(hidden_dropout_ratios, 0.5); } } else if (hidden_dropout_ratios.length != hidden.length) throw new IllegalArgumentException("Must have " + hidden.length + " hidden layer dropout ratios."); else if (activation != Activation.TanhWithDropout && activation != Activation.MaxoutWithDropout && activation != Activation.RectifierWithDropout) { if (!quiet_mode) Log.info("Ignoring hidden_dropout_ratios because a non-Dropout activation function was specified."); } if (!quiet_mode) { if (adaptive_rate) { Log.info("Using automatic learning rate. Ignoring the following input parameters:"); Log.info(" rate, rate_decay, rate_annealing, momentum_start, momentum_ramp, momentum_stable, nesterov_accelerated_gradient."); } else { Log.info("Using manual learning rate. Ignoring the following input parameters:"); Log.info(" rho, epsilon."); } if (initial_weight_distribution == InitialWeightDistribution.UniformAdaptive) { Log.info("Ignoring initial_weight_scale for UniformAdaptive weight distribution."); } } if(loss == Loss.Automatic) { if (!classification) { if (!quiet_mode) Log.info("Automatically setting loss to MeanSquare for regression."); loss = Loss.MeanSquare; } else if (autoencoder) { if (!quiet_mode) Log.info("Automatically setting loss to MeanSquare for auto-encoder."); loss = Loss.MeanSquare; } else { if (!quiet_mode) Log.info("Automatically setting loss to Cross-Entropy for classification."); loss = Loss.CrossEntropy; } } if (!classification && loss == Loss.CrossEntropy) throw new IllegalArgumentException("Cannot use CrossEntropy loss function for regression."); if (autoencoder && loss != Loss.MeanSquare) throw new IllegalArgumentException("Must use MeanSquare loss function for auto-encoder."); if (autoencoder && classification) { classification = false; Log.info("Using regression mode for auto-encoder.");} // reason for the error message below is that validation might not have the same horizontalized features as the training data (or different order) if (autoencoder && validation != null) throw new UnsupportedOperationException("Cannot specify a validation dataset for auto-encoder."); if (autoencoder && activation == Activation.Maxout) throw new UnsupportedOperationException("Maxout activation is not supported for auto-encoder."); // make default job_key and destination_key in case they are missing if (dest() == null) { destination_key = Key.make(); } if (self() == null) { job_key = Key.make(); } if (UKV.get(self()) == null) { start_time = System.currentTimeMillis(); state = JobState.RUNNING; UKV.put(self(), this); _fakejob = true; } if (!sparse && col_major) { if (!quiet_mode) throw new IllegalArgumentException("Cannot use column major storage for non-sparse data handling."); } } /** * Helper to create a DataInfo object from the source and response * @return DataInfo object */ private DataInfo prepareDataInfo() { final boolean del_enum_resp = classification && !response.isEnum(); final Frame train = FrameTask.DataInfo.prepareFrame(source, autoencoder ? null : response, ignored_cols, classification, ignore_const_cols, true /*drop >20% NA cols*/); final DataInfo dinfo = new FrameTask.DataInfo(train, autoencoder ? 0 : 1, autoencoder || use_all_factor_levels, //use all FactorLevels for auto-encoder autoencoder ? DataInfo.TransformType.NORMALIZE : DataInfo.TransformType.STANDARDIZE, //transform predictors classification ? DataInfo.TransformType.NONE : DataInfo.TransformType.STANDARDIZE); //transform response if (!autoencoder) { final Vec resp = dinfo._adaptedFrame.lastVec(); //convention from DataInfo: response is the last Vec assert (!classification ^ resp.isEnum()) : "Must have enum response for classification!"; //either regression or enum response if (del_enum_resp) ltrash(resp); } return dinfo; } /** * Create an initial Deep Learning model, typically to be trained by trainModel(model) * @return Randomly initialized model */ public final DeepLearningModel initModel() { try { lock_data(); checkParams(); final DataInfo dinfo = prepareDataInfo(); final Vec resp = dinfo._adaptedFrame.lastVec(); //convention from DataInfo: response is the last Vec float[] priorDist = classification ? new MRUtils.ClassDist(resp).doAll(resp).rel_dist() : null; final DeepLearningModel model = new DeepLearningModel(dest(), self(), source._key, dinfo, (DeepLearning)this.clone(), priorDist); model.model_info().initializeMembers(); return model; } finally { unlock_data(); } } /** * Helper to update a Frame and adding it to the local trash at the same time * @param target Frame referece, to be overwritten * @param src Newly made frame, to be deleted via local trash * @return src */ Frame updateFrame(Frame target, Frame src) { if (src != target) ltrash(src); return src; } /** * Train a Deep Learning neural net model * @param model Input model (e.g., from initModel(), or from a previous training run) * @return Trained model */ public final DeepLearningModel trainModel(DeepLearningModel model) { Frame validScoreFrame = null; Frame train, trainScoreFrame; try { lock_data(); if (checkpoint == null && !quiet_mode) logStart(); //if checkpoint is given, some Job's params might be uninitialized (but the restarted model's parameters are correct) if (model == null) { model = UKV.get(dest()); } model.write_lock(self()); final DeepLearning mp = model.model_info().get_params(); //use the model's parameters for everything below - NOT the job's parameters (can be different after checkpoint restart) prepareValidationWithModel(model); final long model_size = model.model_info().size(); if (!quiet_mode) Log.info("Number of model parameters (weights/biases): " + String.format("%,d", model_size)); train = model.model_info().data_info()._adaptedFrame; if (mp.force_load_balance) train = updateFrame(train, reBalance(train, mp.replicate_training_data /*rebalance into only 4*cores per node*/)); float[] trainSamplingFactors; if (mp.classification && mp.balance_classes) { trainSamplingFactors = new float[train.lastVec().domain().length]; //leave initialized to 0 -> will be filled up below train = updateFrame(train, sampleFrameStratified( train, train.lastVec(), trainSamplingFactors, (long)(mp.max_after_balance_size*train.numRows()), mp.seed, true, false)); model.setModelClassDistribution(new MRUtils.ClassDist(train.lastVec()).doAll(train.lastVec()).rel_dist()); } model.training_rows = train.numRows(); trainScoreFrame = sampleFrame(train, mp.score_training_samples, mp.seed); //training scoring dataset is always sampled uniformly from the training dataset if (train != trainScoreFrame) ltrash(trainScoreFrame); if (!quiet_mode) Log.info("Number of chunks of the training data: " + train.anyVec().nChunks()); if (validation != null) { Frame adaptedValid = getValidation(); if (getValidAdaptor().needsAdaptation2CM()) { adaptedValid.add(getValidAdaptor().adaptedValidationResponse(_responseName), getValidAdaptor().getAdaptedValidationResponse2CM()); } // validation scoring dataset can be sampled in multiple ways from the given validation dataset if (mp.classification && mp.balance_classes && mp.score_validation_sampling == ClassSamplingMethod.Stratified) { validScoreFrame = updateFrame(adaptedValid, sampleFrameStratified(adaptedValid, adaptedValid.lastVec(), null, mp.score_validation_samples > 0 ? mp.score_validation_samples : adaptedValid.numRows(), mp.seed+1, false /* no oversampling */, false)); } else { validScoreFrame = updateFrame(adaptedValid, sampleFrame(adaptedValid, mp.score_validation_samples, mp.seed+1)); } if (mp.force_load_balance) validScoreFrame = updateFrame(validScoreFrame, reBalance(validScoreFrame, false /*always split up globally since scoring should be distributed*/)); if (!quiet_mode) Log.info("Number of chunks of the validation data: " + validScoreFrame.anyVec().nChunks()); } // Set train_samples_per_iteration size (cannot be done earlier since this depends on whether stratified sampling is done) mp.actual_train_samples_per_iteration = computeTrainSamplesPerIteration(mp.train_samples_per_iteration, train.numRows(), mp.replicate_training_data, mp.quiet_mode); // Determine whether shuffling is enforced if(mp.replicate_training_data && (mp.actual_train_samples_per_iteration == train.numRows()*H2O.CLOUD.size()) && !mp.shuffle_training_data && H2O.CLOUD.size() > 1) { Log.warn("Enabling training data shuffling, because all nodes train on the full dataset (replicated training data)."); mp.shuffle_training_data = true; } final float rowUsageFraction = computeRowUsageFraction(train.numRows(), mp.actual_train_samples_per_iteration, mp.replicate_training_data); if (!mp.quiet_mode) Log.info("Initial model:\n" + model.model_info()); if (autoencoder) model.doScoring(train, trainScoreFrame, validScoreFrame, self(), getValidAdaptor()); //get the null model reconstruction error Log.info("Starting to train the Deep Learning model."); //main loop do model.set_model_info(H2O.CLOUD.size() > 1 && mp.replicate_training_data ? ( mp.single_node_mode ? new DeepLearningTask2(train, model.model_info(), rowUsageFraction).invoke(Key.make()).model_info() : //replicated data + single node mode new DeepLearningTask2(train, model.model_info(), rowUsageFraction).invokeOnAllNodes().model_info() ) : //replicated data + multi-node mode new DeepLearningTask(model.model_info(), rowUsageFraction).doAll(train).model_info()); //distributed data (always in multi-node mode) while (model.doScoring(train, trainScoreFrame, validScoreFrame, self(), getValidAdaptor())); Log.info(model); Log.info("Finished training the Deep Learning model."); return model; } catch(JobCancelledException ex) { model = UKV.get(dest()); state = JobState.CANCELLED; //for JSON REST response model.get_params().state = state; //for parameter JSON on the HTML page Log.info("Deep Learning model building was cancelled."); return model; } finally { if (model != null) model.unlock(self()); unlock_data(); } } /** * Lock the input datasets against deletes */ private void lock_data() { source.read_lock(self()); if( validation != null && source._key != null && validation._key !=null && !source._key.equals(validation._key) ) validation.read_lock(self()); } /** * Release the lock for the input datasets */ private void unlock_data() { source.unlock(self()); if( validation != null && source._key != null && validation._key != null && !source._key.equals(validation._key) ) validation.unlock(self()); } /** * Delete job related keys */ public void delete() { cleanup(); if (_fakejob) UKV.remove(job_key); remove(); // HACK: update the state of the model's Job/parameter object // (since we cloned the Job/parameters several times and we're not sharing a reference) Value v = DKV.get(dest()); if (v != null) { DeepLearningModel m = v.get(); m.get_params().state = state; DKV.put(dest(), m); } } /** * Rebalance a frame for load balancing * @param fr Input frame * @param local whether to only create enough chunks to max out all cores on one node only * @return Frame that has potentially more chunks */ private Frame reBalance(final Frame fr, boolean local) { final int chunks = (int)Math.min( 4 * H2O.NUMCPUS * (local ? 1 : H2O.CLOUD.size()), fr.numRows()); if (fr.anyVec().nChunks() > chunks) { Log.info("Dataset already contains " + fr.anyVec().nChunks() + " chunks. No need to rebalance."); return fr; } if (!quiet_mode) Log.info("ReBalancing dataset into (at least) " + chunks + " chunks."); // return MRUtils.shuffleAndBalance(fr, chunks, seed, local, shuffle_training_data); Key newKey = fr._key != null ? Key.make(fr._key.toString() + ".balanced") : Key.make(); newKey = Key.makeUserHidden(newKey); RebalanceDataSet rb = new RebalanceDataSet(fr, newKey, chunks); H2O.submitTask(rb); rb.join(); return UKV.get(newKey); } /** * Compute the actual train_samples_per_iteration size from the user-given parameter * @param train_samples_per_iteration user-given train_samples_per_iteration size * @param numRows number of training rows * @param replicate_training_data whether or not the training data is replicated on each node * @return The total number of training rows to be processed per iteration (summed over on all nodes) */ private static long computeTrainSamplesPerIteration(final long train_samples_per_iteration, final long numRows, final boolean replicate_training_data, final boolean quiet_mode) { long tspi = train_samples_per_iteration; assert(tspi == 0 || tspi == -1 || tspi >= 1); if (tspi == 0 || (!replicate_training_data && tspi == -1) ) { tspi = numRows; if (!quiet_mode) Log.info("Setting train_samples_per_iteration (" + train_samples_per_iteration + ") to one epoch: #rows (" + tspi + ")."); } else if (tspi == -1) { tspi = H2O.CLOUD.size() * numRows; if (!quiet_mode) Log.info("Setting train_samples_per_iteration (" + train_samples_per_iteration + ") to #nodes x #rows (" + tspi + ")."); } assert(tspi != 0 && tspi != -1 && tspi >= 1); return tspi; } /** * Compute the fraction of rows that need to be used for training during one iteration * @param numRows number of training rows * @param train_samples_per_iteration number of training rows to be processed per iteration * @param replicate_training_data whether of not the training data is replicated on each node * @return fraction of rows to be used for training during one iteration */ private static float computeRowUsageFraction(final long numRows, final long train_samples_per_iteration, final boolean replicate_training_data) { float rowUsageFraction = (float)train_samples_per_iteration / numRows; if (replicate_training_data) rowUsageFraction /= H2O.CLOUD.size(); assert(rowUsageFraction > 0); return rowUsageFraction; } /** * Cross-Validate a DeepLearning model by building new models on N train/test holdout splits * @param splits Frames containing train/test splits * @param cv_preds Array of Frames to store the predictions for each cross-validation run * @param offsets Array to store the offsets of starting row indices for each cross-validation run * @param i Which fold of cross-validation to perform */ @Override public void crossValidate(Frame[] splits, Frame[] cv_preds, long[] offsets, int i) { // Train a clone with slightly modified parameters (to account for cross-validation) DeepLearning cv = (DeepLearning) this.clone(); cv.best_model_key = null; // model-specific stuff cv.genericCrossValidation(splits, offsets, i); cv_preds[i] = ((DeepLearningModel) UKV.get(cv.dest())).score(cv.validation); } }
package i5.las2peer.logging; import java.io.File; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.io.UnsupportedEncodingException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.logging.ConsoleHandler; import java.util.logging.FileHandler; import java.util.logging.Formatter; import java.util.logging.Handler; import java.util.logging.Level; import java.util.logging.LogManager; import java.util.logging.LogRecord; import java.util.logging.Logger; import i5.las2peer.execution.L2pThread; import i5.las2peer.p2p.Node; import i5.las2peer.security.Agent; public final class L2pLogger extends Logger implements NodeObserver { public static final String GLOBAL_NAME = "i5.las2peer"; // this name should be equal to the LAS2peer package name. // default parameters public static final int DEFAULT_LIMIT_BYTES = 1 * 1000 * 1000; // max 1 MB log file size public static final int DEFAULT_LIMIT_FILES = 10; // max 10 log files in rotation public static final String DEFAULT_ENCODING = "UTF-8"; public static final String DEFAULT_LOG_DIRECTORY = "log/"; public static final String DEFAULT_LOGFILE_PREFIX = "las2peer.log"; public static final Level DEFAULT_CONSOLE_LEVEL = Level.INFO; public static final Level DEFAULT_LOGFILE_LEVEL = Level.FINEST; public static final Level DEFAULT_OBSERVER_LEVEL = Level.FINER; public static final SimpleDateFormat DEFAULT_DATE_FORMAT = new SimpleDateFormat("yyyy MMM dd HH:mm:ss"); // instance parameters private int limitBytes = DEFAULT_LIMIT_BYTES; private int limitFiles = DEFAULT_LIMIT_FILES; private String encoding = DEFAULT_ENCODING; private String logDir = DEFAULT_LOG_DIRECTORY; private String logfilePrefix; // default null => no file logging, only done by global instance private Level consoleLevel = DEFAULT_CONSOLE_LEVEL; private Level logfileLevel = DEFAULT_LOGFILE_LEVEL; private Level observerLevel = DEFAULT_OBSERVER_LEVEL; private SimpleDateFormat dateFormat = DEFAULT_DATE_FORMAT; private ConsoleFormatter consoleFormatter = new ConsoleFormatter(); private LogfileFormatter logfileFormatter = new LogfileFormatter(this); private ConsoleHandler handlerConsole; private FileHandler handlerLogfile; // this is the global static instance, logging to default log file and console private static final L2pLogger GLOBAL_INSTANCE = new L2pLogger(GLOBAL_NAME, null); // initialize global static instance static { // suppress Java native console logging GLOBAL_INSTANCE.setUseParentHandlers(false); // global console logging GLOBAL_INSTANCE.handlerConsole = new ConsoleHandler(); GLOBAL_INSTANCE.handlerConsole.setLevel(GLOBAL_INSTANCE.consoleLevel); try { GLOBAL_INSTANCE.handlerConsole.setEncoding(GLOBAL_INSTANCE.encoding); } catch (UnsupportedEncodingException e) { System.err.println("Fatal Error! Can't set console log encoding to '" + GLOBAL_INSTANCE.encoding + "'! " + e + " Using default: " + GLOBAL_INSTANCE.handlerConsole.getEncoding()); } GLOBAL_INSTANCE.handlerConsole.setFormatter(GLOBAL_INSTANCE.consoleFormatter); GLOBAL_INSTANCE.addHandler(GLOBAL_INSTANCE.handlerConsole); // auto create log directory try { createDir(GLOBAL_INSTANCE.logDir); } catch (IOException e) { System.err.println("Fatal Error! Can't create log directory '" + GLOBAL_INSTANCE.logDir + "'! File logging is about to fail!"); } // global file logging try { GLOBAL_INSTANCE.setLogfilePrefix(DEFAULT_LOGFILE_PREFIX); GLOBAL_INSTANCE.handlerLogfile.setLevel(GLOBAL_INSTANCE.logfileLevel); } catch (IOException e) { System.err.println("Fatal Error! Can't use logging prefix '" + GLOBAL_INSTANCE.logfilePrefix + "'! File logging is disabled!"); } // since this is the global instance, drop not logged messages GLOBAL_INSTANCE.minimizeLogLevel(); } /** * This is the formatter used for the console output. */ protected static class ConsoleFormatter extends Formatter { @Override public String format(LogRecord record) { StringBuilder sb = new StringBuilder(); // actual message sb.append(record.getMessage()).append("\n"); // stack trace printStackTrace(sb, record.getThrown()); return sb.toString(); } } /** * This is the formatter used for the log file output. */ protected static class LogfileFormatter extends Formatter { private final L2pLogger logger; public LogfileFormatter(L2pLogger logger) { this.logger = logger; } @Override public String format(LogRecord record) { StringBuilder sb = new StringBuilder(); // timestamp sb.append(logger.dateFormat.format(new Date(record.getMillis()))).append(" "); // level sb.append(record.getLevel().getName()).append(" "); // class and method name sb.append(record.getSourceClassName()); if (logger.getLevel().intValue() >= Level.FINER.intValue()) { // print the method name, too sb.append("#").append(record.getSourceMethodName()); } sb.append(": "); // actual message sb.append(record.getMessage()).append("\n"); // stack trace printStackTrace(sb, record.getThrown()); return sb.toString(); } } /** * Prints a stack trace as nicely as {@code e.printStackTrace()}, but uses the logging system as output. * * @param e A {@code Throwable} thats stack trace should be printed. */ public synchronized void printStackTrace(Throwable e) { StringBuilder sb = new StringBuilder(); printStackTrace(sb, e); severe(sb.toString().trim()); } /** * Appends the stack trace for the given {@link Throwable} to the given {@link StringBuilder}. * * @param sb {@code StringBuilder} as output for the stack trace. * @param e A {@code Throwable} which stack trace should be appended. If {@code null} given, nothing is appended. */ protected static void printStackTrace(StringBuilder sb, Throwable e) { if (e != null) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); e.printStackTrace(pw); sb.append(sw.toString()); } } protected L2pLogger(String name, String resourceBundleName) throws IllegalArgumentException { super(name, resourceBundleName); // if the logger is not added to the LogManager, the log files may not be closed correctly if (!LogManager.getLogManager().addLogger(this)) { // a logger with that name is already registered // therefore this instance is not added to the logger hierarchy and may only be used with caution throw new IllegalArgumentException("A logger with that name is already registered!"); } // by default the logger itself logs everything, level filtering is done by its handlers setLevel(Level.ALL); } /** * Same as {@link #setLogDirectory(String)} for the global static instance. * * @param directory A directory path given as String. {@code null} is equal to "" and the class loader directory. * @throws IOException */ public static void setGlobalLogDirectory(String directory) throws IOException { GLOBAL_INSTANCE.setLogDirectory(directory); } /** * Sets the directory to store log files. * * @param directory A directory path given as String. {@code null} is equal to "" and the class loader directory. * @throws IOException */ public synchronized void setLogDirectory(String directory) throws IOException { if (directory == null || directory.isEmpty()) { logDir = ""; } else { logDir = directory; if (!logDir.endsWith(File.separator)) { logDir = directory + File.separator; } } updateLogfileHandler(); } /** * Same as {@link #setLogfilePrefix(String)} for global static instance. * * @param prefix If {@code null} is given, file logging will be disabled. * @throws IOException */ public static void setGlobalLogfilePrefix(String prefix) throws IOException { GLOBAL_INSTANCE.setLogfilePrefix(prefix); } /** * Sets the prefix used to generate log files. * * @param prefix If {@code null} is given, file logging will be disabled. * @throws IOException */ public synchronized void setLogfilePrefix(String prefix) throws IOException { if (prefix.equals(logfilePrefix)) { // already the same prefix return; } logfilePrefix = prefix; updateLogfileHandler(); } /** * This method must be called each time the log file target is changed. * * @throws IOException */ private synchronized void updateLogfileHandler() throws IOException { Level oldLevel = null; if (handlerLogfile != null) { oldLevel = handlerLogfile.getLevel(); handlerLogfile.close(); this.removeHandler(handlerLogfile); handlerLogfile = null; } if (logfilePrefix == null) { return; } // auto create log dir createDir(logDir); // file logging handlerLogfile = new FileHandler(logDir + logfilePrefix, limitBytes, limitFiles, true); try { handlerLogfile.setEncoding(encoding); } catch (SecurityException | UnsupportedEncodingException e) { System.err.println("Fatal Error! Can't set file log encoding to '" + encoding + "'! " + e + " Using default: " + handlerConsole.getEncoding()); } handlerLogfile.setFormatter(logfileFormatter); // default level: FINEST if (oldLevel != null) { handlerLogfile.setLevel(oldLevel); } this.addHandler(handlerLogfile); } /** * This method ensures that the given directory is actually a directory and exists. * * @param dir A path given as String for the desired directory * @throws IOException return null; */ private static void createDir(String dir) throws IOException { File fDir = new File(dir); if (fDir != null && !dir.isEmpty() && !fDir.isDirectory() && !fDir.mkdirs()) { throw new IOException("Can't create directory! Invalid path '" + fDir.getPath() + "'"); } } /** * Same as {@link #setConsoleLevel(Level)} for the global static instance. * * @param level The log level to set. */ public static void setGlobalConsoleLevel(Level level) { GLOBAL_INSTANCE.setConsoleLevel(level); } /** * Sets the log level for the console output of this logger. * * @param level The log level to set. */ public synchronized void setConsoleLevel(Level level) { consoleLevel = level; handlerConsole.setLevel(consoleLevel); } /** * Same as {@link #setLogfileLevel(Level)} for global static instance. * * @param level The log level to set. */ public static void setGlobalLogfileLevel(Level level) { GLOBAL_INSTANCE.setLogfileLevel(level); } /** * Sets the log level for the log files used in this logger. * * @param level The log level to set. */ public synchronized void setLogfileLevel(Level level) { logfileLevel = level; if (handlerLogfile != null) { handlerLogfile.setLevel(logfileLevel); } } /** * Updates the loggers own log level and sets it to the minimal value of all assigned handlers. This way the * performance is slightly improved, because the logger itself drops messages not suitable for assigned handlers. * Please pay attention that this will drop messages, that may be interesting for parent loggers or handlers, too. * Usually this method should be only used with the global instance. */ private synchronized void minimizeLogLevel() { // set minimal level of all handlers and this logger instance Level minLevel = Level.OFF; for (Handler handler : getHandlers()) { Level level = handler.getLevel(); if (level.intValue() < minLevel.intValue()) { minLevel = level; } } setLevel(minLevel); } /** * {@inheritDoc} */ @Override public void log(LogRecord record) { super.log(record); // Thread t = Thread.currentThread(); // if (t instanceof L2pThread) { // // TODO write output to specific node log file, too // Serializable nodeId = ((L2pThread) t).getContext().getLocalNode().getNodeId(); } /** * Writes a log message. The given event can be used to differentiate between different log messages. * * @param event used to differentiate between different log messages * @param message */ public static void logEvent(Event event, String message) { logEvent(null, event, message, null, null); } /** * Writes a log message. The given event can be used to differentiate between different log messages. * * @param event used to differentiate between different log messages * @param actingUser can be set to null if unknown / not desired * @param message */ public static void logEvent(Event event, Agent actingUser, String message) { Thread t = Thread.currentThread(); if (t instanceof L2pThread) { // Logging custom service messages requires a serviceAgent Agent serviceAgent = ((L2pThread) t).getServiceAgent(); logEvent(null, event, message, serviceAgent, actingUser); } else { throw new IllegalStateException("Not executed in a L2pThread environment!"); } } /** * Logs a message to the l2p system using the observers. * * Since this method will/should only be used in an L2pThread, the message will come from a service or a helper, so * a SERVICE_MESSAGE is assumed. Then this message will not be monitored by the monitoring observer. * * @param from the calling class * @param event used to differentiate between different log messages * @param message */ public static void logEvent(Object from, Event event, String message) { logEvent(from, event, message, null, null); } /** * Writes a log message. The given event can be used to differentiate between different log messages. The * serviceAgent and actingUser can be set to {@code null} if not known. Then this message will not be monitored by * the monitoring observer. * * This method replaces: {@link Context#logMessage(Object, int, String, Agent, Agent)}, * {@link Context#logMessage(Object, String)}, {@link Context#logError(Object, int, String, Agent, Agent)}, * {@link Context#logError(Object, String)}, {@link Service#logError(String message)} * * @param from the calling class * @param event used to differentiate between different log messages * @param message * @param serviceAgent can be set to null if unknown / not desired * @param actingUser can be set to null if unknown / not desired */ public static void logEvent(Object from, Event event, String message, Agent serviceAgent, Agent actingUser) { Thread t = Thread.currentThread(); if (t instanceof L2pThread) { Node node = ((L2pThread) t).getContext().getLocalNode(); logEvent(node, from, event, message, serviceAgent, actingUser); } else { throw new IllegalStateException("Not executed in a L2pThread environment!"); } } /** * Writes a log message. The given event can be used to differentiate between different log messages. The * serviceAgent and actingUser can be set to {@code null} if not known. Then this message will not be monitored by * the monitoring observer. * * @param node * @param from the calling class * @param event used to differentiate between different log messages * @param message * @param serviceAgent can be set to null if unknown / not desired * @param actingUser can be set to null if unknown / not desired */ public static void logEvent(Node node, Object from, Event event, String message, Agent serviceAgent, Agent actingUser) { String msg = message; if (from != null) { msg = from.getClass().getSimpleName() + ": " + message; } node.observerNotice(event, node.getNodeId(), serviceAgent, null, actingUser, msg); } /** * {@inheritDoc} */ @Override public void log(Long timestamp, Event event, String sourceNode, Long sourceAgentId, String destinationNode, Long destinationAgentId, String remarks) { StringBuilder logLine = new StringBuilder(DEFAULT_DATE_FORMAT.format(new Date(timestamp)) + "\t"); logLine.append(event + " (" + event.getCode() + ")\t"); logLine.append(appendPart(sourceNode)); logLine.append(appendPart(sourceAgentId)); logLine.append(appendPart(destinationNode)); logLine.append(appendPart(destinationAgentId)); logLine.append(appendPart(remarks)); // with default levels this hides the output from console and only writes it to logfile log(observerLevel, logLine.toString()); } /** * Simple method for one log line entry. Null will be printed as "-". All values will be followed by a tab char. * * @param o * @return a string */ private static String appendPart(Object o) { if (o == null) { return "-\t"; } else { return "" + o + "\t"; } } /** * This method returns the default {@link Formatter} currently used to format log output for console. * * @return Returns the console formatter. */ public static Formatter getGlobalConsoleFormatter() { return GLOBAL_INSTANCE.consoleFormatter; } /** * This method returns the default {@link Formatter} currently used to format log output for log files. * * @return Returns the log file formatter. */ public static Formatter getGlobalLogfileFormatter() { return GLOBAL_INSTANCE.logfileFormatter; } /** * This method is used to retrieve a L2pLogger instance. * * @param cls Should be the class this instance is used with. * @return Returns a L2pLogger instance for the given class. * @throws ClassCastException If someone overloaded the loggers instance by adding some other logger implementation * with the same name. In this case you may use Java native method by calling * {@link Logger#getLogger(String)}. */ public static L2pLogger getInstance(Class<?> cls) throws ClassCastException { return getInstance(cls.getCanonicalName()); } /** * This method is used to retrieve a L2pLogger instance. * * @param name A name for the new logger instance. Should be the name of your current class by default. Like * L2pLogger.class.getCanonicalName() * @return Returns a L2pLogger instance for the given name. * @throws ClassCastException If someone overloaded the loggers instance by adding some other logger implementation * with the same name. In this case you may use Java native method by calling * {@link Logger#getLogger(String)}. */ public static L2pLogger getInstance(String name) throws ClassCastException { if (name == null || name.isEmpty() || "i5.las2peer".equals(name)) { throw new IllegalArgumentException("Invalid logger name '" + name + "' given!"); } L2pLogger result = null; try { result = new L2pLogger(name, null); if (!LogManager.getLogManager().addLogger(result)) { // the log manager already has a logger with that name result = (L2pLogger) LogManager.getLogManager().getLogger(name); } } catch (IllegalArgumentException e) { // a logger with that name is already registered result = (L2pLogger) LogManager.getLogManager().getLogger(name); } return result; } }
package info.faceland.loot; import info.faceland.api.FacePlugin; import info.faceland.facecore.shade.command.CommandHandler; import info.faceland.facecore.shade.nun.ivory.config.VersionedIvoryConfiguration; import info.faceland.facecore.shade.nun.ivory.config.VersionedIvoryYamlConfiguration; import info.faceland.facecore.shade.nun.ivory.config.settings.IvorySettings; import info.faceland.loot.api.creatures.CreatureMod; import info.faceland.loot.api.creatures.CreatureModBuilder; import info.faceland.loot.api.groups.ItemGroup; import info.faceland.loot.api.items.CustomItem; import info.faceland.loot.api.items.CustomItemBuilder; import info.faceland.loot.api.items.ItemBuilder; import info.faceland.loot.api.managers.CreatureModManager; import info.faceland.loot.api.managers.CustomItemManager; import info.faceland.loot.api.managers.ItemGroupManager; import info.faceland.loot.api.managers.NameManager; import info.faceland.loot.api.managers.SocketGemManager; import info.faceland.loot.api.managers.TierManager; import info.faceland.loot.api.sockets.SocketGem; import info.faceland.loot.api.sockets.SocketGemBuilder; import info.faceland.loot.api.sockets.effects.SocketEffect; import info.faceland.loot.api.tier.Tier; import info.faceland.loot.api.tier.TierBuilder; import info.faceland.loot.commands.LootCommand; import info.faceland.loot.creatures.LootCreatureModBuilder; import info.faceland.loot.groups.LootItemGroup; import info.faceland.loot.io.SmartTextFile; import info.faceland.loot.items.LootCustomItemBuilder; import info.faceland.loot.items.LootItemBuilder; import info.faceland.loot.listeners.spawning.EntityDeathListener; import info.faceland.loot.managers.LootCreatureModManager; import info.faceland.loot.managers.LootCustomItemManager; import info.faceland.loot.managers.LootItemGroupManager; import info.faceland.loot.managers.LootNameManager; import info.faceland.loot.managers.LootSocketGemManager; import info.faceland.loot.managers.LootTierManager; import info.faceland.loot.sockets.LootSocketGemBuilder; import info.faceland.loot.sockets.effects.LootSocketPotionEffect; import info.faceland.loot.tier.LootTierBuilder; import info.faceland.loot.utils.converters.StringConverter; import info.faceland.utils.TextUtils; import net.nunnerycode.java.libraries.cannonball.DebugPrinter; import org.bukkit.Bukkit; import org.bukkit.Material; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.entity.EntityType; import org.bukkit.event.HandlerList; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Level; public final class LootPlugin extends FacePlugin { private DebugPrinter debugPrinter; private VersionedIvoryYamlConfiguration itemsYAML; private VersionedIvoryYamlConfiguration tierYAML; private VersionedIvoryYamlConfiguration corestatsYAML; private VersionedIvoryYamlConfiguration customItemsYAML; private VersionedIvoryYamlConfiguration socketGemsYAML; private VersionedIvoryYamlConfiguration languageYAML; private VersionedIvoryYamlConfiguration configYAML; private VersionedIvoryYamlConfiguration creaturesYAML; private IvorySettings settings; private ItemGroupManager itemGroupManager; private TierManager tierManager; private NameManager nameManager; private CustomItemManager customItemManager; private SocketGemManager socketGemManager; private CreatureModManager creatureModManager; @Override public void preEnable() { debugPrinter = new DebugPrinter(getDataFolder().getPath(), "debug.log"); itemsYAML = new VersionedIvoryYamlConfiguration(new File(getDataFolder(), "items.yml"), getResource("items.yml"), VersionedIvoryConfiguration.VersionUpdateType .BACKUP_AND_UPDATE); if (itemsYAML.update()) { getLogger().info("Updating items.yml"); debug("Updating items.yml"); } tierYAML = new VersionedIvoryYamlConfiguration(new File(getDataFolder(), "tier.yml"), getResource("tier.yml"), VersionedIvoryConfiguration.VersionUpdateType .BACKUP_AND_UPDATE); if (tierYAML.update()) { getLogger().info("Updating tier.yml"); debug("Updating tier.yml"); } corestatsYAML = new VersionedIvoryYamlConfiguration(new File(getDataFolder(), "corestats.yml"), getResource("corestats.yml"), VersionedIvoryConfiguration.VersionUpdateType .BACKUP_AND_UPDATE); if (corestatsYAML.update()) { getLogger().info("Updating corestats.yml"); debug("Updating corestats.yml"); } customItemsYAML = new VersionedIvoryYamlConfiguration(new File(getDataFolder(), "customItems.yml"), getResource("customItems.yml"), VersionedIvoryConfiguration.VersionUpdateType .BACKUP_AND_UPDATE); if (customItemsYAML.update()) { getLogger().info("Updating customItems.yml"); debug("Updating customItems.yml"); } socketGemsYAML = new VersionedIvoryYamlConfiguration(new File(getDataFolder(), "socketGems.yml"), getResource("socketGems.yml"), VersionedIvoryConfiguration.VersionUpdateType .BACKUP_AND_UPDATE); if (socketGemsYAML.update()) { getLogger().info("Updating socketGems.yml"); debug("Updating socketGems.yml"); } languageYAML = new VersionedIvoryYamlConfiguration(new File(getDataFolder(), "language.yml"), getResource("language.yml"), VersionedIvoryConfiguration.VersionUpdateType .BACKUP_AND_UPDATE); if (languageYAML.update()) { getLogger().info("Updating language.yml"); debug("Updating language.yml"); } configYAML = new VersionedIvoryYamlConfiguration(new File(getDataFolder(), "config.yml"), getResource("config.yml"), VersionedIvoryConfiguration.VersionUpdateType .BACKUP_AND_UPDATE); if (configYAML.update()) { getLogger().info("Updating config.yml"); debug("Updating config.yml"); } creaturesYAML = new VersionedIvoryYamlConfiguration(new File(getDataFolder(), "creatures.yml"), getResource("creatures.yml"), VersionedIvoryConfiguration.VersionUpdateType .BACKUP_AND_UPDATE); if (creaturesYAML.update()) { getLogger().info("Updating creatures.yml"); debug("Updating creatures.yml"); } settings = IvorySettings.loadFromFiles(corestatsYAML, languageYAML, configYAML); itemGroupManager = new LootItemGroupManager(); tierManager = new LootTierManager(); nameManager = new LootNameManager(); customItemManager = new LootCustomItemManager(); socketGemManager = new LootSocketGemManager(); creatureModManager = new LootCreatureModManager(); } @Override public void enable() { loadItemGroups(); loadTiers(); loadNames(); loadCustomItems(); loadSocketGems(); loadCreatureMods(); } @Override public void postEnable() { CommandHandler handler = new CommandHandler(this); handler.registerCommands(new LootCommand(this)); Bukkit.getPluginManager().registerEvents(new EntityDeathListener(this), this); //Bukkit.getPluginManager().registerEvents(new LoginListener(this), this); debug("v" + getDescription().getVersion() + " enabled"); } @Override public void preDisable() { HandlerList.unregisterAll(this); } @Override public void disable() { } @Override public void postDisable() { creatureModManager = null; socketGemManager = null; customItemManager = null; nameManager = null; tierManager = null; itemGroupManager = null; settings = null; creaturesYAML = null; configYAML = null; languageYAML = null; customItemsYAML = null; corestatsYAML = null; tierYAML = null; itemsYAML = null; debugPrinter = null; } public void debug(String... messages) { debug(Level.INFO, messages); } public void debug(Level level, String... messages) { if (debugPrinter != null) { debugPrinter.debug(level, messages); } } private void loadCreatureMods() { for (CreatureMod cm : getCreatureModManager().getCreatureMods()) { getCreatureModManager().removeCreatureMod(cm.getEntityType()); } Set<CreatureMod> mods = new HashSet<>(); List<String> loadedMods = new ArrayList<>(); for (String key : creaturesYAML.getKeys(false)) { if (!creaturesYAML.isConfigurationSection(key)) { continue; } ConfigurationSection cs = creaturesYAML.getConfigurationSection(key); CreatureModBuilder builder = getNewCreatureModBuilder(EntityType.valueOf(key)); if (cs.isConfigurationSection("custom-items")) { Map<CustomItem, Double> map = new HashMap<>(); for (String k : cs.getConfigurationSection("custom-items").getKeys(false)) { if (!cs.isConfigurationSection("custom-items." + k)) { continue; } CustomItem ci = customItemManager.getCustomItem(k); if (ci == null) { continue; } map.put(ci, cs.getDouble("custom-items." + k)); } builder.withCustomItemMults(map); } if (cs.isConfigurationSection("socket-gems")) { Map<SocketGem, Double> map = new HashMap<>(); for (String k : cs.getConfigurationSection("socket-gems").getKeys(false)) { if (!cs.isConfigurationSection("socket-gems." + k)) { continue; } SocketGem sg = socketGemManager.getSocketGem(k); if (sg == null) { continue; } map.put(sg, cs.getDouble("socket-gems." + k)); } builder.withSocketGemMults(map); } if (cs.isConfigurationSection("tiers")) { Map<Tier, Double> map = new HashMap<>(); for (String k : cs.getConfigurationSection("tiers").getKeys(false)) { if (!cs.isConfigurationSection("tiers." + k)) { continue; } Tier t = tierManager.getTier(k); if (t == null) { continue; } map.put(t, cs.getDouble("tiers." + k)); } builder.withTierMults(map); } CreatureMod mod = builder.build(); mods.add(mod); loadedMods.add(mod.getEntityType().name()); } for (CreatureMod cm : mods) { creatureModManager.addCreatureMod(cm); } debug("Loaded creature mods: " + loadedMods.toString()); } private void loadSocketGems() { for (SocketGem sg : getSocketGemManager().getSocketGems()) { getSocketGemManager().removeSocketGem(sg.getName()); } Set<SocketGem> gems = new HashSet<>(); List<String> loadedSocketGems = new ArrayList<>(); for (String key : socketGemsYAML.getKeys(false)) { if (!socketGemsYAML.isConfigurationSection(key)) { continue; } ConfigurationSection cs = socketGemsYAML.getConfigurationSection(key); SocketGemBuilder builder = getNewSocketGemBuilder(key); builder.withPrefix(cs.getString("prefix")); builder.withSuffix(cs.getString("suffix")); builder.withLore(cs.getStringList("lore")); builder.withWeight(cs.getDouble("weight")); List<SocketEffect> effects = new ArrayList<>(); for (String eff : cs.getStringList("effects")) { effects.add(LootSocketPotionEffect.parseString(eff)); } builder.withSocketEffects(effects); builder.withItemGroup(itemGroupManager.getItemGroup(cs.getString("item-group"))); SocketGem gem = builder.build(); gems.add(gem); loadedSocketGems.add(gem.getName()); } for (SocketGem sg : gems) { getSocketGemManager().addSocketGem(sg); } debug("Loaded socket gems: " + loadedSocketGems.toString()); } private void loadCustomItems() { for (CustomItem ci : getCustomItemManager().getCustomItems()) { getCustomItemManager().removeCustomItem(ci.getName()); } Set<CustomItem> customItems = new HashSet<>(); List<String> loaded = new ArrayList<>(); for (String key : customItemsYAML.getKeys(false)) { if (!customItemsYAML.isConfigurationSection(key)) { continue; } ConfigurationSection cs = customItemsYAML.getConfigurationSection(key); CustomItemBuilder builder = getNewCustomItemBuilder(key); builder.withMaterial(StringConverter.toMaterial(cs.getString("material"))); builder.withDisplayName(cs.getString("display-name")); builder.withLore(cs.getStringList("lore")); CustomItem ci = builder.build(); customItems.add(ci); loaded.add(ci.getName()); } for (CustomItem ci : customItems) { getCustomItemManager().addCustomItem(ci); } debug("Loaded custom items: " + loaded.toString()); } private void loadNames() { for (String s : getNameManager().getPrefixes()) { getNameManager().removePrefix(s); } for (String s : getNameManager().getSuffixes()) { getNameManager().removeSuffix(s); } File prefixFile = new File(getDataFolder(), "prefix.txt"); File suffixFile = new File(getDataFolder(), "suffix.txt"); SmartTextFile.writeToFile(getResource("prefix.txt"), prefixFile, true); SmartTextFile.writeToFile(getResource("suffix.txt"), suffixFile, true); SmartTextFile smartPrefixFile = new SmartTextFile(prefixFile); SmartTextFile smartSuffixFile = new SmartTextFile(suffixFile); for (String s : smartPrefixFile.read()) { getNameManager().addPrefix(s); } for (String s : smartSuffixFile.read()) { getNameManager().addSuffix(s); } debug("Loaded prefixes: " + getNameManager().getPrefixes().size(), "Loaded suffixes: " + getNameManager() .getSuffixes().size()); } private void loadItemGroups() { for (ItemGroup ig : getItemGroupManager().getItemGroups()) { getItemGroupManager().removeItemGroup(ig.getName()); } Set<ItemGroup> itemGroups = new HashSet<>(); List<String> loadedItemGroups = new ArrayList<>(); for (String key : itemsYAML.getKeys(false)) { if (!itemsYAML.isList(key)) { continue; } List<String> list = itemsYAML.getStringList(key); ItemGroup ig = new LootItemGroup(key, false); for (String s : list) { Material m = StringConverter.toMaterial(s); if (m == Material.AIR) { continue; } ig.addMaterial(m); } itemGroups.add(ig); loadedItemGroups.add(key); } for (ItemGroup ig : itemGroups) { getItemGroupManager().addItemGroup(ig); } debug("Loaded item groups: " + loadedItemGroups.toString()); } private void loadTiers() { for (Tier t : getTierManager().getLoadedTiers()) { getTierManager().removeTier(t.getName()); } Set<Tier> tiers = new HashSet<>(); List<String> loadedTiers = new ArrayList<>(); for (String key : tierYAML.getKeys(false)) { if (!tierYAML.isConfigurationSection(key)) { continue; } ConfigurationSection cs = tierYAML.getConfigurationSection(key); TierBuilder builder = getNewTierBuilder(key); builder.withDisplayName(cs.getString("display-name")); builder.withDisplayColor(TextUtils.convertTag(cs.getString("display-color"))); builder.withIdentificationColor(TextUtils.convertTag(cs.getString("identification-color"))); builder.withSpawnWeight(cs.getDouble("spawn-weight")); builder.withIdentifyWeight(cs.getDouble("identify-weight")); builder.withDistanceWeight(cs.getDouble("distance-weight")); builder.withMinimumSockets(cs.getInt("minimum-sockets")); builder.withMaximumSockets(cs.getInt("maximum-sockets")); builder.withMinimumBonusLore(cs.getInt("minimum-bonus-lore")); builder.withMaximumBonusLore(cs.getInt("maximum-bonus-lore")); builder.withBaseLore(cs.getStringList("base-lore")); builder.withBonusLore(cs.getStringList("bonus-lore")); List<String> sl = cs.getStringList("item-groups"); Set<ItemGroup> itemGroups = new HashSet<>(); for (String s : sl) { ItemGroup ig; if (s.startsWith("-")) { ig = getItemGroupManager().getItemGroup(s.substring(1)); if (ig == null) { continue; } ig = ig.getInverse(); } else { ig = getItemGroupManager().getItemGroup(s); if (ig == null) { continue; } } itemGroups.add(ig.getInverse()); } builder.withItemGroups(itemGroups); builder.withMinimumDurability(cs.getDouble("minimum-durability")); builder.withMaximumDurability(cs.getDouble("maximum-durability")); Tier t = builder.build(); loadedTiers.add(t.getName()); tiers.add(t); } for (Tier t : tiers) { getTierManager().addTier(t); } debug("Loaded tiers: " + loadedTiers.toString()); } public TierBuilder getNewTierBuilder(String name) { return new LootTierBuilder(name); } public ItemBuilder getNewItemBuilder() { return new LootItemBuilder(this); } public CustomItemBuilder getNewCustomItemBuilder(String name) { return new LootCustomItemBuilder(name); } public SocketGemBuilder getNewSocketGemBuilder(String name) { return new LootSocketGemBuilder(name); } public CreatureModBuilder getNewCreatureModBuilder(EntityType entityType) { return new LootCreatureModBuilder(entityType); } public TierManager getTierManager() { return tierManager; } public ItemGroupManager getItemGroupManager() { return itemGroupManager; } public NameManager getNameManager() { return nameManager; } public IvorySettings getSettings() { return settings; } public CustomItemManager getCustomItemManager() { return customItemManager; } public SocketGemManager getSocketGemManager() { return socketGemManager; } public CreatureModManager getCreatureModManager() { return creatureModManager; } }
package kalang.compiler; import kalang.AstNotFoundException; import kalang.ast.ClassNode; import kalang.ast.VarObject; import kalang.ast.MethodNode; import java.lang.reflect.Constructor; import java.lang.reflect.Executable; import java.lang.reflect.Field; import java.lang.reflect.GenericArrayType; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.Parameter; import java.lang.reflect.TypeVariable; import java.util.Arrays; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import javax.annotation.Nonnull; import javax.annotation.Nullable; import kalang.ast.FieldNode; import kalang.ast.ParameterNode; import kalang.core.ArrayType; import kalang.core.ClassType; import kalang.core.GenericType; import kalang.core.ParameterizedType; import kalang.core.Type; import kalang.core.Types; import kalang.exception.Exceptions; import kalang.util.AstUtil; import kalang.util.MethodUtil; /** * The class loads ast from java class * * @author Kason Yang */ public class JavaAstLoader extends AstLoader { static String ROOT_CLASS = "java.lang.Object"; private ClassLoader javaClassLoader; private Map<String,ClassNode> loadedClasses =new HashMap<>(); private static String getMethodDeclarationKey(Executable m){ Class<?>[] pts = m.getParameterTypes(); String[] types = new String[pts.length]; for(int i=0;i<types.length;i++){ types[i] = pts[i].getName(); } return MethodUtil.getDeclarationKey(m.getName(), types); } /** * build ast from java class * @param clz the java class * @return the ast built from java class * @throws AstNotFoundException */ @Nonnull public ClassNode buildFromClass(@Nonnull Class clz) throws AstNotFoundException { ClassNode cn = ClassNode.create(); cn.name = clz.getName(); cn.isInterface = clz.isInterface(); loadedClasses.put(clz.getName(), cn); Map<TypeVariable,GenericType> genericTypes = new HashMap(); TypeVariable[] typeParameters = clz.getTypeParameters(); if(typeParameters.length>0){ for(TypeVariable pt:typeParameters){ GenericType gt = new GenericType(pt.getName()); genericTypes.put(pt, gt); cn.declareGenericType(gt); } } java.lang.reflect.Type superType = clz.getGenericSuperclass(); Class superClazz = clz.getSuperclass(); if (superType != null) { cn.superType = (ClassType)getType(superType, genericTypes,superClazz); } java.lang.reflect.Type[] typeInterfaces = clz.getGenericInterfaces(); Class[] clzInterfaces = clz.getInterfaces(); if(clzInterfaces != null){ for(int i=0;i<clzInterfaces.length;i++){ cn.interfaces.add((ClassType)getType(typeInterfaces[i], genericTypes,clzInterfaces[i])); } } List<Executable> methods = new LinkedList(); methods.addAll(Arrays.asList(clz.getDeclaredMethods())); methods.addAll(Arrays.asList(clz.getDeclaredConstructors())); Class[] itfs = clz.getInterfaces(); //TODO should default method of interface becomes a declared method List<String> declaredMethods = new LinkedList<>(); for(Executable m:methods){ declaredMethods.add(getMethodDeclarationKey(m)); } if(itfs!=null){ for(Class i:itfs){ for(Method m:i.getMethods()){ if( m.isDefault() && !declaredMethods.contains(getMethodDeclarationKey(m))){ methods.add(m); } } } } for (Executable m : methods) { MethodNode methodNode = cn.createMethodNode(); for (Parameter p : m.getParameters()) { ParameterNode param = ParameterNode.create(methodNode); param.name = p.getName(); param.type = getType(p.getParameterizedType(),genericTypes,p.getType()); methodNode.parameters.add(param); } if (m instanceof Method) { methodNode.type =getType(((Method) m).getGenericReturnType(),genericTypes,((Method)m).getReturnType()); methodNode.name = m.getName(); methodNode.modifier = m.getModifiers(); } else if (m instanceof Constructor) { methodNode.name = "<init>"; methodNode.type = Types.VOID_TYPE;// getType(clz); methodNode.modifier = m.getModifiers();// | Modifier.STATIC; } methodNode.body = null; for (Class e : m.getExceptionTypes()) { methodNode.exceptionTypes.add(getType(e,genericTypes,e)); } } for (Field f : clz.getFields()) { FieldNode fn = cn.createField(); fn.name = f.getName(); fn.type =getType(f.getGenericType(),genericTypes,f.getType()); fn.modifier = f.getModifiers(); } return cn; } public JavaAstLoader(@Nonnull ClassLoader javaClassLoader) { this.javaClassLoader = javaClassLoader; } public JavaAstLoader() { javaClassLoader = this.getClass().getClassLoader(); } @Override protected ClassNode findAst(String className) throws AstNotFoundException { if(className==null){ System.err.println("warning:trying to null class"); throw new AstNotFoundException("null"); } ClassNode ast = loadedClasses.get(className); if(ast!=null){ return ast; } try { return super.findAst(className); } catch (AstNotFoundException e) { try { Class clz = javaClassLoader.loadClass(className); ast = buildFromClass(clz); return ast; } catch (ClassNotFoundException ex) { throw e; } } } @Nullable private Type[] transType(java.lang.reflect.Type[] ts,Map<TypeVariable,GenericType> genericTypes) throws AstNotFoundException{ Type[] ret = new Type[ts.length]; for(int i=0;i<ret.length;i++){ ret[i] = transType(ts[i],genericTypes); if(ret[i]==null) return null; } return ret; } @Nullable private Type transType(java.lang.reflect.Type t,Map<TypeVariable,GenericType> genericTypes) throws AstNotFoundException{ if(t instanceof TypeVariable){ GenericType vt = genericTypes.get((TypeVariable)t); //FIXME why it maybe null? if(vt!=null) return vt; return null; }else if(t instanceof java.lang.reflect.ParameterizedType){ java.lang.reflect.ParameterizedType pt = (java.lang.reflect.ParameterizedType) t; Type rawType = transType(pt.getRawType(),genericTypes); if(!(rawType instanceof ClassType)) return null; java.lang.reflect.Type[] typeArgs = pt.getActualTypeArguments(); Type[] gTypes = transType(typeArgs,genericTypes); if(gTypes==null) return null; return new ParameterizedType((ClassType) rawType, gTypes); }else if(t instanceof java.lang.reflect.WildcardType){ java.lang.reflect.WildcardType wt = (java.lang.reflect.WildcardType) t; Type[] upperBounds = transType(wt.getUpperBounds(),genericTypes); if(upperBounds==null) return null; Type[] lowerBounds = transType(wt.getLowerBounds(),genericTypes); if(lowerBounds==null) return null; return Types.getWildcartType(upperBounds,lowerBounds); }else if(t instanceof GenericArrayType){ GenericArrayType gt = (GenericArrayType) t; Type ct = transType(gt.getGenericComponentType(),genericTypes); if(ct==null) return null; return new ArrayType(ct); }else if(t instanceof Class){ Class type = (Class) t; if(type.isPrimitive()){ return Types.getPrimitiveType(type.getTypeName()); }else if(type.isArray()){ return Types.getArrayType(getType(type.getComponentType(),genericTypes,type.getComponentType())); }else{ return Types.getClassType(findAst(type.getName())); } }else{ return null; } } private Type getType(java.lang.reflect.Type t,Map<TypeVariable,GenericType> genericTypes,Class defaultClass) throws AstNotFoundException { Type type = this.transType(t, genericTypes); return type==null ? transType(defaultClass,genericTypes) : type; } }
package kalang.core; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import kalang.ast.ClassNode; /** * * @author Kason Yang */ public class ParameterizedType extends ClassType { Type[] parameterTypes; private final ClassType rawType; public ParameterizedType(ClassType rawType,Type... parameterTypes ) { super(rawType.getClassNode()); this.rawType = rawType; this.parameterTypes = parameterTypes; //TODO check parameterTypes.length } public Type[] getParameterTypes() { return parameterTypes; } public Map<GenericType,Type> getParameterTypesMap(){ ClassNode clz = getClassNode(); GenericType[] gts = clz.getGenericTypes(); Map<GenericType,Type> ret = new HashMap(); for(int i=0;i<gts.length;i++){ ret.put(gts[i], parameterTypes[i]); } return ret; } public ClassType getRawType() { return rawType; } @Override public String getDeclarationKey() { return rawType.getDeclarationKey(); } @Override public String getName() { List<String> paramTypes = new ArrayList(parameterTypes.length); for(Type t:parameterTypes){ paramTypes.add(t.getName()); } return rawType.getName() + "<" + String.join(",",paramTypes) + ">"; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final ParameterizedType other = (ParameterizedType) obj; if (!Arrays.deepEquals(this.parameterTypes, other.parameterTypes)) { return false; } if (!Objects.equals(this.rawType, other.rawType)) { return false; } return true; } }
package net.etalia.client.domain; import java.util.Collections; import java.util.Map; public class User { private String id; private String title; private Map<String, Object> extraData; public String getId() { return id; } public void setId(String id) { this.id = id; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public Map<String,Object> getExtraData() { if (this.extraData == null) return Collections.emptyMap(); return Collections.unmodifiableMap(this.extraData); } @SuppressWarnings("unchecked") public <T> T getExtraData(String name) { if (extraData == null) return null; return (T)extraData.get(name); } }
package network.brightspots.rcv; import java.net.UnknownHostException; import java.util.List; import java.util.Map; class Utils { static final boolean IS_OS_WINDOWS = osMatchesName("Windows"); static final boolean IS_OS_MAC = osMatchesName("Mac"); static final boolean IS_OS_LINUX = osMatchesName("Linux"); private static final Map<String, String> envMap = System.getenv(); static boolean isNullOrBlank(String s) { return s == null || s.isBlank(); } static boolean isInt(String s) { boolean isInt = true; try { Integer.parseInt(s); } catch (NumberFormatException e) { isInt = false; } return isInt; } static String listToSentenceWithQuotes(List<String> list) { String sentence; if (list.size() == 1) { sentence = String.format("\"%s\"", list.get(0)); } else if (list.size() == 2) { // if there are only 2 items, don't use a comma sentence = String.format("\"%s\" and \"%s\"", list.get(0), list.get(1)); } else { StringBuilder stringBuilder = new StringBuilder(); for (int i = 0; i < list.size() - 1; i++) { stringBuilder.append("\"").append(list.get(i)).append("\", "); } stringBuilder.append("and \"").append(list.get(list.size() - 1)).append("\""); sentence = stringBuilder.toString(); } return sentence; } static String getComputerName() { String computerName = "[unknown]"; try { java.net.InetAddress localMachine = java.net.InetAddress.getLocalHost(); computerName = localMachine.getHostName(); } catch (UnknownHostException e) { if (envMap.containsKey("COMPUTERNAME")) { computerName = envMap.get("COMPUTERNAME"); } else if (envMap.containsKey("HOSTNAME")) { computerName = envMap.get("HOSTNAME"); } } return computerName; } static String getUserName() { String user = System.getProperty("user.name"); if (user == null) { user = envMap.getOrDefault("USERNAME", "[unknown]"); } return user; } private static boolean osMatchesName(final String osNamePrefix) { return System.getProperty("os.name").toUpperCase().startsWith(osNamePrefix.toUpperCase()); } }
package org.amc.game.chess; import org.amc.util.DefaultSubject; /** * Represents a Chess Board * Responsibility is to know the position of all the pieces * * @author Adrian Mclaughlin * */ public class ChessBoard extends DefaultSubject { public enum Coordinate implements Comparable<Coordinate>{ A(0), B(1), C(2), D(3), E(4), F(5), G(6), H(7); private int name; private Coordinate(int name){ this.name=name; } public int getName(){ return this.name; } } private final ChessPiece[][] board; public ChessBoard(){ board=new ChessPiece[8][8]; } /** * Sets up the board in it's initial state */ public void initialise(){ putPieceOnBoardAt(new BishopPiece(Colour.WHITE), new Location(Coordinate.C,1)); putPieceOnBoardAt(new BishopPiece(Colour.WHITE), new Location(Coordinate.F,1)); putPieceOnBoardAt(new KingPiece(Colour.WHITE), new Location(Coordinate.E, 1)); putPieceOnBoardAt(new KnightPiece(Colour.WHITE), new Location(Coordinate.B, 1)); putPieceOnBoardAt(new KnightPiece(Colour.WHITE), new Location(Coordinate.G, 1)); putPieceOnBoardAt(new BishopPiece(Colour.BLACK), new Location(Coordinate.C,8)); putPieceOnBoardAt(new BishopPiece(Colour.BLACK), new Location(Coordinate.F,8)); putPieceOnBoardAt(new KingPiece(Colour.BLACK), new Location(Coordinate.E, 8)); putPieceOnBoardAt(new KnightPiece(Colour.BLACK), new Location(Coordinate.B, 8)); putPieceOnBoardAt(new KnightPiece(Colour.BLACK), new Location(Coordinate.G, 8)); } public void move(Player player,Move move)throws InvalidMoveException{ ChessPiece piece=getPieceFromBoardAt(move.getStart()); if(piece==null){ throw new InvalidMoveException("No piece at "+move.getStart()); }else if (player.getColour()!=piece.getColour()){ throw new InvalidMoveException("Player can only move their own pieces"); }else{ if(piece.isValidMove(this, move)){ removePieceOnBoardAt(piece, move.getStart()); putPieceOnBoardAt(piece, move.getEnd()); this.notifyObservers(null); } else { throw new InvalidMoveException("Not a valid move"); } } } void removePieceOnBoardAt(ChessPiece piece,Location location){ this.board[location.getLetter().getName()][location.getNumber()-1]=null; } void putPieceOnBoardAt(ChessPiece piece,Location location){ this.board[location.getLetter().getName()][location.getNumber()-1]=piece; } ChessPiece getPieceFromBoardAt(int letterCoordinate,int numberCoordinate){ return board[letterCoordinate][numberCoordinate-1]; } ChessPiece getPieceFromBoardAt(Location location){ return getPieceFromBoardAt(location.getLetter().getName(), location.getNumber()); } }
package org.cactoos.iterator; import java.security.SecureRandom; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Random; import org.cactoos.list.ListOf; import org.cactoos.scalar.Sticky; import org.cactoos.scalar.Unchecked; /** * Shuffled iterator. * * <p>There is no thread-safety guarantee.</p> * * @param <T> Element type * @since 0.20 */ public final class Shuffled<T> implements Iterator<T> { /** * Shuffled scalar. */ private final Unchecked<Iterator<T>> scalar; /** * Ctor. * @param iterator The original iterator */ public Shuffled(final Iterator<? extends T> iterator) { this(new SecureRandom(), iterator); } /** * Ctor. * @param random Randomizer. * @param iterator The original iterator */ public Shuffled(final Random random, final Iterator<? extends T> iterator) { this.scalar = new Unchecked<>( new Sticky<>( () -> { final List<T> items = new ListOf<>(iterator); Collections.shuffle(items, random); return items.iterator(); } ) ); } @Override public boolean hasNext() { return this.scalar.value().hasNext(); } @Override public T next() { return this.scalar.value().next(); } }
package org.kohsuke.github; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import java.io.IOException; import java.net.URL; import java.util.Date; @SuppressFBWarnings(value = { "UWF_UNWRITTEN_FIELD", "NP_UNWRITTEN_FIELD", "URF_UNREAD_FIELD" }, justification = "JSON API") public class GHCheckRun extends GHObject { GHRepository owner; GitHub root; private String status; private String conclusion; private String name; private String headSha; private String nodeId; private String externalId; private String startedAt; private String completedAt; private URL htmlUrl; private URL detailsUrl; private Output output; private GHApp app; private GHPullRequest[] pullRequests; // TODO: Add Check Suite object GHCheckRun wrap(GHRepository owner) { this.owner = owner; this.root = owner.root; return this; } GHCheckRun wrap(GitHub root) { this.root = root; if (owner != null) { owner.wrap(root); } return this; } GHPullRequest[] wrap() { return pullRequests; } /** * Gets status of the check run. It can be one of "queue", "in_progress", or "completed" * * @return Status of the check run */ public String getStatus() { return status; } /** * Gets conclusion of a completed check run. It can be one of "success", "failure", "neutral", "cancelled", * "time_out", or "action_required". * * @return Status of the check run */ public String getConclusion() { return conclusion; } /** * Gets the custom name of this check run. * * @return Name of the check run */ public String getName() { return name; } /** * Gets the HEAD SHA. * * @return sha for the HEAD commit */ public String getHeadSha() { return headSha; } /** * Gets the pull requests participated in this check run. * * @return Pull requests of this check run */ GHPullRequest[] getPullRequests() throws IOException { if (pullRequests != null && pullRequests.length != 0) { for (GHPullRequest singlePull : pullRequests) { singlePull.refresh(); } } return pullRequests; } @Override public URL getHtmlUrl() { return htmlUrl; } public String getNodeId() { return nodeId; } /** * Gets a reference for the check run on the integrator's system. * * @return Reference id */ public String getExternalId() { return externalId; } /** * Gets the details URL from which to find full details of the check run on the integrator's site. * * @return Details URL */ public URL getDetailsUrl() { return detailsUrl; } /** * Gets the start time of the check run in ISO 8601 format: YYYY-MM-DDTHH:MM:SSZ. * * @return Timestamp of the start time */ public Date getStartedAt() { return GitHubClient.parseDate(startedAt); } /** * Gets the completed time of the check run in ISO 8601 format: YYYY-MM-DDTHH:MM:SSZ. * * @return Timestamp of the completed time */ public Date getCompletedAt() { return GitHubClient.parseDate(completedAt); } /** * Gets the GitHub app this check run belongs to, included in response. * * @return GitHub App */ public GHApp getApp() { return app; } public Output getOutput() { return output; } public static class Output { private String title; private String summary; private String text; private int annotationsCount; private URL annotationsUrl; public String getTitle() { return title; } public String getSummary() { return summary; } public String getText() { return text; } public int getAnnotationsCount() { return annotationsCount; } public URL getAnnotationsUrl() { return annotationsUrl; } } }
package org.myrobotlab.service; import java.io.IOException; import org.myrobotlab.framework.Service; import org.myrobotlab.framework.ServiceType; import org.myrobotlab.logging.Level; import org.myrobotlab.logging.LoggerFactory; import org.myrobotlab.logging.Logging; import org.myrobotlab.logging.LoggingFactory; import org.myrobotlab.openni.OpenNiData; import org.myrobotlab.openni.Skeleton; import org.slf4j.Logger; // TODO set pir sensor /** * * Sweety - The sweety robot service. Maintained by \@beetlejuice * */ public class Sweety extends Service { private static final long serialVersionUID = 1L; public final static Logger log = LoggerFactory.getLogger(Sweety.class); transient public Arduino arduino; transient public Adafruit16CServoDriver adaFruit16cRight; transient public Adafruit16CServoDriver adaFruit16cLeft; transient public WebkitSpeechRecognition ear; transient public WebGui webGui; transient public MarySpeech mouth; transient public static Tracking tracker; transient public ProgramAB chatBot; transient public static OpenNi openni; transient public Pid pid; transient public Pir pir; transient public HtmlFilter htmlFilter; transient public OpenCV openCV; // Right arm Servomotors transient public Servo rightShoulderServo; transient public Servo rightArmServo; transient public Servo rightBicepsServo; transient public Servo rightElbowServo; transient public Servo rightWristServo; // Left arm Servomotors transient public Servo leftShoulderServo; transient public Servo leftArmServo; transient public Servo leftBicepsServo; transient public Servo leftElbowServo; transient public Servo leftWristServo; // Right hand Servomotors transient public Servo rightThumbServo; transient public Servo rightIndexServo; transient public Servo rightMiddleServo; transient public Servo rightRingServo; transient public Servo rightPinkyServo; // Left hand Servomotors transient public Servo leftThumbServo; transient public Servo leftIndexServo; transient public Servo leftMiddleServo; transient public Servo leftRingServo; transient public Servo leftPinkyServo; // Head Servomotors transient public Servo neckTiltServo; transient public Servo neckPanServo; // Ultrasonic sensors transient public UltrasonicSensor USfront; transient public UltrasonicSensor USfrontRight; transient public UltrasonicSensor USfrontLeft; transient public UltrasonicSensor USback; transient public UltrasonicSensor USbackRight; transient public UltrasonicSensor USbackLeft; boolean copyGesture = false; boolean firstSkeleton = true; boolean saveSkeletonFrame = false; // Adafruit16CServoDriver setup String i2cBus = "0"; String i2cAdressRight = "0x40"; String i2cAdressLeft = "0x41"; // arduino pins variables int rightMotorDirPin = 2; int rightMotorPwmPin = 3; int leftMotorDirPin = 4; int leftMotorPwmPin = 5; int backUltrasonicTrig = 22; int backUltrasonicEcho = 23; int back_leftUltrasonicTrig = 24; int back_leftUltrasonicEcho = 25; int back_rightUltrasonicTrig = 26; int back_rightUltrasonicEcho = 27; int front_leftUltrasonicTrig = 28; int front_leftUltrasonicEcho = 29; int frontUltrasonicTrig = 30; int frontUltrasonicEcho = 31; int front_rightUltrasonicTrig = 32; int front_rightUltrasonicEcho = 33; int SHIFT = 14; int LATCH = 15; int DATA = 16; int pirSensorPin = 17; int pin = 0; int min = 1; int max = 2; int rest = 3; // for arms and hands, the values are pin,min,max,rest // Right arm int rightShoulder[] = { 34, 0, 180, 0 }; int rightArm[] = { 1, 45, 155, 140 }; int rightBiceps[] = { 2, 12, 90, 12 }; int rightElbow[] = { 3, 8, 90, 8 }; int rightWrist[] = { 4, 0, 140, 140 }; // Left arm int leftShoulder[] = { 35, 0, 150, 148 }; int leftArm[] = { 1, 0, 85, 0 }; int leftBiceps[] = { 2, 60, 140, 140 }; int leftElbow[] = { 3, 0, 75, 0 }; int leftWrist[] = { 4, 0, 168, 0 }; // Right hand int rightThumb[] = { 5, 170, 75, 170 }; int rightIndex[] = { 6, 70, 180, 180 }; int rightMiddle[] = { 7, 1, 2, 3 }; int rightRing[] = { 8, 15, 130, 15 }; int rightPinky[] = { 9, 25, 180, 25 }; // Left hand int leftThumb[] = { 5, 40, 105, 40 }; int leftIndex[] = { 6, 0, 180, 0 }; int leftMiddle[] = { 7, 0, 180, 0 }; int leftRing[] = { 8, 10, 180, 180 }; int leftPinky[] = { 9, 65, 180, 180 }; // Head int neckTilt[] = { 6, 15, 50, 30 }; int neckPan[] = { 7, 20, 130, 75 }; /** * Replace the values of an array , if a value == -1 the old value is keep * Exemple if rightArm[]={35,1,2,3} and user ask to change by {-1,1,2,3}, this * method will return {35,1,2,3} This method must receive an array of ten * arrays. If one of these arrays is less or more than four numbers length , * it doesn't will be changed. */ int[][] changeArrayValues(int[][] valuesArray) { // valuesArray contain first the news values and after, the old values for (int i = 0; i < (valuesArray.length / 2); i++) { if (valuesArray[i].length == 4) { for (int j = 0; j < 3; j++) { if (valuesArray[i][j] == -1) { valuesArray[i][j] = valuesArray[i + 5][j]; } } } else { valuesArray[i] = valuesArray[i + (valuesArray.length / 2)]; } } return valuesArray; } /** * Set pin, min, max, and rest for each servos. -1 in an array mean "no * change" Exemple * setRightArm({39,1,2,3},{40,1,2,3},{41,1,2,3},{-1,1,2,3},{-1,1,2,3}) Python * exemple : * sweety.setRightArm([1,0,180,90],[2,0,180,0],[3,180,90,90],[7,7,4,4],[8,5,8,1]) */ public void setRightArm(int[] shoulder, int[] arm, int[] biceps, int[] elbow, int[] wrist) { int[][] valuesArray = new int[][] { shoulder, arm, biceps, elbow, wrist, rightShoulder, rightArm, rightBiceps, rightElbow, rightWrist }; valuesArray = changeArrayValues(valuesArray); rightShoulder = valuesArray[0]; rightArm = valuesArray[1]; rightBiceps = valuesArray[2]; rightElbow = valuesArray[3]; rightWrist = valuesArray[4]; } /** * Same as setRightArm */ public void setLefttArm(int[] shoulder, int[] arm, int[] biceps, int[] elbow, int[] wrist) { int[][] valuesArray = new int[][] { shoulder, arm, biceps, elbow, wrist, leftShoulder, leftArm, leftBiceps, leftElbow, leftWrist }; valuesArray = changeArrayValues(valuesArray); leftShoulder = valuesArray[0]; leftArm = valuesArray[1]; leftBiceps = valuesArray[2]; leftElbow = valuesArray[3]; leftWrist = valuesArray[4]; } /** * Same as setRightArm */ public void setLeftHand(int[] thumb, int[] index, int[] middle, int[] ring, int[] pinky) { int[][] valuesArray = new int[][] { thumb, index, middle, ring, pinky, leftThumb, leftIndex, leftMiddle, leftRing, leftPinky }; valuesArray = changeArrayValues(valuesArray); leftThumb = valuesArray[0]; leftIndex = valuesArray[1]; leftMiddle = valuesArray[2]; leftRing = valuesArray[3]; leftPinky = valuesArray[4]; } /** * Same as setRightArm */ public void setRightHand(int[] thumb, int[] index, int[] middle, int[] ring, int[] pinky) { int[][] valuesArray = new int[][] { thumb, index, middle, ring, pinky, rightThumb, rightIndex, rightMiddle, rightRing, rightPinky }; valuesArray = changeArrayValues(valuesArray); rightThumb = valuesArray[0]; rightIndex = valuesArray[1]; rightMiddle = valuesArray[2]; rightRing = valuesArray[3]; rightPinky = valuesArray[4]; } /** * Set pin, min, max, and rest for head tilt and pan . -1 in an array mean "no * change" Exemple setHead({39,1,2,3},{40,1,2,3}) Python exemple : * sweety.setHead([1,0,180,90],[2,0,180,0]) */ public void setHead(int[] tilt, int[] pan) { int[][] valuesArray = new int[][] { tilt, pan, neckTilt, neckPan }; valuesArray = changeArrayValues(valuesArray); neckTilt = valuesArray[0]; neckPan = valuesArray[1]; } // set Adafruit16CServoDriver setup public void setadafruitServoDriver(String i2cBusValue, String i2cAdressRightValue, String i2cAdressLeftValue) { i2cBus = i2cBusValue; i2cAdressRight = i2cAdressRightValue; i2cAdressLeft = i2cAdressLeftValue; } // variables for speak / mouth sync public int delaytime = 3; public int delaytimestop = 5; public int delaytimeletter = 1; String lang; public static void main(String[] args) { LoggingFactory.init(Level.INFO); try { Runtime.start("sweety", "Sweety"); } catch (Exception e) { Logging.logError(e); } } public Sweety(String n) { super(n); } /** * Attach the servos to arduino and adafruitServoDriver pins * * @throws Exception * e */ public void attach() throws Exception { adaFruit16cLeft.attach("arduino", i2cBus, i2cAdressLeft); adaFruit16cRight.attach("arduino", i2cBus, i2cAdressRight); rightElbowServo.attach(adaFruit16cRight, rightElbow[pin]); rightShoulderServo.attach(adaFruit16cRight, rightShoulder[pin]); rightArmServo.attach(adaFruit16cRight, rightArm[pin]); rightBicepsServo.attach(adaFruit16cRight, rightBiceps[pin]); rightElbowServo.attach(adaFruit16cRight, rightElbow[pin]); rightWristServo.attach(adaFruit16cRight, rightWrist[pin]); leftShoulderServo.attach(adaFruit16cLeft, leftShoulder[pin]); leftArmServo.attach(adaFruit16cLeft, leftArm[pin]); leftBicepsServo.attach(adaFruit16cLeft, leftBiceps[pin]); leftElbowServo.attach(adaFruit16cLeft, leftElbow[pin]); leftWristServo.attach(adaFruit16cLeft, leftWrist[pin]); rightThumbServo.attach(adaFruit16cRight, rightThumb[pin]); rightIndexServo.attach(adaFruit16cRight, rightIndex[pin]); rightMiddleServo.attach(adaFruit16cRight, rightMiddle[pin]); rightRingServo.attach(adaFruit16cRight, rightRing[pin]); rightPinkyServo.attach(adaFruit16cRight, rightPinky[pin]); leftThumbServo.attach(adaFruit16cLeft, leftThumb[pin]); leftIndexServo.attach(adaFruit16cLeft, leftIndex[pin]); leftMiddleServo.attach(adaFruit16cLeft, leftMiddle[pin]); leftRingServo.attach(adaFruit16cLeft, leftRing[pin]); leftPinkyServo.attach(adaFruit16cLeft, leftPinky[pin]); neckTiltServo.attach(arduino, neckTilt[pin]); neckPanServo.attach(arduino, neckPan[pin]); // Inverted servos neckTiltServo.setInverted(true); } /** * Connect the arduino to a COM port . Exemple : connect("COM8") * * @param port * port * @throws IOException * e */ public void connect(String port) throws IOException { arduino.connect(port); sleep(2000); arduino.pinMode(SHIFT, Arduino.OUTPUT); arduino.pinMode(LATCH, Arduino.OUTPUT); arduino.pinMode(DATA, Arduino.OUTPUT); arduino.pinMode(pirSensorPin, Arduino.INPUT); } /** * detach the servos from arduino pins */ public void detach() { if (rightElbowServo != null) rightElbowServo.detach(); if (rightShoulderServo != null) rightShoulderServo.detach(); if (rightArmServo != null) rightArmServo.detach(); if (rightBicepsServo != null) rightBicepsServo.detach(); if (rightElbowServo != null) rightElbowServo.detach(); if (rightWristServo != null) rightWristServo.detach(); if (leftShoulderServo != null) leftShoulderServo.detach(); if (leftShoulderServo != null) leftShoulderServo.detach(); if (leftBicepsServo != null) leftBicepsServo.detach(); if (leftElbowServo != null) leftElbowServo.detach(); if (leftWristServo != null) leftWristServo.detach(); if (rightThumbServo != null) rightThumbServo.detach(); if (rightIndexServo != null) rightIndexServo.detach(); if (rightMiddleServo != null) rightMiddleServo.detach(); if (rightRingServo != null) rightRingServo.detach(); if (rightPinkyServo != null) rightPinkyServo.detach(); if (leftThumbServo != null) leftThumbServo.detach(); if (leftIndexServo != null) leftIndexServo.detach(); if (leftMiddleServo != null) leftMiddleServo.detach(); if (leftRingServo != null) leftRingServo.detach(); if (leftPinkyServo != null) leftPinkyServo.detach(); if (neckTiltServo != null) neckTiltServo.detach(); if (neckPanServo != null) neckPanServo.detach(); } /** * Move the head . Use : head(neckTiltAngle, neckPanAngle -1 mean "no change" * * @param neckTiltAngle * tilt * @param neckPanAngle * pan */ public void setHeadPosition(double neckTiltAngle, double neckPanAngle) { if (neckTiltAngle == -1) { neckTiltAngle = neckTiltServo.getPos(); } if (neckPanAngle == -1) { neckPanAngle = neckPanServo.getPos(); } neckTiltServo.moveTo(neckTiltAngle); neckPanServo.moveTo(neckPanAngle); } /** * Move the right arm . Use : setRightArm(shoulder angle, arm angle, biceps * angle, Elbow angle, wrist angle) -1 mean "no change" * * @param shoulderAngle * s * @param armAngle * a * @param bicepsAngle * b * @param ElbowAngle * f * @param wristAngle * w */ public void setRightArmPosition(double shoulderAngle, double armAngle, double bicepsAngle, double ElbowAngle, double wristAngle) { // TODO protect against self collision if (shoulderAngle == -1) { shoulderAngle = rightShoulderServo.getPos(); } if (armAngle == -1) { armAngle = rightArmServo.getPos(); } if (bicepsAngle == -1) { armAngle = rightBicepsServo.getPos(); } if (ElbowAngle == -1) { ElbowAngle = rightElbowServo.getPos(); } if (wristAngle == -1) { wristAngle = rightWristServo.getPos(); } rightShoulderServo.moveTo(shoulderAngle); rightArmServo.moveTo(armAngle); rightBicepsServo.moveTo(bicepsAngle); rightElbowServo.moveTo(ElbowAngle); rightWristServo.moveTo(wristAngle); } /* * Move the left arm . Use : setLeftArm(shoulder angle, arm angle, biceps * angle, Elbow angle, Elbow angle,wrist angle) -1 mean "no change" * * @param shoulderAngle s * * @param armAngle a * * @param bicepsAngle b * * @param ElbowAngle f * * @param wristAngle w */ public void setLeftArmPosition(double shoulderAngle, double armAngle, double bicepsAngle, double ElbowAngle, double wristAngle) { // TODO protect against self collision with -> servoName.getPos() if (shoulderAngle == -1) { shoulderAngle = leftShoulderServo.getPos(); } if (armAngle == -1) { armAngle = leftArmServo.getPos(); } if (bicepsAngle == -1) { armAngle = leftBicepsServo.getPos(); } if (ElbowAngle == -1) { ElbowAngle = leftElbowServo.getPos(); } if (wristAngle == -1) { wristAngle = leftWristServo.getPos(); } leftShoulderServo.moveTo(shoulderAngle); leftArmServo.moveTo(armAngle); leftBicepsServo.moveTo(bicepsAngle); leftElbowServo.moveTo(ElbowAngle); leftWristServo.moveTo(wristAngle); } /* * Move the left hand . Use : setLeftHand(thumb angle, index angle, middle * angle, ring angle, pinky angle) -1 mean "no change" */ public void setLeftHandPosition(double thumbAngle, double indexAngle, double middleAngle, double ringAngle, double pinkyAngle) { if (thumbAngle == -1) { thumbAngle = leftThumbServo.getPos(); } if (indexAngle == -1) { indexAngle = leftIndexServo.getPos(); } if (middleAngle == -1) { middleAngle = leftMiddleServo.getPos(); } if (ringAngle == -1) { ringAngle = leftRingServo.getPos(); } if (pinkyAngle == -1) { pinkyAngle = leftPinkyServo.getPos(); } leftThumbServo.moveTo(thumbAngle); leftIndexServo.moveTo(indexAngle); leftMiddleServo.moveTo(middleAngle); leftRingServo.moveTo(ringAngle); leftPinkyServo.moveTo(pinkyAngle); } /* * Move the right hand . Use : setrightHand(thumb angle, index angle, middle * angle, ring angle, pinky angle) -1 mean "no change" */ public void setRightHandPosition(double thumbAngle, double indexAngle, double middleAngle, double ringAngle, double pinkyAngle) { if (thumbAngle == -1) { thumbAngle = rightThumbServo.getPos(); } if (indexAngle == -1) { indexAngle = rightIndexServo.getPos(); } if (middleAngle == -1) { middleAngle = rightMiddleServo.getPos(); } if (ringAngle == -1) { ringAngle = rightRingServo.getPos(); } if (pinkyAngle == -1) { pinkyAngle = rightPinkyServo.getPos(); } rightThumbServo.moveTo(thumbAngle); rightIndexServo.moveTo(indexAngle); rightMiddleServo.moveTo(middleAngle); rightRingServo.moveTo(ringAngle); rightPinkyServo.moveTo(pinkyAngle); } /* * Set the mouth attitude . choose : smile, notHappy, speechLess, empty. */ public void mouthState(String value) { if (value == "smile") { myShiftOut("11011100"); } else if (value == "notHappy") { myShiftOut("00111110"); } else if (value == "speechLess") { myShiftOut("10111100"); } else if (value == "empty") { myShiftOut("00000000"); } } /* * drive the motors . Speed &gt; 0 go forward . Speed &lt; 0 go backward . * Direction &gt; 0 go right . Direction &lt; 0 go left */ public void moveMotors(int speed, int direction) { int speedMin = 50; // min PWM needed for the motors boolean isMoving = false; int rightCurrentSpeed = 0; int leftCurrentSpeed = 0; if (speed < 0) { // Go backward arduino.analogWrite(rightMotorDirPin, 0); arduino.analogWrite(leftMotorDirPin, 0); speed = speed * -1; } else {// Go forward arduino.analogWrite(rightMotorDirPin, 255); arduino.analogWrite(leftMotorDirPin, 255); } if (direction > speedMin && speed > speedMin) {// move and turn to the // right if (isMoving) { arduino.analogWrite(rightMotorPwmPin, direction); arduino.analogWrite(leftMotorPwmPin, speed); } else { rightCurrentSpeed = speedMin; leftCurrentSpeed = speedMin; while (rightCurrentSpeed < speed && leftCurrentSpeed < direction) { if (rightCurrentSpeed < direction) { rightCurrentSpeed++; } if (leftCurrentSpeed < speed) { leftCurrentSpeed++; } arduino.analogWrite(rightMotorPwmPin, rightCurrentSpeed); arduino.analogWrite(leftMotorPwmPin, leftCurrentSpeed); sleep(20); } isMoving = true; } } else if (direction < (speedMin * -1) && speed > speedMin) {// move and // turn // the // left direction *= -1; if (isMoving) { arduino.analogWrite(leftMotorPwmPin, direction); arduino.analogWrite(rightMotorPwmPin, speed); } else { rightCurrentSpeed = speedMin; leftCurrentSpeed = speedMin; while (rightCurrentSpeed < speed && leftCurrentSpeed < direction) { if (rightCurrentSpeed < speed) { rightCurrentSpeed++; } if (leftCurrentSpeed < direction) { leftCurrentSpeed++; } arduino.analogWrite(rightMotorPwmPin, rightCurrentSpeed); arduino.analogWrite(leftMotorPwmPin, leftCurrentSpeed); sleep(20); } isMoving = true; } } else if (speed > speedMin) { // Go strait if (isMoving) { arduino.analogWrite(leftMotorPwmPin, speed); arduino.analogWrite(rightMotorPwmPin, speed); } else { int CurrentSpeed = speedMin; while (CurrentSpeed < speed) { CurrentSpeed++; arduino.analogWrite(rightMotorPwmPin, CurrentSpeed); arduino.analogWrite(leftMotorPwmPin, CurrentSpeed); sleep(20); } isMoving = true; } } else if (speed < speedMin && direction < speedMin * -1) {// turn left arduino.analogWrite(rightMotorDirPin, 255); arduino.analogWrite(leftMotorDirPin, 0); arduino.analogWrite(leftMotorPwmPin, speedMin); arduino.analogWrite(rightMotorPwmPin, speedMin); } else if (speed < speedMin && direction > speedMin) {// turn right arduino.analogWrite(rightMotorDirPin, 0); arduino.analogWrite(leftMotorDirPin, 255); arduino.analogWrite(leftMotorPwmPin, speedMin); arduino.analogWrite(rightMotorPwmPin, speedMin); } else {// stop arduino.analogWrite(leftMotorPwmPin, 0); arduino.analogWrite(rightMotorPwmPin, 0); isMoving = false; } } /** * Used to manage a shift register */ private void myShiftOut(String value) { arduino.digitalWrite(LATCH, 0); // Stop the copy for (int i = 0; i < 8; i++) { // Store the data if (value.charAt(i) == '1') { arduino.digitalWrite(DATA, 1); } else { arduino.digitalWrite(DATA, 0); } arduino.digitalWrite(SHIFT, 1); arduino.digitalWrite(SHIFT, 0); } arduino.digitalWrite(LATCH, 1); // copy } /** * Move the servos to show asked posture * * @param pos * pos */ public void posture(String pos) { if (pos == "rest") { setLeftArmPosition(leftShoulder[rest], leftArm[rest], leftBiceps[rest], leftElbow[rest], leftWrist[rest]); setRightArmPosition(rightShoulder[rest], rightArm[rest], rightBiceps[rest], rightElbow[rest], rightWrist[rest]); setLeftHandPosition(leftThumb[rest], leftIndex[rest], leftMiddle[rest], leftRing[rest], leftPinky[rest]); setRightHandPosition(rightThumb[rest], rightIndex[rest], rightMiddle[rest], rightRing[rest], rightPinky[rest]); setHeadPosition(neckTilt[rest], neckPan[rest]); } /* * Template else if (pos == ""){ setLeftArmPosition(, , , 85, 150); * setRightArmPosition(, , , 116, 10); setHeadPosition(75, 127, 75); } */ // TODO correct angles for posture else if (pos == "yes") { setLeftArmPosition(0, 95, 136, 85, 150); setRightArmPosition(155, 55, 5, 116, 10); setLeftHandPosition(-1, -1, -1, -1, -1); setRightHandPosition(-1, -1, -1, -1, -1); setHeadPosition(75, 85); } else if (pos == "concenter") { setLeftArmPosition(37, 116, 85, 85, 150); setRightArmPosition(109, 43, 54, 116, 10); setLeftHandPosition(-1, -1, -1, -1, -1); setRightHandPosition(-1, -1, -1, -1, -1); setHeadPosition(75, 85); } else if (pos == "showLeft") { setLeftArmPosition(68, 63, 160, 85, 150); setRightArmPosition(2, 76, 40, 116, 10); setLeftHandPosition(-1, -1, -1, -1, -1); setRightHandPosition(-1, -1, -1, -1, -1); setHeadPosition(75, 85); } else if (pos == "showRight") { setLeftArmPosition(145, 79, 93, 85, 150); setRightArmPosition(80, 110, 5, 116, 10); setLeftHandPosition(-1, -1, -1, -1, -1); setRightHandPosition(-1, -1, -1, -1, -1); setHeadPosition(75, 85); } else if (pos == "handsUp") { setLeftArmPosition(0, 79, 93, 85, 150); setRightArmPosition(155, 76, 40, 116, 10); setLeftHandPosition(-1, -1, -1, -1, -1); setRightHandPosition(-1, -1, -1, -1, -1); setHeadPosition(75, 85); } else if (pos == "carryBags") { setLeftArmPosition(145, 79, 93, 85, 150); setRightArmPosition(2, 76, 40, 116, 10); setLeftHandPosition(-1, -1, -1, -1, -1); setRightHandPosition(-1, -1, -1, -1, -1); setHeadPosition(75, 85); } } @Override public Sweety publishState() { super.publishState(); if (arduino != null) arduino.publishState(); if (rightShoulderServo != null) rightShoulderServo.publishState(); if (rightArmServo != null) rightArmServo.publishState(); if (rightBicepsServo != null) rightBicepsServo.publishState(); if (rightElbowServo != null) rightElbowServo.publishState(); if (rightWristServo != null) rightWristServo.publishState(); if (leftShoulderServo != null) leftShoulderServo.publishState(); if (leftArmServo != null) leftArmServo.publishState(); if (leftElbowServo != null) leftElbowServo.publishState(); if (leftBicepsServo != null) leftBicepsServo.publishState(); if (leftWristServo != null) leftWristServo.publishState(); if (rightThumbServo != null) rightThumbServo.publishState(); if (rightIndexServo != null) rightIndexServo.publishState(); if (rightMiddleServo != null) rightMiddleServo.publishState(); if (rightRingServo != null) rightRingServo.publishState(); if (rightPinkyServo != null) rightPinkyServo.publishState(); if (leftThumbServo != null) leftThumbServo.publishState(); if (leftIndexServo != null) leftIndexServo.publishState(); if (leftMiddleServo != null) leftMiddleServo.publishState(); if (leftRingServo != null) leftRingServo.publishState(); if (leftPinkyServo != null) leftPinkyServo.publishState(); if (neckTiltServo != null) neckTiltServo.publishState(); if (neckPanServo != null) neckPanServo.publishState(); return this; } /** * Say text and move mouth leds * * @param text * text being said */ public synchronized void saying(String text) { // Adapt mouth leds to words log.info("Saying :" + text); try { mouth.speak(text); } catch (Exception e) { Logging.logError(e); } } public synchronized void onStartSpeaking(String text) { sleep(15); boolean ison = false; String testword; String[] a = text.split(" "); for (int w = 0; w < a.length; w++) { testword = a[w]; char[] c = testword.toCharArray(); for (int x = 0; x < c.length; x++) { char s = c[x]; if ((s == 'a' || s == 'e' || s == 'i' || s == 'o' || s == 'u' || s == 'y') && !ison) { myShiftOut("00011100"); ison = true; sleep(delaytime); myShiftOut("00000100"); } else if (s == '.') { ison = false; myShiftOut("00000000"); sleep(delaytimestop); } else { ison = false; sleep(delaytimeletter); } } } } public synchronized void onEndSpeaking(String utterance) { myShiftOut("00000000"); } public void setdelays(Integer d1, Integer d2, Integer d3) { delaytime = d1; delaytimestop = d2; delaytimeletter = d3; } public void setLanguage(String lang) { this.lang = lang; } public void setVoice(String voice) { mouth.setVoice(voice); } @Override public void startService() { super.startService(); arduino = (Arduino) Runtime.start("arduino", "Arduino"); adaFruit16cLeft = (Adafruit16CServoDriver) Runtime.start("I2cServoControlLeft", "Adafruit16CServoDriver"); adaFruit16cRight = (Adafruit16CServoDriver) Runtime.start("I2cServoControlRight", "Adafruit16CServoDriver"); chatBot = (ProgramAB) Runtime.start("chatBot", "ProgramAB"); htmlFilter = (HtmlFilter) Runtime.start("htmlFilter", "HtmlFilter"); mouth = (MarySpeech) Runtime.start("mouth", "MarySpeech"); ear = (WebkitSpeechRecognition) Runtime.start("ear", "WebkitSpeechRecognition"); webGui = (WebGui) Runtime.start("webGui", "WebGui"); pir = (Pir) Runtime.start("pir", "Pir"); // configure services pir.attach(arduino, pirSensorPin); // FIXME - there is likely an "attach" that does this... subscribe(mouth.getName(), "publishStartSpeaking"); subscribe(mouth.getName(), "publishEndSpeaking"); } public void startServos() { rightShoulderServo = (Servo) Runtime.start("rightShoulderServo", "Servo"); rightArmServo = (Servo) Runtime.start("rightArmServo", "Servo"); rightBicepsServo = (Servo) Runtime.start("rightBicepsServo", "Servo"); rightElbowServo = (Servo) Runtime.start("rightElbowServo", "Servo"); rightWristServo = (Servo) Runtime.start("rightWristServo", "Servo"); leftShoulderServo = (Servo) Runtime.start("leftShoulderServo", "Servo"); leftArmServo = (Servo) Runtime.start("leftArmServo", "Servo"); leftBicepsServo = (Servo) Runtime.start("leftBicepsServo", "Servo"); leftElbowServo = (Servo) Runtime.start("leftElbowServo", "Servo"); leftWristServo = (Servo) Runtime.start("leftWristServo", "Servo"); rightThumbServo = (Servo) Runtime.start("rightThumbServo", "Servo"); rightIndexServo = (Servo) Runtime.start("rightIndexServo", "Servo"); rightMiddleServo = (Servo) Runtime.start("rightMiddleServo", "Servo"); rightRingServo = (Servo) Runtime.start("rightRingServo", "Servo"); rightPinkyServo = (Servo) Runtime.start("rightPinkyServo", "Servo"); leftThumbServo = (Servo) Runtime.start("leftThumbServo", "Servo"); leftIndexServo = (Servo) Runtime.start("leftIndexServo", "Servo"); leftMiddleServo = (Servo) Runtime.start("leftMiddleServo", "Servo"); leftRingServo = (Servo) Runtime.start("leftRingServo", "Servo"); leftPinkyServo = (Servo) Runtime.start("leftPinkyServo", "Servo"); neckTiltServo = (Servo) Runtime.start("neckTiltServo", "Servo"); neckPanServo = (Servo) Runtime.start("neckPanServo", "Servo"); // Set min and max angle for each servos rightShoulderServo.setMinMax(rightShoulder[min], rightShoulder[max]); rightArmServo.setMinMax(rightArm[min], rightArm[max]); rightBicepsServo.setMinMax(rightBiceps[min], rightBiceps[max]); rightElbowServo.setMinMax(rightElbow[min], rightElbow[max]); rightWristServo.setMinMax(rightWrist[min], rightWrist[max]); leftShoulderServo.setMinMax(leftShoulder[min], leftShoulder[max]); leftArmServo.setMinMax(leftArm[min], leftArm[max]); leftBicepsServo.setMinMax(leftBiceps[min], leftBiceps[max]); leftElbowServo.setMinMax(leftElbow[min], leftElbow[max]); leftWristServo.setMinMax(leftWrist[min], leftWrist[max]); rightThumbServo.setMinMax(rightThumb[min], rightThumb[max]); rightIndexServo.setMinMax(rightIndex[min], rightIndex[max]); rightMiddleServo.setMinMax(rightMiddle[min], rightMiddle[max]); rightRingServo.setMinMax(rightRing[min], rightRing[max]); rightPinkyServo.setMinMax(rightPinky[min], rightPinky[max]); leftThumbServo.setMinMax(leftThumb[min], leftThumb[max]); leftIndexServo.setMinMax(leftIndex[min], leftIndex[max]); leftMiddleServo.setMinMax(leftMiddle[min], leftMiddle[max]); leftRingServo.setMinMax(leftRing[min], leftRing[max]); leftPinkyServo.setMinMax(leftPinky[min], leftPinky[max]); neckTiltServo.setMinMax(neckTilt[min], neckTilt[max]); neckPanServo.setMinMax(neckPan[min], neckPan[max]); // Set rest for each servos rightShoulderServo.setRest(rightShoulder[rest]); rightArmServo.setRest(rightArm[rest]); rightBicepsServo.setRest(rightBiceps[rest]); rightElbowServo.setRest(rightElbow[rest]); rightWristServo.setRest(rightWrist[rest]); leftShoulderServo.setRest(leftShoulder[rest]); leftArmServo.setRest(leftArm[rest]); leftBicepsServo.setRest(leftBiceps[rest]); leftElbowServo.setRest(leftElbow[rest]); leftWristServo.setRest(leftWrist[rest]); rightThumbServo.setRest(rightThumb[rest]); rightIndexServo.setRest(rightIndex[rest]); rightMiddleServo.setRest(rightMiddle[rest]); rightRingServo.setRest(rightRing[rest]); rightPinkyServo.setRest(rightPinky[rest]); leftThumbServo.setRest(leftThumb[rest]); leftIndexServo.setRest(leftIndex[rest]); leftMiddleServo.setRest(leftMiddle[rest]); leftRingServo.setRest(leftRing[rest]); leftPinkyServo.setRest(leftPinky[rest]); neckTiltServo.setRest(neckTilt[rest]); neckPanServo.setRest(neckPan[rest]); setVelocity(75); } void setVelocity(int value) { rightShoulderServo.setVelocity(value); rightArmServo.setVelocity(value); rightBicepsServo.setVelocity(value); rightElbowServo.setVelocity(value); rightWristServo.setVelocity(value); leftShoulderServo.setVelocity(value); leftArmServo.setVelocity(value); leftBicepsServo.setVelocity(value); leftElbowServo.setVelocity(value); leftWristServo.setVelocity(value); rightThumbServo.setVelocity(value); rightIndexServo.setVelocity(value); rightMiddleServo.setVelocity(value); rightRingServo.setVelocity(value); rightPinkyServo.setVelocity(value); leftThumbServo.setVelocity(value); leftIndexServo.setVelocity(value); leftMiddleServo.setVelocity(value); leftRingServo.setVelocity(value); leftPinkyServo.setVelocity(value); neckTiltServo.setVelocity(value); neckPanServo.setVelocity(value); } /** * Start the tracking services */ public void startTrack() throws Exception { tracker = (Tracking) Runtime.start("tracker", "Tracking"); openCV = (OpenCV) Runtime.start("openCv", "OpenCV"); sleep(1000); tracker.connect(openCV, neckPanServo, neckTiltServo); // tracker.pid.invert("y"); // tracker.clearPreFilters(); } /** * Start the ultrasonic sensors services * * @param port * port * @throws Exception * e */ public void startUltraSonic(String port) throws Exception { USfront = (UltrasonicSensor) Runtime.start("USfront", "UltrasonicSensor"); USfrontRight = (UltrasonicSensor) Runtime.start("USfrontRight", "UltrasonicSensor"); USfrontLeft = (UltrasonicSensor) Runtime.start("USfrontLeft", "UltrasonicSensor"); USback = (UltrasonicSensor) Runtime.start("USback", "UltrasonicSensor"); USbackRight = (UltrasonicSensor) Runtime.start("USbackRight", "UltrasonicSensor"); USbackLeft = (UltrasonicSensor) Runtime.start("USbackLeft", "UltrasonicSensor"); USfront.attach(arduino, frontUltrasonicTrig, frontUltrasonicEcho); USfrontRight.attach(arduino, front_rightUltrasonicTrig, front_rightUltrasonicEcho); USfrontLeft.attach(arduino, front_leftUltrasonicTrig, front_leftUltrasonicEcho); USback.attach(arduino, backUltrasonicTrig, backUltrasonicEcho); USbackRight.attach(arduino, back_rightUltrasonicTrig, back_rightUltrasonicEcho); USbackLeft.attach(arduino, back_leftUltrasonicTrig, back_leftUltrasonicEcho); } /** * Stop the tracking services * * @throws Exception * e */ public void stopTrack() throws Exception { tracker.opencv.stopCapture(); tracker.releaseService(); } public OpenNi startOpenNI() throws Exception { // TODO modify this function to fit new sweety /* * Start the Kinect service */ if (openni == null) { System.out.println("starting kinect"); openni = (OpenNi) Runtime.start("openni", "OpenNi"); pid = (Pid) Runtime.start("pid", "Pid"); pid.setMode("kinect", Pid.MODE_AUTOMATIC); pid.setOutputRange("kinect", -1, 1); pid.setPID("kinect", 10.0, 0.0, 1.0); pid.setControllerDirection("kinect", 0); // re-mapping of skeleton ! // openni.skeleton.leftElbow.mapXY(0, 180, 180, 0); openni.skeleton.rightElbow.mapXY(0, 180, 180, 0); // openni.skeleton.leftShoulder.mapYZ(0, 180, 180, 0); openni.skeleton.rightShoulder.mapYZ(0, 180, 180, 0); openni.skeleton.leftShoulder.mapXY(0, 180, 180, 0); // openni.skeleton.rightShoulder.mapXY(0, 180, 180, 0); openni.addListener("publishOpenNIData", this.getName(), "onOpenNIData"); // openni.addOpenNIData(this); } return openni; } public boolean copyGesture(boolean b) throws Exception { log.info("copyGesture {}", b); if (b) { if (openni == null) { openni = startOpenNI(); } System.out.println("copying gestures"); openni.startUserTracking(); } else { System.out.println("stop copying gestures"); if (openni != null) { openni.stopCapture(); firstSkeleton = true; } } copyGesture = b; return b; } public String captureGesture() { return captureGesture(null); } public String captureGesture(String gestureName) { StringBuffer script = new StringBuffer(); String indentSpace = ""; if (gestureName != null) { indentSpace = " "; script.append(String.format("def %s():\n", gestureName)); } script.append(indentSpace); script.append(String.format("Sweety.setRightArmPosition(%d,%d,%d,%d,%d)\n", rightShoulderServo.getPos(), rightArmServo.getPos(), rightBicepsServo.getPos(), rightElbowServo.getPos(), rightWristServo.getPos())); script.append(indentSpace); script.append(String.format("Sweety.setLeftArmPosition(%d,%d,%d,%d,%d)\n", leftShoulderServo.getPos(), leftArmServo.getPos(), leftBicepsServo.getPos(), leftElbowServo.getPos(), leftWristServo.getPos())); script.append(indentSpace); script.append(String.format("Sweety.setHeadPosition(%d,%d)\n", neckTiltServo.getPos(), neckPanServo.getPos())); send("python", "appendScript", script.toString()); return script.toString(); } public void onOpenNIData(OpenNiData data) { Skeleton skeleton = data.skeleton; if (firstSkeleton) { System.out.println("i see you"); firstSkeleton = false; } // TODO adapt for new design int LElbow = Math.round(skeleton.leftElbow.getAngleXY()) - (180 - leftElbow[max]); int Larm = Math.round(skeleton.leftShoulder.getAngleXY()) - (180 - leftArm[max]); int Lshoulder = Math.round(skeleton.leftShoulder.getAngleYZ()) + leftShoulder[min]; int RElbow = Math.round(skeleton.rightElbow.getAngleXY()) + rightElbow[min]; int Rarm = Math.round(skeleton.rightShoulder.getAngleXY()) + rightArm[min]; int Rshoulder = Math.round(skeleton.rightShoulder.getAngleYZ()) - (180 - rightShoulder[max]); // Move the left side setLeftArmPosition(Lshoulder, Larm, LElbow, -1, -1); // Move the right side setRightArmPosition(Rshoulder, Rarm, RElbow, -1, -1); } /** * This static method returns all the details of the class without it having * to be constructed. It has description, categories, and dependencies * definitions. * * @return ServiceType - returns all the data * */ static public ServiceType getMetaData() { ServiceType meta = new ServiceType(Sweety.class.getCanonicalName()); meta.addDescription("service for the Sweety robot"); meta.addCategory("robot"); return meta; } }
package org.openbmp; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.sql.Statement; import java.util.HashMap; import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; /** * MySQL writer thread class * * Inserts messages in bulk and batch (multi-statement) into MySQL by reading * the FIFO queue. */ public class MySQLWriterThread implements Runnable { private final Integer MAX_BULK_STATEMENTS = 2500; // Maximum number of bulk values/multi-statements to allow private final Integer MAX_BULK_WAIT_MS = 75; // Maximum milliseconds to wait for bulk messages private final Integer MAX_MYSQL_RETRIES = 10; // Maximum MySQL retires private static final Logger logger = LogManager.getFormatterLogger(MySQLWriterThread.class.getName()); private Connection con; // MySQL connection private Boolean dbConnected; // Indicates if DB is connected or not private Config cfg; private BlockingQueue<Map<String, String>> writerQueue; // Reference to the writer FIFO queue private boolean run; private final Object lock = new Object(); // Lock for thread private final Lock globalLock; // Global/sync globalLock between writers /** * Constructor * * @param cfg Configuration - e.g. DB credentials * @param queue FIFO queue to read from * @param lock Global lock */ public MySQLWriterThread(Config cfg, BlockingQueue queue, Lock lock) { globalLock = lock; this.cfg = cfg; writerQueue = queue; run = true; con = null; dbConnected = false; /* * Establish connection to MySQL */ try { con = DriverManager.getConnection( "jdbc:mariadb://" + cfg.getDbHost() + "/" + cfg.getDbName() + "?tcpKeepAlive=true&connectTimeout=30000&socketTimeout=15000&useCompression=true&autoReconnect=true&allowMultiQueries=true", cfg.getDbUser(), cfg.getDbPw()); logger.debug("Writer thread connected to mysql"); synchronized (lock) { dbConnected = true; } } catch (SQLException e) { e.printStackTrace(); logger.warn("Writer thread failed to connect to mysql"); } } /** * Shutdown this thread */ public synchronized void shutdown() { run = false; try { con.close(); } catch (SQLException e) { e.printStackTrace(); } } /** * Run MySQL update query * * @param query Query string to run * @param retries Number of times to retry, zero means no retries */ private void mysqlQueryUpdate(String query, int retries) { Boolean success = Boolean.FALSE; // Loop the request if broken pipe, connection timed out, or deadlock for (int i = 0; i <= retries; i++) { try { Statement stmt = con.createStatement(); logger.trace("SQL Query retry = %d: %s", i, query); stmt.executeUpdate(query); i = retries; success = Boolean.TRUE; break; } catch (SQLException e) { if (!e.getSQLState().equals("40001")) { logger.info("SQL exception state " + i + " : " + e.getSQLState()); logger.info("SQL exception: " + e.getMessage()); logger.debug("query: " + query); } //e.printStackTrace(); if (!e.getMessage().contains("Broken pipe") && !e.getMessage().contains("Connection timed out") && !e.getMessage().contains("Deadlock found when trying") ) { i = retries; break; } } } if (!success) { logger.warn("Failed to insert/update after max retires of %d", MAX_MYSQL_RETRIES); logger.warn("query: " + query); } } /** * Run the thread */ public void run() { if (!dbConnected) { logger.debug("Will not run writer thread since DB isn't connected"); return; } logger.debug("writer thread started"); long cur_time = 0; long prev_time = System.currentTimeMillis(); int bulk_count = 0; /* * bulk query map has a key of : <prefix|suffix> * Prefix and suffix are from the query FIFO message. Value is the VALUE to be inserted/updated/deleted */ Map<String, String> bulk_query = new HashMap<String, String>(); try { while (run) { cur_time = System.currentTimeMillis(); /* * Do insert/query if max wait/duration has been reached or if max statements have been reached. */ if (cur_time - prev_time > MAX_BULK_WAIT_MS || bulk_count >= MAX_BULK_STATEMENTS) { if (bulk_count > 0) { logger.trace("Max reached, doing insert: wait_ms=%d bulk_count=%d", cur_time - prev_time, bulk_count); // Block if another thread is running a sync query globalLock.lock(); globalLock.unlock(); StringBuilder query = new StringBuilder(); // Loop through queries and add them as multi-statements for (Map.Entry<String, String> entry : bulk_query.entrySet()) { String key = entry.getKey().toString(); String value = entry.getValue(); String[] ins = key.split("[|]"); if (query.length() > 0) query.append(';'); query.append(ins[0]); query.append(' '); query.append(value); query.append(' '); if (ins.length > 1 && ins[1] != null && ins[1].length() > 0) query.append(ins[1]); } if (query.length() > 0) { mysqlQueryUpdate(query.toString(), 10); } prev_time = System.currentTimeMillis(); bulk_count = 0; bulk_query.clear(); } else { prev_time = System.currentTimeMillis(); } } // Get next query from queue Map<String, String> cur_query = writerQueue.poll(MAX_BULK_WAIT_MS, TimeUnit.MILLISECONDS); if (cur_query != null) { if (cur_query.containsKey("prefix")) { String key = cur_query.get("prefix") + "|" + cur_query.get("suffix"); ++bulk_count; // merge the data to existing bulk map if already present if (bulk_query.containsKey(key)) { bulk_query.put(key, bulk_query.get(key).concat("," + cur_query.get("value"))); } else { bulk_query.put(key, cur_query.get("value")); } if (cur_query.get("value").length() > 100000) { bulk_count = MAX_BULK_STATEMENTS; logger.debug("value length is: %d", cur_query.get("value").length()); } } else if (cur_query.containsKey("query")) { // Null prefix means run query now, not in bulk logger.debug("Non bulk query"); // Sync query - globalLock other threads so that this completes in order globalLock.lock(); try { mysqlQueryUpdate(cur_query.get("query"), 3); } finally { globalLock.unlock(); } } } } } catch (InterruptedException e) { e.printStackTrace(); } logger.debug("Writer thread finished"); } /** * Indicates if the DB is connected or not. * * @return True if DB is connected, False otherwise */ public boolean isDbConnected() { boolean status; synchronized (lock) { status = dbConnected; } return status; } }
package org.opengis.te.stats; import java.io.BufferedWriter; import java.io.File; import java.io.FileNotFoundException; import java.io.FileWriter; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.attribute.BasicFileAttributes; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; import java.util.NoSuchElementException; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.joda.time.DateTime; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.xml.sax.SAXParseException; public class TEReport { String username; String session; String date; String test; String result; public void processResult(File userDirPath, File reportFileName) { String[] rootDirs = userDirPath.list(); for (int i = 0; i < rootDirs.length; i++) { // Set username setUsername(rootDirs[i]); // Get the Session List String[] sessionList = new File(userDirPath, rootDirs[i]).list(); if (null != sessionList && sessionList.length > 0) { for (int j = 0; j < sessionList.length; j++) { // Set Session setSession(sessionList[j]); if (new File(new File(userDirPath, rootDirs[i]),sessionList[j]).isDirectory() && new File(new File(new File(userDirPath, rootDirs[i]), sessionList[j]), "session.xml").exists()) { try { File logFile = new File(new File(new File(userDirPath, rootDirs[i]), sessionList[j]),"log.xml"); File sessionFile = new File(new File(new File(userDirPath, rootDirs[i]), sessionList[j]),"session.xml"); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = dbf.newDocumentBuilder(); Document doc = docBuilder.parse(sessionFile); NodeList sessionAttributeList = doc.getElementsByTagName("session"); Element sessionElement = (Element) sessionAttributeList.item(0); setSession(sessionElement.getAttribute("id")); setTest(sessionElement.getAttribute("sourcesId")); if (!sessionElement.hasAttribute("date")) { Path file = sessionFile.toPath(); BasicFileAttributes attr = Files.readAttributes(file, BasicFileAttributes.class); Date dates=new DateTime( attr.creationTime().toString() ).toDate(); DateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss"); String fileCreatinDate=dateFormat.format(dates); // session.setAttribute("date", currentdate); setDate(fileCreatinDate); } else { setDate(sessionElement.getAttribute("date")); } /* * Get Final Result from log file. */ getFinalResult(logFile); /* * It will write the current session info into report. */ reportWritter(reportFileName); } catch (SAXParseException pe) { System.out.println("Error: Unable to parse xml >>"); System.out.println(" Public ID: " + pe.getPublicId()); System.out.println(" System ID: " + pe.getSystemId()); System.out.println(" Line number: " + pe.getLineNumber()); System.out.println(" Column number: " + pe.getColumnNumber()); System.out.println(" Message: " + pe.getMessage()); System.exit(1); } catch (NullPointerException npe) { System.out.println("Error:Mandatory values are Null >> " + npe.getMessage()); System.exit(1); } catch (Exception e) { System.out.println("Execption occured: "+ e.toString()); System.exit(1); } } } } } } public void getFinalResult(File logFile){ try { DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = dbf.newDocumentBuilder(); Document doc = docBuilder.parse(logFile); NodeList logElementList = doc.getElementsByTagName("log"); Element logElement = (Element) logElementList.item(0); NodeList testResult = logElement.getElementsByTagName("endtest"); if (testResult.getLength() == 0) { throw new NoSuchElementException( "The 'endtest' element not found in log file."); } else { Element resultStatus = (Element) testResult.item(0); if (resultStatus.hasAttribute("result") && !resultStatus.getAttribute("result").equals("")) { setResult(resultStatus.getAttribute("result")); } else { throw new RuntimeException( "The 'result' attribute not found or having the NULL value in log file."); } } } catch (SAXParseException pe) { setResult("Error:" + pe.getMessage()); } catch (FileNotFoundException fnfe) { setResult("Error: The log file not exist."); } catch (NullPointerException npe) { setResult("Error:" + npe.getLocalizedMessage()); } catch (Exception e) { setResult("Error: " + e.getMessage()); } } public void reportWritter(File reportFileName){ BufferedWriter outputFile=null; // Write the result into file; try{ FileWriter resultsWritter=new FileWriter(reportFileName, true); outputFile = new BufferedWriter(resultsWritter); // "userName | session | date | testName | overallResult" String result = this.getUsername() + "," + this.getSession() + "," + this.getDate() + "," + this.getTest() + "," + getResultDescription(this.getResult()); outputFile.newLine(); outputFile.write(result); outputFile.close(); }catch(IOException io){ System.out.println("Exception while writting file."); io.printStackTrace(); } } public static void main(String[] args) { String userdir = args[0]; File userDirPath = new File(userdir); // Create TE Report File BufferedWriter outputFile=null; File dir=new File(System.getProperty("user.dir") + File.separator + "result-output" ); if(!dir.exists()){ if(!dir.mkdirs()){ System.out.println("Failed to create directory!"); } } SimpleDateFormat sdfDate = new SimpleDateFormat("yyyyMMdd-HHmmss");//dd/MM/yyyy Date now = new Date(); String currentdate = sdfDate.format(now); String resultFileName="teamengine-statistics-" + currentdate; File finalResult = new File(dir + File.separator + resultFileName + ".txt"); if(finalResult.exists()){ finalResult.delete(); } try{ FileWriter resultsWritter=new FileWriter(finalResult, true); outputFile = new BufferedWriter(resultsWritter); outputFile.write("userName,session,date,testName,overallResult"); outputFile.close(); }catch(IOException io){ System.out.println("Exception while writting file."); io.printStackTrace(); } TEReport te = new TEReport(); te.processResult(userDirPath, finalResult); System.out.println("The TE Statistics Report has been successfully generated."); System.out.println("Here -> " + finalResult); } public String getResultDescription(String result) { if (result.equals("-1")) { return "CONTINUE"; } else if (result.equals("0")) { return "BEST PRACTICE"; } else if (result.equals("1")) { return "PASS"; } else if (result.equals("2")) { return "NOT TESTED"; } else if (result.equals("3")) { return "SKIPPED"; } else if (result.equals("4")) { return "WARNING"; } else if (result.equals("5")) { return "INHERITED FAILURE"; } else if (result.equals("6")) { return "FAIL"; } else { return result; } } public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } public String getSession() { return session; } public void setSession(String session) { this.session = session; } public String getDate() { return date; } public void setDate(String date) { this.date = date; } public String getTest() { return test; } public void setTest(String test) { this.test = test; } public String getResult() { return result; } public void setResult(String result) { this.result = result; } }
package org.oscii.api; import com.google.common.primitives.Doubles; import com.google.gson.Gson; import com.medallia.word2vec.Searcher; import com.medallia.word2vec.Searcher.Match; import com.medallia.word2vec.Searcher.UnknownWordException; import com.medallia.word2vec.Word2VecModel; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.oscii.concordance.AlignedCorpus; import org.oscii.concordance.AlignedSentence; import org.oscii.concordance.SentenceExample; import org.oscii.lex.*; import java.util.ArrayList; import java.util.List; import static java.util.stream.Collectors.toList; /** * Transmission protocol for Lexicon API */ public class LexiconProtocol { private final Lexicon lexicon; private final AlignedCorpus corpus; private final Ranker ranker; private final Word2VecModel word2vec; private final static Logger logger = LogManager.getLogger(LexiconProtocol.class); public LexiconProtocol(Lexicon lexicon, AlignedCorpus corpus, Ranker ranker, Word2VecModel word2vec) { this.lexicon = lexicon; this.corpus = corpus; this.ranker = ranker; this.word2vec = word2vec; } /* * Generate a response to a request parsed from requestString. */ public Response respond(Request request) { if (request.query == null || request.source == null || request.target == null) { return Response.error("Invalid request"); } Response response = new Response(); if (request.translate) addTranslations(request, response); if (request.define) addDefinitions(request, response); if (request.example) addExamples(request, response); if (request.extend) addExtensions(request, response); if (request.synonym) addSynonyms(request, response); if (request.wordvec) addWordVector(request, response); if (request.distance) addDistance(request, response); if (request.similar) addMatches(request, response); return response; } /* Aspect processing */ /* * Add translations filtered by frequency. */ private void addTranslations(Request request, Response response) { List<Translation> results = lexicon.translate(request.query, request.source, request.target); results.stream().limit(request.maxCount).forEach(t -> { // TODO(denero) Add formatted source? String pos = t.pos.stream().findFirst().orElse(""); if (t.frequency >= request.minFrequency || response.translations.isEmpty()) { response.translations.add(new ResponseTranslation( request.query, pos, t.translation.text, t.frequency)); } }); } /* * Add distinct definitions. */ private void addDefinitions(Request request, Response response) { List<Definition> results = lexicon.define(request.query, request.source); results.stream() .limit(request.maxCount) .map(d -> { String pos = d.pos.stream().findFirst().orElse(""); return new ResponseDefinition(request.query, pos, d.text); }) .distinct() .forEach(response.definitions::add); } private void addExamples(Request request, Response response) { List<SentenceExample> results = corpus.examples(request.query, request.source, request.target, request.maxCount, request.memory); // TODO Rank examples (e.g., based on request.context) results.forEach(ex -> { AlignedSentence source = ex.sentence; AlignedSentence target = source.aligned; Span sourceSpan = new Span(ex.sourceStart, ex.sourceLength); Span targetSpan = new Span(ex.targetStart, ex.targetLength); ResponseExample example = new ResponseExample(source.tokens, source.delimiters, target.tokens, target.delimiters, source.getAlignment(), sourceSpan, targetSpan); response.examples.add(example); }); } private void addExtensions(Request request, Response response) { List<Expression> results = lexicon.extend(request.query, request.source, request.target, 20 * request.maxCount + 20); // rank results through ranker if (ranker != null) { ranker.rerank(results, request.source, request.target); } results.forEach(ex -> { if (response.extensions.size() >= request.maxCount) return; List<Translation> translations = lexicon.translate(ex.text, request.source, request.target); if (translations.isEmpty()) return; Translation first = translations.get(0); logger.debug("ex.text={} first={}", ex.text, first); if (first.frequency < request.minFrequency) return; response.extensions.add(ResponseTranslation.create(ex, first)); }); logger.debug("extensions 1st: {}", response.extensions); if (response.extensions.isEmpty()) { results.forEach(ex -> { if (response.extensions.size() >= request.maxCount) return; List<Translation> translations = lexicon.translate(ex.text, request.source, request.target); if (translations.isEmpty()) return; response.extensions.add(ResponseTranslation.create(ex, translations.get(0))); }); logger.debug("extensions 2nd: {}", response.extensions); } } private void addSynonyms(Request request, Response response) { List<Meaning> results = lexicon.lookup(request.query, request.source); results.stream().forEach(r -> { if (r.synonyms.isEmpty()) return; if (r.pos.isEmpty()) { response.synonyms.add(new ResponseSynonymSet("", listSynonyms(r))); } else { r.pos.stream().distinct().forEach(pos -> response.synonyms.add(new ResponseSynonymSet(pos, listSynonyms(r)))); } }); response.synonyms = response.synonyms.stream().distinct().collect(toList()); } private void addWordVector(Request request, Response response) { if (word2vec == null) { response.error = "no word2vec model available"; return; } String query = request.query; Searcher searcher = word2vec.forSearch(); if (!searcher.contains(query)) { query = Lexicon.degrade(query); } try { response.wordVector = searcher.getRawVector(query).asList(); } catch (UnknownWordException e) { response.error = e.getMessage(); } } private void addDistance(Request request, Response response) { if (word2vec == null) { response.error = "no word2vec model available"; return; } Searcher searcher = word2vec.forSearch(); try { String query1 = request.query; String query2 = request.context; if (query2.length() == 0) { String[] splitQuery = request.query.split("\\|\\|\\|"); if (splitQuery.length >= 2) { query1 = splitQuery[0]; query2 = splitQuery[1]; } else { response.error = "malformed query"; return; } } if (!searcher.contains(query1)) { query1 = Lexicon.degrade(query1); } if (!searcher.contains(query2)) { query2 = Lexicon.degrade(query2); } response.distance = searcher.cosineDistance(query1, query2); } catch (UnknownWordException e) { response.error = e.getMessage(); } } private void addMatches(Request request, Response response) { if (word2vec == null) { response.error = "no word2vec model available"; return; } String query = request.query; Searcher searcher = word2vec.forSearch(); if (!searcher.contains(query)) { query = Lexicon.degrade(query); } try { List<Match> matches = searcher.getMatches(query, request.maxCount); matches.stream().forEach( m -> response.matches.add(new ResponseMatch(m.match(), m.distance())) ); } catch (UnknownWordException e) { response.error = e.getMessage(); } } private List<String> listSynonyms(Meaning r) { return r.synonyms.stream().map(e -> e.text).collect(toList()); } /* API classes to define JSON serialization */ private static abstract class Jsonable { @Override public String toString() { return new Gson().toJson(this, this.getClass()); } } public static class Request extends Jsonable { public String query = ""; public String source = ""; public String target = ""; public String context = ""; public boolean translate = false; public boolean define = false; public boolean example = false; public boolean extend = false; public boolean synonym = false; public boolean wordvec = false; public boolean similar = false; public boolean distance = false; public double minFrequency = 1e-4; public int maxCount = 10; public int memory = 0; } public static class Response extends Jsonable { public List<ResponseTranslation> translations = new ArrayList<>(); public List<ResponseDefinition> definitions = new ArrayList<>(); public List<ResponseExample> examples = new ArrayList(); public List<ResponseTranslation> extensions = new ArrayList<>(); public List<ResponseSynonymSet> synonyms = new ArrayList<>(); public List<ResponseMatch> matches = new ArrayList<>(); public List<Double> wordVector = new ArrayList<>(); public double distance = 0.0; public String error; public static Response error(String message) { Response response = new Response(); response.error = message; return response; } } public static class ResponseTranslation extends Jsonable { String source; String pos; String target; double frequency; ResponseTranslation(String source, String pos, String target, double frequency) { this.source = source; this.pos = pos; this.target = target; this.frequency = frequency; } public static ResponseTranslation create(Expression ex, Translation first) { String pos = first.pos.stream().findFirst().orElse(""); return new ResponseTranslation(ex.text, pos, first.translation.text, first.frequency); } } private static class ResponseDefinition extends Jsonable { String source; String pos; String text; public ResponseDefinition(String source, String pos, String text) { this.source = source; this.pos = pos; this.text = text; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ResponseDefinition that = (ResponseDefinition) o; if (pos != null ? !pos.equals(that.pos) : that.pos != null) return false; if (source != null ? !source.equals(that.source) : that.source != null) return false; if (text != null ? !text.equals(that.text) : that.text != null) return false; return true; } @Override public int hashCode() { int result = source != null ? source.hashCode() : 0; result = 31 * result + (pos != null ? pos.hashCode() : 0); result = 31 * result + (text != null ? text.hashCode() : 0); return result; } } static class ResponseSynonymSet extends Jsonable { String pos; List<String> synonyms; public ResponseSynonymSet(String pos, List<String> synonyms) { this.pos = pos; this.synonyms = synonyms; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ResponseSynonymSet that = (ResponseSynonymSet) o; if (!pos.equals(that.pos)) return false; return synonyms.equals(that.synonyms); } @Override public int hashCode() { int result = pos.hashCode(); result = 31 * result + synonyms.hashCode(); return result; } } /** * A similar match based on Word2Vec. */ static class ResponseMatch extends Jsonable { String match; double distance; public ResponseMatch(String match, double distance) { this.match = match; this.distance = distance; } } /* * A span of a sequence */ private static class Span { int start; int length; public Span(int start, int length) { assert length >= 0; this.start = start; this.length = length; } public String[] Slice(String[] sequence) { String[] slice = new String[length]; for (int i = start; i < start + length; i++) { slice[i - start] = sequence[i]; } return slice; } } private static class ResponseExample { String[] source; String[] sourceDelimiters; String[] target; String[] targetDelimiters; int[][] sourceToTarget; Span sourceSpan; Span targetSpan; public ResponseExample(String[] source, String[] sourceDelimiters, String[] target, String[] targetDelimiters, int[][] sourceToTarget, Span sourceSpan, Span targetSpan) { this.source = source; this.sourceDelimiters = sourceDelimiters; this.target = target; this.targetDelimiters = targetDelimiters; this.sourceToTarget = sourceToTarget; this.sourceSpan = sourceSpan; this.targetSpan = targetSpan; } } }
package org.owasp.esapi.codecs; import org.owasp.esapi.ESAPI; import org.owasp.esapi.Logger; // starting with v2.3. Should we upgrade and then add ESAPI logging or stay at 2.2.2 base? // I think that really depends on how much OWASP ESAPI plans on tracking changes to this // version vs. if the plan was just to fork from it and maintain OWASP's own version. // At this point, I think I prefer split from tracking Harder's original, but I'm easily // persuaded otherwise. - Kevin Wall public class Base64 { /** No options specified. Value is zero. */ public final static int NO_OPTIONS = 0; /** Specify encoding. */ public final static int ENCODE = 1; /** Specify decoding. */ public final static int DECODE = 0; /** Specify that data should be gzip-compressed. */ public final static int GZIP = 2; /** Don't break lines when encoding (violates strict Base64 specification) */ public final static int DONT_BREAK_LINES = 8; public final static int URL_SAFE = 16; public final static int ORDERED = 32; /** Maximum line length (76) of Base64 output. */ private final static int MAX_LINE_LENGTH = 76; /** The equals sign (=) as a byte. */ private final static byte EQUALS_SIGN = (byte)'='; /** The new line character (\n) as a byte. */ private final static byte NEW_LINE = (byte)'\n'; /** Preferred encoding. */ private final static String PREFERRED_ENCODING = "UTF-8"; /** End of line character. */ private final static String EOL = System.getProperty("line.separator", "\n"); // I think I end up not using the BAD_ENCODING indicator. //private final static byte BAD_ENCODING = -9; // Indicates error in encoding private final static byte WHITE_SPACE_ENC = -5; // Indicates white space in encoding private final static byte EQUALS_SIGN_ENC = -1; // Indicates equals sign in encoding private static final Logger logger = ESAPI.getLogger("Base64"); /** The 64 valid Base64 values. */ //private final static byte[] ALPHABET; /* Host platform me be something funny like EBCDIC, so we hard code these values. */ private final static byte[] _STANDARD_ALPHABET = { (byte)'A', (byte)'B', (byte)'C', (byte)'D', (byte)'E', (byte)'F', (byte)'G', (byte)'H', (byte)'I', (byte)'J', (byte)'K', (byte)'L', (byte)'M', (byte)'N', (byte)'O', (byte)'P', (byte)'Q', (byte)'R', (byte)'S', (byte)'T', (byte)'U', (byte)'V', (byte)'W', (byte)'X', (byte)'Y', (byte)'Z', (byte)'a', (byte)'b', (byte)'c', (byte)'d', (byte)'e', (byte)'f', (byte)'g', (byte)'h', (byte)'i', (byte)'j', (byte)'k', (byte)'l', (byte)'m', (byte)'n', (byte)'o', (byte)'p', (byte)'q', (byte)'r', (byte)'s', (byte)'t', (byte)'u', (byte)'v', (byte)'w', (byte)'x', (byte)'y', (byte)'z', (byte)'0', (byte)'1', (byte)'2', (byte)'3', (byte)'4', (byte)'5', (byte)'6', (byte)'7', (byte)'8', (byte)'9', (byte)'+', (byte)'/' }; /** * Translates a Base64 value to either its 6-bit reconstruction value * or a negative number indicating some other meaning. **/ private final static byte[] _STANDARD_DECODABET = { -9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 0 - 8 -5,-5, // Whitespace: Tab and Linefeed -9,-9, // Decimal 11 - 12 -5, // Whitespace: Carriage Return -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 14 - 26 -9,-9,-9,-9,-9, // Decimal 27 - 31 -5, // Whitespace: Space -9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 33 - 42 62, // Plus sign at decimal 43 -9,-9,-9, // Decimal 44 - 46 63, // Slash at decimal 47 52,53,54,55,56,57,58,59,60,61, // Numbers zero through nine -9,-9,-9, // Decimal 58 - 60 -1, // Equals sign at decimal 61 -9,-9,-9, // Decimal 62 - 64 0,1,2,3,4,5,6,7,8,9,10,11,12,13, // Letters 'A' through 'N' 14,15,16,17,18,19,20,21,22,23,24,25, // Letters 'O' through 'Z' -9,-9,-9,-9,-9,-9, // Decimal 91 - 96 26,27,28,29,30,31,32,33,34,35,36,37,38, // Letters 'a' through 'm' 39,40,41,42,43,44,45,46,47,48,49,50,51, // Letters 'n' through 'z' -9,-9,-9,-9 // Decimal 123 - 126 /*,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 127 - 139 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 140 - 152 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 153 - 165 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 166 - 178 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 179 - 191 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 192 - 204 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 205 - 217 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 218 - 230 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 231 - 243 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9 // Decimal 244 - 255 */ }; private final static byte[] _URL_SAFE_ALPHABET = { (byte)'A', (byte)'B', (byte)'C', (byte)'D', (byte)'E', (byte)'F', (byte)'G', (byte)'H', (byte)'I', (byte)'J', (byte)'K', (byte)'L', (byte)'M', (byte)'N', (byte)'O', (byte)'P', (byte)'Q', (byte)'R', (byte)'S', (byte)'T', (byte)'U', (byte)'V', (byte)'W', (byte)'X', (byte)'Y', (byte)'Z', (byte)'a', (byte)'b', (byte)'c', (byte)'d', (byte)'e', (byte)'f', (byte)'g', (byte)'h', (byte)'i', (byte)'j', (byte)'k', (byte)'l', (byte)'m', (byte)'n', (byte)'o', (byte)'p', (byte)'q', (byte)'r', (byte)'s', (byte)'t', (byte)'u', (byte)'v', (byte)'w', (byte)'x', (byte)'y', (byte)'z', (byte)'0', (byte)'1', (byte)'2', (byte)'3', (byte)'4', (byte)'5', (byte)'6', (byte)'7', (byte)'8', (byte)'9', (byte)'-', (byte)'_' }; /** * Used in decoding URL- and Filename-safe dialects of Base64. */ private final static byte[] _URL_SAFE_DECODABET = { -9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 0 - 8 -5,-5, // Whitespace: Tab and Linefeed -9,-9, // Decimal 11 - 12 -5, // Whitespace: Carriage Return -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 14 - 26 -9,-9,-9,-9,-9, // Decimal 27 - 31 -5, // Whitespace: Space -9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 33 - 42 -9, // Plus sign at decimal 43 -9, // Decimal 44 62, // Minus sign at decimal 45 -9, // Decimal 46 -9, // Slash at decimal 47 52,53,54,55,56,57,58,59,60,61, // Numbers zero through nine -9,-9,-9, // Decimal 58 - 60 -1, // Equals sign at decimal 61 -9,-9,-9, // Decimal 62 - 64 0,1,2,3,4,5,6,7,8,9,10,11,12,13, // Letters 'A' through 'N' 14,15,16,17,18,19,20,21,22,23,24,25, // Letters 'O' through 'Z' -9,-9,-9,-9, // Decimal 91 - 94 63, // Underscore at decimal 95 -9, // Decimal 96 26,27,28,29,30,31,32,33,34,35,36,37,38, // Letters 'a' through 'm' 39,40,41,42,43,44,45,46,47,48,49,50,51, // Letters 'n' through 'z' -9,-9,-9,-9 // Decimal 123 - 126 /*,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 127 - 139 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 140 - 152 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 153 - 165 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 166 - 178 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 179 - 191 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 192 - 204 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 205 - 217 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 218 - 230 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 231 - 243 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9 // Decimal 244 - 255 */ }; private final static byte[] _ORDERED_ALPHABET = { (byte)'-', (byte)'0', (byte)'1', (byte)'2', (byte)'3', (byte)'4', (byte)'5', (byte)'6', (byte)'7', (byte)'8', (byte)'9', (byte)'A', (byte)'B', (byte)'C', (byte)'D', (byte)'E', (byte)'F', (byte)'G', (byte)'H', (byte)'I', (byte)'J', (byte)'K', (byte)'L', (byte)'M', (byte)'N', (byte)'O', (byte)'P', (byte)'Q', (byte)'R', (byte)'S', (byte)'T', (byte)'U', (byte)'V', (byte)'W', (byte)'X', (byte)'Y', (byte)'Z', (byte)'_', (byte)'a', (byte)'b', (byte)'c', (byte)'d', (byte)'e', (byte)'f', (byte)'g', (byte)'h', (byte)'i', (byte)'j', (byte)'k', (byte)'l', (byte)'m', (byte)'n', (byte)'o', (byte)'p', (byte)'q', (byte)'r', (byte)'s', (byte)'t', (byte)'u', (byte)'v', (byte)'w', (byte)'x', (byte)'y', (byte)'z' }; /** * Used in decoding the "ordered" dialect of Base64. */ private final static byte[] _ORDERED_DECODABET = { -9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 0 - 8 -5,-5, // Whitespace: Tab and Linefeed -9,-9, // Decimal 11 - 12 -5, // Whitespace: Carriage Return -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 14 - 26 -9,-9,-9,-9,-9, // Decimal 27 - 31 -5, // Whitespace: Space -9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 33 - 42 -9, // Plus sign at decimal 43 -9, // Decimal 44 0, // Minus sign at decimal 45 -9, // Decimal 46 -9, // Slash at decimal 47 1,2,3,4,5,6,7,8,9,10, // Numbers zero through nine -9,-9,-9, // Decimal 58 - 60 -1, // Equals sign at decimal 61 -9,-9,-9, // Decimal 62 - 64 11,12,13,14,15,16,17,18,19,20,21,22,23, // Letters 'A' through 'M' 24,25,26,27,28,29,30,31,32,33,34,35,36, // Letters 'N' through 'Z' -9,-9,-9,-9, // Decimal 91 - 94 37, // Underscore at decimal 95 -9, // Decimal 96 38,39,40,41,42,43,44,45,46,47,48,49,50, // Letters 'a' through 'm' 51,52,53,54,55,56,57,58,59,60,61,62,63, // Letters 'n' through 'z' -9,-9,-9,-9 // Decimal 123 - 126 /*,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 127 - 139 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 140 - 152 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 153 - 165 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 166 - 178 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 179 - 191 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 192 - 204 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 205 - 217 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 218 - 230 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9, // Decimal 231 - 243 -9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9,-9 // Decimal 244 - 255 */ }; /** * Returns one of the _SOMETHING_ALPHABET byte arrays depending on * the options specified. * It's possible, though silly, to specify ORDERED and URLSAFE * in which case one of them will be picked, though there is * no guarantee as to which one will be picked. */ private final static byte[] getAlphabet( int options ) { if( (options & URL_SAFE) == URL_SAFE ) return _URL_SAFE_ALPHABET; else if( (options & ORDERED) == ORDERED ) return _ORDERED_ALPHABET; else return _STANDARD_ALPHABET; } // end getAlphabet /** * Returns one of the _SOMETHING_DECODABET byte arrays depending on * the options specified. * It's possible, though silly, to specify ORDERED and URL_SAFE * in which case one of them will be picked, though there is * no guarantee as to which one will be picked. */ private final static byte[] getDecodabet( int options ) { if( (options & URL_SAFE) == URL_SAFE ) return _URL_SAFE_DECODABET; else if( (options & ORDERED) == ORDERED ) return _ORDERED_DECODABET; else return _STANDARD_DECODABET; } // end getAlphabet /** Defeats instantiation. */ private Base64(){} /** * Encodes or decodes two files from the command line; * <strong>feel free to delete this method (in fact you probably should) * if you're embedding this code into a larger program</strong>. * @param args */ public final static void main( String[] args ) { if( args.length < 3 ){ usage("Not enough arguments."); } // end if: args.length < 3 else { String flag = args[0]; String infile = args[1]; String outfile = args[2]; if( flag.equals( "-e" ) ){ Base64.encodeFileToFile( infile, outfile ); } // end if: encode else if( flag.equals( "-d" ) ) { Base64.decodeFileToFile( infile, outfile ); } // end else if: decode else { usage( "Unknown flag: " + flag ); } // end else } // end else } // end main /** * Prints command line usage. * * @param msg A message to include with usage info. */ private final static void usage( String msg ) { System.err.println( msg ); System.err.println( "Usage: java Base64 -e|-d inputfile outputfile" ); } // end usage /** * Encodes up to the first three bytes of array <var>threeBytes</var> * and returns a four-byte array in Base64 notation. * The actual number of significant bytes in your array is * given by <var>numSigBytes</var>. * The array <var>threeBytes</var> needs only be as big as * <var>numSigBytes</var>. * Code can reuse a byte array by passing a four-byte array as <var>b4</var>. * * @param b4 A reusable byte array to reduce array instantiation * @param threeBytes the array to convert * @param numSigBytes the number of significant bytes in your array * @return four byte array in Base64 notation. * @since 1.5.1 */ private static byte[] encode3to4( byte[] b4, byte[] threeBytes, int numSigBytes, int options ) { encode3to4( threeBytes, 0, numSigBytes, b4, 0, options ); return b4; } // end encode3to4 /** * <p>Encodes up to three bytes of the array <var>source</var> * and writes the resulting four Base64 bytes to <var>destination</var>. * The source and destination arrays can be manipulated * anywhere along their length by specifying * <var>srcOffset</var> and <var>destOffset</var>. * This method does not check to make sure your arrays * are large enough to accomodate <var>srcOffset</var> + 3 for * the <var>source</var> array or <var>destOffset</var> + 4 for * the <var>destination</var> array. * The actual number of significant bytes in your array is * given by <var>numSigBytes</var>.</p> * <p>This is the lowest level of the encoding methods with * all possible parameters.</p> * * @param source the array to convert * @param srcOffset the index where conversion begins * @param numSigBytes the number of significant bytes in your array * @param destination the array to hold the conversion * @param destOffset the index where output will be put * @return the <var>destination</var> array * @since 1.3 */ private static byte[] encode3to4( byte[] source, int srcOffset, int numSigBytes, byte[] destination, int destOffset, int options ) { byte[] ALPHABET = getAlphabet( options ); // 1 2 3 // 01234567890123456789012345678901 Bit position // >>18 >>12 >> 6 >> 0 Right shift necessary // 0x3f 0x3f 0x3f Additional AND // Create buffer with zero-padding if there are only one or two // significant bytes passed in the array. // We have to shift left 24 in order to flush out the 1's that appear // when Java treats a value as negative that is cast from a byte to an int. int inBuff = ( numSigBytes > 0 ? ((source[ srcOffset ] << 24) >>> 8) : 0 ) | ( numSigBytes > 1 ? ((source[ srcOffset + 1 ] << 24) >>> 16) : 0 ) | ( numSigBytes > 2 ? ((source[ srcOffset + 2 ] << 24) >>> 24) : 0 ); switch( numSigBytes ) { case 3: destination[ destOffset ] = ALPHABET[ (inBuff >>> 18) ]; destination[ destOffset + 1 ] = ALPHABET[ (inBuff >>> 12) & 0x3f ]; destination[ destOffset + 2 ] = ALPHABET[ (inBuff >>> 6) & 0x3f ]; destination[ destOffset + 3 ] = ALPHABET[ (inBuff ) & 0x3f ]; return destination; case 2: destination[ destOffset ] = ALPHABET[ (inBuff >>> 18) ]; destination[ destOffset + 1 ] = ALPHABET[ (inBuff >>> 12) & 0x3f ]; destination[ destOffset + 2 ] = ALPHABET[ (inBuff >>> 6) & 0x3f ]; destination[ destOffset + 3 ] = EQUALS_SIGN; return destination; case 1: destination[ destOffset ] = ALPHABET[ (inBuff >>> 18) ]; destination[ destOffset + 1 ] = ALPHABET[ (inBuff >>> 12) & 0x3f ]; destination[ destOffset + 2 ] = EQUALS_SIGN; destination[ destOffset + 3 ] = EQUALS_SIGN; return destination; default: return destination; } // end switch } // end encode3to4 /** * Serializes an object and returns the Base64-encoded * version of that serialized object. If the object * cannot be serialized or there is another error, * the method will return <tt>null</tt>. * The object is not GZip-compressed before being encoded. * * @param serializableObject The object to encode * @return The Base64-encoded object * @since 1.4 */ public static String encodeObject( java.io.Serializable serializableObject ) { return encodeObject( serializableObject, NO_OPTIONS ); } // end encodeObject /** * Serializes an object and returns the Base64-encoded * version of that serialized object. If the object * cannot be serialized or there is another error, * the method will return <tt>null</tt>. * <p> * Valid options:<pre> * GZIP: gzip-compresses object before encoding it. * DONT_BREAK_LINES: don't break lines at 76 characters * <i>Note: Technically, this makes your encoding non-compliant.</i> * </pre> * <p> * Example: <code>encodeObject( myObj, Base64.GZIP )</code> or * <p> * Example: <code>encodeObject( myObj, Base64.GZIP | Base64.DONT_BREAK_LINES )</code> * * @param serializableObject The object to encode * @param options Specified options * @return The Base64-encoded object * @see Base64#GZIP * @see Base64#DONT_BREAK_LINES * @since 2.0 */ public static String encodeObject( java.io.Serializable serializableObject, int options ) { // Streams java.io.ByteArrayOutputStream baos = null; java.io.OutputStream b64os = null; java.io.ObjectOutputStream oos = null; java.util.zip.GZIPOutputStream gzos = null; // Isolate options int gzip = (options & GZIP); //int dontBreakLines = (options & DONT_BREAK_LINES); try { // ObjectOutputStream -> (GZIP) -> Base64 -> ByteArrayOutputStream baos = new java.io.ByteArrayOutputStream(); b64os = new Base64.OutputStream( baos, ENCODE | options ); // GZip? if( gzip == GZIP ) { gzos = new java.util.zip.GZIPOutputStream( b64os ); oos = new java.io.ObjectOutputStream( gzos ); } // end if: gzip else oos = new java.io.ObjectOutputStream( b64os ); oos.writeObject( serializableObject ); } // end try catch( java.io.IOException e ) { logger.error( Logger.SECURITY_FAILURE, "Problem writing object", e ); return null; } // end catch finally { try{ oos.close(); } catch( Exception e ){} try{ gzos.close(); } catch( Exception e ){} try{ b64os.close(); } catch( Exception e ){} try{ baos.close(); } catch( Exception e ){} } // end finally // Return value according to relevant encoding. try { return new String( baos.toByteArray(), PREFERRED_ENCODING ); } // end try catch (java.io.UnsupportedEncodingException uue) { return new String( baos.toByteArray() ); } // end catch } // end encode /** * Encodes a byte array into Base64 notation. * Does not GZip-compress data. * * @param source The data to convert * @return The Base64-encoded resulting string * @since 1.4 */ public static String encodeBytes( byte[] source ) { return encodeBytes( source, 0, source.length, NO_OPTIONS ); } // end encodeBytes /** * Encodes a byte array into Base64 notation. * <p> * Valid options:<pre> * GZIP: gzip-compresses object before encoding it. * DONT_BREAK_LINES: don't break lines at 76 characters * <i>Note: Technically, this makes your encoding non-compliant.</i> * </pre> * <p> * Example: <code>encodeBytes( myData, Base64.GZIP )</code> or * <p> * Example: <code>encodeBytes( myData, Base64.GZIP | Base64.DONT_BREAK_LINES )</code> * * * @param source The data to convert * @param options Specified options * @return The Base64-encoded resulting string * @see Base64#GZIP * @see Base64#DONT_BREAK_LINES * @since 2.0 */ public static String encodeBytes( byte[] source, int options ) { return encodeBytes( source, 0, source.length, options ); } // end encodeBytes /** * Encodes a byte array into Base64 notation. * Does not GZip-compress data. * * @param source The data to convert * @param off Offset in array where conversion should begin * @param len Length of data to convert * @return The Base64-encoded resulting string * @since 1.4 */ public static String encodeBytes( byte[] source, int off, int len ) { return encodeBytes( source, off, len, NO_OPTIONS ); } // end encodeBytes /** * Encodes a byte array into Base64 notation. * <p> * Valid options:<pre> * GZIP: gzip-compresses object before encoding it. * DONT_BREAK_LINES: don't break lines at 76 characters * <i>Note: Technically, this makes your encoding non-compliant.</i> * </pre> * <p> * Example: <code>encodeBytes( myData, Base64.GZIP )</code> or * <p> * Example: <code>encodeBytes( myData, Base64.GZIP | Base64.DONT_BREAK_LINES )</code> * * * @param source The data to convert * @param off Offset in array where conversion should begin * @param len Length of data to convert * @param options alphabet type is pulled from this (standard, url-safe, ordered) * @return The Base64-encoded resulting string * @see Base64#GZIP * @see Base64#DONT_BREAK_LINES * @since 2.0 */ public static String encodeBytes( byte[] source, int off, int len, int options ) { // Isolate options int dontBreakLines = ( options & DONT_BREAK_LINES ); int gzip = ( options & GZIP ); // Compress? if( gzip == GZIP ) { java.io.ByteArrayOutputStream baos = null; java.util.zip.GZIPOutputStream gzos = null; Base64.OutputStream b64os = null; try { // GZip -> Base64 -> ByteArray baos = new java.io.ByteArrayOutputStream(); b64os = new Base64.OutputStream( baos, ENCODE | options ); gzos = new java.util.zip.GZIPOutputStream( b64os ); gzos.write( source, off, len ); gzos.close(); } // end try catch( java.io.IOException e ) { logger.error( Logger.SECURITY_FAILURE, "Problem writing gzip stream", e ); return null; } // end catch finally { try{ gzos.close(); } catch( Exception e ){} try{ b64os.close(); } catch( Exception e ){} try{ baos.close(); } catch( Exception e ){} } // end finally // Return value according to relevant encoding. try { return new String( baos.toByteArray(), PREFERRED_ENCODING ); } // end try catch (java.io.UnsupportedEncodingException uue) { return new String( baos.toByteArray() ); } // end catch } // end if: compress // Else, don't compress. Better not to use streams at all then. else { // Convert option to boolean in way that code likes it. boolean breakLines = dontBreakLines == 0; int len43 = len * 4 / 3; byte[] outBuff = new byte[ ( len43 ) // Main 4:3 + ( (len % 3) > 0 ? 4 : 0 ) // Account for padding + (breakLines ? ( len43 / MAX_LINE_LENGTH ) : 0) ]; // New lines int d = 0; int e = 0; int len2 = len - 2; int lineLength = 0; for( ; d < len2; d+=3, e+=4 ) { encode3to4( source, d+off, 3, outBuff, e, options ); lineLength += 4; if( breakLines && lineLength == MAX_LINE_LENGTH ) { outBuff[e+4] = NEW_LINE; e++; lineLength = 0; } // end if: end of line } // en dfor: each piece of array if( d < len ) { encode3to4( source, d+off, len - d, outBuff, e, options ); e += 4; } // end if: some padding needed // Return value according to relevant encoding. try { return new String( outBuff, 0, e, PREFERRED_ENCODING ); } // end try catch (java.io.UnsupportedEncodingException uue) { return new String( outBuff, 0, e ); } // end catch } // end else: don't compress } // end encodeBytes /** * Decodes four bytes from array <var>source</var> * and writes the resulting bytes (up to three of them) * to <var>destination</var>. * The source and destination arrays can be manipulated * anywhere along their length by specifying * <var>srcOffset</var> and <var>destOffset</var>. * This method does not check to make sure your arrays * are large enough to accomodate <var>srcOffset</var> + 4 for * the <var>source</var> array or <var>destOffset</var> + 3 for * the <var>destination</var> array. * This method returns the actual number of bytes that * were converted from the Base64 encoding. * <p>This is the lowest level of the decoding methods with * all possible parameters.</p> * * * @param source the array to convert * @param srcOffset the index where conversion begins * @param destination the array to hold the conversion * @param destOffset the index where output will be put * @param options alphabet type is pulled from this (standard, url-safe, ordered) * @return the number of decoded bytes converted * @since 1.3 */ private static int decode4to3( byte[] source, int srcOffset, byte[] destination, int destOffset, int options ) { byte[] DECODABET = getDecodabet( options ); // Example: Dk== if( source[ srcOffset + 2] == EQUALS_SIGN ) { // Two ways to do the same thing. Don't know which way I like best. //int outBuff = ( ( DECODABET[ source[ srcOffset ] ] << 24 ) >>> 6 ) // | ( ( DECODABET[ source[ srcOffset + 1] ] << 24 ) >>> 12 ); int outBuff = ( ( DECODABET[ source[ srcOffset ] ] & 0xFF ) << 18 ) | ( ( DECODABET[ source[ srcOffset + 1] ] & 0xFF ) << 12 ); destination[ destOffset ] = (byte)( outBuff >>> 16 ); return 1; } // Example: DkL= else if( source[ srcOffset + 3 ] == EQUALS_SIGN ) { // Two ways to do the same thing. Don't know which way I like best. //int outBuff = ( ( DECODABET[ source[ srcOffset ] ] << 24 ) >>> 6 ) // | ( ( DECODABET[ source[ srcOffset + 1 ] ] << 24 ) >>> 12 ) // | ( ( DECODABET[ source[ srcOffset + 2 ] ] << 24 ) >>> 18 ); int outBuff = ( ( DECODABET[ source[ srcOffset ] ] & 0xFF ) << 18 ) | ( ( DECODABET[ source[ srcOffset + 1 ] ] & 0xFF ) << 12 ) | ( ( DECODABET[ source[ srcOffset + 2 ] ] & 0xFF ) << 6 ); destination[ destOffset ] = (byte)( outBuff >>> 16 ); destination[ destOffset + 1 ] = (byte)( outBuff >>> 8 ); return 2; } // Example: DkLE else { try{ // Two ways to do the same thing. Don't know which way I like best. //int outBuff = ( ( DECODABET[ source[ srcOffset ] ] << 24 ) >>> 6 ) // | ( ( DECODABET[ source[ srcOffset + 1 ] ] << 24 ) >>> 12 ) // | ( ( DECODABET[ source[ srcOffset + 2 ] ] << 24 ) >>> 18 ) // | ( ( DECODABET[ source[ srcOffset + 3 ] ] << 24 ) >>> 24 ); int outBuff = ( ( DECODABET[ source[ srcOffset ] ] & 0xFF ) << 18 ) | ( ( DECODABET[ source[ srcOffset + 1 ] ] & 0xFF ) << 12 ) | ( ( DECODABET[ source[ srcOffset + 2 ] ] & 0xFF ) << 6) | ( ( DECODABET[ source[ srcOffset + 3 ] ] & 0xFF ) ); destination[ destOffset ] = (byte)( outBuff >> 16 ); destination[ destOffset + 1 ] = (byte)( outBuff >> 8 ); destination[ destOffset + 2 ] = (byte)( outBuff ); return 3; }catch( Exception e){ // Remove these after checking -- for context only. // logger.error( Logger.SECURITY_FAILURE, "Problem writing object", e ); // logger.error( Logger.SECURITY_FAILURE, ""+source[srcOffset]+ ": " + ( DECODABET[ source[ srcOffset ] ] ) ); // logger.error( Logger.SECURITY_FAILURE, ""+source[srcOffset+1]+ ": " + ( DECODABET[ source[ srcOffset + 1 ] ] ) ); // logger.error( Logger.SECURITY_FAILURE, ""+source[srcOffset+2]+ ": " + ( DECODABET[ source[ srcOffset + 2 ] ] ) ); // logger.error( Logger.SECURITY_FAILURE, ""+source[srcOffset+3]+ ": " + ( DECODABET[ source[ srcOffset + 3 ] ] ) ); // CHECKME: I replaced the 5 separate logger.error() calls above with a single logger.error() call so they can't // become interleaved with other log entries from other threads. Normally this would have placed log entries // on separate lines, so I also added line terminators here as well. (Probably don't want it all on one single // really long log entry, do we?) Anyhow, somebody should check the formatting to ensure that it's // esthetically pleasing, etc. But this works for me. I'm also OK if you want to remove all the line terminators // in which case the declaration for EOL should be removed as well. - Kevin Wall StringBuilder sb = new StringBuilder("Problem writing object:"); sb.append(EOL); sb.append( source[srcOffset] ).append(": ").append( ( DECODABET[ source[ srcOffset ] ] ) ).append(EOL); sb.append( source[srcOffset+1] ).append(": ").append( ( DECODABET[ source[ srcOffset + 1 ] ] ) ).append(EOL); sb.append( source[srcOffset+2] ).append(": ").append( ( DECODABET[ source[ srcOffset + 2 ] ] ) ).append(EOL); sb.append( source[srcOffset+3] ).append(": ").append( ( DECODABET[ source[ srcOffset + 3 ] ] ) ).append(EOL); logger.error( Logger.SECURITY_FAILURE, sb.toString(), e ); return -1; } // end catch } } // end decodeToBytes /** * Very low-level access to decoding ASCII characters in * the form of a byte array. Does not support automatically * gunzipping or any other "fancy" features. * * @param source The Base64 encoded data * @param off The offset of where to begin decoding * @param len The length of characters to decode * @param options * @return decoded data * @since 1.3 */ public static byte[] decode( byte[] source, int off, int len, int options ) { byte[] DECODABET = getDecodabet( options ); int len34 = len * 3 / 4; byte[] outBuff = new byte[ len34 ]; // Upper limit on size of output int outBuffPosn = 0; byte[] b4 = new byte[4]; int b4Posn = 0; int i = 0; byte sbiCrop = 0; byte sbiDecode = 0; for( i = off; i < off+len; i++ ) { sbiCrop = (byte)(source[i] & 0x7f); // Only the low seven bits sbiDecode = DECODABET[ sbiCrop ]; if( sbiDecode >= WHITE_SPACE_ENC ) // White space, Equals sign or better { if( sbiDecode >= EQUALS_SIGN_ENC ) { b4[ b4Posn++ ] = sbiCrop; if( b4Posn > 3 ) { outBuffPosn += decode4to3( b4, 0, outBuff, outBuffPosn, options ); b4Posn = 0; // If that was the equals sign, break out of 'for' loop if( sbiCrop == EQUALS_SIGN ) break; } // end if: quartet built } // end if: equals sign or better } // end if: white space, equals sign or better else { logger.error( Logger.SECURITY_FAILURE, "Bad Base64 input character at " + i + ": " + source[i] + "(decimal)" ); return null; } // end else: } // each input character byte[] out = new byte[ outBuffPosn ]; System.arraycopy( outBuff, 0, out, 0, outBuffPosn ); return out; } // end decode /** * Decodes data from Base64 notation, automatically * detecting gzip-compressed data and decompressing it. * * @param s the string to decode * @return the decoded data * @since 1.4 */ public static byte[] decode( String s ) { return decode( s, NO_OPTIONS ); } /** * Decodes data from Base64 notation, automatically * detecting gzip-compressed data and decompressing it. * * @param s the string to decode * @param options encode options such as URL_SAFE * @return the decoded data * @since 1.4 */ public static byte[] decode( String s, int options ) { byte[] bytes; try { bytes = s.getBytes( PREFERRED_ENCODING ); } catch( java.io.UnsupportedEncodingException uee ) { bytes = s.getBytes(); // Uses native encoding // CHECKME: Is this correct? I think it should be a warning instead of an error since nothing // is re-thrown. I do think that *some* sort of logging is in order here especially since UTF-8 should // always be available on all platforms. If it's not, then all bets are off on your runtime env. - Kevin Wall logger.warning( Logger.SECURITY_FAILURE, "Problem decoding string using " + PREFERRED_ENCODING + "; substituting native platform encoding instead", uee ); } // Decode bytes = decode( bytes, 0, bytes.length, options ); // Check to see if it's gzip-compressed // GZIP Magic Two-Byte Number: 0x8b1f (35615) if( bytes != null && bytes.length >= 4 ) { int head = ((int)bytes[0] & 0xff) | ((bytes[1] << 8) & 0xff00); if( java.util.zip.GZIPInputStream.GZIP_MAGIC == head ) { java.io.ByteArrayInputStream bais = null; java.util.zip.GZIPInputStream gzis = null; java.io.ByteArrayOutputStream baos = null; byte[] buffer = new byte[2048]; int length = 0; try { baos = new java.io.ByteArrayOutputStream(); bais = new java.io.ByteArrayInputStream( bytes ); gzis = new java.util.zip.GZIPInputStream( bais ); while( ( length = gzis.read( buffer ) ) >= 0 ) { baos.write(buffer,0,length); } // end while: reading input // No error? Get new bytes. bytes = baos.toByteArray(); } // end try catch( java.io.IOException e ) { // Just return originally-decoded bytes } // end catch finally { try{ baos.close(); } catch( Exception e ){} try{ gzis.close(); } catch( Exception e ){} try{ bais.close(); } catch( Exception e ){} } // end finally } // end if: gzipped } // end if: bytes.length >= 2 return bytes; } // end decode /** * Attempts to decode Base64 data and deserialize a Java * Object within. Returns <tt>null</tt> if there was an error. * * @param encodedObject The Base64 data to decode * @return The decoded and deserialized object * @since 1.5 */ public static Object decodeToObject( String encodedObject ) { // Decode and gunzip if necessary byte[] objBytes = decode( encodedObject ); java.io.ByteArrayInputStream bais = null; java.io.ObjectInputStream ois = null; Object obj = null; try { bais = new java.io.ByteArrayInputStream( objBytes ); ois = new java.io.ObjectInputStream( bais ); obj = ois.readObject(); } // end try catch( java.io.IOException e ) { logger.error( Logger.SECURITY_FAILURE, "Problem reading object", e ); obj = null; } // end catch catch( java.lang.ClassNotFoundException e ) { logger.error( Logger.SECURITY_FAILURE, "Problem reading object", e ); obj = null; } // end catch finally { try{ bais.close(); } catch( Exception e ){} try{ ois.close(); } catch( Exception e ){} } // end finally return obj; } // end decodeObject /** * Convenience method for encoding data to a file. * * @param dataToEncode byte array of data to encode in base64 form * @param filename Filename for saving encoded data * @return <tt>true</tt> if successful, <tt>false</tt> otherwise * * @since 2.1 */ public static boolean encodeToFile( byte[] dataToEncode, String filename ) { boolean success = false; Base64.OutputStream bos = null; try { bos = new Base64.OutputStream( new java.io.FileOutputStream( filename ), Base64.ENCODE ); bos.write( dataToEncode ); success = true; } // end try catch( java.io.IOException e ) { success = false; } // end catch: IOException finally { try{ bos.close(); } catch( Exception e ){} } // end finally return success; } // end encodeToFile /** * Convenience method for decoding data to a file. * * @param dataToDecode Base64-encoded data as a string * @param filename Filename for saving decoded data * @return <tt>true</tt> if successful, <tt>false</tt> otherwise * * @since 2.1 */ public static boolean decodeToFile( String dataToDecode, String filename ) { boolean success = false; Base64.OutputStream bos = null; try { bos = new Base64.OutputStream( new java.io.FileOutputStream( filename ), Base64.DECODE ); bos.write( dataToDecode.getBytes( PREFERRED_ENCODING ) ); success = true; } // end try catch( java.io.IOException e ) { success = false; } // end catch: IOException finally { try{ bos.close(); } catch( Exception e ){} } // end finally return success; } // end decodeToFile /** * Convenience method for reading a base64-encoded * file and decoding it. * * @param filename Filename for reading encoded data * @return decoded byte array or null if unsuccessful * * @since 2.1 */ public static byte[] decodeFromFile( String filename ) { byte[] decodedData = null; Base64.InputStream bis = null; try { // Set up some useful variables java.io.File file = new java.io.File( filename ); byte[] buffer = null; int length = 0; int numBytes = 0; // Check for size of file if( file.length() > Integer.MAX_VALUE ) { logger.error( Logger.SECURITY_FAILURE, "File is too big for this convenience method (" + file.length() + " bytes)." ); return null; } // end if: file too big for int index buffer = new byte[ (int)file.length() ]; // Open a stream bis = new Base64.InputStream( new java.io.BufferedInputStream( new java.io.FileInputStream( file ) ), Base64.DECODE ); // Read until done while( ( numBytes = bis.read( buffer, length, 4096 ) ) >= 0 ) length += numBytes; // Save in a variable to return decodedData = new byte[ length ]; System.arraycopy( buffer, 0, decodedData, 0, length ); } // end try catch( java.io.IOException e ) { logger.error( Logger.SECURITY_FAILURE, "Error decoding from file " + filename, e ); } // end catch: IOException finally { try{ if (bis != null ) bis.close(); } catch( Exception e) {} } // end finally return decodedData; } // end decodeFromFile /** * Convenience method for reading a binary file * and base64-encoding it. * * @param filename Filename for reading binary data * @return base64-encoded string or null if unsuccessful * * @since 2.1 */ public static String encodeFromFile( String filename ) { String encodedData = null; Base64.InputStream bis = null; try { // Set up some useful variables java.io.File file = new java.io.File( filename ); byte[] buffer = new byte[ Math.max((int)(file.length() * 1.4),40) ]; // Need max() for math on small files (v2.2.1) int length = 0; int numBytes = 0; // Open a stream bis = new Base64.InputStream( new java.io.BufferedInputStream( new java.io.FileInputStream( file ) ), Base64.ENCODE ); // Read until done while( ( numBytes = bis.read( buffer, length, 4096 ) ) >= 0 ) length += numBytes; // Save in a variable to return encodedData = new String( buffer, 0, length, Base64.PREFERRED_ENCODING ); } // end try catch( java.io.IOException e ) { logger.error( Logger.SECURITY_FAILURE, "Error encoding from file " + filename, e ); } // end catch: IOException finally { try{ bis.close(); } catch( Exception e) {} } // end finally return encodedData; } // end encodeFromFile /** * Reads <tt>infile</tt> and encodes it to <tt>outfile</tt>. * * @param infile Input file * @param outfile Output file * @return true if the operation is successful * @since 2.2 */ public static boolean encodeFileToFile( String infile, String outfile ) { boolean success = false; java.io.InputStream in = null; java.io.OutputStream out = null; try{ in = new Base64.InputStream( new java.io.BufferedInputStream( new java.io.FileInputStream( infile ) ), Base64.ENCODE ); out = new java.io.BufferedOutputStream( new java.io.FileOutputStream( outfile ) ); byte[] buffer = new byte[65536]; // 64K int read = -1; while( ( read = in.read(buffer) ) >= 0 ){ out.write( buffer,0,read ); } // end while: through file success = true; } catch( java.io.IOException exc ){ logger.error( Logger.SECURITY_FAILURE, "Problem encoding file to file", exc ); } finally{ try{ in.close(); } catch( Exception exc ){} try{ out.close(); } catch( Exception exc ){} } // end finally return success; } // end encodeFileToFile /** * Reads <tt>infile</tt> and decodes it to <tt>outfile</tt>. * * @param infile Input file * @param outfile Output file * @return true if the operation is successful * @since 2.2 */ public static boolean decodeFileToFile( String infile, String outfile ) { boolean success = false; java.io.InputStream in = null; java.io.OutputStream out = null; try{ in = new Base64.InputStream( new java.io.BufferedInputStream( new java.io.FileInputStream( infile ) ), Base64.DECODE ); out = new java.io.BufferedOutputStream( new java.io.FileOutputStream( outfile ) ); byte[] buffer = new byte[65536]; // 64K int read = -1; while( ( read = in.read(buffer) ) >= 0 ){ out.write( buffer,0,read ); } // end while: through file success = true; } catch( java.io.IOException exc ){ logger.error( Logger.SECURITY_FAILURE, "Problem decoding file to file", exc ); } finally{ try{ in.close(); } catch( Exception exc ){} try{ out.close(); } catch( Exception exc ){} } // end finally return success; } // end decodeFileToFile /** * A {@link Base64.InputStream} will read data from another * <tt>java.io.InputStream</tt>, given in the constructor, * and encode/decode to/from Base64 notation on the fly. * * @see Base64 * @since 1.3 */ public static class InputStream extends java.io.FilterInputStream { private boolean encode; // Encoding or decoding private int position; // Current position in the buffer private byte[] buffer; // Small buffer holding converted data private int bufferLength; // Length of buffer (3 or 4) private int numSigBytes; // Number of meaningful bytes in the buffer private int lineLength; private boolean breakLines; // Break lines at less than 80 characters private int options; // Record options used to create the stream. private byte[] decodabet; // Local copies to avoid extra method calls /** * Constructs a {@link Base64.InputStream} in DECODE mode. * * @param in the <tt>java.io.InputStream</tt> from which to read data. * @since 1.3 */ public InputStream( java.io.InputStream in ) { this( in, DECODE ); } // end constructor /** * Constructs a {@link Base64.InputStream} in * either ENCODE or DECODE mode. * <p> * Valid options:<pre> * ENCODE or DECODE: Encode or Decode as data is read. * DONT_BREAK_LINES: don't break lines at 76 characters * (only meaningful when encoding) * <i>Note: Technically, this makes your encoding non-compliant.</i> * </pre> * <p> * Example: <code>new Base64.InputStream( in, Base64.DECODE )</code> * * * @param in the <tt>java.io.InputStream</tt> from which to read data. * @param options Specified options * @see Base64#ENCODE * @see Base64#DECODE * @see Base64#DONT_BREAK_LINES * @since 2.0 */ public InputStream( java.io.InputStream in, int options ) { super( in ); this.breakLines = (options & DONT_BREAK_LINES) != DONT_BREAK_LINES; this.encode = (options & ENCODE) == ENCODE; this.bufferLength = encode ? 4 : 3; this.buffer = new byte[ bufferLength ]; this.position = -1; this.lineLength = 0; this.options = options; // Record for later, mostly to determine which alphabet to use this.decodabet = getDecodabet(options); } // end constructor /** * Reads enough of the input stream to convert * to/from Base64 and returns the next byte. * * @return next byte * @throws java.io.IOException * @since 1.3 */ public int read() throws java.io.IOException { // Do we need to get data? if( position < 0 ) { if( encode ) { byte[] b3 = new byte[3]; int numBinaryBytes = 0; for( int i = 0; i < 3; i++ ) { try { int b = in.read(); // If end of stream, b is -1. if( b >= 0 ) { b3[i] = (byte)b; numBinaryBytes++; } // end if: not end of stream } // end try: read catch( java.io.IOException e ) { // Only a problem if we got no data at all. if( i == 0 ) throw e; } // end catch } // end for: each needed input byte if( numBinaryBytes > 0 ) { encode3to4( b3, 0, numBinaryBytes, buffer, 0, options ); position = 0; numSigBytes = 4; } // end if: got data else { return -1; } // end else } // end if: encoding // Else decoding else { byte[] b4 = new byte[4]; int i = 0; for( i = 0; i < 4; i++ ) { // Read four "meaningful" bytes: int b = 0; do{ b = in.read(); } while( b >= 0 && decodabet[ b & 0x7f ] <= WHITE_SPACE_ENC ); if( b < 0 ) break; // Reads a -1 if end of stream b4[i] = (byte)b; } // end for: each needed input byte if( i == 4 ) { numSigBytes = decode4to3( b4, 0, buffer, 0, options ); position = 0; } // end if: got four characters else if( i == 0 ){ return -1; } // end else if: also padded correctly else { // Must have broken out from above. throw new java.io.IOException( "Improperly padded Base64 input." ); } // end } // end else: decode } // end else: get data // Got data? if( position >= 0 ) { // End of relevant data? if( /*!encode &&*/ position >= numSigBytes ) return -1; if( encode && breakLines && lineLength >= MAX_LINE_LENGTH ) { lineLength = 0; return '\n'; } // end if else { lineLength++; // This isn't important when decoding // but throwing an extra "if" seems // just as wasteful. int b = buffer[ position++ ]; if( position >= bufferLength ) position = -1; return b & 0xFF; // This is how you "cast" a byte that's // intended to be unsigned. } // end else } // end if: position >= 0 // Else error else { // When JDK1.4 is more accepted, use an assertion here. throw new java.io.IOException( "Error in Base64 code reading stream." ); } // end else } // end read /** * Calls {@link #read()} repeatedly until the end of stream * is reached or <var>len</var> bytes are read. * Returns number of bytes read into array or -1 if * end of stream is encountered. * * @param dest array to hold values * @param off offset for array * @param len max number of bytes to read into array * @return bytes read into array or -1 if end of stream is encountered. * @throws java.io.IOException * @since 1.3 */ public int read( byte[] dest, int off, int len ) throws java.io.IOException { int i; int b; for( i = 0; i < len; i++ ) { b = read(); //if( b < 0 && i == 0 ) // return -1; if( b >= 0 ) dest[off + i] = (byte)b; else if( i == 0 ) return -1; else break; // Out of 'for' loop } // end for: each byte read return i; } // end read } // end inner class InputStream /** * A {@link Base64.OutputStream} will write data to another * <tt>java.io.OutputStream</tt>, given in the constructor, * and encode/decode to/from Base64 notation on the fly. * * @see Base64 * @since 1.3 */ public static class OutputStream extends java.io.FilterOutputStream { private boolean encode; private int position; private byte[] buffer; private int bufferLength; private int lineLength; private boolean breakLines; private byte[] b4; // Scratch used in a few places private boolean suspendEncoding; private int options; // Record for later private byte[] decodabet; // Local copies to avoid extra method calls /** * Constructs a {@link Base64.OutputStream} in ENCODE mode. * * @param out the <tt>java.io.OutputStream</tt> to which data will be written. * @since 1.3 */ public OutputStream( java.io.OutputStream out ) { this( out, ENCODE ); } // end constructor /** * Constructs a {@link Base64.OutputStream} in * either ENCODE or DECODE mode. * <p> * Valid options:<pre> * ENCODE or DECODE: Encode or Decode as data is read. * DONT_BREAK_LINES: don't break lines at 76 characters * (only meaningful when encoding) * <i>Note: Technically, this makes your encoding non-compliant.</i> * </pre> * <p> * Example: <code>new Base64.OutputStream( out, Base64.ENCODE )</code> * * @param out the <tt>java.io.OutputStream</tt> to which data will be written. * @param options Specified options. * @see Base64#ENCODE * @see Base64#DECODE * @see Base64#DONT_BREAK_LINES * @since 1.3 */ public OutputStream( java.io.OutputStream out, int options ) { super( out ); this.breakLines = (options & DONT_BREAK_LINES) != DONT_BREAK_LINES; this.encode = (options & ENCODE) == ENCODE; this.bufferLength = encode ? 3 : 4; this.buffer = new byte[ bufferLength ]; this.position = 0; this.lineLength = 0; this.suspendEncoding = false; this.b4 = new byte[4]; this.options = options; this.decodabet = getDecodabet(options); } // end constructor /** * Writes the byte to the output stream after * converting to/from Base64 notation. * When encoding, bytes are buffered three * at a time before the output stream actually * gets a write() call. * When decoding, bytes are buffered four * at a time. * * @param theByte the byte to write * @throws java.io.IOException * @since 1.3 */ public void write(int theByte) throws java.io.IOException { // Encoding suspended? if( suspendEncoding ) { super.out.write( theByte ); return; } // end if: supsended // Encode? if( encode ) { buffer[ position++ ] = (byte)theByte; if( position >= bufferLength ) // Enough to encode. { out.write( encode3to4( b4, buffer, bufferLength, options ) ); lineLength += 4; if( breakLines && lineLength >= MAX_LINE_LENGTH ) { out.write( NEW_LINE ); lineLength = 0; } // end if: end of line position = 0; } // end if: enough to output } // end if: encoding // Else, Decoding else { // Meaningful Base64 character? if( decodabet[ theByte & 0x7f ] > WHITE_SPACE_ENC ) { buffer[ position++ ] = (byte)theByte; if( position >= bufferLength ) // Enough to output. { int len = Base64.decode4to3( buffer, 0, b4, 0, options ); out.write( b4, 0, len ); //out.write( Base64.decode4to3( buffer ) ); position = 0; } // end if: enough to output } // end if: meaningful base64 character else if( decodabet[ theByte & 0x7f ] != WHITE_SPACE_ENC ) { throw new java.io.IOException( "Invalid character in Base64 data." ); } // end else: not white space either } // end else: decoding } // end write /** * Calls {@link #write(int)} repeatedly until <var>len</var> * bytes are written. * * @param theBytes array from which to read bytes * @param off offset for array * @param len max number of bytes to read into array * @throws java.io.IOException * @since 1.3 */ public void write( byte[] theBytes, int off, int len ) throws java.io.IOException { // Encoding suspended? if( suspendEncoding ) { super.out.write( theBytes, off, len ); return; } // end if: supsended for( int i = 0; i < len; i++ ) { write( theBytes[ off + i ] ); } // end for: each byte written } // end write /** * Method added by PHIL. [Thanks, PHIL. -Rob] * This pads the buffer without closing the stream. * @throws java.io.IOException */ public void flushBase64() throws java.io.IOException { if( position > 0 ) { if( encode ) { out.write( encode3to4( b4, buffer, position, options ) ); position = 0; } // end if: encoding else { throw new java.io.IOException( "Base64 input not properly padded." ); } // end else: decoding } // end if: buffer partially full } // end flush /** * Flushes and closes (I think, in the superclass) the stream. * * @throws java.io.IOException * @since 1.3 */ public void close() throws java.io.IOException { // 1. Ensure that pending characters are written flushBase64(); // 2. Actually close the stream // Base class both flushes and closes. super.close(); buffer = null; out = null; } // end close /** * Suspends encoding of the stream. * May be helpful if you need to embed a piece of * base640-encoded data in a stream. * * @throws java.io.IOException * @since 1.5.1 */ public void suspendEncoding() throws java.io.IOException { flushBase64(); this.suspendEncoding = true; } // end suspendEncoding /** * Resumes encoding of the stream. * May be helpful if you need to embed a piece of * base640-encoded data in a stream. * * @since 1.5.1 */ public void resumeEncoding() { this.suspendEncoding = false; } // end resumeEncoding } // end inner class OutputStream } // end class Base64
package org.voovan.network; import java.io.FileInputStream; import java.io.IOException; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.security.UnrecoverableKeyException; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; import javax.net.ssl.KeyManager; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLException; import javax.net.ssl.TrustManager; import javax.net.ssl.TrustManagerFactory; import javax.net.ssl.X509TrustManager; import org.voovan.tools.TString; public class SSLManager { private KeyManagerFactory keyManagerFactory; private TrustManagerFactory trustManagerFactory; private SSLContext context; private SSLEngine engine; private boolean useClientAuth; private String protocol; /** * * * @param protocol * @throws NoSuchAlgorithmException * @throws Exception */ public SSLManager(String protocol) throws NoSuchAlgorithmException{ this.useClientAuth = true; this.protocol = protocol; } /** * * @param protocol * @param useClientAuth * @throws SSLException */ public SSLManager(String protocol,boolean useClientAuth) throws SSLException{ this.useClientAuth = useClientAuth; this.protocol = protocol; } /** * * @param manageCertFile * @param certPassword * @param keyPassword * @throws SSLException */ public void loadCertificate(String manageCertFile, String certPassword,String keyPassword) throws SSLException{ try{ keyManagerFactory = KeyManagerFactory.getInstance("SunX509"); trustManagerFactory = TrustManagerFactory.getInstance("SunX509"); KeyStore manageKeystore = KeyStore.getInstance(KeyStore.getDefaultType()); manageKeystore.load(new FileInputStream(manageCertFile), certPassword.toCharArray()); keyManagerFactory.init(manageKeystore, keyPassword.toCharArray()); trustManagerFactory.init(manageKeystore); } catch (CertificateException | IOException | NoSuchAlgorithmException | KeyStoreException | UnrecoverableKeyException e) { throw new SSLException("Init SSLContext Error: "+e.getMessage(),e); } } /** * * @param protocol SSL/TLS * @throws SSLException */ private void init(String protocol) throws SSLException { if(TString.isNullOrEmpty(protocol) || protocol == null){ this.protocol = "SSL"; } try { context = SSLContext.getInstance(protocol); if(keyManagerFactory!=null && trustManagerFactory!=null){ context.init(keyManagerFactory.getKeyManagers(), trustManagerFactory.getTrustManagers(), new SecureRandom()); }else{ context.init(new KeyManager[0],new TrustManager[]{new DefaultTrustManager()},new SecureRandom()); } //NoSuchAlgorithmException | KeyManagementException | } catch ( Exception e) { throw new SSLException("Init SSLContext Error: "+e.getMessage(),e); } } /** * SSLEngine * @return * @throws SSLException */ private void createSSLEngine(String protocol) throws SSLException { init(protocol); engine = context.createSSLEngine(); } /** * SSLParser * @return * @throws SSLException */ public SSLParser createClientSSLParser(IoSession session) throws SSLException { createSSLEngine(protocol); engine.setUseClientMode(true); return new SSLParser(engine, session); } /** * Server SSLSSLParser * @return * @throws SSLException */ public SSLParser createServerSSLParser(IoSession session) throws SSLException{ createSSLEngine(protocol); engine.setUseClientMode(false); engine.setNeedClientAuth(useClientAuth); return new SSLParser(engine, session); } private static class DefaultTrustManager implements X509TrustManager { @Override public void checkClientTrusted(X509Certificate[] paramArrayOfX509Certificate, String paramString) throws CertificateException { } @Override public void checkServerTrusted(X509Certificate[] paramArrayOfX509Certificate, String paramString) throws CertificateException { } @Override public X509Certificate[] getAcceptedIssuers() { return null; } } }
package org.zendesk.client.v2; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import com.ning.http.client.AsyncCompletionHandler; import com.ning.http.client.AsyncHttpClient; import com.ning.http.client.AsyncHttpClient.BoundRequestBuilder; import com.ning.http.client.ListenableFuture; import com.ning.http.client.Realm; import com.ning.http.client.Request; import com.ning.http.client.RequestBuilder; import com.ning.http.client.Response; import com.ning.http.client.multipart.FilePart; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.zendesk.client.v2.model.Attachment; import org.zendesk.client.v2.model.Audit; import org.zendesk.client.v2.model.Automation; import org.zendesk.client.v2.model.Comment; import org.zendesk.client.v2.model.Field; import org.zendesk.client.v2.model.Forum; import org.zendesk.client.v2.model.Group; import org.zendesk.client.v2.model.GroupMembership; import org.zendesk.client.v2.model.Identity; import org.zendesk.client.v2.model.JobStatus; import org.zendesk.client.v2.model.Macro; import org.zendesk.client.v2.model.Metric; import org.zendesk.client.v2.model.Organization; import org.zendesk.client.v2.model.OrganizationField; import org.zendesk.client.v2.model.SearchResultEntity; import org.zendesk.client.v2.model.Status; import org.zendesk.client.v2.model.SuspendedTicket; import org.zendesk.client.v2.model.Ticket; import org.zendesk.client.v2.model.TicketForm; import org.zendesk.client.v2.model.TicketResult; import org.zendesk.client.v2.model.Topic; import org.zendesk.client.v2.model.Trigger; import org.zendesk.client.v2.model.TwitterMonitor; import org.zendesk.client.v2.model.User; import org.zendesk.client.v2.model.UserField; import org.zendesk.client.v2.model.hc.Article; import org.zendesk.client.v2.model.hc.ArticleAttachments; import org.zendesk.client.v2.model.hc.Category; import org.zendesk.client.v2.model.hc.Section; import org.zendesk.client.v2.model.hc.Translation; import org.zendesk.client.v2.model.targets.BasecampTarget; import org.zendesk.client.v2.model.targets.CampfireTarget; import org.zendesk.client.v2.model.targets.EmailTarget; import org.zendesk.client.v2.model.targets.PivotalTarget; import org.zendesk.client.v2.model.targets.Target; import org.zendesk.client.v2.model.targets.TwitterTarget; import org.zendesk.client.v2.model.targets.UrlTarget; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; /** * @author stephenc * @since 04/04/2013 13:08 */ public class Zendesk implements Closeable { private static final String JSON = "application/json; charset=UTF-8"; private final boolean closeClient; private final AsyncHttpClient client; private final Realm realm; private final String url; private final String oauthToken; private final ObjectMapper mapper; private final Logger logger; private boolean closed = false; private static final Map<String, Class<? extends SearchResultEntity>> searchResultTypes = searchResultTypes(); private static final Map<String, Class<? extends Target>> targetTypes = targetTypes(); private static Map<String, Class<? extends SearchResultEntity>> searchResultTypes() { Map<String, Class<? extends SearchResultEntity>> result = new HashMap<String, Class<? extends SearchResultEntity>>(); result.put("ticket", Ticket.class); result.put("user", User.class); result.put("group", Group.class); result.put("organization", Organization.class); result.put("topic", Topic.class); result.put("article", Article.class); return Collections.unmodifiableMap(result); } private static Map<String, Class<? extends Target>> targetTypes() { Map<String, Class<? extends Target>> result = new HashMap<String, Class<? extends Target>>(); result.put("url_target", UrlTarget.class); result.put("email_target",EmailTarget.class); result.put("basecamp_target", BasecampTarget.class); result.put("campfire_target", CampfireTarget.class); result.put("pivotal_target", PivotalTarget.class); result.put("twitter_target", TwitterTarget.class); // TODO: Implement other Target types //result.put("clickatell_target", ClickatellTarget.class); //result.put("flowdock_target", FlowdockTarget.class); //result.put("get_satisfaction_target", GetSatisfactionTarget.class); //result.put("yammer_target", YammerTarget.class); return Collections.unmodifiableMap(result); } private Zendesk(AsyncHttpClient client, String url, String username, String password) { this.logger = LoggerFactory.getLogger(Zendesk.class); this.closeClient = client == null; this.oauthToken = null; this.client = client == null ? new AsyncHttpClient() : client; this.url = url.endsWith("/") ? url + "api/v2" : url + "/api/v2"; if (username != null) { this.realm = new Realm.RealmBuilder() .setScheme(Realm.AuthScheme.BASIC) .setPrincipal(username) .setPassword(password) .setUsePreemptiveAuth(true) .build(); } else { if (password != null) { throw new IllegalStateException("Cannot specify token or password without specifying username"); } this.realm = null; } this.mapper = createMapper(); } private Zendesk(AsyncHttpClient client, String url, String oauthToken) { this.logger = LoggerFactory.getLogger(Zendesk.class); this.closeClient = client == null; this.realm = null; this.client = client == null ? new AsyncHttpClient() : client; this.url = url.endsWith("/") ? url + "api/v2" : url + "/api/v2"; if (oauthToken != null) { this.oauthToken = oauthToken; } else { throw new IllegalStateException("Cannot specify token or password without specifying username"); } this.mapper = createMapper(); } // Closeable interface methods public boolean isClosed() { return closed || client.isClosed(); } public void close() { if (closeClient && !client.isClosed()) { client.close(); } closed = true; } // Action methods public <T> JobStatus<T> getJobStatus(JobStatus<T> status) { return complete(getJobStatusAsync(status)); } public <T> ListenableFuture<JobStatus<T>> getJobStatusAsync(JobStatus<T> status) { return submit(req("GET", tmpl("/job_statuses/{id}.json").set("id", status.getId())), handleJobStatus(status.getResultsClass())); } public List<JobStatus<HashMap<String, Object>>> getJobStatuses(List<JobStatus> statuses) { return complete(getJobStatusesAsync(statuses)); } public ListenableFuture<List<JobStatus<HashMap<String, Object>>>> getJobStatusesAsync(List<JobStatus> statuses) { List<String> ids = new ArrayList<String>(statuses.size()); for (JobStatus status : statuses) { ids.add(status.getId()); } Class<JobStatus<HashMap<String, Object>>> clazz = (Class<JobStatus<HashMap<String, Object>>>)(Object)JobStatus.class; return submit(req("GET", tmpl("/job_statuses/show_many.json{?ids}").set("ids", ids)), handleList(clazz, "job_statuses")); } public TicketForm getTicketForm(long id) { return complete(submit(req("GET", tmpl("/ticket_forms/{id}.json").set("id", id)), handle(TicketForm.class, "ticket_form"))); } public List<TicketForm> getTicketForms() { return complete(submit(req("GET", cnst("/ticket_forms.json")), handleList(TicketForm.class, "ticket_forms"))); } public Ticket getTicket(long id) { return complete(submit(req("GET", tmpl("/tickets/{id}.json").set("id", id)), handle(Ticket.class, "ticket"))); } public List<Ticket> getTicketIncidents(long id) { return complete(submit(req("GET", tmpl("/tickets/{id}/incidents.json").set("id", id)), handleList(Ticket.class, "tickets"))); } public List<User> getTicketCollaborators(long id) { return complete(submit(req("GET", tmpl("/tickets/{id}/collaborators.json").set("id", id)), handleList(User.class, "users"))); } public void deleteTicket(Ticket ticket) { checkHasId(ticket); deleteTicket(ticket.getId()); } public void deleteTicket(long id) { complete(submit(req("DELETE", tmpl("/tickets/{id}.json").set("id", id)), handleStatus())); } public Ticket createTicket(Ticket ticket) { return complete(submit(req("POST", cnst("/tickets.json"), JSON, json(Collections.singletonMap("ticket", ticket))), handle(Ticket.class, "ticket"))); } public JobStatus<Ticket> createTickets(Ticket... tickets) { return createTickets(Arrays.asList(tickets)); } public JobStatus<Ticket> createTickets(List<Ticket> tickets) { return complete(createTicketsAsync(tickets)); } public ListenableFuture<JobStatus<Ticket>> createTicketsAsync(List<Ticket> tickets) { return submit(req("POST", cnst("/tickets/create_many.json"), JSON, json( Collections.singletonMap("tickets", tickets))), handleJobStatus(Ticket.class)); } public Ticket updateTicket(Ticket ticket) { checkHasId(ticket); return complete(submit(req("PUT", tmpl("/tickets/{id}.json").set("id", ticket.getId()), JSON, json(Collections.singletonMap("ticket", ticket))), handle(Ticket.class, "ticket"))); } public void markTicketAsSpam(Ticket ticket) { checkHasId(ticket); markTicketAsSpam(ticket.getId()); } public void markTicketAsSpam(long id) { complete(submit(req("PUT", tmpl("/tickets/{id}/mark_as_spam.json").set("id", id)), handleStatus())); } public void deleteTickets(long id, long... ids) { complete(submit(req("DELETE", tmpl("/tickets/destroy_many.json{?ids}").set("ids", idArray(id, ids))), handleStatus())); } public Iterable<Ticket> getTickets() { return new PagedIterable<Ticket>(cnst("/tickets.json"), handleList(Ticket.class, "tickets")); } public Iterable<Ticket> getTicketsByStatus(Status... ticketStatus) { return new PagedIterable<Ticket>(tmpl("/tickets.json{?status}").set("status", statusArray(ticketStatus)), handleList(Ticket.class, "tickets")); } public Iterable<Ticket> getTicketsByExternalId(String externalId, boolean includeArchived) { Iterable<Ticket> results = new PagedIterable<Ticket>(tmpl("/tickets.json{?external_id}").set("external_id", externalId), handleList(Ticket.class, "tickets")); if (!includeArchived || results.iterator().hasNext()) { return results; } return new PagedIterable<Ticket>(tmpl("/search.json{?query}{&type}").set("query", "external_id:" + externalId).set("type", "ticket"), handleList(Ticket.class, "results")); } public Iterable<Ticket> getTicketsByExternalId(String externalId) { return getTicketsByExternalId(externalId, false); } public Iterable<Ticket> getTicketsFromSearch(String searchTerm) { return new PagedIterable<Ticket>(tmpl("/search.json{?query}").set("query", searchTerm + "+type:ticket"), handleList(Ticket.class, "results")); } public Iterable<Article> getArticleFromSearch(String searchTerm) { return new PagedIterable<Article>(tmpl("/help_center/articles/search.json{?query}").set("query", searchTerm), handleList(Article.class, "results")); } public Iterable<Article> getArticleFromSearch(String searchTerm, Long sectionId) { return new PagedIterable<Article>(tmpl("/help_center/articles/search.json{?section,query}") .set("query", searchTerm).set("section", sectionId), handleList(Article.class, "results")); } public List<ArticleAttachments> getAttachmentsFromArticle(Long articleID) { return complete(submit(req("GET", tmpl("/help_center/articles/{id}/attachments.json").set("id", articleID)), handleArticleAttachmentsList("article_attachments"))); } public List<Ticket> getTickets(long id, long... ids) { return complete(submit(req("GET", tmpl("/tickets/show_many.json{?ids}").set("ids", idArray(id, ids))), handleList(Ticket.class, "tickets"))); } public Iterable<Ticket> getRecentTickets() { return new PagedIterable<Ticket>(cnst("/tickets/recent.json"), handleList(Ticket.class, "tickets")); } public Iterable<Ticket> getTicketsIncrementally(Date startTime) { return new PagedIterable<Ticket>( tmpl("/incremental/tickets.json{?start_time}").set("start_time", msToSeconds(startTime.getTime())), handleIncrementalList(Ticket.class, "tickets")); } public Iterable<Ticket> getTicketsIncrementally(Date startTime, Date endTime) { return new PagedIterable<Ticket>( tmpl("/incremental/tickets.json{?start_time,end_time}") .set("start_time", msToSeconds(startTime.getTime())) .set("end_time", msToSeconds(endTime.getTime())), handleIncrementalList(Ticket.class, "tickets")); } public Iterable<Ticket> getOrganizationTickets(long organizationId) { return new PagedIterable<Ticket>( tmpl("/organizations/{organizationId}/tickets.json").set("organizationId", organizationId), handleList(Ticket.class, "tickets")); } public Iterable<Ticket> getUserRequestedTickets(long userId) { return new PagedIterable<Ticket>(tmpl("/users/{userId}/tickets/requested.json").set("userId", userId), handleList(Ticket.class, "tickets")); } public Iterable<Ticket> getUserCCDTickets(long userId) { return new PagedIterable<Ticket>(tmpl("/users/{userId}/tickets/ccd.json").set("userId", userId), handleList(Ticket.class, "tickets")); } public Iterable<Metric> getTicketMetrics() { return new PagedIterable<Metric>(cnst("/ticket_metrics.json"), handleList(Metric.class, "ticket_metrics")); } public Metric getTicketMetricByTicket(long id) { return complete(submit(req("GET", tmpl("/tickets/{ticketId}/metrics.json").set("ticketId", id)), handle(Metric.class, "ticket_metric"))); } public Metric getTicketMetric(long id) { return complete(submit(req("GET", tmpl("/ticket_metrics/{ticketMetricId}.json").set("ticketMetricId", id)), handle(Metric.class, "ticket_metric"))); } public Iterable<Audit> getTicketAudits(Ticket ticket) { checkHasId(ticket); return getTicketAudits(ticket.getId()); } public Iterable<Audit> getTicketAudits(Long id) { return new PagedIterable<Audit>(tmpl("/tickets/{ticketId}/audits.json").set("ticketId", id), handleList(Audit.class, "audits")); } public Audit getTicketAudit(Ticket ticket, Audit audit) { checkHasId(audit); return getTicketAudit(ticket, audit.getId()); } public Audit getTicketAudit(Ticket ticket, long id) { checkHasId(ticket); return getTicketAudit(ticket.getId(), id); } public Audit getTicketAudit(long ticketId, long auditId) { return complete(submit(req("GET", tmpl("/tickets/{ticketId}/audits/{auditId}.json").set("ticketId", ticketId) .set("auditId", auditId)), handle(Audit.class, "audit"))); } public void trustTicketAudit(Ticket ticket, Audit audit) { checkHasId(audit); trustTicketAudit(ticket, audit.getId()); } public void trustTicketAudit(Ticket ticket, long id) { checkHasId(ticket); trustTicketAudit(ticket.getId(), id); } public void trustTicketAudit(long ticketId, long auditId) { complete(submit(req("PUT", tmpl("/tickets/{ticketId}/audits/{auditId}/trust.json").set("ticketId", ticketId) .set("auditId", auditId)), handleStatus())); } public void makePrivateTicketAudit(Ticket ticket, Audit audit) { checkHasId(audit); makePrivateTicketAudit(ticket, audit.getId()); } public void makePrivateTicketAudit(Ticket ticket, long id) { checkHasId(ticket); makePrivateTicketAudit(ticket.getId(), id); } public void makePrivateTicketAudit(long ticketId, long auditId) { complete(submit(req("PUT", tmpl("/tickets/{ticketId}/audits/{auditId}/make_private.json").set("ticketId", ticketId) .set("auditId", auditId)), handleStatus())); } public List<Field> getTicketFields() { return complete(submit(req("GET", cnst("/ticket_fields.json")), handleList(Field.class, "ticket_fields"))); } public Field getTicketField(long id) { return complete(submit(req("GET", tmpl("/ticket_fields/{id}.json").set("id", id)), handle(Field.class, "ticket_field"))); } public Field createTicketField(Field field) { return complete(submit(req("POST", cnst("/ticket_fields.json"), JSON, json( Collections.singletonMap("ticket_field", field))), handle(Field.class, "ticket_field"))); } public Field updateTicketField(Field field) { checkHasId(field); return complete(submit(req("PUT", tmpl("/ticket_fields/{id}.json").set("id", field.getId()), JSON, json(Collections.singletonMap("ticket_field", field))), handle(Field.class, "ticket_field"))); } public void deleteTicketField(Field field) { checkHasId(field); deleteTicket(field.getId()); } public void deleteTicketField(long id) { complete(submit(req("DELETE", tmpl("/ticket_fields/{id}.json").set("id", id)), handleStatus())); } public Iterable<SuspendedTicket> getSuspendedTickets() { return new PagedIterable<SuspendedTicket>(cnst("/suspended_tickets.json"), handleList(SuspendedTicket.class, "suspended_tickets")); } public void deleteSuspendedTicket(SuspendedTicket ticket) { checkHasId(ticket); deleteSuspendedTicket(ticket.getId()); } public void deleteSuspendedTicket(long id) { complete(submit(req("DELETE", tmpl("/suspended_tickets/{id}.json").set("id", id)), handleStatus())); } public Attachment.Upload createUpload(String fileName, byte[] content) { return createUpload(null, fileName, "application/binary", content); } public Attachment.Upload createUpload(String fileName, String contentType, byte[] content) { return createUpload(null, fileName, contentType, content); } public Attachment.Upload createUpload(String token, String fileName, String contentType, byte[] content) { TemplateUri uri = tmpl("/uploads.json{?filename}{?token}").set("filename", fileName); if (token != null) { uri.set("token", token); } return complete( submit(req("POST", uri, contentType, content), handle(Attachment.Upload.class, "upload"))); } public void associateAttachmentsToArticle(String idArticle, List<Attachment> attachments) { TemplateUri uri = tmpl("/help_center/articles/{article_id}/bulk_attachments.json").set("article_id", idArticle); List<Long> attachmentsIds = new ArrayList<Long>(); for(Attachment item : attachments){ attachmentsIds.add(item.getId()); } complete(submit(req("POST", uri, JSON, json(Collections.singletonMap("attachment_ids", attachmentsIds))), handleStatus())); } public ArticleAttachments createUploadArticle(long articleId, File file) throws IOException { BoundRequestBuilder builder = client.preparePost(tmpl("/help_center/articles/{id}/attachments.json").set("id", articleId).toString()); if (realm != null) { builder.setRealm(realm); } else { builder.addHeader("Authorization", "Bearer " + oauthToken); } builder.setHeader("Content-Type", "multipart/form-data"); builder.addBodyPart( new FilePart("file", file, "application/octet-stream", Charset.forName("UTF-8"), file.getName())); final Request req = builder.build(); return complete(submit(req, handle(ArticleAttachments.class, "article_attachment"))); } public void deleteUpload(Attachment.Upload upload) { checkHasToken(upload); deleteUpload(upload.getToken()); } public void deleteUpload(String token) { complete(submit(req("DELETE", tmpl("/uploads/{token}.json").set("token", token)), handleStatus())); } public Attachment getAttachment(Attachment attachment) { checkHasId(attachment); return getAttachment(attachment.getId()); } public Attachment getAttachment(long id) { return complete(submit(req("GET", tmpl("/attachments/{id}.json").set("id", id)), handle(Attachment.class, "attachment"))); } public void deleteAttachment(Attachment attachment) { checkHasId(attachment); deleteAttachment(attachment.getId()); } public void deleteAttachment(long id) { complete(submit(req("DELETE", tmpl("/attachments/{id}.json").set("id", id)), handleStatus())); } public Iterable<Target> getTargets() { return new PagedIterable<Target>(cnst("/targets.json"), handleTargetList("targets")); } public Target getTarget(long id) { return complete(submit(req("GET", tmpl("/targets/{id}.json").set("id", id)), handle(Target.class, "target"))); } public Target createTarget(Target target) { return complete(submit(req("POST", cnst("/targets.json"), JSON, json(Collections.singletonMap("target", target))), handle(Target.class, "target"))); } public void deleteTarget(long targetId) { complete(submit(req("DELETE", tmpl("/targets/{id}.json").set("id", targetId)), handleStatus())); } public Iterable<Trigger> getTriggers() { return new PagedIterable<Trigger>(cnst("/triggers.json"), handleList(Trigger.class, "triggers")); } public Trigger getTrigger(long id) { return complete(submit(req("GET", tmpl("/triggers/{id}.json").set("id", id)), handle(Trigger.class, "trigger"))); } public Trigger createTrigger(Trigger trigger) { return complete(submit(req("POST", cnst("/triggers.json"), JSON, json(Collections.singletonMap("trigger", trigger))), handle(Trigger.class, "trigger"))); } public Trigger updateTrigger(Long triggerId, Trigger trigger) { return complete(submit(req("PUT", tmpl("/triggers/{id}.json").set("id", triggerId), JSON, json(Collections.singletonMap("trigger", trigger))), handle(Trigger.class, "trigger"))); } public void deleteTrigger(long triggerId) { complete(submit(req("DELETE", tmpl("/triggers/{id}.json").set("id", triggerId)), handleStatus())); } // Automations public Iterable<Automation> getAutomations() { return new PagedIterable<Automation>(cnst("/automations.json"), handleList(Automation.class, "automations")); } public Automation getAutomation(long id) { return complete(submit(req("GET", tmpl("/automations/{id}.json").set("id", id)), handle(Automation.class, "automation"))); } public Automation createAutomation(Automation automation) { return complete(submit( req("POST", cnst("/automations.json"), JSON, json(Collections.singletonMap("automation", automation))), handle(Automation.class, "automation"))); } public Automation updateAutomation(Long automationId, Automation automation) { return complete(submit( req("PUT", tmpl("/automations/{id}.json").set("id", automationId), JSON, json(Collections.singletonMap("automation", automation))), handle(Automation.class, "automation"))); } public void deleteAutomation(long automationId) { complete(submit(req("DELETE", tmpl("/automations/{id}.json").set("id", automationId)), handleStatus())); } public Iterable<TwitterMonitor> getTwitterMonitors() { return new PagedIterable<TwitterMonitor>(cnst("/channels/twitter/monitored_twitter_handles.json"), handleList(TwitterMonitor.class, "monitored_twitter_handles")); } public Iterable<User> getUsers() { return new PagedIterable<User>(cnst("/users.json"), handleList(User.class, "users")); } public Iterable<User> getUsersByRole(String role, String... roles) { // Going to have to build this URI manually, because the RFC6570 template spec doesn't support // variables like ?role[]=...role[]=..., which is what Zendesk requires. // See https://developer.zendesk.com/rest_api/docs/core/users#filters final StringBuilder uriBuilder = new StringBuilder("/users.json"); if (roles.length == 0) { uriBuilder.append("?role=").append(encodeUrl(role)); } else { uriBuilder.append("?role[]=").append(encodeUrl(role)); } for (final String curRole : roles) { uriBuilder.append("&role[]=").append(encodeUrl(curRole)); } return new PagedIterable<User>(cnst(uriBuilder.toString()), handleList(User.class, "users")); } public Iterable<User> getUsersIncrementally(Date startTime) { return new PagedIterable<User>( tmpl("/incremental/users.json{?start_time}").set("start_time", msToSeconds(startTime.getTime())), handleIncrementalList(User.class, "users")); } public Iterable<User> getGroupUsers(long id) { return new PagedIterable<User>(tmpl("/groups/{id}/users.json").set("id", id), handleList(User.class, "users")); } public Iterable<User> getOrganizationUsers(long id) { return new PagedIterable<User>(tmpl("/organizations/{id}/users.json").set("id", id), handleList(User.class, "users")); } public User getUser(long id) { return complete(submit(req("GET", tmpl("/users/{id}.json").set("id", id)), handle(User.class, "user"))); } public User getAuthenticatedUser() { return complete(submit(req("GET", cnst("/users/me.json")), handle(User.class, "user"))); } public Iterable<UserField> getUserFields() { return complete(submit(req("GET", cnst("/user_fields.json")), handleList(UserField.class, "user_fields"))); } public User createUser(User user) { return complete(submit(req("POST", cnst("/users.json"), JSON, json( Collections.singletonMap("user", user))), handle(User.class, "user"))); } public JobStatus<User> createUsers(User... users) { return createUsers(Arrays.asList(users)); } public JobStatus<User> createUsers(List<User> users) { return complete(createUsersAsync(users)); } public ListenableFuture<JobStatus<User>> createUsersAsync(List<User> users) { return submit(req("POST", cnst("/users/create_many.json"), JSON, json( Collections.singletonMap("users", users))), handleJobStatus(User.class)); } public User updateUser(User user) { checkHasId(user); return complete(submit(req("PUT", tmpl("/users/{id}.json").set("id", user.getId()), JSON, json( Collections.singletonMap("user", user))), handle(User.class, "user"))); } public void deleteUser(User user) { checkHasId(user); deleteUser(user.getId()); } public void deleteUser(long id) { complete(submit(req("DELETE", tmpl("/users/{id}.json").set("id", id)), handleStatus())); } public Iterable<User> lookupUserByEmail(String email) { return new PagedIterable<User>(tmpl("/users/search.json{?query}").set("query", email), handleList(User.class, "users")); } public Iterable<User> lookupUserByExternalId(String externalId) { return new PagedIterable<User>(tmpl("/users/search.json{?external_id}").set("external_id", externalId), handleList(User.class, "users")); } public User getCurrentUser() { return complete(submit(req("GET", cnst("/users/me.json")), handle(User.class, "user"))); } public void resetUserPassword(User user, String password) { checkHasId(user); resetUserPassword(user.getId(), password); } public void resetUserPassword(long id, String password) { complete(submit(req("POST", tmpl("/users/{id}/password.json").set("id", id), JSON, json(Collections.singletonMap("password", password))), handleStatus())); } public void changeUserPassword(User user, String oldPassword, String newPassword) { checkHasId(user); Map<String, String> req = new HashMap<String, String>(); req.put("previous_password", oldPassword); req.put("password", newPassword); complete(submit(req("PUT", tmpl("/users/{id}/password.json").set("id", user.getId()), JSON, json(req)), handleStatus())); } public List<Identity> getUserIdentities(User user) { checkHasId(user); return getUserIdentities(user.getId()); } public List<Identity> getUserIdentities(long userId) { return complete(submit(req("GET", tmpl("/users/{id}/identities.json").set("id", userId)), handleList(Identity.class, "identities"))); } public Identity getUserIdentity(User user, Identity identity) { checkHasId(identity); return getUserIdentity(user, identity.getId()); } public Identity getUserIdentity(User user, long identityId) { checkHasId(user); return getUserIdentity(user.getId(), identityId); } public Identity getUserIdentity(long userId, long identityId) { return complete(submit(req("GET", tmpl("/users/{userId}/identities/{identityId}.json").set("userId", userId) .set("identityId", identityId)), handle( Identity.class, "identity"))); } public List<Identity> setUserPrimaryIdentity(User user, Identity identity) { checkHasId(identity); return setUserPrimaryIdentity(user, identity.getId()); } public List<Identity> setUserPrimaryIdentity(User user, long identityId) { checkHasId(user); return setUserPrimaryIdentity(user.getId(), identityId); } public List<Identity> setUserPrimaryIdentity(long userId, long identityId) { return complete(submit(req("PUT", tmpl("/users/{userId}/identities/{identityId}/make_primary.json").set("userId", userId) .set("identityId", identityId), JSON, null), handleList(Identity.class, "identities"))); } public Identity verifyUserIdentity(User user, Identity identity) { checkHasId(identity); return verifyUserIdentity(user, identity.getId()); } public Identity verifyUserIdentity(User user, long identityId) { checkHasId(user); return verifyUserIdentity(user.getId(), identityId); } public Identity verifyUserIdentity(long userId, long identityId) { return complete(submit(req("PUT", tmpl("/users/{userId}/identities/{identityId}/verify.json") .set("userId", userId) .set("identityId", identityId), JSON, null), handle(Identity.class, "identity"))); } public Identity requestVerifyUserIdentity(User user, Identity identity) { checkHasId(identity); return requestVerifyUserIdentity(user, identity.getId()); } public Identity requestVerifyUserIdentity(User user, long identityId) { checkHasId(user); return requestVerifyUserIdentity(user.getId(), identityId); } public Identity requestVerifyUserIdentity(long userId, long identityId) { return complete(submit(req("PUT", tmpl("/users/{userId}/identities/{identityId}/request_verification.json") .set("userId", userId) .set("identityId", identityId), JSON, null), handle(Identity.class, "identity"))); } public void deleteUserIdentity(User user, Identity identity) { checkHasId(identity); deleteUserIdentity(user, identity.getId()); } public void deleteUserIdentity(User user, long identityId) { checkHasId(user); deleteUserIdentity(user.getId(), identityId); } public void deleteUserIdentity(long userId, long identityId) { complete(submit(req("DELETE", tmpl("/users/{userId}/identities/{identityId}.json") .set("userId", userId) .set("identityId", identityId) ), handleStatus())); } public void createUserIdentity(long userId, Identity identity) { complete(submit(req("POST", tmpl("/users/{userId}/identities.json").set("userId", userId), JSON, json( Collections.singletonMap("identity", identity))), handle(Identity.class, "identity"))); } public void createUserIdentity(User user, Identity identity) { complete(submit(req("POST", tmpl("/users/{userId}/identities.json").set("userId", user.getId()), JSON, json( Collections.singletonMap("identity", identity))), handle(Identity.class, "identity"))); } public Iterable<org.zendesk.client.v2.model.Request> getRequests() { return new PagedIterable<org.zendesk.client.v2.model.Request>(cnst("/requests.json"), handleList(org.zendesk.client.v2.model.Request.class, "requests")); } public Iterable<org.zendesk.client.v2.model.Request> getOpenRequests() { return new PagedIterable<org.zendesk.client.v2.model.Request>(cnst("/requests/open.json"), handleList(org.zendesk.client.v2.model.Request.class, "requests")); } public Iterable<org.zendesk.client.v2.model.Request> getSolvedRequests() { return new PagedIterable<org.zendesk.client.v2.model.Request>(cnst("/requests/solved.json"), handleList(org.zendesk.client.v2.model.Request.class, "requests")); } public Iterable<org.zendesk.client.v2.model.Request> getCCRequests() { return new PagedIterable<org.zendesk.client.v2.model.Request>(cnst("/requests/ccd.json"), handleList(org.zendesk.client.v2.model.Request.class, "requests")); } public Iterable<org.zendesk.client.v2.model.Request> getUserRequests(User user) { checkHasId(user); return getUserRequests(user.getId()); } public Iterable<org.zendesk.client.v2.model.Request> getUserRequests(long id) { return new PagedIterable<org.zendesk.client.v2.model.Request>(tmpl("/users/{id}/requests.json").set("id", id), handleList(org.zendesk.client.v2.model.Request.class, "requests")); } public org.zendesk.client.v2.model.Request getRequest(long id) { return complete(submit(req("GET", tmpl("/requests/{id}.json").set("id", id)), handle(org.zendesk.client.v2.model.Request.class, "request"))); } public org.zendesk.client.v2.model.Request createRequest(org.zendesk.client.v2.model.Request request) { return complete(submit(req("POST", cnst("/requests.json"), JSON, json(Collections.singletonMap("request", request))), handle(org.zendesk.client.v2.model.Request.class, "request"))); } public org.zendesk.client.v2.model.Request updateRequest(org.zendesk.client.v2.model.Request request) { checkHasId(request); return complete(submit(req("PUT", tmpl("/requests/{id}.json").set("id", request.getId()), JSON, json(Collections.singletonMap("request", request))), handle(org.zendesk.client.v2.model.Request.class, "request"))); } public Iterable<Comment> getRequestComments(org.zendesk.client.v2.model.Request request) { checkHasId(request); return getRequestComments(request.getId()); } public Iterable<Comment> getRequestComments(long id) { return new PagedIterable<Comment>(tmpl("/requests/{id}/comments.json").set("id", id), handleList(Comment.class, "comments")); } public Iterable<Comment> getTicketComments(long id) { return new PagedIterable<Comment>(tmpl("/tickets/{id}/comments.json").set("id", id), handleList(Comment.class, "comments")); } public Comment getRequestComment(org.zendesk.client.v2.model.Request request, Comment comment) { checkHasId(comment); return getRequestComment(request, comment.getId()); } public Comment getRequestComment(org.zendesk.client.v2.model.Request request, long commentId) { checkHasId(request); return getRequestComment(request.getId(), commentId); } public Comment getRequestComment(long requestId, long commentId) { return complete(submit(req("GET", tmpl("/requests/{requestId}/comments/{commentId}.json") .set("requestId", requestId) .set("commentId", commentId)), handle(Comment.class, "comment"))); } public Ticket createComment(long ticketId, Comment comment) { Ticket ticket = new Ticket(); ticket.setComment(comment); return complete(submit(req("PUT", tmpl("/tickets/{id}.json").set("id", ticketId), JSON, json(Collections.singletonMap("ticket", ticket))), handle(Ticket.class, "ticket"))); } public Ticket createTicketFromTweet(long tweetId, long monitorId) { Map<String,Object> map = new HashMap<String,Object>(); map.put("twitter_status_message_id", tweetId); map.put("monitored_twitter_handle_id", monitorId); return complete(submit(req("POST", cnst("/channels/twitter/tickets.json"), JSON, json(Collections.singletonMap("ticket", map))), handle(Ticket.class, "ticket"))); } public Iterable<Organization> getOrganizations() { return new PagedIterable<Organization>(cnst("/organizations.json"), handleList(Organization.class, "organizations")); } public Iterable<Organization> getOrganizationsIncrementally(Date startTime) { return new PagedIterable<Organization>( tmpl("/incremental/organizations.json{?start_time}").set("start_time", msToSeconds(startTime.getTime())), handleIncrementalList(Organization.class, "organizations")); } public Iterable<OrganizationField> getOrganizationFields() { //The organization_fields api doesn't seem to support paging return complete(submit(req("GET", cnst("/organization_fields.json")), handleList(OrganizationField.class, "organization_fields"))); } public Iterable<Organization> getAutoCompleteOrganizations(String name) { if (name == null || name.length() < 2) { throw new IllegalArgumentException("Name must be at least 2 characters long"); } return new PagedIterable<Organization>(tmpl("/organizations/autocomplete.json{?name}").set("name", name), handleList(Organization.class, "organizations")); } // TODO getOrganizationRelatedInformation public Organization getOrganization(long id) { return complete(submit(req("GET", tmpl("/organizations/{id}.json").set("id", id)), handle(Organization.class, "organization"))); } public Organization createOrganization(Organization organization) { return complete(submit(req("POST", cnst("/organizations.json"), JSON, json( Collections.singletonMap("organization", organization))), handle(Organization.class, "organization"))); } public JobStatus<Organization> createOrganizations(Organization... organizations) { return createOrganizations(Arrays.asList(organizations)); } public JobStatus createOrganizations(List<Organization> organizations) { return complete(createOrganizationsAsync(organizations)); } public ListenableFuture<JobStatus<Organization>> createOrganizationsAsync(List<Organization> organizations) { return submit(req("POST", cnst("/organizations/create_many.json"), JSON, json( Collections.singletonMap("organizations", organizations))), handleJobStatus(Organization.class)); } public Organization updateOrganization(Organization organization) { checkHasId(organization); return complete(submit(req("PUT", tmpl("/organizations/{id}.json").set("id", organization.getId()), JSON, json( Collections.singletonMap("organization", organization))), handle(Organization.class, "organization"))); } public void deleteOrganization(Organization organization) { checkHasId(organization); deleteOrganization(organization.getId()); } public void deleteOrganization(long id) { complete(submit(req("DELETE", tmpl("/organizations/{id}.json").set("id", id)), handleStatus())); } public Iterable<Organization> lookupOrganizationsByExternalId(String externalId) { if (externalId == null || externalId.length() < 2) { throw new IllegalArgumentException("Name must be at least 2 characters long"); } return new PagedIterable<Organization>( tmpl("/organizations/search.json{?external_id}").set("external_id", externalId), handleList(Organization.class, "organizations")); } public Iterable<Group> getGroups() { return new PagedIterable<Group>(cnst("/groups.json"), handleList(Group.class, "groups")); } public Iterable<Group> getAssignableGroups() { return new PagedIterable<Group>(cnst("/groups/assignable.json"), handleList(Group.class, "groups")); } public Group getGroup(long id) { return complete(submit(req("GET", tmpl("/groups/{id}.json").set("id", id)), handle(Group.class, "group"))); } public Group createGroup(Group group) { return complete(submit(req("POST", cnst("/groups.json"), JSON, json( Collections.singletonMap("group", group))), handle(Group.class, "group"))); } public List<Group> createGroups(Group... groups) { return createGroups(Arrays.asList(groups)); } public List<Group> createGroups(List<Group> groups) { return complete(submit(req("POST", cnst("/groups/create_many.json"), JSON, json( Collections.singletonMap("groups", groups))), handleList(Group.class, "results"))); } public Group updateGroup(Group group) { checkHasId(group); return complete(submit(req("PUT", tmpl("/groups/{id}.json").set("id", group.getId()), JSON, json( Collections.singletonMap("group", group))), handle(Group.class, "group"))); } public void deleteGroup(Group group) { checkHasId(group); deleteGroup(group.getId()); } public void deleteGroup(long id) { complete(submit(req("DELETE", tmpl("/groups/{id}.json").set("id", id)), handleStatus())); } public Iterable<Macro> getMacros(){ return new PagedIterable<Macro>(cnst("/macros.json"), handleList(Macro.class, "macros")); } public Macro getMacro(long macroId){ return complete(submit(req("GET", tmpl("/macros/{id}.json").set("id", macroId)), handle(Macro.class, "macro"))); } public Macro createMacro(Macro macro) { return complete(submit( req("POST", cnst("/macros.json"), JSON, json(Collections.singletonMap("macro", macro))), handle(Macro.class, "macro"))); } public Macro updateMacro(Long macroId, Macro macro) { return complete(submit(req("PUT", tmpl("/macros/{id}.json").set("id", macroId), JSON, json(Collections.singletonMap("macro", macro))), handle(Macro.class, "macro"))); } public Ticket macrosShowChangesToTicket(long macroId) { return complete(submit(req("GET", tmpl("/macros/{id}/apply.json").set("id", macroId)), handle(TicketResult.class, "result"))).getTicket(); } public Ticket macrosShowTicketAfterChanges(long ticketId, long macroId) { return complete(submit(req("GET", tmpl("/tickets/{ticket_id}/macros/{id}/apply.json") .set("ticket_id", ticketId) .set("id", macroId)), handle(TicketResult.class, "result"))).getTicket(); } public List<String> addTagToTicket(long id, String... tags) { return complete(submit( req("PUT", tmpl("/tickets/{id}/tags.json").set("id", id), JSON, json(Collections.singletonMap("tags", tags))), handle(List.class, "tags"))); } public List<String> addTagToTopics(long id, String... tags) { return complete(submit( req("PUT", tmpl("/topics/{id}/tags.json").set("id", id), JSON, json(Collections.singletonMap("tags", tags))), handle(List.class, "tags"))); } public List<String> addTagToOrganisations(long id, String... tags) { return complete(submit( req("PUT", tmpl("/organizations/{id}/tags.json").set("id", id), JSON, json(Collections.singletonMap("tags", tags))), handle(List.class, "tags"))); } public List<String> setTagOnTicket(long id, String... tags) { return complete(submit( req("POST", tmpl("/tickets/{id}/tags.json").set("id", id), JSON, json(Collections.singletonMap("tags", tags))), handle(List.class, "tags"))); } public List<String> setTagOnTopics(long id, String... tags) { return complete(submit( req("POST", tmpl("/topics/{id}/tags.json").set("id", id), JSON, json(Collections.singletonMap("tags", tags))), handle(List.class, "tags"))); } public List<String> setTagOnOrganisations(long id, String... tags) { return complete(submit( req("POST", tmpl("/organizations/{id}/tags.json").set("id", id), JSON, json(Collections.singletonMap("tags", tags))), handle(List.class, "tags"))); } public List<String> removeTagFromTicket(long id, String... tags) { return complete(submit( req("DELETE", tmpl("/tickets/{id}/tags.json").set("id", id), JSON, json(Collections.singletonMap("tags", tags))), handle(List.class, "tags"))); } public List<String> removeTagFromTopics(long id, String... tags) { return complete(submit( req("DELETE", tmpl("/topics/{id}/tags.json").set("id", id), JSON, json(Collections.singletonMap("tags", tags))), handle(List.class, "tags"))); } public List<String> removeTagFromOrganisations(long id, String... tags) { return complete(submit( req("DELETE", tmpl("/organizations/{id}/tags.json").set("id", id), JSON, json(Collections.singletonMap("tags", tags))), handle(List.class, "tags"))); } public Map getIncrementalTicketsResult(long unixEpochTime) { return complete(submit( req("GET", tmpl("/exports/tickets.json?start_time={time}").set( "time", unixEpochTime)), handle(Map.class))); } public Iterable<GroupMembership> getGroupMemberships() { return new PagedIterable<GroupMembership>(cnst("/group_memberships.json"), handleList(GroupMembership.class, "group_memberships")); } public List<GroupMembership> getGroupMembershipByUser(long user_id) { return complete(submit(req("GET", tmpl("/users/{user_id}/group_memberships.json").set("user_id", user_id)), handleList(GroupMembership.class, "group_memberships"))); } public List<GroupMembership> getGroupMemberships(long group_id) { return complete(submit(req("GET", tmpl("/groups/{group_id}/memberships.json").set("group_id", group_id)), handleList(GroupMembership.class, "group_memberships"))); } public Iterable<GroupMembership> getAssignableGroupMemberships() { return new PagedIterable<GroupMembership>(cnst("/group_memberships/assignable.json"), handleList(GroupMembership.class, "group_memberships")); } public List<GroupMembership> getAssignableGroupMemberships(long group_id) { return complete(submit(req("GET", tmpl("/groups/{group_id}/memberships/assignable.json").set("group_id", group_id)), handleList(GroupMembership.class, "group_memberships"))); } public GroupMembership getGroupMembership(long id) { return complete(submit(req("GET", tmpl("/group_memberships/{id}.json").set("id", id)), handle(GroupMembership.class, "group_membership"))); } public GroupMembership getGroupMembership(long user_id, long group_membership_id) { return complete(submit(req("GET", tmpl("/users/{uid}/group_memberships/{gmid}.json").set("uid", user_id) .set("gmid", group_membership_id)), handle(GroupMembership.class, "group_membership"))); } public GroupMembership createGroupMembership(GroupMembership groupMembership) { return complete(submit(req("POST", cnst("/group_memberships.json"), JSON, json( Collections.singletonMap("group_membership", groupMembership))), handle(GroupMembership.class, "group_membership"))); } public GroupMembership createGroupMembership(long user_id, GroupMembership groupMembership) { return complete(submit(req("POST", tmpl("/users/{id}/group_memberships.json").set("id", user_id), JSON, json(Collections.singletonMap("group_membership", groupMembership))), handle(GroupMembership.class, "group_membership"))); } public void deleteGroupMembership(GroupMembership groupMembership) { checkHasId(groupMembership); deleteGroupMembership(groupMembership.getId()); } public void deleteGroupMembership(long id) { complete(submit(req("DELETE", tmpl("/groups_memberships/{id}.json").set("id", id)), handleStatus())); } public void deleteGroupMembership(long user_id, GroupMembership groupMembership) { checkHasId(groupMembership); deleteGroupMembership(user_id, groupMembership.getId()); } public void deleteGroupMembership(long user_id, long group_membership_id) { complete(submit(req("DELETE", tmpl("/users/{uid}/groups_memberships/{gmid}.json").set("uid", user_id) .set("gmid", group_membership_id)), handleStatus())); } public List<GroupMembership> setGroupMembershipAsDefault(long user_id, GroupMembership groupMembership) { checkHasId(groupMembership); return complete(submit(req("POST", tmpl("/users/{uid}/group_memberships/{gmid}/make_default.json") .set("uid", user_id).set("gmid", groupMembership.getId()), JSON, json( Collections.singletonMap("group_memberships", groupMembership))), handleList(GroupMembership.class, "results"))); } public Iterable<Forum> getForums() { return new PagedIterable<Forum>(cnst("/forums.json"), handleList(Forum.class, "forums")); } public List<Forum> getForums(long category_id) { return complete(submit(req("GET", tmpl("/categories/{id}/forums.json").set("id", category_id)), handleList(Forum.class, "forums"))); } public Forum getForum(long id) { return complete(submit(req("GET", tmpl("/forums/{id}.json").set("id", id)), handle(Forum.class, "forum"))); } public Forum createForum(Forum forum) { return complete(submit(req("POST", cnst("/forums.json"), JSON, json( Collections.singletonMap("forum", forum))), handle(Forum.class, "forum"))); } public Forum updateForum(Forum forum) { checkHasId(forum); return complete(submit(req("PUT", tmpl("/forums/{id}.json").set("id", forum.getId()), JSON, json( Collections.singletonMap("forum", forum))), handle(Forum.class, "forum"))); } public void deleteForum(Forum forum) { checkHasId(forum); complete(submit(req("DELETE", tmpl("/forums/{id}.json").set("id", forum.getId())), handleStatus())); } public Iterable<Topic> getTopics() { return new PagedIterable<Topic>(cnst("/topics.json"), handleList(Topic.class, "topics")); } public List<Topic> getTopics(long forum_id) { return complete(submit(req("GET", tmpl("/forums/{id}/topics.json").set("id", forum_id)), handleList(Topic.class, "topics"))); } public List<Topic> getTopicsByUser(long user_id) { return complete(submit(req("GET", tmpl("/users/{id}/topics.json").set("id", user_id)), handleList(Topic.class, "topics"))); } public Topic getTopic(long id) { return complete(submit(req("GET", tmpl("/topics/{id}.json").set("id", id)), handle(Topic.class, "topic"))); } public Topic createTopic(Topic topic) { checkHasId(topic); return complete(submit(req("POST", cnst("/topics.json"), JSON, json( Collections.singletonMap("topic", topic))), handle(Topic.class, "topic"))); } public Topic importTopic(Topic topic) { checkHasId(topic); return complete(submit(req("POST", cnst("/import/topics.json"), JSON, json( Collections.singletonMap("topic", topic))), handle(Topic.class, "topic"))); } public List<Topic> getTopics(long id, long... ids) { return complete(submit(req("POST", tmpl("/topics/show_many.json{?ids}").set("ids", idArray(id, ids))), handleList(Topic.class, "topics"))); } public Topic updateTopic(Topic topic) { checkHasId(topic); return complete(submit(req("PUT", tmpl("/topics/{id}.json").set("id", topic.getId()), JSON, json( Collections.singletonMap("topic", topic))), handle(Topic.class, "topic"))); } public void deleteTopic(Topic topic) { checkHasId(topic); complete(submit(req("DELETE", tmpl("/topics/{id}.json").set("id", topic.getId())), handleStatus())); } public Iterable<SearchResultEntity> getSearchResults(String query) { return new PagedIterable<SearchResultEntity>(tmpl("/search.json{?query}").set("query", query), handleSearchList("results")); } public <T extends SearchResultEntity> Iterable<T> getSearchResults(Class<T> type, String query) { return getSearchResults(type, query, null); } public <T extends SearchResultEntity> Iterable<T> getSearchResults(Class<T> type, String query, String params) { String typeName = null; for (Map.Entry<String, Class<? extends SearchResultEntity>> entry : searchResultTypes.entrySet()) { if (type.equals(entry.getValue())) { typeName = entry.getKey(); break; } } if (typeName == null) { return Collections.emptyList(); } return new PagedIterable<T>(tmpl("/search.json{?query,params}") .set("query", query + "+type:" + typeName) .set("params", params), handleList(type, "results")); } public void notifyApp(String json) { complete(submit(req("POST", cnst("/apps/notify.json"), JSON, json.getBytes()), handleStatus())); } public void updateInstallation(int id, String json) { complete(submit(req("PUT", tmpl("/apps/installations/{id}.json").set("id", id), JSON, json.getBytes()), handleStatus())); } // TODO search with sort order // TODO search with query building API // Action methods for Help Center /** * Get all articles from help center. * * @return List of Articles. */ public Iterable<Article> getArticles() { return new PagedIterable<Article>(cnst("/help_center/articles.json"), handleList(Article.class, "articles")); } public Iterable<Article> getArticlesIncrementally(Date startTime) { return new PagedIterable<Article>( tmpl("/help_center/incremental/articles.json{?start_time}") .set("start_time", msToSeconds(startTime.getTime())), handleIncrementalList(Article.class, "articles")); } public List<Article> getArticlesFromPage(int page) { return complete(submit(req("GET", tmpl("/help_center/articles.json?page={page}").set("page", page)), handleList(Article.class, "articles"))); } public Article getArticle(int id) { return complete(submit(req("GET", tmpl("/help_center/articles/{id}.json").set("id", id)), handle(Article.class, "article"))); } public Iterable<Translation> getArticleTranslations(Long articleId) { return new PagedIterable<Translation>( tmpl("/help_center/articles/{articleId}/translations.json").set("articleId", articleId), handleList(Translation.class, "translations")); } public Article createArticle(Article article) { checkHasSectionId(article); return complete(submit(req("POST", tmpl("/help_center/sections/{id}/articles.json").set("id", article.getSectionId()), JSON, json(Collections.singletonMap("article", article))), handle(Article.class, "article"))); } public Article updateArticle(Article article) { checkHasId(article); return complete(submit(req("PUT", tmpl("/help_center/articles/{id}.json").set("id", article.getId()), JSON, json(Collections.singletonMap("article", article))), handle(Article.class, "article"))); } public void deleteArticle(Article article) { checkHasId(article); complete(submit(req("DELETE", tmpl("/help_center/articles/{id}.json").set("id", article.getId())), handleStatus())); } /** * Delete attachment from article. * @param attachment */ public void deleteArticleAttachment(ArticleAttachments attachment) { if (attachment.getId() == 0) { throw new IllegalArgumentException("Attachment requires id"); } deleteArticleAttachment(attachment.getId()); } /** * Delete attachment from article. * @param id attachment identifier. */ public void deleteArticleAttachment(long id) { complete(submit(req("DELETE", tmpl("/help_center/articles/attachments/{id}.json").set("id", id)), handleStatus())); } public List<Category> getCategories() { return complete(submit(req("GET", cnst("/help_center/categories.json")), handleList(Category.class, "categories"))); } public Category getCategory(int id) { return complete(submit(req("GET", tmpl("/help_center/categories/{id}.json").set("id", id)), handle(Category.class, "category"))); } public Iterable<Translation> getCategoryTranslations(Long categoryId) { return new PagedIterable<Translation>( tmpl("/help_center/categories/{categoryId}/translations.json").set("categoryId", categoryId), handleList(Translation.class, "translations")); } public Category createCategory(Category category) { return complete(submit(req("POST", cnst("/help_center/categories.json"), JSON, json(Collections.singletonMap("category", category))), handle(Category.class, "category"))); } public Category updateCategory(Category category) { checkHasId(category); return complete(submit(req("PUT", tmpl("/help_center/categories/{id}.json").set("id", category.getId()), JSON, json(Collections.singletonMap("category", category))), handle(Category.class, "category"))); } public void deleteCategory(Category category) { checkHasId(category); complete(submit(req("DELETE", tmpl("/help_center/categories/{id}.json").set("id", category.getId())), handleStatus())); } public List<Section> getSections() { return complete(submit(req("GET", cnst("/help_center/sections.json")), handleList(Section.class, "sections"))); } public List<Section> getSections(Category category) { checkHasId(category); return complete(submit(req("GET", tmpl("/help_center/categories/{id}/sections.json").set("id", category.getId())), handleList(Section.class, "sections"))); } public Section getSection(int id) { return complete(submit(req("GET", tmpl("/help_center/sections/{id}.json").set("id", id)), handle(Section.class, "section"))); } public Iterable<Translation> getSectionTranslations(Long sectionId) { return new PagedIterable<Translation>( tmpl("/help_center/sections/{sectionId}/translations.json").set("sectionId", sectionId), handleList(Translation.class, "translations")); } public Section createSection(Section section) { return complete(submit(req("POST", cnst("/help_center/sections.json"), JSON, json(Collections.singletonMap("section", section))), handle(Section.class, "section"))); } public Section updateSection(Section section) { checkHasId(section); return complete(submit(req("PUT", tmpl("/help_center/sections/{id}.json").set("id", section.getId()), JSON, json(Collections.singletonMap("section", section))), handle(Section.class, "section"))); } public void deleteSection(Section section) { checkHasId(section); complete(submit(req("DELETE", tmpl("/help_center/sections/{id}.json").set("id", section.getId())), handleStatus())); } // Helper methods private byte[] json(Object object) { try { return mapper.writeValueAsBytes(object); } catch (JsonProcessingException e) { throw new ZendeskException(e.getMessage(), e); } } private <T> ListenableFuture<T> submit(Request request, ZendeskAsyncCompletionHandler<T> handler) { if (logger.isDebugEnabled()) { if (request.getStringData() != null) { logger.debug("Request {} {}\n{}", request.getMethod(), request.getUrl(), request.getStringData()); } else if (request.getByteData() != null) { logger.debug("Request {} {} {} {} bytes", request.getMethod(), request.getUrl(), request.getHeaders().getFirstValue("Content-type"), request.getByteData().length); } else { logger.debug("Request {} {}", request.getMethod(), request.getUrl()); } } return client.executeRequest(request, handler); } private static abstract class ZendeskAsyncCompletionHandler<T> extends AsyncCompletionHandler<T> { @Override public void onThrowable(Throwable t) { if (t instanceof IOException) { throw new ZendeskException(t); } else { super.onThrowable(t); } } } private Request req(String method, Uri template) { return req(method, template.toString()); } private static final Pattern RESTRICTED_PATTERN = Pattern.compile("%2B", Pattern.LITERAL); private Request req(String method, String url) { RequestBuilder builder = new RequestBuilder(method); if (realm != null) { builder.setRealm(realm); } else { builder.addHeader("Authorization", "Bearer " + oauthToken); } builder.setUrl(RESTRICTED_PATTERN.matcher(url).replaceAll("+")); // replace out %2B with + due to API restriction return builder.build(); } private Request req(String method, Uri template, String contentType, byte[] body) { RequestBuilder builder = new RequestBuilder(method); if (realm != null) { builder.setRealm(realm); } else { builder.addHeader("Authorization", "Bearer " + oauthToken); } builder.setUrl(RESTRICTED_PATTERN.matcher(template.toString()).replaceAll("+")); //replace out %2B with + due to API restriction builder.addHeader("Content-type", contentType); builder.setBody(body); return builder.build(); } protected ZendeskAsyncCompletionHandler<Void> handleStatus() { return new ZendeskAsyncCompletionHandler<Void>() { @Override public Void onCompleted(Response response) throws Exception { logResponse(response); if (isStatus2xx(response)) { return null; } throw new ZendeskResponseException(response); } }; } @SuppressWarnings("unchecked") protected <T> ZendeskAsyncCompletionHandler<T> handle(final Class<T> clazz) { return new ZendeskAsyncCompletionHandler<T>() { @Override public T onCompleted(Response response) throws Exception { logResponse(response); if (isStatus2xx(response)) { return (T) mapper.reader(clazz).readValue(response.getResponseBodyAsStream()); } if (response.getStatusCode() == 404) { return null; } throw new ZendeskResponseException(response); } }; } private class BasicAsyncCompletionHandler<T> extends ZendeskAsyncCompletionHandler<T> { private final Class<T> clazz; private final String name; private final Class[] typeParams; public BasicAsyncCompletionHandler(Class clazz, String name, Class... typeParams) { this.clazz = clazz; this.name = name; this.typeParams = typeParams; } @Override public T onCompleted(Response response) throws Exception { logResponse(response); if (isStatus2xx(response)) { if (typeParams.length > 0) { JavaType type = mapper.getTypeFactory().constructParametricType(clazz, typeParams); return mapper.convertValue(mapper.readTree(response.getResponseBodyAsStream()).get(name), type); } return mapper.convertValue(mapper.readTree(response.getResponseBodyAsStream()).get(name), clazz); } if (response.getStatusCode() == 404) { return null; } throw new ZendeskResponseException(response); } } protected <T> ZendeskAsyncCompletionHandler<T> handle(final Class<T> clazz, final String name, final Class... typeParams) { return new BasicAsyncCompletionHandler<T>(clazz, name, typeParams); } protected <T> ZendeskAsyncCompletionHandler<JobStatus<T>> handleJobStatus(final Class<T> resultClass) { return new BasicAsyncCompletionHandler<JobStatus<T>>(JobStatus.class, "job_status", resultClass) { @Override public JobStatus<T> onCompleted(Response response) throws Exception { JobStatus<T> result = super.onCompleted(response); result.setResultsClass(resultClass); return result; } }; } private static final String NEXT_PAGE = "next_page"; private static final String END_TIME = "end_time"; private static final String COUNT = "count"; private static final int INCREMENTAL_EXPORT_MAX_COUNT_BY_REQUEST = 1000; private abstract class PagedAsyncCompletionHandler<T> extends ZendeskAsyncCompletionHandler<T> { private String nextPage; public void setPagedProperties(JsonNode responseNode, Class<?> clazz) { JsonNode node = responseNode.get(NEXT_PAGE); if (node == null) { this.nextPage = null; if (logger.isDebugEnabled()) { logger.debug(NEXT_PAGE + " property not found, pagination not supported" + (clazz != null ? " for " + clazz.getName() : "")); } } else { this.nextPage = node.asText(); } } public String getNextPage() { return nextPage; } public void setNextPage(String nextPage) { this.nextPage = nextPage; } } private class PagedAsyncListCompletionHandler<T> extends PagedAsyncCompletionHandler<List<T>> { private final Class<T> clazz; private final String name; public PagedAsyncListCompletionHandler(Class<T> clazz, String name) { this.clazz = clazz; this.name = name; } @Override public List<T> onCompleted(Response response) throws Exception { logResponse(response); if (isStatus2xx(response)) { JsonNode responseNode = mapper.readTree(response.getResponseBodyAsBytes()); setPagedProperties(responseNode, clazz); List<T> values = new ArrayList<T>(); for (JsonNode node : responseNode.get(name)) { values.add(mapper.convertValue(node, clazz)); } return values; } throw new ZendeskResponseException(response); } } protected <T> PagedAsyncCompletionHandler<List<T>> handleList(final Class<T> clazz, final String name) { return new PagedAsyncListCompletionHandler<T>(clazz, name); } private static final long FIVE_MINUTES = TimeUnit.MINUTES.toMillis(5); protected <T> PagedAsyncCompletionHandler<List<T>> handleIncrementalList(final Class<T> clazz, final String name) { return new PagedAsyncListCompletionHandler<T>(clazz, name) { @Override public void setPagedProperties(JsonNode responseNode, Class<?> clazz) { JsonNode node = responseNode.get(NEXT_PAGE); if (node == null) { if (logger.isDebugEnabled()) { logger.debug(NEXT_PAGE + " property not found, pagination not supported" + (clazz != null ? " for " + clazz.getName() : "")); } setNextPage(null); return; } JsonNode endTimeNode = responseNode.get(END_TIME); if (endTimeNode == null || endTimeNode.asLong() == 0) { if (logger.isDebugEnabled()) { logger.debug(END_TIME + " property not found, incremental export pagination not supported" + (clazz != null ? " for " + clazz.getName() : "")); } setNextPage(null); return; } /** * A request after five minutes ago will result in a 422 responds from Zendesk. * Therefore, we stop pagination. */ if (TimeUnit.SECONDS.toMillis(endTimeNode.asLong()) > System.currentTimeMillis() - FIVE_MINUTES) { setNextPage(null); } else { // Taking into account documentation found at https://developer.zendesk.com/rest_api/docs/core/incremental_export#polling-strategy JsonNode countNode = responseNode.get(COUNT); if (countNode == null) { if (logger.isDebugEnabled()) { logger.debug(COUNT + " property not found, incremental export pagination not supported" + (clazz != null ? " for " + clazz.getName() : "")); } setNextPage(null); return; } if (countNode.asInt() < INCREMENTAL_EXPORT_MAX_COUNT_BY_REQUEST) { setNextPage(null); } else { setNextPage(node.asText()); } } } }; } protected PagedAsyncCompletionHandler<List<SearchResultEntity>> handleSearchList(final String name) { return new PagedAsyncCompletionHandler<List<SearchResultEntity>>() { @Override public List<SearchResultEntity> onCompleted(Response response) throws Exception { logResponse(response); if (isStatus2xx(response)) { JsonNode responseNode = mapper.readTree(response.getResponseBodyAsStream()).get(name); setPagedProperties(responseNode, null); List<SearchResultEntity> values = new ArrayList<SearchResultEntity>(); for (JsonNode node : responseNode) { Class<? extends SearchResultEntity> clazz = searchResultTypes.get(node.get("result_type")); if (clazz != null) { values.add(mapper.convertValue(node, clazz)); } } return values; } throw new ZendeskResponseException(response); } }; } protected PagedAsyncCompletionHandler<List<Target>> handleTargetList(final String name) { return new PagedAsyncCompletionHandler<List<Target>>() { @Override public List<Target> onCompleted(Response response) throws Exception { logResponse(response); if (isStatus2xx(response)) { JsonNode responseNode = mapper.readTree(response.getResponseBodyAsBytes()); setPagedProperties(responseNode, null); List<Target> values = new ArrayList<Target>(); for (JsonNode node : responseNode.get(name)) { Class<? extends Target> clazz = targetTypes.get(node.get("type").asText()); if (clazz != null) { values.add(mapper.convertValue(node, clazz)); } } return values; } throw new ZendeskResponseException(response); } }; } protected PagedAsyncCompletionHandler<List<ArticleAttachments>> handleArticleAttachmentsList(final String name) { return new PagedAsyncCompletionHandler<List<ArticleAttachments>>() { @Override public List<ArticleAttachments> onCompleted(Response response) throws Exception { logResponse(response); if (isStatus2xx(response)) { JsonNode responseNode = mapper.readTree(response.getResponseBodyAsBytes()); List<ArticleAttachments> values = new ArrayList<ArticleAttachments>(); for (JsonNode node : responseNode.get(name)) { values.add(mapper.convertValue(node, ArticleAttachments.class)); } return values; } throw new ZendeskResponseException(response); } }; } private TemplateUri tmpl(String template) { return new TemplateUri(url + template); } private Uri cnst(String template) { return new FixedUri(url + template); } private void logResponse(Response response) throws IOException { if (logger.isDebugEnabled()) { logger.debug("Response HTTP/{} {}\n{}", response.getStatusCode(), response.getStatusText(), response.getResponseBody()); } if (logger.isTraceEnabled()) { logger.trace("Response headers {}", response.getHeaders()); } } private static final String UTF_8 = "UTF-8"; private static String encodeUrl(String input) { try { return URLEncoder.encode(input, UTF_8); } catch (UnsupportedEncodingException impossible) { return input; } } private static long msToSeconds(long millis) { return TimeUnit.MILLISECONDS.toSeconds(millis); } private boolean isStatus2xx(Response response) { return response.getStatusCode() / 100 == 2; } // Static helper methods private static <T> T complete(ListenableFuture<T> future) { try { return future.get(); } catch (InterruptedException e) { throw new ZendeskException(e.getMessage(), e); } catch (ExecutionException e) { if (e.getCause() instanceof ZendeskException) { throw (ZendeskException) e.getCause(); } throw new ZendeskException(e.getMessage(), e); } } private static void checkHasId(Ticket ticket) { if (ticket.getId() == null) { throw new IllegalArgumentException("Ticket requires id"); } } private static void checkHasId(org.zendesk.client.v2.model.Request request) { if (request.getId() == null) { throw new IllegalArgumentException("Request requires id"); } } private static void checkHasId(Audit audit) { if (audit.getId() == null) { throw new IllegalArgumentException("Audit requires id"); } } private static void checkHasId(Comment comment) { if (comment.getId() == null) { throw new IllegalArgumentException("Comment requires id"); } } private static void checkHasId(Field field) { if (field.getId() == null) { throw new IllegalArgumentException("Field requires id"); } } private static void checkHasId(Attachment attachment) { if (attachment.getId() == null) { throw new IllegalArgumentException("Attachment requires id"); } } private static void checkHasId(User user) { if (user.getId() == null) { throw new IllegalArgumentException("User requires id"); } } private static void checkHasId(Identity identity) { if (identity.getId() == null) { throw new IllegalArgumentException("Identity requires id"); } } private static void checkHasId(Organization organization) { if (organization.getId() == null) { throw new IllegalArgumentException("Organization requires id"); } } private static void checkHasId(Group group) { if (group.getId() == null) { throw new IllegalArgumentException("Group requires id"); } } private static void checkHasId(GroupMembership groupMembership) { if (groupMembership.getId() == null) { throw new IllegalArgumentException("GroupMembership requires id"); } } private void checkHasId(Forum forum) { if (forum.getId() == null) { throw new IllegalArgumentException("Forum requires id"); } } private void checkHasId(Topic topic) { if (topic.getId() == null) { throw new IllegalArgumentException("Topic requires id"); } } private static void checkHasId(Article article) { if (article.getId() == null) { throw new IllegalArgumentException("Article requires id"); } } private static void checkHasSectionId(Article article) { if (article.getSectionId() == null) { throw new IllegalArgumentException("Article requires section id"); } } private static void checkHasId(Category category) { if (category.getId() == null) { throw new IllegalArgumentException("Category requires id"); } } private static void checkHasId(Section section) { if (section.getId() == null) { throw new IllegalArgumentException("Section requires id"); } } private static void checkHasId(SuspendedTicket ticket) { if (ticket == null || ticket.getId() == null) { throw new IllegalArgumentException("SuspendedTicket requires id"); } } private static void checkHasToken(Attachment.Upload upload) { if (upload.getToken() == null) { throw new IllegalArgumentException("Upload requires token"); } } private static List<Long> idArray(long id, long... ids) { List<Long> result = new ArrayList<Long>(ids.length + 1); result.add(id); for (long i : ids) { result.add(i); } return result; } private static List<String> statusArray(Status... statuses) { List<String> result = new ArrayList<String>(statuses.length); for (Status s : statuses) { result.add(s.toString()); } return result; } public static ObjectMapper createMapper() { ObjectMapper mapper = new ObjectMapper(); mapper.enable(SerializationFeature.WRITE_ENUMS_USING_TO_STRING); mapper.enable(DeserializationFeature.READ_ENUMS_USING_TO_STRING); mapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES); mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); return mapper; } // Helper classes private class PagedIterable<T> implements Iterable<T> { private final Uri url; private final PagedAsyncCompletionHandler<List<T>> handler; private PagedIterable(Uri url, PagedAsyncCompletionHandler<List<T>> handler) { this.handler = handler; this.url = url; } public Iterator<T> iterator() { return new PagedIterator(url); } private class PagedIterator implements Iterator<T> { private Iterator<T> current; private String nextPage; public PagedIterator(Uri url) { this.nextPage = url.toString(); } public boolean hasNext() { if (current == null || !current.hasNext()) { if (nextPage == null || nextPage.equalsIgnoreCase("null")) { return false; } List<T> values = complete(submit(req("GET", nextPage), handler)); nextPage = handler.getNextPage(); current = values.iterator(); } return current.hasNext(); } public T next() { if (!hasNext()) { throw new NoSuchElementException(); } return current.next(); } public void remove() { throw new UnsupportedOperationException(); } } } public static class Builder { private AsyncHttpClient client = null; private final String url; private String username = null; private String password = null; private String token = null; private String oauthToken = null; public Builder(String url) { this.url = url; } public Builder setClient(AsyncHttpClient client) { this.client = client; return this; } public Builder setUsername(String username) { this.username = username; return this; } public Builder setPassword(String password) { this.password = password; if (password != null) { this.token = null; this.oauthToken = null; } return this; } public Builder setToken(String token) { this.token = token; if (token != null) { this.password = null; this.oauthToken = null; } return this; } public Builder setOauthToken(String oauthToken) { this.oauthToken = oauthToken; if (oauthToken != null) { this.password = null; this.token = null; } return this; } public Builder setRetry(boolean retry) { return this; } public Zendesk build() { if (token != null) { return new Zendesk(client, url, username + "/token", token); } else if (oauthToken != null) { return new Zendesk(client, url, oauthToken); } return new Zendesk(client, url, username, password); } } }
package seedu.whatnow.model.task; import java.util.Comparator; import java.util.Objects; import seedu.whatnow.commons.util.CollectionUtil; import seedu.whatnow.model.tag.UniqueTagList; /** * Represents a Task in WhatNow. * Guarantees: details are present and not null, field values are validated. */ public class Task implements ReadOnlyTask, Comparable<Task> { private Name name; private String taskDate; private String startDate; private String endDate; private String taskTime; private String startTime; private String endTime; private UniqueTagList tags; private String status; private String taskType; private static final String FLOATING = "floating"; private static final String NOT_FLOATING = "not_floating"; private static final int COMPARE_TO_IS_EQUAL = 0; public Task() { } /** * Every field must be present and not null. */ public Task(Name name, String taskDate, String startDate, String endDate, String taskTime, String startTime, String endTime, UniqueTagList tags, String status, String taskType) { assert !CollectionUtil.isAnyNull(name, tags); this.name = name; this.tags = new UniqueTagList(tags); this.status = status; this.taskType = FLOATING; if (taskDate != null) { this.taskDate = taskDate; this.taskType = NOT_FLOATING; } if (startDate != null) { this.startDate = startDate; this.taskType = NOT_FLOATING; } if (endDate != null) { this.endDate = endDate; this.taskType = NOT_FLOATING; } if (taskTime != null) { this.taskTime = taskTime; this.taskType = NOT_FLOATING; } if (startTime != null) { this.startTime = startTime; this.taskType = NOT_FLOATING; } if (endTime != null) { this.endTime = endTime; this.taskType = NOT_FLOATING; } if (taskType != null) { this.taskType = taskType; } } /** * Copy constructor. */ public Task(ReadOnlyTask source) { this(source.getName(), source.getTaskDate(), source.getStartDate(), source.getEndDate(), source.getTaskTime(), source.getStartTime(), source.getEndTime(), source.getTags(), source.getStatus(), source.getTaskType()); } @Override public Name getName() { return name; } @Override public String getTaskDate() { return taskDate; } @Override public String getStartDate() { return startDate; } @Override public String getEndDate() { return endDate; } @Override public String getTaskTime() { return taskTime; } @Override public String getStartTime() { return startTime; } @Override public String getEndTime() { return endTime; } @Override public UniqueTagList getTags() { return new UniqueTagList(tags); } @Override public String getStatus() { return status; } @Override public String getTaskType() { return taskType; } public void setName(Name name) { this.name = name; } public void setTaskDate(String taskDate) { this.taskDate = taskDate; } public void setStartDate(String startDate) { this.startDate = startDate; } public void setEndDate(String endDate) { this.endDate = endDate; } public void setTaskTime(String taskTime) { this.taskTime = taskTime; } public void setStartTime(String startTime) { this.startTime = startTime; } public void setEndTime(String endTime) { this.endTime = endTime; } /** * Replaces this task's tags with the tags in the argument tag list. */ public void setTags(UniqueTagList replacement) { tags.setTags(replacement); } public void setStatus(String status) { this.status = status; } public void setTaskType(String taskType) { this.taskType = taskType; } public int compareTo(Task task) { if (isBothFloating(task)) { return COMPARE_TO_IS_EQUAL; } else if (isBothDeadline(task)) { if (this.taskDate.equals(task.taskDate)) { return COMPARE_TO_IS_EQUAL; //@zac : check for time later } else { return this.taskDate.compareToIgnoreCase(task.taskDate); //@zac : check for time later } } else if (isBothEvent(task)) { if (this.startDate.equals(task.startDate)) { return COMPARE_TO_IS_EQUAL; //@zac : check for time later } else { return this.startDate.compareToIgnoreCase(task.startDate); //@zac : check for time later } } else { return COMPARE_TO_IS_EQUAL; } } private boolean isBothFloating(Task task) { return this.taskDate == null && task.taskDate == null && this.startDate == null && task.startDate == null; } private boolean isBothDeadline(Task task) { return this.taskDate != null && task.taskDate != null && this.startDate == null && task.startDate == null; } private boolean isBothEvent(Task task) { return this.taskDate == null && task.taskDate == null && this.startDate != null && task.startDate != null; } @Override public boolean equals(Object other) { return other == this // short circuit if same object || (other instanceof ReadOnlyTask // instanceof handles nulls && this.isSameStateAs((ReadOnlyTask) other)); } @Override public int hashCode() { // use this method for custom fields hashing instead of implementing your own return Objects.hash(name, taskDate, tags, status, taskType); } @Override public String toString() { return getAsText(); } }
package twitter4j.api; import twitter4j.Paging; import twitter4j.ResponseList; import twitter4j.Status; import twitter4j.TwitterException; import twitter4j.http.PostParameter; /** * @author Joern Huxhorn - jhuxhorn at googlemail.com */ public interface TimelineMethods { ResponseList<Status> getPublicTimeline() throws TwitterException; ResponseList<Status> getPublicTimeline(long sinceID) throws TwitterException; ResponseList<Status> getHomeTimeline() throws TwitterException; ResponseList<Status> getHomeTimeline(Paging paging) throws TwitterException; ResponseList<Status> getFriendsTimeline() throws TwitterException; ResponseList<Status> getFriendsTimeline(Paging paging) throws TwitterException; ResponseList<Status> getUserTimeline(String screenName, Paging paging) throws TwitterException; ResponseList<Status> getUserTimeline(int userId, Paging paging) throws TwitterException; ResponseList<Status> getUserTimeline(String screenName) throws TwitterException; ResponseList<Status> getUserTimeline(int user_id) throws TwitterException; ResponseList<Status> getUserTimeline() throws TwitterException; ResponseList<Status> getUserTimeline(Paging paging) throws TwitterException; ResponseList<Status> getMentions() throws TwitterException; ResponseList<Status> getMentions(Paging paging) throws TwitterException; ResponseList<Status> getRetweetedByMe() throws TwitterException; ResponseList<Status> getRetweetedByMe(Paging paging) throws TwitterException; ResponseList<Status> getRetweetedToMe() throws TwitterException; ResponseList<Status> getRetweetedToMe(Paging paging) throws TwitterException; ResponseList<Status> getRetweetsOfMe() throws TwitterException; ResponseList<Status> getRetweetsOfMe(Paging paging) throws TwitterException; }
package test; /** * @hidden * @opt operations */ class UMLOptions{} abstract class AbstractNode { public abstract void abstractMethod(); public int concreteMethod() { return 1; } } /** * @composed 1 has * test.AbstractNode */ class InnerNode extends AbstractNode {} class Leaf extends AbstractNode {}
package de.golfgl.gdxgamesvcs; import com.badlogic.gdx.Application; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.backends.lwjgl.LwjglApplication; import com.badlogic.gdx.backends.lwjgl.LwjglApplicationConfiguration; import com.badlogic.gdx.scenes.scene2d.ui.Table; import com.badlogic.gdx.scenes.scene2d.ui.TextField; public class GpgsClientTest extends GameServiceClientTest<GpgsClient> { public static void main(String[] args) { LwjglApplicationConfiguration config = new LwjglApplicationConfiguration(); config.width = 800; config.height = 950; GpgsClient client = new GpgsClient(); client.setLogLevel(Application.LOG_ERROR); new LwjglApplication(new GpgsClientTest(client), config); } private TextField appName; private TextField clientSecretPath; public GpgsClientTest(GpgsClient gsClient) { super(gsClient); } @Override protected void createServiceSpecificInitialization(Table table) { appName = createField(table, "app.name", "Application Name", ""); clientSecretPath = createField(table, "client.secret.path", "Client Secret Path", Gdx.files.absolute(System.getProperty("user.home")).child("client_secret.json").path()); createAction(table, "initialize", new Runnable() { @Override public void run() { gsClient.initialize(appName.getText(), Gdx.files.absolute(clientSecretPath.getText())); } }); } }
package zjava.collection; import java.util.AbstractList; import java.util.Arrays; import java.util.Collection; import java.util.ConcurrentModificationException; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import java.util.RandomAccess; /** * Resizable array implementation of the <tt>List</tt> interface. Implements * all optional list operations, and permits all elements, including * <tt>null</tt>. * * <p>The <tt>size</tt>, <tt>isEmpty</tt>, <tt>get</tt>, <tt>set</tt>, * <tt>iterator</tt>, and <tt>listIterator</tt> operations run in constant * time. The <tt>add</tt> operation runs in <i>amortized constant time</i>, * that is, adding n elements requires O(n) time. Removal and insertion of * elements at arbitrary index runs in <i>O(sqrt(n)) amortized time</i>.<br> * In most cases this implementation is significantly faster than * {@link java.util.ArrayList ArrayList} implementation and require just * O(sqrt(n)) of additional memory. * * @param <E> the type of elements in this list * * @author Ivan Zaitsau * @see Collection * @see List */ public class DynamicList<E> extends AbstractList<E> implements List<E>, HugeCapacityList<E>, RandomAccess, Cloneable, java.io.Serializable { static private final long serialVersionUID = 2015_02_12_1200L; /** Actual initial block size is 2<sup>INITIAL_BLOCK_BITSIZE</sup> */ static private final int INITIAL_BLOCK_BITSIZE = 5; /** Number of blocks on DynamicList initialization. * <br> <b>Note:</b> Must be even number due to some simplifications and assumptions made in the code*/ static private final int INITIAL_BLOCKS_COUNT = 2; /** This coefficient used to check if reduction of block size and amount of blocks is required. * <br> <b>Note:</b> Must be no less than 4. Needs to be no less than 8 for amortized performance estimations to hold */ static private final int REDUCTION_COEFFICIENT = 12; /** * The maximum size of array to allocate.<br> * Some VMs reserve some header words in an array. * Attempts to allocate larger arrays may result in * OutOfMemoryError: Requested array size exceeds VM limit */ static private final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8; /** * Internal storage block.<br> * Maintains up to <tt>capacity</tt> objects.<br> * If there are more elements than <tt>capacity</tt> after <tt>add</tt> operation, last element is removed from the block and returned from <tt>add</tt> method.<br> * * <p>Implementation may vary, but the following conditions must be met: * <li> Insertions and deletions at the beginning and the end of the block must complete in constant time * <li> Retrieval of element at arbitrary position must complete in constant time * <li> Any other operation must complete in O(n) * <li> Only Blocks of sizes 2, 4, 8, ... 2<sup>30</sup> need to be supported * * @author Ivan Zaitsau */ static final private class Block<E> implements Cloneable, java.io.Serializable { private static final long serialVersionUID = 2015_02_12_1200L; // - merges two blocks of equal capacities into block with doubled capacity static <E> Block<E> merge(Block<E> block1, Block<E> block2) { if ((block1 == null || block1.size() == 0) && (block2 == null || block2.size() == 0)) return null; assert (block1 == null | block2 == null) || (block1.values.length == block2.values.length); Block<E> mergedBlock = new Block<E>(2 * ((block1 == null) ? block2.values.length : block1.values.length)); if (block1 != null) mergedBlock.size += block1.copyToArray(mergedBlock.values, 0); if (block2 != null) mergedBlock.size += block2.copyToArray(mergedBlock.values, mergedBlock.size); return mergedBlock; } // - splits block to two smaller blocks of capacity equal to half of given block @SuppressWarnings("unchecked") static <E> Block<E>[] split(Block<E> block) { if (block == null || block.size == 0) return new Block[] {null, null}; assert (block.values.length & 1) == 0; int halfSize = block.values.length / 2; if (block.size <= halfSize) { Block<E> block1 = new Block<E>(halfSize); block.copyToArray(block1.values, 0, 0, block.size); block1.size = block.size; return new Block[] {block1, null}; } else { Block<E> block1 = new Block<E>(halfSize); block.copyToArray(block1.values, 0, 0, halfSize); block1.size = halfSize; Block<E> block2 = new Block<E>(halfSize); block.copyToArray(block2.values, 0, halfSize, block.size - halfSize); block2.size = halfSize; return new Block[] {block1, block2}; } } private int offset; private int size; private E[] values; @SuppressWarnings("unchecked") Block(int capacity) { // - capacity must be even power of 2 assert((capacity & (capacity-1)) == 0 && capacity > 1); this.offset = 0; this.size = 0; this.values = (E[]) new Object[capacity]; } Block(int capacity, E[] values, int pos, int length) { this(capacity); System.arraycopy(values, pos, this.values, 0, length); size = length; } int copyToArray(Object[] array, int trgPos, int srcPos, int count) { if (srcPos >= values.length | count <= 0) return 0; if (srcPos + count > values.length) count = values.length - srcPos; int first = (offset + srcPos < values.length) ? offset + srcPos : offset + srcPos - values.length; if (first + count <= values.length) { System.arraycopy(values, first, array, trgPos, count); } else { int halfCount = values.length - first; System.arraycopy(values, first, array, trgPos, halfCount); System.arraycopy(values, 0, array, trgPos + halfCount, count - halfCount); } return count; } int copyToArray(Object[] array, int pos) { return copyToArray(array, pos, 0, size); } int size() { return size; } E addFirst(E value) { offset = (offset - 1) & (values.length-1); E last = values[offset]; values[offset] = value; if (size < values.length) { size++; } return last; } void addLast(E value) { if (size == values.length) return; values[(offset + size) & (values.length-1)] = value; size++; } E add(int index, E value) { // - range check assert(index >= 0 && index <= size); E last = (size == values.length) ? values[(offset - 1) & (values.length-1)] : null; if (2*index < size) { offset = (offset - 1) & (values.length-1); for (int i = 0; i < index; i++) { values[(offset + i) & (values.length-1)] = values[(offset + i + 1) & (values.length-1)]; } } else { for (int i = (size == values.length) ? size-1 : size; i > index; i values[(offset + i) & (values.length-1)] = values[(offset + i - 1) & (values.length-1)]; } } values[(offset + index) & (values.length-1)] = value; if (size < values.length) size++; return last; } E set(int index, E value) { // - range check assert(index >= 0 && index < size); int i = (offset + index) & (values.length-1); E replaced = values[i]; values[i] = value; return replaced; } E removeFirst() { // - range check assert(size > 0); E removed = values[offset]; values[offset] = null; offset = (offset + 1) & (values.length-1); size return removed; } E remove(int index) { // - range check assert(index >= 0 && index < size); E removed = values[(offset + index) & (values.length-1)]; if (2*index < size) { for (int i = index; i > 0; i values[(offset + i) & (values.length-1)] = values[(offset + i - 1) & (values.length-1)]; } values[offset] = null; offset = (offset + 1) & (values.length-1); } else { for (int i = index + 1; i < size; i++) { values[(offset + i - 1) & (values.length-1)] = values[(offset + i) & (values.length-1)]; } values[(offset + size - 1) & (values.length-1)] = null; } size return removed; } E get(int index) { // - range check assert(index >= 0 && index < size); return values[(offset + index) & (values.length-1)]; } public Object clone() { try { @SuppressWarnings("unchecked") Block<E> clone = (Block<E>) super.clone(); clone.values = Arrays.copyOf(values, values.length); return clone; } catch (CloneNotSupportedException e) { // - this should never be thrown since we are Cloneable throw new InternalError(); } } } private long size; private int blockBitsize; private Block<E>[] data; private transient FarListAccess<E> farAccess; private class FarAccess implements FarListAccess<E> { public long size() { return size; } public E get(long index) { rangeCheck(index); return fastGet(index); } public E set(long index, E element) { rangeCheck(index); return fastSet(index, element); } public void add(long index, E element) { rangeCheckForAdd(index); ensureCapacity(size + 1); fastAdd(index, element); } public E remove(long index) { rangeCheck(index); return fastRemove(index); } }; private class Iter implements Iterator<E> { /** * Cursor position */ private long i = 0; /** * Current (last returned) element index or -1 if element is not defined (or has been removed) */ private long last = -1; /** * Expected version (modifications count) of the backing List */ int expectedModCount = modCount; public boolean hasNext() { return i < size; } public E next() { checkForComodification(); try { rangeCheck(i); E next = fastGet(i); last = i++; return next; } catch (IndexOutOfBoundsException e) { checkForComodification(); throw new NoSuchElementException(); } } public void remove() { if (last < 0) throw new IllegalStateException(); checkForComodification(); try { rangeCheck(last); fastRemove(last); if (last < i) i last = -1; expectedModCount = modCount; } catch (IndexOutOfBoundsException e) { throw new ConcurrentModificationException(); } } void checkForComodification() { if (expectedModCount != modCount) throw new ConcurrentModificationException(); } } public FarListAccess<E> far() { if (farAccess == null) farAccess = new FarAccess(); return farAccess; } /** Null-safe access to data block with initialization.*/ private Block<E> data(int index) { if (data[index] == null) data[index] = new Block<E>(1 << blockBitsize); return data[index]; } private void ensureCapacity(long requiredCapacity) { long capacity = (long) data.length << blockBitsize; while (requiredCapacity > capacity) { // - double number of blocks and their size @SuppressWarnings("unchecked") Block<E>[] newData = new Block[2*data.length]; int newBlockBitsize = blockBitsize+1; for (int i = 1, j = 0; i < data.length; i += 2, j++) { newData[j] = Block.merge(data[i-1], data[i]); if (newData[j] == null) break; } /* Redundant because data.length assumed to be even number * *deleted code* */ assert (data.length & 1) == 0; data = newData; blockBitsize = newBlockBitsize; capacity = (long) data.length << blockBitsize; } } private void compact() { if (data.length <= INITIAL_BLOCKS_COUNT) return; if (size * REDUCTION_COEFFICIENT <= (long) data.length << blockBitsize) { // - decrease number of blocks and their size by half @SuppressWarnings("unchecked") Block<E>[] newData = new Block[(data.length+1)/2]; int newBlockBitsize = blockBitsize-1; main: for (int i = 0; ; i++) { Block<E>[] splitBlock = Block.split(data[i]); for (int j = 0; j <= 1; j++) { if (splitBlock[j] == null || splitBlock[j].size() == 0) break main; newData[i+i+j] = splitBlock[j]; } } data = newData; blockBitsize = newBlockBitsize; modCount++; } } private String outOfBoundsMsg(long index) { return "Index: " + index + ", Size: " + size; } private void rangeCheck(long index) { if (index < 0 || index >= size) throw new IndexOutOfBoundsException(outOfBoundsMsg(index)); } private void rangeCheckForAdd(long index) { if (index < 0 || index > size) throw new IndexOutOfBoundsException(outOfBoundsMsg(index)); } private void init() { init(0); } @SuppressWarnings("unchecked") private void init(long initialCapacity) { size = 0; blockBitsize = INITIAL_BLOCK_BITSIZE; int blocksCount = INITIAL_BLOCKS_COUNT; while ((long)blocksCount << blockBitsize < initialCapacity) { blocksCount += blocksCount; blockBitsize++; } data = new Block[blocksCount]; } /** * Removes from this list all of the elements whose index is between * {@code fromIndex}, inclusive, and {@code toIndex}, exclusive. * Shifts any succeeding elements to the left (reduces their index). * * @throws IndexOutOfBoundsException if {@code fromIndex} or * {@code toIndex} is out of range */ protected void removeRange(int fromIndex, int toIndex) { int fromBlock = (int) (((long)fromIndex + (1 << blockBitsize) - 1) >>> blockBitsize); int toBlock = (toIndex >>> blockBitsize); int d = toBlock - fromBlock; if (d > 0) { for (int i = toBlock; i < data.length && data[i] != null && data[i].size() > 0; i++) { data[i-d] = data[i]; data[i] = null; } size -= d << blockBitsize; toIndex -= d << blockBitsize; } for (int i = fromIndex; i < toIndex; i++) fastRemove(fromIndex); } /** * Constructs an empty list with an initial capacity of 32 elements. */ public DynamicList() { init(); } /** * Constructs an empty list with at least specified capacity. */ public DynamicList(long initialCapacity) { init(initialCapacity); } /** * Constructs a list containing the elements of the specified * collection, in the order they are returned by the collection's * iterator. * * @param c the collection whose elements are to be placed into this list * @throws NullPointerException if the specified collection is null */ @SuppressWarnings("unchecked") public DynamicList(Collection<? extends E> src) { init(src.size()); // - unsafe, can throw ConcurrentModificationException if source collection is changed if (src.size() > MAX_ARRAY_SIZE) { synchronized(src) { for (E e : src) add(e); } } // - more safe but resource consuming as well. // - can't be applied to enormous collections with more than MAX_ARRAY_SIZE elements. else { for (E e : (E[]) src.toArray()) add(e); } } /** * Returns the number of elements in this list.<br> * If this list contains more than <tt>Integer.MAX_VALUE</tt> elements, * returns <tt>Integer.MAX_VALUE</tt>. * * @return the number of elements in this list */ public int size() { return (size > Integer.MAX_VALUE) ? Integer.MAX_VALUE : (int) size; } /** * Appends the specified element to the end of this list. * * @param e element to be appended to this list * @return <tt>true</tt> (as specified by {@link Collection#add}) */ public boolean add(E element) { ensureCapacity(size + 1); int blockIndex = (int) (size >>> blockBitsize); data(blockIndex).addLast(element); size++; return true; } /** * Inserts the specified element at the specified position in this * list. Shifts the element currently at that position (if any) and * any subsequent elements to the right (adds one to their indices). * * @param index index at which the specified element is to be inserted * @param element element to be inserted * @throws IndexOutOfBoundsException {@inheritDoc} */ public void add(int index, E element) { rangeCheckForAdd(index); ensureCapacity(size + 1); fastAdd(index, element); } private void fastAdd(long index, E element) { modCount++; int blockIndex = (int) (index >>> blockBitsize); int valueIndex = (int) (index & (-1L >>> -blockBitsize)); int blockSize = 1 << blockBitsize; if (data(blockIndex).size() < blockSize) { data[blockIndex].add(valueIndex, element); } else { element = data[blockIndex].add(valueIndex, element); while (data(++blockIndex).size() == blockSize) { element = data[blockIndex].addFirst(element); } data[blockIndex].addFirst(element); } size++; } /** * Appends all of the elements in the specified collection to the end of * this list, in the order that they are returned by the * specified collection's Iterator. * * @param c collection containing elements to be added to this list * @return <tt>true</tt> if this list changed as a result of the call * @throws NullPointerException if the specified collection is null */ public boolean addAll(Collection<? extends E> c) { @SuppressWarnings("unchecked") E[] values = (E[]) c.toArray(); if (values.length == 0) return false; for (E value : values) add(value); return true; } /** * Inserts all of the elements in the specified collection into this * list, starting at the specified position. Shifts the element * currently at that position (if any) and any subsequent elements to * the right (increases their indices). The new elements will appear * in the list in the order that they are returned by the * specified collection's iterator. * * @param index index at which to insert the first element from the * specified collection * @param c collection containing elements to be added to this list * @return <tt>true</tt> if this list changed as a result of the call * @throws IndexOutOfBoundsException {@inheritDoc} * @throws NullPointerException if the specified collection is null */ public boolean addAll(int index, Collection<? extends E> c) { rangeCheckForAdd(index); @SuppressWarnings("unchecked") E[] values = (E[]) c.toArray(); if (values.length == 0) return false; ensureCapacity(size + values.length); int blockSize = 1 << blockBitsize; int mask = (1 << blockBitsize) - 1; int i = 0; while (i < values.length && ((index + i) & mask) > 0) { fastAdd((long)index + i, values[i]); i++; } while (i < values.length - blockSize) { int fromBlock = (int) (((long)index + i) >>> blockBitsize); int toBlock = (int) ((size + mask) >>> blockBitsize); for (int j = toBlock; j > fromBlock; j data[j] = data[j-1]; data[fromBlock] = new Block<>(blockSize, values, i, blockSize); i += blockSize; size += blockSize; } while (i < values.length) { fastAdd((long)index + i, values[i]); i++; } return true; } /** * Returns the element at the specified position in this list. * * @param index index of the element to return * @return the element at the specified position in this list * @throws IndexOutOfBoundsException {@inheritDoc} */ public E get(int index) { rangeCheck(index); return fastGet(index); } private E fastGet(long index) { int blockIndex = (int) (index >>> blockBitsize); int valueIndex = (int) (index & (-1L >>> -blockBitsize)); return data[blockIndex].get(valueIndex); } /** * Replaces the element at the specified position in this list with * the specified element. * * @param index index of the element to replace * @param element element to be stored at the specified position * @return the element previously at the specified position * @throws IndexOutOfBoundsException {@inheritDoc} */ public E set(int index, E element) { rangeCheck(index); return fastSet(index, element); } private E fastSet(long index, E element) { int blockIndex = (int) (index >>> blockBitsize); int valueIndex = (int) (index & (-1L >>> -blockBitsize)); return data[blockIndex].set(valueIndex, element); } /** * Removes the element at the specified position in this list. * Shifts any subsequent elements to the left (subtracts one from their * indices). * * @param index the index of the element to be removed * @return the element that was removed from the list * @throws IndexOutOfBoundsException {@inheritDoc} */ public E remove(int index) { rangeCheck(index); return fastRemove(index); } private E fastRemove(long index) { modCount++; int blockIndex = (int) (index >>> blockBitsize); int valueIndex = (int) (index & (-1L >>> -blockBitsize)); E removed = data[blockIndex].remove(valueIndex); while (++blockIndex < data.length && data[blockIndex] != null && data[blockIndex].size() > 0) { data[blockIndex-1].addLast(data[blockIndex].removeFirst()); } size // - free unused blocks for GC and compact list if needed boolean blockFreed = false; while (++blockIndex < data.length && data[blockIndex] != null && data[blockIndex].size() == 0) { data[blockIndex] = null; blockFreed = true; } if (blockFreed) compact(); return removed; } /** * Retains only the elements in this collection that are contained in the * specified collection. In other words, removes from this collection all * of its elements that are not contained in the specified collection. * * @param c collection containing elements to be retained in this collection * * @return <tt>true</tt> if this collection changed as a result of the call * @throws NullPointerException if this collection contains one or more * null elements and the specified collection does not permit null * elements */ public boolean retainAll(Collection<?> c) { boolean modified = false; for (long i = size-1; i >= 0; i if (!c.contains(fastGet(i))) { fastRemove(i); modified = true; } } return modified; } /** * Removes all of the elements from this list. The list will be empty after this call returns. */ public void clear() { Arrays.fill(data, null); size = 0; } /** * Returns an iterator over the elements in this list in proper sequence. * * <p>This implementation supports lists of sizes above <tt>Integer.MAX_VALUE</tt> * limit. * * <p>This implementation is made to throw runtime exceptions in the * face of concurrent modification, as described in the specification * for the (protected) {@link AbstractList#modCount modCount} field. * * @return an iterator over the elements in this list in proper sequence */ public Iterator<E> iterator() { return new Iter(); } /** * Returns an array containing all of the elements in this list * in proper sequence (from first to last element). * * <p>The returned array will be "safe" in that no references to it are * maintained by this list. (In other words, this method must allocate * a new array). The caller is thus free to modify the returned array. * * <p>This method acts as bridge between array-based and collection-based * APIs. * * @return an array containing all of the elements in this list in * proper sequence */ public Object[] toArray() { long size = this.size; if (size > MAX_ARRAY_SIZE) throw new OutOfMemoryError("Required array size too large"); Object[] result = new Object[(int) size]; int pos = 0; for (int i = 0; i < data.length && data[i] != null && data[i].size() > 0; i++) { pos += data[i].copyToArray(result, pos); } return (pos < size) ? Arrays.copyOf(result, pos) : result; } /** * Returns an array containing all of the elements in this list in proper * sequence (from first to last element); the runtime type of the returned * array is that of the specified array. If the list fits in the * specified array, it is returned therein. Otherwise, a new array is * allocated with the runtime type of the specified array and the size of * this list. * * <p>If the list fits in the specified array with room to spare * (i.e., the array has more elements than the list), the element in * the array immediately following the end of the collection is set to * <tt>null</tt>. (This is useful in determining the length of the * list <i>only</i> if the caller knows that the list does not contain * any null elements.) * * @param a the array into which the elements of the list are to * be stored, if it is big enough; otherwise, a new array of the * same runtime type is allocated for this purpose. * @return an array containing the elements of the list * @throws ArrayStoreException if the runtime type of the specified array * is not a supertype of the runtime type of every element in * this list * @throws NullPointerException if the specified array is null */ public <T> T[] toArray(T[] a) { long size = this.size; if (size > MAX_ARRAY_SIZE) throw new OutOfMemoryError("Required array size too large"); @SuppressWarnings("unchecked") T[] result = (a.length < size) ? (T[]) java.lang.reflect.Array.newInstance(a.getClass().getComponentType(), (int) size) : a; for (int i = 0, pos = 0; i < data.length && data[i] != null && data[i].size() > 0; i++) { pos += data[i].copyToArray(result, pos); } if (result.length > size) result[(int) size] = null; return result; } /** * Returns a shallow copy of this <tt>DynamicList</tt> instance. * (The elements themselves are not cloned). * * @return a clone of this <tt>DynamicList</tt> instance */ @SuppressWarnings("unchecked") public Object clone() { try { DynamicList<E> clone = (DynamicList<E>) super.clone(); clone.farAccess = null; clone.data = new Block[data.length]; for (int i = 0; i < data.length && data[i] != null; i++) { clone.data[i] = (Block<E>) data[i].clone(); } return clone; } catch (CloneNotSupportedException e) { // - should never be thrown since we are Cloneable throw new InternalError(); } } /** * Returns a string representation of this list. The string representation * consists of a list of the collection's elements separated by commas * in the order they are returned by its iterator. List enclosed in square * brackets (<tt>"[]"</tt>).<br> * If list is too large, only first elements will be shown, followed by * three-dot (<tt>"..."</tt>). */ public String toString() { if (isEmpty()) return "[]"; StringBuilder sb = new StringBuilder(); sb.append('['); for (int i = 0; i < size; ) { E e = fastGet(i); sb.append(e == this ? "(this List)" : e); if (++i < size) { if (sb.length() > 1000) { sb.append(',').append(" ..."); break; } sb.append(',').append(' '); } } sb.append(']'); return sb.toString(); } }
package com.intellij.diagnostic; /** * @author kir, max */ import com.intellij.ExtensionPoints; import com.intellij.ide.plugins.PluginManager; import com.intellij.ide.reporter.ScrData; import com.intellij.ide.util.PropertiesComponent; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ApplicationNamesInfo; import com.intellij.openapi.diagnostic.ErrorReportSubmitter; import com.intellij.openapi.diagnostic.IdeaLoggingEvent; import com.intellij.openapi.diagnostic.SubmittedReportInfo; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.extensions.PluginDescriptor; import com.intellij.openapi.extensions.PluginId; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.IconLoader; import com.intellij.util.text.DateFormatUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.KeyEvent; import java.util.*; import java.util.List; public class IdeErrorsDialog extends DialogWrapper implements MessagePoolListener { private JTextPane myDetailsPane; private List<AbstractMessage> myFatalErrors; private List<ArrayList<AbstractMessage>> myModel = new ArrayList<ArrayList<AbstractMessage>>(); private final MessagePool myMessagePool; private JLabel myCountLabel; private JLabel myBlameLabel; private JLabel myInfoLabel; private JCheckBox myImmediatePopupCheckbox; private int myIndex = 0; @NonNls public static final String IMMEDIATE_POPUP_OPTION = "IMMEDIATE_FATAL_ERROR_POPUP"; public IdeErrorsDialog(MessagePool messagePool) { super(JOptionPane.getRootFrame(), false); myMessagePool = messagePool; init(); } public void newEntryAdded() { SwingUtilities.invokeLater(new Runnable() { public void run() { rebuildHeaders(); } }); } public void poolCleared() { SwingUtilities.invokeLater(new Runnable() { public void run() { doOKAction(); } }); } protected Action[] createActions() { return new Action[]{new ShutdownAction(), new ClearFatalsAction(), new CloseAction()}; } private ActionToolbar createNavigationToolbar() { DefaultActionGroup group = new DefaultActionGroup(); BackAction back = new BackAction(); back.registerCustomShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_LEFT, 0)), getRootPane()); group.add(back); ForwardAction forward = new ForwardAction(); forward.registerCustomShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_RIGHT, 0)), getRootPane()); group.add(forward); return ActionManager.getInstance().createActionToolbar(ActionPlaces.UNKNOWN, group, true); } private void goBack() { myIndex updateControls(); } private void goForward() { myIndex++; updateControls(); } private void updateControls() { updateCountLabel(); updateBlameLabel(); updateInfoLabel(); updateDetailsPane(); } private void updateInfoLabel() { final AbstractMessage message = getMessageAt(myIndex); if (message != null) { StringBuffer txt = new StringBuffer(); txt.append(DiagnosticBundle.message("error.list.message.info", DateFormatUtil.formatDate(new Date(), message.getDate()), myModel.get(myIndex).size())); if (message.isSubmitted()) { final SubmittedReportInfo info = message.getSubmissionInfo(); if (info.getStatus() == SubmittedReportInfo.SubmissionStatus.FAILED) { txt.append(DiagnosticBundle.message("error.list.message.submission.failed")); } else { if (info.getLinkText() != null) { txt.append(DiagnosticBundle.message("error.list.message.submitted.as.link", info.getLinkText())); if (info.getStatus() == SubmittedReportInfo.SubmissionStatus.DUPLICATE) { txt.append(DiagnosticBundle.message("error.list.message.duplicate")); } } else { txt.append(DiagnosticBundle.message("error.list.message.submitted")); } } txt.append(". "); } else if (!message.isRead()) { txt.append(DiagnosticBundle.message("error.list.message.unread")); } myInfoLabel.setText(txt.toString()); } else { myInfoLabel.setText(""); } } private void updateBlameLabel() { final AbstractMessage message = getMessageAt(myIndex); if (message != null && !(message.getThrowable() instanceof MessagePool.TooManyErrorsException)) { final PluginId pluginId = findPluginId(message.getThrowable()); if (pluginId == null) { myBlameLabel.setText(DiagnosticBundle.message("error.list.message.blame.core", ApplicationNamesInfo.getInstance().getProductName())); } else { final Application app = ApplicationManager.getApplication(); myBlameLabel.setText(DiagnosticBundle.message("error.list.message.blame.plugin", app.getPlugin(pluginId).getName())); } } else { myBlameLabel.setText(""); } } private void updateDetailsPane() { final AbstractMessage message = getMessageAt(myIndex); if (message != null) { showMessageDetails(message); } else { hideMessageDetails(); } } private void updateCountLabel() { myCountLabel.setText(DiagnosticBundle.message("error.list.message.index.count", Integer.toString(myIndex + 1), myModel.size())); } private class BackAction extends AnAction { public BackAction() { super(DiagnosticBundle.message("error.list.back.action"), null, IconLoader.getIcon("/actions/back.png")); } public void actionPerformed(AnActionEvent e) { goBack(); } public void update(AnActionEvent e) { Presentation presentation = e.getPresentation(); presentation.setEnabled(myIndex > 0); } } private class ForwardAction extends AnAction { public ForwardAction() { super(DiagnosticBundle.message("error.list.forward.action"), null, IconLoader.getIcon("/actions/forward.png")); } public void actionPerformed(AnActionEvent e) { goForward(); } public void update(AnActionEvent e) { Presentation presentation = e.getPresentation(); presentation.setEnabled(myIndex < myModel.size() - 1); } } protected JComponent createCenterPanel() { setTitle(DiagnosticBundle.message("error.list.title")); JPanel root = new JPanel(new BorderLayout()); JPanel top = new JPanel(new BorderLayout()); JPanel toolbar = new JPanel(new FlowLayout()); myImmediatePopupCheckbox = new JCheckBox(DiagnosticBundle.message("error.list.popup.immediately.checkbox")); myImmediatePopupCheckbox.setSelected(PropertiesComponent.getInstance().isTrueValue(IMMEDIATE_POPUP_OPTION)); myCountLabel = new JLabel(); myBlameLabel = new JLabel(); myInfoLabel = new JLabel(); ActionToolbar navToolbar = createNavigationToolbar(); toolbar.add(navToolbar.getComponent()); toolbar.add(myCountLabel); top.add(toolbar, BorderLayout.WEST); JPanel blamePanel = new JPanel(new FlowLayout()); blamePanel.add(myBlameLabel); final ActionToolbar blameToolbar = createBlameToolbar(); blamePanel.add(blameToolbar.getComponent()); top.add(blamePanel, BorderLayout.EAST); root.add(top, BorderLayout.NORTH); myDetailsPane = new JTextPane(); myDetailsPane.setEditable(false); JPanel infoPanel = new JPanel(new BorderLayout()); JPanel gapPanel = new JPanel(new FlowLayout(FlowLayout.LEFT, 7, 0)); gapPanel.add(myInfoLabel); infoPanel.add(gapPanel, BorderLayout.NORTH); infoPanel.add(new JScrollPane(myDetailsPane), BorderLayout.CENTER); root.add(infoPanel, BorderLayout.CENTER); root.add(myImmediatePopupCheckbox, BorderLayout.SOUTH); root.setPreferredSize(new Dimension(600, 550)); rebuildHeaders(); moveSelectionToEarliestMessage(); updateControls(); return root; } private ActionToolbar createBlameToolbar() { DefaultActionGroup blameGroup = new DefaultActionGroup(); final BlameAction blameAction = new BlameAction(); blameGroup.add(blameAction); blameAction.registerCustomShortcutSet(new CustomShortcutSet(KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0)), getRootPane()); return ActionManager.getInstance().createActionToolbar(ActionPlaces.UNKNOWN, blameGroup, true); } private AbstractMessage getMessageAt(int idx) { if (idx < 0 || idx >= myModel.size()) return null; return myModel.get(idx).get(0); } private void moveSelectionToEarliestMessage() { myIndex = 0; for (int i = 0; i < myModel.size(); i++) { final AbstractMessage each = getMessageAt(i); if (!each.isRead()) { myIndex = i; break; } } updateControls(); } private void rebuildHeaders() { myModel.clear(); myFatalErrors = myMessagePool.getFatalErrors(true, true); Map<String, ArrayList<AbstractMessage>> hash2Messages = buildHashcode2MessageListMap(myFatalErrors); for (final ArrayList<AbstractMessage> abstractMessages : hash2Messages.values()) { myModel.add(abstractMessages); } updateControls(); } private static Map<String, ArrayList<AbstractMessage>> buildHashcode2MessageListMap(List<AbstractMessage> aErrors) { Map<String, ArrayList<AbstractMessage>> hash2Messages = new LinkedHashMap<String, ArrayList<AbstractMessage>>(); for (final AbstractMessage each : aErrors) { final String hashcode = ScrData.getThrowableHashCode(each.getThrowable()); ArrayList<AbstractMessage> list; if (hash2Messages.containsKey(hashcode)) { list = hash2Messages.get(hashcode); } else { list = new ArrayList<AbstractMessage>(); hash2Messages.put(hashcode, list); } list.add(0, each); } return hash2Messages; } private void showMessageDetails(AbstractMessage aMessage) { if (aMessage.getThrowable() instanceof MessagePool.TooManyErrorsException) { myDetailsPane.setText(aMessage.getThrowable().getMessage()); } else { myDetailsPane.setText(new StringBuffer().append(aMessage.getMessage()).append("\n").append(aMessage.getThrowableText()).toString()); } if (myDetailsPane.getCaret() != null) { // Upon some strange circumstances caret may be missing from the text component making the following line fail with NPE. myDetailsPane.setCaretPosition(0); } } private void hideMessageDetails() { myDetailsPane.setText(""); } @Nullable public static PluginId findPluginId(Throwable t) { StackTraceElement[] elements = t.getStackTrace(); for (StackTraceElement element : elements) { String className = element.getClassName(); if (PluginManager.isPluginClass(className)) { return PluginManager.getPluginByClassName(className); } } if (t instanceof NoSuchMethodException) { // check is method called from plugin classes if (t.getMessage() != null) { String className = ""; StringTokenizer tok = new StringTokenizer(t.getMessage(), "."); while (tok.hasMoreTokens()) { String token = tok.nextToken(); if (token.length() > 0 && Character.isJavaIdentifierStart(token.charAt(0))) { className += token; } } if (PluginManager.isPluginClass(className)) { return PluginManager.getPluginByClassName(className); } } } else if (t instanceof ClassNotFoundException) { // check is class from plugin classes if (t.getMessage() != null) { String className = t.getMessage(); if (PluginManager.isPluginClass(className)) { return PluginManager.getPluginByClassName(className); } } } else if (t instanceof PluginException) { return ((PluginException)t).getPluginId(); } return null; } private class ShutdownAction extends AbstractAction { public ShutdownAction() { super(DiagnosticBundle.message("error.list.shutdown.action")); } public void actionPerformed(ActionEvent e) { myMessagePool.setJvmIsShuttingDown(); ApplicationManager.getApplication().exit(); } } private class ClearFatalsAction extends AbstractAction { public ClearFatalsAction() { super(DiagnosticBundle.message("error.list.clear.action")); } public void actionPerformed(ActionEvent e) { myMessagePool.clearFatals(); doOKAction(); } } private class BlameAction extends AnAction { public BlameAction() { super(DiagnosticBundle.message("error.list.submit.action"), DiagnosticBundle.message("error.list.submit.action.description"), IconLoader.getIcon("/actions/startDebugger.png")); } public void update(AnActionEvent e) { final Presentation presentation = e.getPresentation(); final AbstractMessage logMessage = getMessageAt(myIndex); if (logMessage == null) { presentation.setEnabled(false); return; } final ErrorReportSubmitter submitter = getSubmitter(logMessage); if (logMessage.isSubmitted() || submitter == null) { presentation.setEnabled(false); return; } presentation.setEnabled(true); presentation.setDescription(submitter.getReportActionText()); } public void actionPerformed(AnActionEvent e) { final AbstractMessage logMessage = getMessageAt(myIndex); reportMessage(logMessage); rebuildHeaders(); updateControls(); } private void reportMessage(final AbstractMessage logMessage) { ErrorReportSubmitter submitter = getSubmitter(logMessage); if (submitter != null) { logMessage.setSubmitted(submitter.submit(getEvents(logMessage), getContentPane())); } } private IdeaLoggingEvent[] getEvents(final AbstractMessage logMessage) { if (logMessage instanceof GroupedLogMessage) { final List<AbstractMessage> messages = ((GroupedLogMessage)logMessage).getMessages(); IdeaLoggingEvent[] res = new IdeaLoggingEvent[messages.size()]; for (int i = 0; i < res.length; i++) { res[i] = getEvent(messages.get(i)); } return res; } return new IdeaLoggingEvent[]{getEvent(logMessage)}; } private IdeaLoggingEvent getEvent(final AbstractMessage logMessage) { return new IdeaLoggingEvent(logMessage.getMessage(), logMessage.getThrowable()); } } @Nullable private static ErrorReportSubmitter getSubmitter(final AbstractMessage logMessage) { if (logMessage.getThrowable() instanceof MessagePool.TooManyErrorsException) { return null; } final PluginId pluginId = findPluginId(logMessage.getThrowable()); final Object[] reporters = Extensions.getRootArea().getExtensionPoint(ExtensionPoints.ERROR_HANDLER).getExtensions(); ErrorReportSubmitter submitter = null; for (Object reporter1 : reporters) { ErrorReportSubmitter reporter = (ErrorReportSubmitter)reporter1; final PluginDescriptor descriptor = reporter.getPluginDescriptor(); if (pluginId == null && (descriptor == null || PluginId.getId("com.intellij") == descriptor.getPluginId()) || descriptor != null && Comparing.equal(pluginId, descriptor.getPluginId())) { submitter = reporter; } } return submitter; } protected void doOKAction() { PropertiesComponent.getInstance().setValue(IMMEDIATE_POPUP_OPTION, String.valueOf(myImmediatePopupCheckbox.isSelected())); markAllAsRead(); super.doOKAction(); } private void markAllAsRead() { for (AbstractMessage each : myFatalErrors) { each.setRead(true); } } public void doCancelAction() { PropertiesComponent.getInstance().setValue(IMMEDIATE_POPUP_OPTION, String.valueOf(myImmediatePopupCheckbox.isSelected())); markAllAsRead(); super.doCancelAction(); } protected class CloseAction extends AbstractAction { public CloseAction() { putValue(Action.NAME, DiagnosticBundle.message("error.list.close.action")); } public void actionPerformed(ActionEvent e) { doOKAction(); } } protected String getDimensionServiceKey() { return "IdeErrosDialog"; } }
package com.intellij.psi.impl.meta; import com.intellij.jsp.impl.RelaxedHtmlFromSchemaNSDescriptor; import com.intellij.openapi.Disposable; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.UserDataCache; import com.intellij.psi.PsiElement; import com.intellij.psi.filters.*; import com.intellij.psi.filters.position.NamespaceFilter; import com.intellij.psi.filters.position.TargetNamespaceFilter; import com.intellij.psi.meta.MetaDataRegistrar; import com.intellij.psi.meta.PsiMetaData; import com.intellij.psi.meta.PsiMetaDataBase; import com.intellij.psi.util.CachedValue; import com.intellij.psi.util.CachedValueProvider; import com.intellij.psi.xml.*; import com.intellij.xml.impl.schema.NamedObjectDescriptor; import com.intellij.xml.impl.schema.SchemaNSDescriptor; import com.intellij.xml.impl.schema.XmlAttributeDescriptorImpl; import com.intellij.xml.impl.schema.XmlElementDescriptorImpl; import com.intellij.xml.util.XmlUtil; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.List; public class MetaRegistry extends MetaDataRegistrar { private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.meta.MetaRegistry"); private static final List<MyBinding> ourBindings = new ArrayList<MyBinding>(); public static final String[] SCHEMA_URIS = { XmlUtil.XML_SCHEMA_URI, XmlUtil.XML_SCHEMA_URI2, XmlUtil.XML_SCHEMA_URI3 }; static { { addMetadataBinding( new AndFilter( new NamespaceFilter(SCHEMA_URIS), new ClassFilter(XmlDocument.class) ), SchemaNSDescriptor.class ); addMetadataBinding( new AndFilter( new ClassFilter(XmlTag.class), new NamespaceFilter(SCHEMA_URIS), new TextFilter("schema") ), SchemaNSDescriptor.class ); } { addMetadataBinding( new OrFilter( new AndFilter( new ContentFilter( new OrFilter( new ClassFilter(XmlElementDecl.class), new ClassFilter(XmlConditionalSection.class), new ClassFilter(XmlEntityRef.class) ) ), new ClassFilter(XmlDocument.class) ), new ClassFilter(XmlMarkupDecl.class) ), com.intellij.xml.impl.dtd.XmlNSDescriptorImpl.class ); } { addMetadataBinding(new AndFilter( new ClassFilter(XmlTag.class), new NamespaceFilter(SCHEMA_URIS), new TextFilter("element") ), XmlElementDescriptorImpl.class); } { addMetadataBinding( new AndFilter( new ClassFilter(XmlTag.class), new NamespaceFilter(SCHEMA_URIS), new TextFilter("attribute") ), XmlAttributeDescriptorImpl.class ); } { addMetadataBinding( new ClassFilter(XmlElementDecl.class), com.intellij.xml.impl.dtd.XmlElementDescriptorImpl.class ); } { addMetadataBinding( new ClassFilter(XmlAttributeDecl.class), com.intellij.xml.impl.dtd.XmlAttributeDescriptorImpl.class ); } { addMetadataBinding( new AndFilter( new ClassFilter(XmlDocument.class), new TargetNamespaceFilter(XmlUtil.XHTML_URI), new NamespaceFilter(SCHEMA_URIS)), RelaxedHtmlFromSchemaNSDescriptor.class ); } { addMetadataBinding(new AndFilter( new ClassFilter(XmlTag.class), new NamespaceFilter(SCHEMA_URIS), new TextFilter("complexType","simpleType", "group","attributeGroup") ), NamedObjectDescriptor.class); } } private static final Key<CachedValue<PsiMetaDataBase>> META_DATA_KEY = Key.create("META DATA KEY"); public static void bindDataToElement(final PsiElement element, final PsiMetaDataBase data){ CachedValue<PsiMetaDataBase> value = element.getManager().getCachedValuesManager().createCachedValue(new CachedValueProvider<PsiMetaDataBase>() { public CachedValueProvider.Result<PsiMetaDataBase> compute() { data.init(element); return new Result<PsiMetaDataBase>(data, data.getDependences()); } }); element.putUserData(META_DATA_KEY, value); } public static PsiMetaData getMeta(final PsiElement element) { final PsiMetaDataBase base = getMetaBase(element); return base instanceof PsiMetaData ? (PsiMetaData)base : null; } private static UserDataCache<CachedValue<PsiMetaDataBase>, PsiElement, Object> ourCachedMetaCache = new UserDataCache<CachedValue<PsiMetaDataBase>, PsiElement, Object>() { protected CachedValue<PsiMetaDataBase> compute(final PsiElement element, Object p) { return element.getManager().getCachedValuesManager() .createCachedValue(new CachedValueProvider<PsiMetaDataBase>() { public Result<PsiMetaDataBase> compute() { try { for (final MyBinding binding : ourBindings) { if (binding.myFilter.isClassAcceptable(element.getClass()) && binding.myFilter.isAcceptable(element, element.getParent())) { final PsiMetaDataBase data = binding.myDataClass.newInstance(); data.init(element); return new Result<PsiMetaDataBase>(data, data.getDependences()); } } } catch (IllegalAccessException iae) { throw new RuntimeException(iae); } catch (InstantiationException ie) { throw new RuntimeException(ie); } return new Result<PsiMetaDataBase>(null, element); } }, false); } }; @Nullable public static PsiMetaDataBase getMetaBase(final PsiElement element) { ProgressManager.getInstance().checkCanceled(); return ourCachedMetaCache.get(META_DATA_KEY, element, null).getValue(); } public static <T extends PsiMetaDataBase> void addMetadataBinding(ElementFilter filter, Class<T> aMetadataClass, Disposable parentDisposable) { final MyBinding binding = new MyBinding(filter, aMetadataClass); addBinding(binding); Disposer.register(parentDisposable, new Disposable() { public void dispose() { ourBindings.remove(binding); } }); } public static <T extends PsiMetaDataBase> void addMetadataBinding(ElementFilter filter, Class<T> aMetadataClass) { addBinding(new MyBinding(filter, aMetadataClass)); } private static <T extends PsiMetaDataBase> void addBinding(final MyBinding binding) { ourBindings.add(0, binding); } public <T extends PsiMetaDataBase> void registerMetaData(ElementFilter filter, Class<T> metadataDescriptorClass) { addMetadataBinding(filter, metadataDescriptorClass); } private static class MyBinding { ElementFilter myFilter; Class<PsiMetaDataBase> myDataClass; public <T extends PsiMetaDataBase> MyBinding(ElementFilter filter, Class<T> dataClass) { LOG.assertTrue(filter != null); LOG.assertTrue(dataClass != null); myFilter = filter; myDataClass = (Class)dataClass; } } }
package hello.web; import static spark.Spark.after; import static spark.Spark.get; import hello.domain.Message; import hello.domain.World; import java.util.Date; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; import org.hibernate.Session; import spark.Filter; import spark.Request; import spark.Response; import spark.Route; public class SparkApplication implements spark.servlet.SparkApplication { private static final int DB_ROWS = 10000; private static final String MESSAGE = "Hello, World!"; private static final String CONTENT_TYPE_TEXT = "text/plain"; @Override public void init() { get(new JsonTransformer("/json") { @Override protected Object handleInternal(final Request request, final Response response) { return new Message(); } }); get(new JsonTransformer("/db") { @Override protected Object handleInternal(final Request request, final Response response) { final int queries = getQueries(request); final World[] worlds = new World[queries]; final Session session = HibernateUtil.getSession(); final Random random = ThreadLocalRandom.current(); for (int i = 0; i < queries; i++) { worlds[i] = (World) session.byId(World.class).load(random.nextInt(DB_ROWS) + 1); } return (request.queryParams("queries") == null ? worlds[0] : worlds); } private int getQueries(final Request request) { try { String param = request.queryParams("queries"); if (param == null) { return 1; } int queries = Integer.parseInt(param); if (queries < 1) { return 1; } if (queries > 500) { return 500; } return queries; } catch (NumberFormatException ex) { return 1; } } }); get(new Route("/plaintext") { @Override public Object handle(final Request request, final Response response) { response.type(CONTENT_TYPE_TEXT); return MESSAGE; } }); after(new Filter("/db") { @Override public void handle(final Request request, final Response response) { HibernateUtil.closeSession(); } }); after(new Filter() { @Override public void handle(final Request request, final Response response) { response.raw().addDateHeader("Date", new Date().getTime()); } }); } public static void main(final String[] args) { System.setProperty("jndi", "false"); new SparkApplication().init(); } }
package gov.va.escreening.dto.editors; import java.util.List; public class SurveyPageInfo { private Integer id; private String description; private int pageNumber; private String title; private List<QuestionInfo> questions; public SurveyPageInfo() {} public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public int getPageNumber() { return pageNumber; } public void setPageNumber(int pageNumber) { this.pageNumber = pageNumber; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public List<QuestionInfo> getQuestions() { return questions; } public void setQuestions(List<QuestionInfo> questions) { this.questions = questions; } @Override public String toString() { return "SurveyPageInfo [id=" + id + ", description=" + description + ", pageNumber=" + pageNumber + ", title=" + title + ", questions=" + questions + "]"; } }
package org.jfree.chart.util; import java.awt.Color; import java.awt.Graphics2D; import java.awt.image.BufferedImage; import java.awt.image.DataBufferInt; import java.io.Serializable; import org.jfree.chart.HashUtilities; public class DefaultShadowGenerator implements ShadowGenerator, Serializable { private static final long serialVersionUID = 2732993885591386064L; /** The shadow size. */ private int shadowSize; /** The shadow color. */ private Color shadowColor; /** The shadow opacity. */ private float shadowOpacity; /** The shadow offset angle (in radians). */ private double angle; /** The shadow offset distance (in Java2D units). */ private int distance; /** * Creates a new instance with default attributes. */ public DefaultShadowGenerator() { this(5, Color.black, 0.5f, 5, -Math.PI / 4); } /** * Creates a new instance with the specified attributes. * * @param size the shadow size. * @param color the shadow color. * @param opacity the shadow opacity. * @param distance the shadow offset distance. * @param angle the shadow offset angle (in radians). */ public DefaultShadowGenerator(int size, Color color, float opacity, int distance, double angle) { ParamChecks.nullNotPermitted(color, "color"); this.shadowSize = size; this.shadowColor = color; this.shadowOpacity = opacity; this.distance = distance; this.angle = angle; } /** * Returns the shadow size. * * @return The shadow size. */ public int getShadowSize() { return this.shadowSize; } /** * Returns the shadow color. * * @return The shadow color (never <code>null</code>). */ public Color getShadowColor() { return this.shadowColor; } /** * Returns the shadow opacity. * * @return The shadow opacity. */ public float getShadowOpacity() { return this.shadowOpacity; } /** * Returns the shadow offset distance. * * @return The shadow offset distance (in Java2D units). */ public int getDistance() { return this.distance; } /** * Returns the shadow offset angle (in radians). * * @return The angle (in radians). */ public double getAngle() { return this.angle; } /** * Calculates the x-offset for drawing the shadow image relative to the * source. * * @return The x-offset. */ public int calculateOffsetX() { return (int) (Math.cos(this.angle) * this.distance) - this.shadowSize; } /** * Calculates the y-offset for drawing the shadow image relative to the * source. * * @return The y-offset. */ public int calculateOffsetY() { return -(int) (Math.sin(this.angle) * this.distance) - this.shadowSize; } /** * Creates and returns an image containing the drop shadow for the * specified source image. * * @param source the source image. * * @return A new image containing the shadow. */ public BufferedImage createDropShadow(BufferedImage source) { BufferedImage subject = new BufferedImage( source.getWidth() + this.shadowSize * 2, source.getHeight() + this.shadowSize * 2, BufferedImage.TYPE_INT_ARGB); Graphics2D g2 = subject.createGraphics(); g2.drawImage(source, null, this.shadowSize, this.shadowSize); g2.dispose(); applyShadow(subject); return subject; } /** * Applies a shadow to the image. * * @param image the image. */ protected void applyShadow(BufferedImage image) { int dstWidth = image.getWidth(); int dstHeight = image.getHeight(); int left = (this.shadowSize - 1) >> 1; int right = this.shadowSize - left; int xStart = left; int xStop = dstWidth - right; int yStart = left; int yStop = dstHeight - right; int shadowRgb = this.shadowColor.getRGB() & 0x00FFFFFF; int[] aHistory = new int[this.shadowSize]; int historyIdx = 0; int aSum; int[] dataBuffer = ((DataBufferInt) image.getRaster().getDataBuffer()).getData(); int lastPixelOffset = right * dstWidth; float sumDivider = this.shadowOpacity / this.shadowSize; // horizontal pass for (int y = 0, bufferOffset = 0; y < dstHeight; y++, bufferOffset = y * dstWidth) { aSum = 0; historyIdx = 0; for (int x = 0; x < this.shadowSize; x++, bufferOffset++) { int a = dataBuffer[bufferOffset] >>> 24; aHistory[x] = a; aSum += a; } bufferOffset -= right; for (int x = xStart; x < xStop; x++, bufferOffset++) { int a = (int) (aSum * sumDivider); dataBuffer[bufferOffset] = a << 24 | shadowRgb; // substract the oldest pixel from the sum aSum -= aHistory[historyIdx]; // get the lastest pixel a = dataBuffer[bufferOffset + right] >>> 24; aHistory[historyIdx] = a; aSum += a; if (++historyIdx >= this.shadowSize) { historyIdx -= this.shadowSize; } } } // vertical pass for (int x = 0, bufferOffset = 0; x < dstWidth; x++, bufferOffset = x) { aSum = 0; historyIdx = 0; for (int y = 0; y < this.shadowSize; y++, bufferOffset += dstWidth) { int a = dataBuffer[bufferOffset] >>> 24; aHistory[y] = a; aSum += a; } bufferOffset -= lastPixelOffset; for (int y = yStart; y < yStop; y++, bufferOffset += dstWidth) { int a = (int) (aSum * sumDivider); dataBuffer[bufferOffset] = a << 24 | shadowRgb; // substract the oldest pixel from the sum aSum -= aHistory[historyIdx]; // get the lastest pixel a = dataBuffer[bufferOffset + lastPixelOffset] >>> 24; aHistory[historyIdx] = a; aSum += a; if (++historyIdx >= this.shadowSize) { historyIdx -= this.shadowSize; } } } } /** * Tests this object for equality with an arbitrary object. * * @param obj the object (<code>null</code> permitted). * * @return The object. */ public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof DefaultShadowGenerator)) { return false; } DefaultShadowGenerator that = (DefaultShadowGenerator) obj; if (this.shadowSize != that.shadowSize) { return false; } if (!this.shadowColor.equals(that.shadowColor)) { return false; } if (this.shadowOpacity != that.shadowOpacity) { return false; } if (this.distance != that.distance) { return false; } if (this.angle != that.angle) { return false; } return true; } /** * Returns a hash code for this instance. * * @return The hash code. */ public int hashCode() { int hash = HashUtilities.hashCode(17, this.shadowSize); hash = HashUtilities.hashCode(hash, this.shadowColor); hash = HashUtilities.hashCode(hash, this.shadowOpacity); hash = HashUtilities.hashCode(hash, this.distance); hash = HashUtilities.hashCode(hash, this.angle); return hash; } }
package net.hexid.hexbot.bots; import java.util.ArrayList; import javafx.event.ActionEvent; import javafx.event.EventHandler; import javafx.geometry.Insets; import javafx.scene.Node; import javafx.scene.control.Button; import javafx.scene.control.ButtonBuilder; import javafx.scene.control.Label; import javafx.scene.control.PasswordField; import javafx.scene.control.RadioButton; import javafx.scene.control.TextField; import javafx.scene.control.ToggleGroup; import javafx.scene.layout.HBox; import javafx.scene.layout.Priority; import javafx.scene.layout.VBox; import net.hexid.hexbot.bot.Bots; public class TestTab extends net.hexid.hexbot.bot.gui.BotTab { private Button startProcessButton, stopProcessButton; public TestTab(String botID) { super(botID); } protected Node defaultContent() { return createOutputContent(); } public void processExitCode(int exitCode) { // print the return code and // swap the state of buttons appendOutput("Exit Code: " + exitCode); startProcessButton.setDisable(false); stopProcessButton.setDisable(true); } public ArrayList<String> getBotExecuteData() { return new ArrayList<String>(); } protected Node[] createBottomOutputContent() { startProcessButton = ButtonBuilder.create().text("Start") .onAction(new EventHandler<ActionEvent>() { public void handle(ActionEvent e) { createProcess(); startProcessButton.setDisable(true); stopProcessButton.setDisable(false); } }).maxWidth(Double.MAX_VALUE).build(); HBox.setHgrow(startProcessButton, Priority.ALWAYS); stopProcessButton = ButtonBuilder.create().text("Stop") .onAction(new EventHandler<ActionEvent>() { public void handle(ActionEvent e) { killProcess(); } }).disable(true).maxWidth(Double.MAX_VALUE).build(); HBox.setHgrow(stopProcessButton, Priority.ALWAYS); return new Node[]{startProcessButton, stopProcessButton}; } }
package org.apache.batik.bridge; import java.awt.Point; import java.awt.geom.AffineTransform; import java.awt.geom.Point2D; import java.io.IOException; import java.io.StringReader; import org.apache.batik.gvt.GraphicsNode; import org.apache.batik.gvt.event.EventDispatcher; import org.apache.batik.gvt.event.GraphicsNodeKeyEvent; import org.apache.batik.gvt.event.GraphicsNodeKeyListener; import org.apache.batik.gvt.event.GraphicsNodeMouseEvent; import org.apache.batik.gvt.event.GraphicsNodeMouseListener; import org.apache.batik.script.Interpreter; import org.apache.batik.script.InterpreterException; import org.apache.batik.script.InterpreterPool; import org.apache.batik.util.SVGConstants; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.w3c.dom.events.DocumentEvent; import org.w3c.dom.events.Event; import org.w3c.dom.events.EventListener; import org.w3c.dom.events.EventTarget; import org.w3c.dom.events.MouseEvent; import org.w3c.dom.svg.SVGElement; import org.w3c.dom.svg.SVGSVGElement; /** * A class to attach listeners on the <code>Document</code> to * call pieces of script when necessary and to attach a listener * on the GVT root to propagate GVT events to the DOM. * @author <a href="mailto:cjolif@ilog.fr>Christophe Jolif</a> * @author <a href="mailto:stephane@hillion.org">Stephane Hillion</a> * @version $Id$ */ class BridgeEventSupport implements SVGConstants { private static final String[] EVENT_ATTRIBUTES_GRAPHICS = { // graphics + svg "onfocusin", "onfocusout", "onactivate", "onclick", "onmousedown", "onmouseup", "onmouseover", "onmouseout", "onmousemove", "onload" }; private static final int FIRST_SVG_EVENT = 10; private static final String[] EVENT_ATTRIBUTES_SVG = { // document "onunload", "onabort", "onerror", "onresize", "onscroll", "onzoom" }; private static final int FIRST_ANIMATION_EVENT = 16; private static final String[] EVENT_ATTRIBUTES_ANIMATION = { // animation "onbegin", "onend", "onrepeat" }; private static final String[] EVENT_NAMES = { // all "focusin", "focusout", "activate", "click", "mousedown", "mouseup", "mouseover", "mouseout", "mousemove", "SVGLoad", // document "SVGUnload", "SVGAbort", "SVGError", "SVGResize", "SVGScroll", "SVGZoom", // animation "beginEvent", "endEvent", "repeatEvent" }; private BridgeEventSupport() {} /** * Creates and add a listener on the element to call script * when necessary. * @param ctx the <code>BridgeContext</code> containing useful * information. * @param element the DOM SVGElement corresponding to the node. It should * also be an instance of <code>EventTarget</code> otherwise no listener * will be added. * @param node the <code>GraphicsNode</code>. */ public static void addDOMListener(BridgeContext ctx, SVGElement element) { EventTarget target = null; try { // ability for scripts to be called target = (EventTarget)element; } catch (ClassCastException e) { // will not work on this one! return; } SVGSVGElement svgElement = (SVGSVGElement)element.getOwnerSVGElement(); if (svgElement == null) { if (element.getLocalName().equals(SVG_SVG_TAG)) { svgElement = (SVGSVGElement)element; } else { // something goes wrong => disable scripting return; } } String language = svgElement.getContentScriptType(); Interpreter interpret = null; String script = null; // <!> TODO we need to memo listeners to be able to remove // them later when deconnecting the bridge binding... if (element.getLocalName().equals(SVG_SVG_TAG)) { for (int i = 0; i < EVENT_ATTRIBUTES_SVG.length; i++) { if (!(script = element.getAttribute(EVENT_ATTRIBUTES_SVG[i])). equals("")) { if (interpret == null) { // try to get the intepreter only if we have // a reason to do it! interpret = ctx.getInterpreterPool(). getInterpreter(element.getOwnerDocument(), language); // the interpreter is not avaible => stop it now! if (interpret == null) { UserAgent ua = ctx.getUserAgent(); if (ua != null) ua.displayError("unknow language: "+language); break; } } target. addEventListener(EVENT_NAMES[i+FIRST_SVG_EVENT], new ScriptCaller(ctx.getUserAgent(), script, interpret), false); } } // continue } else if (element.getLocalName().equals("set") || element.getLocalName().startsWith("animate")) { for (int i = 0; i < EVENT_ATTRIBUTES_ANIMATION.length; i++) { if (!(script = element.getAttribute(EVENT_ATTRIBUTES_ANIMATION[i])). equals("")) { if (interpret == null) { // try to get the intepreter only if we have // a reason to do it! interpret = ctx.getInterpreterPool(). getInterpreter(element.getOwnerDocument(), language); // the interpreter is not avaible => stop it now! if (interpret == null) { UserAgent ua = ctx.getUserAgent(); if (ua != null) ua.displayError("unknow language: "+ language); break; } } target. addEventListener(EVENT_NAMES[i+ FIRST_ANIMATION_EVENT], new ScriptCaller(ctx.getUserAgent(), script, interpret), false); } } // not other stuff to do on this kind of events return; } for (int i = 0; i < EVENT_ATTRIBUTES_GRAPHICS.length; i++) { if (!(script = element.getAttribute(EVENT_ATTRIBUTES_GRAPHICS[i])). equals("")) { if (interpret == null) { // try to get the intepreter only if we have // a reason to do it! interpret = ctx.getInterpreterPool(). getInterpreter(element.getOwnerDocument(), language); // the interpreter is not avaible => stop it now! if (interpret == null) { UserAgent ua = ctx.getUserAgent(); if (ua != null) ua.displayError("unknow language: "+language); break; } } target. addEventListener(EVENT_NAMES[i], new ScriptCaller(ctx.getUserAgent(), script, interpret), false); } } } public static void updateDOMListener(BridgeContext ctx, SVGElement element) { } /** * Is called only for the root element in order to dispatch GVT * events to the DOM. */ public static void addGVTListener(BridgeContext ctx, Element svgRoot) { UserAgent ua = ctx.getUserAgent(); if (ua != null) { EventDispatcher dispatcher = ua.getEventDispatcher(); if (dispatcher != null) { final Listener listener = new Listener(ctx, ua); dispatcher.addGraphicsNodeMouseListener(listener); ((EventTarget)svgRoot). addEventListener("SVGUnload", new UnloadListener(dispatcher, listener), false); } } } public static void loadScripts(BridgeContext ctx, Document doc) { NodeList list = doc.getElementsByTagName("script"); final UserAgent ua = ctx.getUserAgent(); String language = null; Element selement = null; for (int i = 0; i < list.getLength(); i++) { language = (selement = (Element)list.item(i)). getAttribute("type"); final Interpreter interpret = ctx.getInterpreterPool().getInterpreter(doc, language); if (interpret != null) { final StringBuffer script = new StringBuffer(); for (Node n = selement.getFirstChild(); n != null; n = n.getNextSibling()) { script.append(n.getNodeValue()); } try { interpret.evaluate (new StringReader(script.toString())); } catch (IOException io) { // will never appeared we don't use a file } catch (InterpreterException e) { if (ua != null) ua.displayError("scripting error: " + e.getMessage()); } } else if (ua != null) ua.displayError("unknown language: "+language); } } private static class UnloadListener implements EventListener { private EventDispatcher dispatcher; private Listener listener; UnloadListener(EventDispatcher dispatcher, Listener listener) { this.dispatcher = dispatcher; this.listener = listener; } public void handleEvent(Event evt) { dispatcher.removeGraphicsNodeMouseListener(listener); evt.getTarget().removeEventListener("SVGUnload", this, false); } } private static class Listener implements GraphicsNodeMouseListener { private BridgeContext context; private UserAgent ua; private GraphicsNode lastTarget; public Listener(BridgeContext ctx, UserAgent u) { context = ctx; ua = u; } public void mouseClicked(GraphicsNodeMouseEvent evt) { dispatchMouseEvent("click", evt, true); } public void mousePressed(GraphicsNodeMouseEvent evt) { dispatchMouseEvent("mousedown", evt, true); } public void mouseReleased(GraphicsNodeMouseEvent evt) { dispatchMouseEvent("mouseup", evt, true); } public void mouseEntered(GraphicsNodeMouseEvent evt) { dispatchMouseEvent("mouseover", evt, true); } public void mouseExited(GraphicsNodeMouseEvent evt) { dispatchMouseEvent("mouseout", evt, true); } public void mouseDragged(GraphicsNodeMouseEvent evt) { GraphicsNode node = evt.getRelatedNode(); GraphicsNodeMouseEvent evt2 = null; if (lastTarget != node) { if (lastTarget != null) { evt2 = new GraphicsNodeMouseEvent(lastTarget, evt.MOUSE_EXITED, evt.getWhen(), evt.getModifiers(), evt.getX(), evt.getY(), evt.getClickCount(), lastTarget); dispatchMouseEvent("mouseout", evt2, true); } if (node != null) { evt2 = new GraphicsNodeMouseEvent(node, evt.MOUSE_ENTERED, evt.getWhen(), evt.getModifiers(), evt.getX(), evt.getY(), evt.getClickCount(), lastTarget); dispatchMouseEvent("mouseover", evt2, true); } } try { if (node != null) { evt2 = new GraphicsNodeMouseEvent(node, evt.MOUSE_MOVED, evt.getWhen(), evt.getModifiers(), evt.getX(), evt.getY(), evt.getClickCount(), null); dispatchMouseEvent("mousemove", evt2, true); } } finally { lastTarget = node; } } public void mouseMoved(GraphicsNodeMouseEvent evt) { dispatchMouseEvent("mousemove", evt, false); } private void dispatchMouseEvent(String eventType, GraphicsNodeMouseEvent evt, boolean cancelok) { Point2D pos = evt.getPoint2D(); AffineTransform transform = ua.getTransform(); if (transform != null && !transform.isIdentity()) transform.transform(pos, pos); Point screen = ua.getClientAreaLocationOnScreen(); screen.translate((int)Math.floor(pos.getX()), (int)Math.floor(pos.getY())); // compute screen coordinates GraphicsNode node = evt.getGraphicsNode(); Element elmt = context.getElement(node); if (elmt == null) // should not appeared if binding on return; EventTarget target = (EventTarget)elmt; // <!> TODO dispatch it only if pointers-event property ask for short button = 1; if ((evt.BUTTON1_MASK & evt.getModifiers()) != 0) button = 0; else if ((evt.BUTTON3_MASK & evt.getModifiers()) != 0) button = 2; MouseEvent mevent = // DOM Level 2 6.5 cast from Document to DocumentEvent is ok (MouseEvent)org.apache.batik.dom.events.EventSupport. createEvent(org.apache.batik.dom.events.EventSupport. MOUSE_EVENT_TYPE); // deal with the related node/target node = evt.getRelatedNode(); EventTarget relatedTarget = (EventTarget)context.getElement(node); mevent.initMouseEvent(eventType, true, cancelok, null, evt.getClickCount(), screen.x, screen.y, (int)Math.floor(pos.getX()), (int)Math.floor(pos.getY()), evt.isControlDown(), evt.isAltDown(), evt.isShiftDown(), evt.isMetaDown(), button, relatedTarget); try { target.dispatchEvent(mevent); } catch (RuntimeException e) { // runtime exceptions may appear we need to display them... ua.displayError("scripting error in event handling: "+ e.getMessage()); } } } public static class ScriptCaller implements EventListener { private static String EVENT_NAME = "evt"; private String script = null; private Interpreter interpreter = null; private UserAgent ua = null; public ScriptCaller(UserAgent agent, String str, Interpreter interpret) { script = str; interpreter = interpret; ua = agent; } public void handleEvent(Event evt) { interpreter.bindObject(EVENT_NAME, evt); try { interpreter.evaluate(new StringReader(script)); } catch (IOException io) { // will never appeared we don't use a file } catch (InterpreterException e) { if (ua != null) ua.displayError("scripting error: " + e.getMessage()); } } } }
package org.gem.engine; import java.io.BufferedOutputStream; import java.io.BufferedWriter; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStreamWriter; import java.io.Reader; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Random; import java.util.Set; import org.apache.commons.configuration.AbstractFileConfiguration; import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.ConfigurationConverter; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.io.FilenameUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.gem.UnoptimizedDeepCopy; import org.gem.calc.GroundMotionFieldCalculator; import org.gem.calc.StochasticEventSetGenerator; import org.gem.engine.CalculatorConfigHelper.CalculationMode; import org.gem.engine.CalculatorConfigHelper.ConfigItems; import org.gem.engine.hazard.GEM1ERF; import org.gem.engine.hazard.GEMHazardCurveRepository; import org.gem.engine.hazard.GEMHazardCurveRepositoryList; import org.gem.engine.hazard.GemComputeHazard; import org.gem.engine.hazard.memcached.Cache; import org.gem.engine.logictree.LogicTree; import org.gem.engine.logictree.LogicTreeBranch; import org.gem.engine.logictree.LogicTreeRule; import org.gem.engine.logictree.LogicTreeRuleParam; import org.opensha.commons.data.Site; import org.opensha.commons.data.TimeSpan; import org.opensha.commons.data.function.ArbitrarilyDiscretizedFunc; import org.opensha.commons.geo.BorderType; import org.opensha.commons.geo.GriddedRegion; import org.opensha.commons.geo.Location; import org.opensha.commons.geo.LocationList; import org.opensha.commons.param.DoubleParameter; import org.opensha.sha.earthquake.EqkRupForecast; import org.opensha.sha.earthquake.EqkRupture; import org.opensha.sha.earthquake.rupForecastImpl.GEM1.SourceData.GEMAreaSourceData; import org.opensha.sha.earthquake.rupForecastImpl.GEM1.SourceData.GEMFaultSourceData; import org.opensha.sha.earthquake.rupForecastImpl.GEM1.SourceData.GEMPointSourceData; import org.opensha.sha.earthquake.rupForecastImpl.GEM1.SourceData.GEMSourceData; import org.opensha.sha.earthquake.rupForecastImpl.GEM1.SourceData.GEMSubductionFaultSourceData; import org.opensha.sha.imr.ScalarIntensityMeasureRelationshipAPI; import org.opensha.sha.imr.param.SiteParams.DepthTo2pt5kmPerSecParam; import org.opensha.sha.imr.param.SiteParams.Vs30_Param; import org.opensha.sha.magdist.GutenbergRichterMagFreqDist; import org.opensha.sha.magdist.IncrementalMagFreqDist; import org.opensha.sha.util.TectonicRegionType; import com.google.gson.Gson; public class CommandLineCalculator { // Apache commons logging, not log4j specifically // Note that for application code, declaring the log member as "static" is // more efficient as one Log object is created per class, and is // recommended. However this is not safe to do for a class which may be // deployed via a "shared" classloader in a servlet or j2ee container or // similar environment. If the class may end up invoked with different // thread-context-classloader values set then the member must not be // declared static. The use of "static" should therefore be avoided in // code within any "library" type project. private static Log logger = LogFactory.getLog(CommandLineCalculator.class); // random is to access through its getter method private static Random random = null; private static Long randomSeed = null; private Configuration config; // for debugging private static Boolean D = false; /** * * @param inStream * e.g. the file input stream * @throws ConfigurationException */ public CommandLineCalculator(final InputStream inStream) throws ConfigurationException { // load calculation configuration data config = new PropertiesConfiguration(); ((PropertiesConfiguration) config).load(inStream, null); } // constructor public CommandLineCalculator(Reader reader) throws ConfigurationException { config = new PropertiesConfiguration(); // load calculation configuration data ((PropertiesConfiguration) config).load(reader); } // constructor public CommandLineCalculator(Properties p) { config = ConfigurationConverter.getConfiguration(p); } // constructor public CommandLineCalculator(String calcConfigFile) throws ConfigurationException { config = new PropertiesConfiguration(); ((PropertiesConfiguration) config).load(calcConfigFile); } // constructor public CommandLineCalculator(Cache cache, String key) { Properties properties = new Gson().fromJson((String) cache.get(key), Properties.class); config = ConfigurationConverter.getConfiguration(properties); } public void setConfig(Properties p) { config = ConfigurationConverter.getConfiguration(p); } // setConfig() public void setConfig(Configuration c) { config = c; } /** * If the property with given key already exists, this adds a property, and * does not replace it. So this can result in multi valued properties. * properties. * * @param key * @param value */ public void addConfigItem(String key, String value) { // the member is private and not null config.addProperty(key, value); } /** * If the property with given key already exists, its value will be replaced * by the one passed in. * * @param key * @param value */ public void setConfigItem(String key, String value) { // the member is private and not null config.setProperty(key, value); } private String configFilesPath() { return FilenameUtils.getFullPath(((AbstractFileConfiguration) config) .getPath()); } private String getRelativePath(String key) { return configFilesPath() + config.getString(key); } /** * Two calculators are equal when have the same configuration. * * @param obj * the calculator to compare on * @return true if the calculators are equal, false otherwise */ @Override public boolean equals(Object obj) { if (!(obj instanceof CommandLineCalculator)) { return false; } CommandLineCalculator other = (CommandLineCalculator) obj; Properties thisConfig = ConfigurationConverter.getProperties(config); Properties otherConfig = ConfigurationConverter.getProperties(other.config); return thisConfig.equals(otherConfig); } /** * After a call to this method a Random generator is initialised and its * seed can be retrieved by <code>getRandomSeed()</code> * * @return This' class random generator. This also initialises this' class * randomSeed such that getRandomSeed does not return null anymore. */ public static Random getRandom() { if (random == null) { randomSeed = new Date().getTime(); random = new Random(randomSeed); } return random; } /** * * @return The long value that is used to initialise this' class random * generator. This is null if the random generator has not yet been * initialised. To do so, call <code>getRandom()</code> * @see getRandom() */ public static Long getRandomSeed() { return randomSeed; } /** * This is the main method that do the calculations. According to the * specifications in the configuration file the method will do the required * calculations. */ public void doCalculation() throws ConfigurationException { StringBuffer logMsg = new StringBuffer(); // start chronometer long startTimeMs = System.currentTimeMillis(); // get calculation mode String calculationMode = config.getString(ConfigItems.CALCULATION_MODE.name()); if (calculationMode.equalsIgnoreCase(CalculationMode.MONTE_CARLO .value())) { // do calculation by random sampling end-branch models doCalculationThroughMonteCarloApproach(); } else if (calculationMode.equalsIgnoreCase(CalculationMode.FULL .value())) { // do calculation for each end branch model doFullCalculation(); } else { logMsg.append("Calculation mode: " + config.getString(ConfigItems.CALCULATION_MODE.name()) + " not recognized. Check the configuration file!\n" + "Execution stops!"); logger.info(logMsg); throw new ConfigurationException(logMsg.toString()); } // calculate elapsed time long taskTimeMs = System.currentTimeMillis() - startTimeMs; logMsg .append("Wall clock time (including time for saving output files)\n"); // 1h = 60*60*10^3 ms logMsg.append(String.format("hours : %6.3f\n", taskTimeMs / (60 * 60 * Math.pow(10, 3)))); // System.out.printf("hours : %6.3f\n", taskTimeMs / (60 * 60 * // Math.pow(10, 3))); // 1 min = 60*10^3 ms logMsg.append(String.format("minutes: %6.3f\n", taskTimeMs / (60 * Math.pow(10, 3)))); // System.out.printf("minutes: %6.3f\n", taskTimeMs / (60 * Math.pow(10, logger.info(logMsg); } // doCalculation() /** * This method is analogue to {@link doCalculation} </br> But because it * returns a value instead of saving results to a file, this calculation can * not just be triggered by {@link doCalculation}, using a flag such as * "flagProbabilisticEventBase" to distinct the "normal" case and the * probabilistic event based case. * * @return a ground motion map */ public Map<Site, Double> doCalculationProbabilisticEventBased() throws ConfigurationException { Map<Site, Double> result = null; StringBuffer logMsg = new StringBuffer(); // start chronometer long startTimeMs = System.currentTimeMillis(); // get calculation mode String calculationMode = config.getString(ConfigItems.CALCULATION_MODE.name()); if (calculationMode.equalsIgnoreCase(CalculationMode.MONTE_CARLO .value())) { // do calculation by random sampling end-branch models result = doProbabilisticEventBasedCalcThroughMonteCarloLogicTreeSampling(); } else if (calculationMode.equalsIgnoreCase(CalculationMode.FULL .value())) { // do calculation for each end branch model result = doProbabilisticEventBasedCalcForAllLogicTreeEndBranches(); } else { logMsg.append("Calculation mode: " + config.getString(ConfigItems.CALCULATION_MODE.name()) + " not recognized. Check the configuration file!\n" + "Execution stops!"); logger.info(logMsg); throw new ConfigurationException(logMsg.toString()); } // calculate elapsed time long taskTimeMs = System.currentTimeMillis() - startTimeMs; logMsg .append("Wall clock time (including time for saving output files)\n"); // 1h = 60*60*10^3 ms logMsg.append(String.format("hours : %6.3f\n", taskTimeMs / (60 * 60 * Math.pow(10, 3)))); // 1 min = 60*10^3 ms logMsg.append(String.format("minutes: %6.3f\n", taskTimeMs / (60 * Math.pow(10, 3)))); logger.info(logMsg); return result; } // doCalculationProbabilisticEventBased() /** * Suggested format for a jsonized GMF {'gmf_id' : { 'eqkrupture_id' : { * 'site_id' : {'lat' : lat_val, 'lon' : lon_val, 'mag' : double_val}}}} * * From identifiers.py, these are what the expected keys look like (this * makes no expectation of the values), the keys are after the colon. * * sites: job_id!block_id!!sites gmf: job_id!block_id!!gmf gmf: * job_id!block_id!site!gmf * * @return */ public static String jsonizeGroundMotionFields(String gmfId, String[] eqkRuptureIds, String[] siteIds, Map<EqkRupture, Map<Site, Double>> groundMotionFields) { StringBuilder result = new StringBuilder(); int ruptureCount = eqkRuptureIds.length; int siteCount = siteIds.length; int gmfCount = groundMotionFields.size(); if (!(ruptureCount * siteCount == gmfCount)) { String msg = ruptureCount + " ruptures * " + siteCount + " sites. \n-> There are " + ruptureCount + " * " + siteCount + " = " + ruptureCount * siteCount + " GMFs ecxpected but there are: " + gmfCount; throw new IllegalArgumentException(msg); } Gson gson = new Gson(); // TODO: // The EqkRupture memcache keys must be known here. // For now behave, as if the map object is ordered. Set<EqkRupture> groundMotionFieldsKeys = groundMotionFields.keySet(); int indexEqkRupture = 0; for (EqkRupture eqkRupture : groundMotionFieldsKeys) { result.append(gson.toJson(eqkRuptureIds[indexEqkRupture])); result.append(gson.toJson(":")); ++indexEqkRupture; Map<Site, Double> groundMotionField = groundMotionFields.get(eqkRupture); // TODO: // The Site memcache keys must be known here. // For now behave, as if the map object is ordered. Set<Site> groundMotionFieldKeys = groundMotionField.keySet(); int indexSite = 0; for (Site s : groundMotionFieldKeys) { result.append(gson.toJson(siteIds[indexSite])); result.append(":"); result.append(gson.toJson("{ 'lat': " + gson.toJson(s.getLocation().getLatitude()))); // key = new StringBuilder(); // key.append(indexEqkRupture); // key.append('_'); // key.append(s.getLocation().getLatitude()); // key.append('_'); // key.append(s.getLocation().getLongitude()); // cache.set(key.toString(), groundMotionField.get(s)); // allKeys.add(key.toString()); } } return result.toString(); } /** * Saves a ground motion map to a Cache object. * * @param cache * the cache to store the ground motion map * @return a List<String> object containing all keys used as key in the * cache's hash map */ public static List<String> storeToMemcache( Map<EqkRupture, Map<Site, Double>> groundMotionFields, Cache cache) { ArrayList<String> allKeys = new ArrayList<String>(); StringBuilder key = null; Set<EqkRupture> groundMotionFieldsKeys = groundMotionFields.keySet(); int indexEqkRupture = 0; for (EqkRupture eqkRupture : groundMotionFieldsKeys) { ++indexEqkRupture; Map<Site, Double> groundMotionField = groundMotionFields.get(eqkRupture); Set<Site> groundMotionFieldKeys = groundMotionField.keySet(); for (Site s : groundMotionFieldKeys) { key = new StringBuilder(); key.append(indexEqkRupture); key.append('_'); key.append(s.getLocation().getLatitude()); key.append('_'); key.append(s.getLocation().getLongitude()); cache.set(key.toString(), groundMotionField.get(s)); allKeys.add(key.toString()); } } return allKeys; } private void doCalculationThroughMonteCarloApproach() { logger.info("Performing calculation through Monte Carlo Approach.\n"); // System.out.println("Performing calculation through Monte Carlo Approach.\n"); // load ERF logic tree data ErfLogicTreeData erfLogicTree = createErfLogicTreeData(config); // load GMPE logic tree data GmpeLogicTreeData gmpeLogicTree = createGmpeLogicTreeData(config); // instantiate the repository for the results GEMHazardCurveRepositoryList hcRepList = new GEMHazardCurveRepositoryList(); // sites for calculation ArrayList<Site> sites = createSiteList(config); // number of hazard curves to be generated for each point int numHazCurves = config.getInt(ConfigItems.NUMBER_OF_HAZARD_CURVE_CALCULATIONS .name()); // loop over number of hazard curves to be generated // The properties are not changed in this loop so it is allowed to // retrieve them before. int numOfThreads = config.getInt(ConfigItems.NUMBER_OF_PROCESSORS.name()); ArbitrarilyDiscretizedFunc imlList = CalculatorConfigHelper.makeImlList(config); double maxDistance = config.getDouble(ConfigItems.MAXIMUM_DISTANCE.name()); for (int i = 0; i < numHazCurves; i++) { logger.info("Realization number: " + (i + 1) + ", of: " + numHazCurves); // System.out.println("Realization number: " + (i + 1) + ", of: " + // numHazCurves); // do calculation // run sampleGemLogicTreeERF() and sampleGemLogicTreeGMPE() for // every iteration. This is necessary because both, ERF and GMPEs // change because they are randomly sampled GemComputeHazard compHaz = new GemComputeHazard(numOfThreads, sites, sampleGemLogicTreeERF(erfLogicTree .getErfLogicTree(), config), sampleGemLogicTreeGMPE(gmpeLogicTree .getGmpeLogicTreeHashMap()), imlList, maxDistance); // store results hcRepList.add(compHaz.getValues(), Integer.toString(i)); } // for // save hazard curves if (D) saveHazardCurveRepositoryListToAsciiFile(config .getString(ConfigItems.OUTPUT_DIR.name()), hcRepList); // create the requested output if (config.getBoolean(ConfigItems.MEAN_GROUND_MOTION_MAP.name())) { // calculate mean ground motion map for the given prob of exceedance ArrayList<Double> meanGroundMotionMap = hcRepList.getMeanGrounMotionMap(config .getDouble(ConfigItems.PROBABILITY_OF_EXCEEDANCE .name())); // save mean ground motion map // as an example: here we read from Configuration object String outfile = config.getString((ConfigItems.OUTPUT_DIR.name())) + "meanGroundMotionMap_" + config .getDouble(ConfigItems.PROBABILITY_OF_EXCEEDANCE .name()) * 100 + "%" + config.getString(ConfigItems.INVESTIGATION_TIME .name()) + "yr.dat"; saveGroundMotionMapToAsciiFile(outfile, meanGroundMotionMap, hcRepList.getHcRepList().get(0).getGridNode()); } if (config.getBoolean(ConfigItems.INDIVIDUAL_GROUND_MOTION_MAP.name())) { // loop over end-branches int indexLabel = 0; for (GEMHazardCurveRepository hcRep : hcRepList.getHcRepList()) { // calculate ground motion map ArrayList<Double> groundMotionMap = hcRep .getHazardMap(config .getDouble(ConfigItems.PROBABILITY_OF_EXCEEDANCE .name())); // define file name String outfile = config.getString(ConfigItems.OUTPUT_DIR.name()) + "groundMotionMap_" + hcRepList.getEndBranchLabels() .get(indexLabel) + "_" + config .getDouble(ConfigItems.PROBABILITY_OF_EXCEEDANCE .name()) * 100 + "%" + config .getString(ConfigItems.INVESTIGATION_TIME .name()) + "yr.dat"; saveGroundMotionMapToAsciiFile(outfile, groundMotionMap, hcRepList.getHcRepList().get(0).getGridNode()); indexLabel = indexLabel + 1; } } if (config.getBoolean(ConfigItems.MEAN_HAZARD_CURVES.name())) { GEMHazardCurveRepository meanHazardCurves = hcRepList.getMeanHazardCurves(); String outfile = config.getString(ConfigItems.OUTPUT_DIR.name()) + "meanHazardCurves.dat"; saveHazardCurveRepositoryToAsciiFile(outfile, meanHazardCurves); } if (config.getBoolean(ConfigItems.INDIVIDUAL_HAZARD_CURVES.name())) { String outfile = config.getString(ConfigItems.OUTPUT_DIR.name()) + "individualHazardCurves.dat"; saveHazardCurveRepositoryListToAsciiFile(outfile, hcRepList); } } // doCalculationThroughMonteCarloApproach() private void doFullCalculation() { logger.info("Performing full calculation. \n"); // System.out.println("Performing full calculation. \n"); // load ERF logic tree data ErfLogicTreeData erfLogicTree = createErfLogicTreeData(config); // load GMPE logic tree data GmpeLogicTreeData gmpeLogicTree = createGmpeLogicTreeData(config); // compute ERF logic tree end-branch models HashMap<String, ArrayList<GEMSourceData>> endBranchModels = computeErfLogicTreeEndBrancheModels(erfLogicTree .getErfLogicTree()); // log info logger.info("ERF logic tree end branch models (total number: " + endBranchModels.keySet().size() + ").\n"); // System.out.println("ERF logic tree end branch models (total number: " // + endBranchModels.keySet().size() Iterator<String> erfEndBranchLabelIter = endBranchModels.keySet().iterator(); while (erfEndBranchLabelIter.hasNext()) { String erfEndBranchLabel = erfEndBranchLabelIter.next(); logger.info("End branch label: " + erfEndBranchLabel + "\n"); // System.out.println("End branch label: " + erfEndBranchLabel + } // while // compute gmpe logic tree end-branch models HashMap<String, HashMap<TectonicRegionType, ScalarIntensityMeasureRelationshipAPI>> gmpeEndBranchModel = computeGmpeLogicTreeEndBrancheModels(gmpeLogicTree .getGmpeLogicTreeHashMap()); // log info logger.info("GMPE logic tree end branch models (total number: " + gmpeEndBranchModel.keySet().size() + ").\n"); // System.out.println("GMPE logic tree end branch models (total number: " // + gmpeEndBranchModel.keySet().size() Iterator<String> gmpeEndBranchLabelIter = gmpeEndBranchModel.keySet().iterator(); while (gmpeEndBranchLabelIter.hasNext()) { String gmpeEndBranchLabel = gmpeEndBranchLabelIter.next(); logger.info("End branch label: " + gmpeEndBranchLabel); // System.out.println("End branch label: " + gmpeEndBranchLabel); Iterator<TectonicRegionType> trtIter = gmpeEndBranchModel.get(gmpeEndBranchLabel).keySet() .iterator(); while (trtIter.hasNext()) { TectonicRegionType trt = trtIter.next(); logger.info(" Tectonic region type: " + trt.toString() + " --> GMPE: " + gmpeEndBranchModel.get(gmpeEndBranchLabel).get(trt) .getName()); // System.out.println(" Tectonic region type: " // + trt.toString() + " --> GMPE: " // gmpeEndBranchModel.get(gmpeEndBranchLabel).get(trt).getName()); } // while // TODO: // O.k., here, the intention is to insert a one line gap after // a "block" logging messages. But is this the way? logger.info("\n"); // System.out.println("\n"); } // while gmpeEndBranchLabelIter // instantiate the repository for the results GEMHazardCurveRepositoryList hcRepList = new GEMHazardCurveRepositoryList(); // sites for calculation ArrayList<Site> sites = createSiteList(config); // number of threads int numThreads = config.getInt(ConfigItems.NUMBER_OF_PROCESSORS.name()); // IML list ArbitrarilyDiscretizedFunc imlList = CalculatorConfigHelper.makeImlList(config); // maximum integration distance double maxDist = config.getDouble(ConfigItems.MAXIMUM_DISTANCE.name()); // loop over ERF end-branch models Iterator<String> endBranchLabels = endBranchModels.keySet().iterator(); while (endBranchLabels.hasNext()) { // current erf end-branch model label String erfLabel = endBranchLabels.next(); logger.info("Processing end-branch model: " + erfLabel); // System.out.println("Processing end-branch model: " + erfLabel); // instantiate GEM1ERF with the source model corresponding // to the current label GEM1ERF erf = new GEM1ERF(endBranchModels.get(erfLabel)); // set ERF parameters setGEM1ERFParams(erf, config); // loop over GMPE end-branch models Iterator<String> gmpeEndBranchLabels = gmpeEndBranchModel.keySet().iterator(); while (gmpeEndBranchLabels.hasNext()) { String gmpeLabel = gmpeEndBranchLabels.next(); logger.info("Processing gmpe end-branch model: " + gmpeLabel); // do calculation GemComputeHazard compHaz = new GemComputeHazard(numThreads, sites, erf, gmpeEndBranchModel.get(gmpeLabel), imlList, maxDist); // store results hcRepList.add(compHaz.getValues(), erfLabel + "-" + gmpeLabel); } // while gmpeEndBranchLabels // create the requested output if (config.getBoolean(ConfigItems.MEAN_GROUND_MOTION_MAP.name())) { // calculate mean hazard map for the given prob of exceedance ArrayList<Double> meanGroundMotionMap = hcRepList .getMeanGroundMotionMap( config .getDouble(ConfigItems.PROBABILITY_OF_EXCEEDANCE .name()), erfLogicTree .getErfLogicTree(), gmpeLogicTree.getGmpeLogicTreeHashMap()); // save mean ground motion map String outfile = config.getString(ConfigItems.OUTPUT_DIR.name()) + "meanGroundMotionMap_" + config .getDouble(ConfigItems.PROBABILITY_OF_EXCEEDANCE .name()) * 100 + "%" + config .getString(ConfigItems.INVESTIGATION_TIME .name()) + "yr.dat"; saveGroundMotionMapToAsciiFile(outfile, meanGroundMotionMap, hcRepList.getHcRepList().get(0).getGridNode()); } if (config.getBoolean(ConfigItems.INDIVIDUAL_GROUND_MOTION_MAP .name())) { // loop over end-branches int indexLabel = 0; for (GEMHazardCurveRepository hcRep : hcRepList.getHcRepList()) { // calculate ground motion map ArrayList<Double> groundMotionMap = hcRep .getHazardMap(config .getDouble(ConfigItems.PROBABILITY_OF_EXCEEDANCE .name())); // define file name String outfile = config.getString(ConfigItems.OUTPUT_DIR.name()) + "groundMotionMap_" + hcRepList.getEndBranchLabels().get( indexLabel) + "_" + config .getDouble(ConfigItems.PROBABILITY_OF_EXCEEDANCE .name()) * 100 + "%" + config .getString(ConfigItems.INVESTIGATION_TIME .name()) + "yr.dat"; saveGroundMotionMapToAsciiFile(outfile, groundMotionMap, hcRepList.getHcRepList().get(0).getGridNode()); indexLabel = indexLabel + 1; } // for GEMHazardCurveRepository } if (config.getBoolean(ConfigItems.MEAN_HAZARD_CURVES.name())) { GEMHazardCurveRepository meanHazardCurves = hcRepList.getMeanHazardCurves(erfLogicTree .getErfLogicTree(), gmpeLogicTree .getGmpeLogicTreeHashMap()); String outfile = config.getString(ConfigItems.OUTPUT_DIR.name()) + "meanHazardCurves.dat"; saveHazardCurveRepositoryToAsciiFile(outfile, meanHazardCurves); } if (config.getBoolean(ConfigItems.INDIVIDUAL_HAZARD_CURVES.name())) { String outfile = config.getString(ConfigItems.OUTPUT_DIR.name()) + "individualHazardCurves.dat"; saveHazardCurveRepositoryListToAsciiFile(outfile, hcRepList); } } // while endBranchLabels } // doFullCalculation() private Map<Site, Double> doProbabilisticEventBasedCalcThroughMonteCarloLogicTreeSampling() { logger.info("Performing calculation probabilistic event based" + "through Monte Carlo Approach.\n"); Map<Site, Double> groundMotionMap = null; ArrayList<Site> sites = createSiteList(config); // load ERF logic tree data ErfLogicTreeData erfLogicTree = createErfLogicTreeData(config); // load GMPE logic tree data GmpeLogicTreeData gmpeLogicTree = createGmpeLogicTreeData(config); int numberOfRealization = config.getInt(ConfigItems.NUMBER_OF_HAZARD_CURVE_CALCULATIONS .name()); int numberOfSeismicityHistories = config .getInt(ConfigItems.NUMBER_OF_SEISMICITY_HISTORIES .name()); for (int i = 0; i < numberOfRealization; ++i) { // GEM1ERF erf = // sampleGemLogicTreeERF(erfLogicTree.getErfLogicTree(), // config); /* TODO: For the moment select the first GMPE */ HashMap<TectonicRegionType, ScalarIntensityMeasureRelationshipAPI> mapGmpe = sampleGemLogicTreeGMPE(gmpeLogicTree .getGmpeLogicTreeHashMap()); EqkRupForecast eqkRupForecast = sampleGemLogicTreeERF(erfLogicTree.getErfLogicTree(), config); ArrayList<ArrayList<EqkRupture>> seismicityHistories = StochasticEventSetGenerator .getMultipleStochasticEventSetsFromPoissonianERF( eqkRupForecast, numberOfSeismicityHistories, getRandom()); for (int j = 0; j < numberOfSeismicityHistories; ++j) { for (int k = 0; k < seismicityHistories.get(j).size(); ++k) { EqkRupture eqkRupture = seismicityHistories.get(j).get(k); TectonicRegionType tectonicRegionType = eqkRupture.getTectRegType(); ScalarIntensityMeasureRelationshipAPI attenRel = mapGmpe.get(tectonicRegionType); groundMotionMap = GroundMotionFieldCalculator .getStochasticGroundMotionField(attenRel, eqkRupture, sites, getRandom()); } // for seismicityHistories } // for numberOfSeismicityHistories } // for numberOfRealization return groundMotionMap; } // doProbabilisticEventBasedCalcThroughMonteCarloLogicTreeSampling () private Map<Site, Double> doProbabilisticEventBasedCalcForAllLogicTreeEndBranches() { logger.info("Performing calculation probabilistic event based" + " for all logic tree branches.\n"); Map<Site, Double> groundMotionMap = null; ArrayList<Site> sites = createSiteList(config); // load ERF logic tree data ErfLogicTreeData erfLogicTree = createErfLogicTreeData(config); // load GMPE logic tree data GmpeLogicTreeData gmpeLogicTree = createGmpeLogicTreeData(config); int numberOfRealization = config.getInt(ConfigItems.NUMBER_OF_HAZARD_CURVE_CALCULATIONS .name()); int numberOfSeismicityHistories = config .getInt(ConfigItems.NUMBER_OF_SEISMICITY_HISTORIES .name()); // compute ERF logic tree end-branch models HashMap<String, ArrayList<GEMSourceData>> endBranchModels = computeErfLogicTreeEndBrancheModels(erfLogicTree .getErfLogicTree()); // compute gmpe logic tree end-branch models HashMap<String, HashMap<TectonicRegionType, ScalarIntensityMeasureRelationshipAPI>> gmpeEndBranchModel = computeGmpeLogicTreeEndBrancheModels(gmpeLogicTree .getGmpeLogicTreeHashMap()); for (int i = 0; i < endBranchModels.size(); ++i) { // loop over ERF end branches ArrayList<GEMSourceData> erfBranch = endBranchModels.get(i); EqkRupForecast eqkRupForecast = sampleGemLogicTreeERF(erfLogicTree.getErfLogicTree(), config); ArrayList<ArrayList<EqkRupture>> seismicityHistories = StochasticEventSetGenerator .getMultipleStochasticEventSetsFromPoissonianERF( eqkRupForecast, numberOfSeismicityHistories, getRandom()); Set<String> keySet = gmpeEndBranchModel.keySet(); for (String gmpeMapName : keySet) { // loop over GMPE end branches Map<TectonicRegionType, ScalarIntensityMeasureRelationshipAPI> mapGmpe = gmpeEndBranchModel.get(gmpeMapName); for (int j = 0; j < numberOfSeismicityHistories; ++j) { // loop over seismicity histories for (int k = 0; k < seismicityHistories.get(j).size(); ++k) { // loop over ruptures EqkRupture eqkRupture = seismicityHistories.get(j).get(k); ScalarIntensityMeasureRelationshipAPI attenRel = mapGmpe.get(eqkRupture.getTectRegType()); groundMotionMap = GroundMotionFieldCalculator .getStochasticGroundMotionField( attenRel, eqkRupture, sites, getRandom()); } // for seismicityHistorities } // for numberOfSeismicityHistories } // for key set with gmpe map names } // for endBranchModels return groundMotionMap; } // doProbabilisticEventBasedCalcForAllLogicTreeEndBranches() /** * @param gmpeLogicTreeHashMap * : this is an hash map relating a set of tectonic settings with * a set of logic trees for gmpes. The idea is the user can * define, for each tectonic setting, a different logic tree for * the gmpes. * @return an hash map relating an end branch label with an hash map that * relates different tectonic settings with different gmpes. For * instance if there are two logic tree for gmpes: Stable Region * (branch 1: D&M2008, weight: 0.5; branch 2: M&P2008, weight: 0.5) * Active Region (branch 1: B&A2008, weight: 0.5; branch 2: C&B2008, * weight: 0.5) then the method will result in a hash map containing * four end branch labels: Stable Region_1-ActiveRegion_1 (referring * to an hash map: {(Stable Region: D&M2008),(Active Region: * B&A2008)} Stable Region_1-ActiveRegion_2 (referring to an hash * map: {(Stable Region: D&M2008),(Active Region: C&B2008)} Stable * Region_2-ActiveRegion_1 (referring to an hash map: {(Stable * Region: M&P2008),(Active Region: B&A2008)} Stable * Region_2-ActiveRegion_2 (referring to an hash map: {(Stable * Region: M&P2008),(Active Region: C&B2008)} NOTE: the major * assumption in this method is that the logic tree for the Gmpes * contains only one branching level. */ private HashMap<String, HashMap<TectonicRegionType, ScalarIntensityMeasureRelationshipAPI>> computeGmpeLogicTreeEndBrancheModels( HashMap<TectonicRegionType, LogicTree<ScalarIntensityMeasureRelationshipAPI>> gmpeLogicTreeHashMap) { // make deep copy HashMap<TectonicRegionType, LogicTree<ScalarIntensityMeasureRelationshipAPI>> gmpeLogicTreeHashMapCopy = (HashMap<TectonicRegionType, LogicTree<ScalarIntensityMeasureRelationshipAPI>>) UnoptimizedDeepCopy .copy(gmpeLogicTreeHashMap); // hash map containing gmpe end branch models HashMap<String, HashMap<TectonicRegionType, ScalarIntensityMeasureRelationshipAPI>> endBranchModels = new HashMap<String, HashMap<TectonicRegionType, ScalarIntensityMeasureRelationshipAPI>>(); // tectonic region types Iterator<TectonicRegionType> trtIter = gmpeLogicTreeHashMapCopy.keySet().iterator(); ArrayList<TectonicRegionType> trtList = new ArrayList<TectonicRegionType>(); while (trtIter.hasNext()) { trtList.add(trtIter.next()); } // load gmpe models from first tectonic region type if (endBranchModels.isEmpty()) { // number of branches for the first tectonic region type int numBranch = gmpeLogicTreeHashMapCopy.get(trtList.get(0)) .getBranchingLevel(0).getBranchList().size(); // loop over branches for (int i = 0; i < numBranch; i++) { // get current branch LogicTreeBranch branch = gmpeLogicTreeHashMapCopy.get(trtList.get(0)) .getBranchingLevel(0).getBranch(i); // define label from branch ID number String label = trtList.get(0) + "_" + Integer.toString(branch.getRelativeID()); // get gmpe ScalarIntensityMeasureRelationshipAPI gmpe = gmpeLogicTreeHashMapCopy.get(trtList.get(0)).getEBMap() .get(Integer.toString(branch.getRelativeID())); HashMap<TectonicRegionType, ScalarIntensityMeasureRelationshipAPI> newHashMap = new HashMap<TectonicRegionType, ScalarIntensityMeasureRelationshipAPI>(); newHashMap.put(trtList.get(0), gmpe); // save in the hash map endBranchModels.put(label, newHashMap); } // remove processed tectonic setting gmpeLogicTreeHashMapCopy.remove(trtList.get(0)); trtList.remove(0); } if (!endBranchModels.isEmpty()) { // while there are additional tectonic settings while (!gmpeLogicTreeHashMapCopy.keySet().isEmpty()) { // loop over current end branch models Iterator<String> endBranchModelLabels = endBranchModels.keySet().iterator(); ArrayList<String> labels = new ArrayList<String>(); while (endBranchModelLabels.hasNext()) labels.add(endBranchModelLabels.next()); for (String label : labels) { // number of branches in the first branching level of the // current tectonic setting int numBranch = gmpeLogicTreeHashMapCopy.get(trtList.get(0)) .getBranchingLevel(0).getBranchList() .size(); // loop over branches for (int i = 0; i < numBranch; i++) { // get current branch LogicTreeBranch branch = gmpeLogicTreeHashMapCopy.get(trtList.get(0)) .getBranchingLevel(0).getBranch(i); // new label String newLabel = label + "-" + trtList.get(0) + "_" + branch.getRelativeID(); // get gmpe ScalarIntensityMeasureRelationshipAPI gmpe = gmpeLogicTreeHashMapCopy.get(trtList.get(0)) .getEBMap().get( Integer.toString(branch .getRelativeID())); // add tectonic setting - gmpe // current end branch model HashMap<TectonicRegionType, ScalarIntensityMeasureRelationshipAPI> newHashMap = new HashMap<TectonicRegionType, ScalarIntensityMeasureRelationshipAPI>(); // copy previous entries Iterator<TectonicRegionType> iterTrt = endBranchModels.get(label).keySet().iterator(); while (iterTrt.hasNext()) { TectonicRegionType trt = iterTrt.next(); ScalarIntensityMeasureRelationshipAPI ar = endBranchModels.get(label).get(trt); newHashMap.put(trt, ar); } // add new entry newHashMap.put(trtList.get(0), gmpe); // add new entry in the end branch hash map endBranchModels.put(newLabel, newHashMap); } // from the hash map remove the entry corresponding // to the current model endBranchModels.remove(label); } // end loop over current end-branch models // remove processed tectonic setting gmpeLogicTreeHashMapCopy.remove(trtList.get(0)); trtList.remove(0); } // end while !gmpeLogicTreeHashMapCopy.keySet().isEmpty() } // end if !endBranchModels.isEmpty() return endBranchModels; } // computeGmpeLogicTreeEndBranchModels() private HashMap<String, ArrayList<GEMSourceData>> computeErfLogicTreeEndBrancheModels( LogicTree<ArrayList<GEMSourceData>> erfLogicTree) { // make deep copy LogicTree<ArrayList<GEMSourceData>> erfLogicTreeCopy = (LogicTree<ArrayList<GEMSourceData>>) UnoptimizedDeepCopy .copy(erfLogicTree); HashMap<String, ArrayList<GEMSourceData>> endBranchModels = new HashMap<String, ArrayList<GEMSourceData>>(); // load source models from first branching level if (endBranchModels.isEmpty()) { // number of branches in the first branching level int numBranch = erfLogicTreeCopy.getBranchingLevel(0).getBranchList() .size(); // loop over branches of the first branching level for (int i = 0; i < numBranch; i++) { // get current branch LogicTreeBranch branch = erfLogicTreeCopy.getBranchingLevel(0).getBranch(i); // define label from branch ID number String label = Integer.toString(branch.getRelativeID()); // read the corresponding source model String sourceName = configFilesPath() + branch.getNameInputFile(); ArrayList<GEMSourceData> srcList = new InputModelData(sourceName, config .getDouble(ConfigItems.WIDTH_OF_MFD_BIN.name())) .getSourceList(); // save in the hash map endBranchModels.put(label, srcList); } // remove processed branching level erfLogicTreeCopy.getBranchingLevelsList().remove(0); } // if the hash map already contains the models from the // first branching levels go through the remaining // branching levels (if they exist) and create the new models if (!endBranchModels.isEmpty()) { // while there are additional branching levels while (!erfLogicTreeCopy.getBranchingLevelsList().isEmpty()) { // loop over current end branch models Iterator<String> endBranchModelLabels = endBranchModels.keySet().iterator(); ArrayList<String> labels = new ArrayList<String>(); while (endBranchModelLabels.hasNext()) labels.add(endBranchModelLabels.next()); for (String label : labels) { // current end branch model ArrayList<GEMSourceData> srcList = endBranchModels.get(label); // from the current end branch model create // models corresponding to the branches in // the first branching level of the current logic tree // number of branches in the first branching level int numBranch = erfLogicTreeCopy.getBranchingLevel(0) .getBranchList().size(); // loop over branches of the first branching level for (int i = 0; i < numBranch; i++) { // get current branch LogicTreeBranch branch = erfLogicTreeCopy.getBranchingLevel(0) .getBranch(i); // new label String newLabel = label + "_" + branch.getRelativeID(); // new source model ArrayList<GEMSourceData> newSrcList = applyRuleToSourceList(srcList, branch.getRule()); // add new entry endBranchModels.put(newLabel, newSrcList); } // from the hash map remove the entry corresponding // to the current model endBranchModels.remove(label); } // end loop over current end-branch models // remove processed branching level erfLogicTreeCopy.getBranchingLevelsList().remove(0); } // end while !erfLogicTreeCopy.getBranchingLevelsList().isEmpty() } // end if !endBranchModels.isEmpty() return endBranchModels; } private void saveGroundMotionMapToAsciiFile(String outfile, ArrayList<Double> map, ArrayList<Site> siteList) { try { FileOutputStream oOutFIS = new FileOutputStream(outfile); BufferedOutputStream oOutBIS = new BufferedOutputStream(oOutFIS); BufferedWriter oWriter = new BufferedWriter(new OutputStreamWriter(oOutBIS)); // loop over grid points for (int i = 0; i < siteList.size(); i++) { double lon = siteList.get(i).getLocation().getLongitude(); double lat = siteList.get(i).getLocation().getLatitude(); double gmv = map.get(i); oWriter.write(String.format("%+8.4f %+7.4f %7.4e \n", lon, lat, gmv)); } oWriter.close(); oOutBIS.close(); oOutFIS.close(); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } // saveGroundMotionMapToGMTAsciiFile() private static void saveHazardCurveRepositoryListToAsciiFile( String outfile, GEMHazardCurveRepositoryList hazardCurves) { try { FileOutputStream oOutFIS = new FileOutputStream(outfile); BufferedOutputStream oOutBIS = new BufferedOutputStream(oOutFIS); BufferedWriter oWriter = new BufferedWriter(new OutputStreamWriter(oOutBIS)); // first line contains ground motion values // loop over ground motion values oWriter.write(String.format("%8s %8s ", " ", " ")); for (int igmv = 0; igmv < hazardCurves.getHcRepList().get(0) .getGmLevels().size(); igmv++) { double gmv = hazardCurves.getHcRepList().get(0).getGmLevels().get( igmv); gmv = Math.exp(gmv); oWriter.write(String.format("%7.4e ", gmv)); } // for oWriter.write("\n"); // loop over grid points for (int igp = 0; igp < hazardCurves.getHcRepList().get(0) .getNodesNumber(); igp++) { // loop over hazard curve realizations for (int ihc = 0; ihc < hazardCurves.getHcRepList().size(); ihc++) { double lat = hazardCurves.getHcRepList().get(0).getGridNode() .get(igp).getLocation().getLatitude(); double lon = hazardCurves.getHcRepList().get(0).getGridNode() .get(igp).getLocation().getLongitude(); oWriter.write(String.format("%+8.4f %+7.4f ", lon, lat)); GEMHazardCurveRepository hcRep = hazardCurves.getHcRepList().get(ihc); // loop over ground motion values for (int igmv = 0; igmv < hcRep.getGmLevels().size(); igmv++) { double probEx = hcRep.getProbExceedanceList(igp)[igmv]; oWriter.write(String.format("%7.4e ", probEx)); } // for oWriter.write("\n"); } // for } // for oWriter.close(); oOutBIS.close(); oOutFIS.close(); } catch (FileNotFoundException e) { // TODO use log4j File tmp = new File(System.getProperties().getProperty("user.home")); e.printStackTrace(); } catch (IOException e) { // TODO use log4j e.printStackTrace(); } } // saveHazardCurves() private static void saveHazardCurveRepositoryToAsciiFile(String outfile, GEMHazardCurveRepository rep) { try { FileOutputStream oOutFIS = new FileOutputStream(outfile); BufferedOutputStream oOutBIS = new BufferedOutputStream(oOutFIS); BufferedWriter oWriter = new BufferedWriter(new OutputStreamWriter(oOutBIS)); // first line contains ground motion values // loop over ground motion values oWriter.write(String.format("%8s %8s ", " ", " ")); for (int igmv = 0; igmv < rep.getGmLevels().size(); igmv++) { double gmv = rep.getGmLevels().get(igmv); gmv = Math.exp(gmv); oWriter.write(String.format("%7.4e ", gmv)); } // for oWriter.write("\n"); // loop over grid points for (int igp = 0; igp < rep.getNodesNumber(); igp++) { double lat = rep.getGridNode().get(igp).getLocation().getLatitude(); double lon = rep.getGridNode().get(igp).getLocation().getLongitude(); oWriter.write(String.format("%+8.4f %+7.4f ", lon, lat)); // loop over ground motion values for (int igmv = 0; igmv < rep.getGmLevels().size(); igmv++) { double probEx = rep.getProbExceedanceList(igp)[igmv]; oWriter.write(String.format("%7.4e ", probEx)); } // for oWriter.write("\n"); } // for oWriter.close(); oOutBIS.close(); oOutFIS.close(); } catch (FileNotFoundException e) { // TODO use log4j e.printStackTrace(); } catch (IOException e) { // TODO use log4j e.printStackTrace(); } } // saveFractiles() private static ArrayList<Site> createSiteList(Configuration calcConfig) { // arraylist of sites storing locations where hazard curves must be // calculated ArrayList<Site> sites = new ArrayList<Site>(); // create gridded region from borders coordinates and grid spacing // GriddedRegion gridReg = new // GriddedRegion(calcConfig.getRegionBoundary(),BorderType.MERCATOR_LINEAR,calcConfig.getGridSpacing(),null); LocationList locations = CalculatorConfigHelper.makeRegionboundary(calcConfig); // old style: "properties" - going to be deleted // double gridSpacing = // Double.parseDouble(calcConfig.getProperty(ConfigItems.REGION_GRID_SPACING.name())); double gridSpacing = calcConfig.getDouble(ConfigItems.REGION_GRID_SPACING.name()); GriddedRegion gridReg = new GriddedRegion(locations, BorderType.MERCATOR_LINEAR, gridSpacing, null); // get list of locations in the region LocationList locList = gridReg.getNodeList(); // store locations as sites Iterator<Location> iter = locList.iterator(); while (iter.hasNext()) { Site site = new Site(iter.next()); site.addParameter(new DoubleParameter(Vs30_Param.NAME, calcConfig .getDouble(ConfigItems.REFERENCE_VS30_VALUE.name()))); site .addParameter(new DoubleParameter( DepthTo2pt5kmPerSecParam.NAME, calcConfig .getDouble(ConfigItems.REFERENCE_DEPTH_TO_2PT5KM_PER_SEC_PARAM .name()))); sites.add(site); } // return array list of sites return sites; } // createSiteList() private GEM1ERF sampleGemLogicTreeERF( LogicTree<ArrayList<GEMSourceData>> ltERF, Configuration calcConfig) { // erf to be returned GEM1ERF erf = null; // array list of sources that will contain the samples sources ArrayList<GEMSourceData> srcList = new ArrayList<GEMSourceData>(); // number of branching levels in the logic tree int numBranchingLevels = ltERF.getBranchingLevelsList().size(); // sample first branching level to get the starting source model int branchNumber = ltERF.sampleBranchingLevel(0); // get the corresponding branch (the -1 is needed because branchNumber // is the // number of the branch (starting from 1) and not the index of the // branch LogicTreeBranch branch = ltERF.getBranchingLevel(0).getBranch(branchNumber - 1); if (branch.getNameInputFile() != null) { // read input file model // next line shows how to read from a Properties object (will be // deleted soon). // InputModelData inputModelData = new // InputModelData(branch.getNameInputFile(), // Double.parseDouble(calcConfig.getProperty(ConfigItems.WIDTH_OF_MFD_BIN.name()))); // new here is the apache Configuration object String sourceName = configFilesPath() + branch.getNameInputFile(); InputModelData inputModelData = new InputModelData(sourceName, calcConfig .getDouble(ConfigItems.WIDTH_OF_MFD_BIN.name())); // load sources srcList = inputModelData.getSourceList(); } else { String msg = "The first branching level of the ERF logic tree does" + " not contain a source model!!\n" + "Please correct your input!\n Execution stopped!"; logger.info(msg); throw new IllegalArgumentException(msg); } // loop over sources // source index int sourceIndex = 0; for (GEMSourceData src : srcList) { // for each source, loop over remaining branching levels and apply // uncertainties for (int i = 1; i < numBranchingLevels; i++) { // sample the current branching level branchNumber = ltERF.sampleBranchingLevel(i); // get the sampled branch branch = ltERF.getBranchingLevel(i).getBranch(branchNumber - 1); if (branch.getRule() != null) { // at the moment we apply rules to all source typologies. In // the future we may want // to apply some filter (i.e. apply rule to this source type // only...) // if area source if (src instanceof GEMAreaSourceData) { // replace the old source with the new source // accordingly to the rule srcList.set(sourceIndex, applyRuleToAreaSource( (GEMAreaSourceData) src, branch.getRule())); } // if point source if (src instanceof GEMPointSourceData) { // replace the old source with the new source // accordingly to the rule srcList.set(sourceIndex, applyRuleToPointSource( (GEMPointSourceData) src, branch.getRule())); } // if fault source if (src instanceof GEMFaultSourceData) { // replace the old source with the new source // accordingly to the rule srcList.set(sourceIndex, applyRuleToFaultSource( (GEMFaultSourceData) src, branch.getRule())); } // if subduction source if (src instanceof GEMSubductionFaultSourceData) { // replace the old source with the new source // accordingly to the rule srcList.set(sourceIndex, applyRuleToSubductionFaultSource( (GEMSubductionFaultSourceData) src, branch.getRule())); } } else { // rule is not defined: String msg = "No rule is defined at branching level: " + i + "\n" + "Please correct your input!\n" + "Execution stopped!"; logger.info(msg); throw new IllegalArgumentException(msg); } // end if no rule is defined } // end loop over branching levels sourceIndex = sourceIndex + 1; } // end loop over sources // instantiate ERF erf = new GEM1ERF(srcList); // set ERF parameters setGEM1ERFParams(erf, calcConfig); return erf; } // sampleGemLogicTreeERF() /** * This method applies an "uncertainty" rule to an area source data object * * @param areaSrc * : source data object subject to uncertainty * @param rule * : GEMLogicTreeRule specifing parameter uncertainty * @return: a new GEMAreaSourceData object with the parameter subject to the * uncertainty changed according to the rule. In case the rule is * not recognized an error is thrown and execution stops */ private static GEMAreaSourceData applyRuleToAreaSource( GEMAreaSourceData areaSrc, LogicTreeRule rule) { // define new area source GEMAreaSourceData newAreaSrc = areaSrc; // if uncertainties on GR Mmax or GR b value if (rule.getRuleName().toString().equalsIgnoreCase( LogicTreeRuleParam.mMaxGRRelative.toString()) || rule.getRuleName().toString().equalsIgnoreCase( LogicTreeRuleParam.bGRRelative.toString())) { // loop over mfds // mfd index int mfdIndex = 0; for (IncrementalMagFreqDist mfd : areaSrc.getMagfreqDistFocMech() .getMagFreqDistList()) { if (mfd instanceof GutenbergRichterMagFreqDist) { // new mfd GutenbergRichterMagFreqDist newMfdGr = null; if (rule.getRuleName().toString().equalsIgnoreCase( LogicTreeRuleParam.mMaxGRRelative.toString())) { // uncertainties on Mmax newMfdGr = applyMmaxGrRelative( (GutenbergRichterMagFreqDist) mfd, rule .getVal(), areaSrc.getName()); } else if (rule.getRuleName().toString().equalsIgnoreCase( LogicTreeRuleParam.bGRRelative.toString())) { // uncertainties on b value newMfdGr = applybGrRelative( (GutenbergRichterMagFreqDist) mfd, rule .getVal(), areaSrc.getName()); } // substitute old mfd with new mfd newAreaSrc.getMagfreqDistFocMech().getMagFreqDistList()[mfdIndex] = newMfdGr; } // end if mfd is GR mfdIndex = mfdIndex + 1; } // for (loop over mfds) // return new area source return newAreaSrc; } else { // not(rule == mMaxGRRelative || == bGRRelative) String msg = "Rule: " + rule.getRuleName().toString() + " not supported.\n" + "Check your input. Execution is stopped."; logger.info(msg); throw new IllegalArgumentException(msg); } } // applyRuleToAreaSource() /** * This method applies an "uncertainty" rule to a point source data object * * @param pntSrc * : source data object subject to uncertainty * @param rule * : GEMLogicTreeRule specifing parameter uncertainty * @return: a new GEMPointSourceData object with the parameter subject to * the uncertainty changed according to the rule. In case the rule * is not recognized an error is thrown and execution stops */ private static GEMPointSourceData applyRuleToPointSource( GEMPointSourceData pntSrc, LogicTreeRule rule) { // new point source GEMPointSourceData newPntSource = pntSrc; // if uncertainties on GR Mmax or GR b value if (rule.getRuleName().toString().equalsIgnoreCase( LogicTreeRuleParam.mMaxGRRelative.toString()) || rule.getRuleName().toString().equalsIgnoreCase( LogicTreeRuleParam.bGRRelative.toString())) { // loop over mfds // mfd index int mfdIndex = 0; for (IncrementalMagFreqDist mfd : pntSrc.getHypoMagFreqDistAtLoc() .getMagFreqDistList()) { if (mfd instanceof GutenbergRichterMagFreqDist) { GutenbergRichterMagFreqDist newMfdGr = null; // create new mfd by applying rule if (rule.getRuleName().toString().equalsIgnoreCase( LogicTreeRuleParam.mMaxGRRelative.toString())) { newMfdGr = applyMmaxGrRelative( (GutenbergRichterMagFreqDist) mfd, rule .getVal(), pntSrc.getName()); } else if (rule.getRuleName().toString().equalsIgnoreCase( LogicTreeRuleParam.bGRRelative.toString())) { newMfdGr = applybGrRelative( (GutenbergRichterMagFreqDist) mfd, rule .getVal(), pntSrc.getName()); } // substitute old mfd with new mfd newPntSource.getHypoMagFreqDistAtLoc().getMagFreqDistList()[mfdIndex] = newMfdGr; } // if mfd is GR mfdIndex = mfdIndex + 1; } // for (loop over mfd) return newPntSource; } else { // not(rule == mMaxGRRelative || == bGRRelative) String msg = "Rule: " + rule.getRuleName().toString() + " not supported.\n" + "Check your input. Execution is stopped."; logger.info(msg); throw new IllegalArgumentException(msg); } } // applyRuleToPointSource() /** * This method applies an "uncertainty" rule to a fault source data object * * @param faultSrc * : source data object subject to uncertainty * @param rule * : GEMLogicTreeRule specifing parameter uncertainty * @return: a new GEMFaultSourceData object with the parameter subject to * the uncertainty changed according to the rule. In case the rule * is not recognized an error is thrown and execution stops */ private static GEMFaultSourceData applyRuleToFaultSource( GEMFaultSourceData faultSrc, LogicTreeRule rule) { // if uncertainties on GR Mmax or GR b value if (rule.getRuleName().toString().equalsIgnoreCase( LogicTreeRuleParam.mMaxGRRelative.toString()) || rule.getRuleName().toString().equalsIgnoreCase( LogicTreeRuleParam.bGRRelative.toString())) { // mfd IncrementalMagFreqDist mfd = faultSrc.getMfd(); if (mfd instanceof GutenbergRichterMagFreqDist) { GutenbergRichterMagFreqDist newMfdGr = null; // create new mfd by applying rule if (rule.getRuleName().toString().equalsIgnoreCase( LogicTreeRuleParam.mMaxGRRelative.toString())) { newMfdGr = applyMmaxGrRelative( (GutenbergRichterMagFreqDist) mfd, rule .getVal(), faultSrc.getName()); } else if (rule.getRuleName().toString().equalsIgnoreCase( LogicTreeRuleParam.bGRRelative.toString())) { newMfdGr = applybGrRelative((GutenbergRichterMagFreqDist) mfd, rule.getVal(), faultSrc.getName()); } // return new fault source with new mfd return new GEMFaultSourceData(faultSrc.getID(), faultSrc .getName(), faultSrc.getTectReg(), newMfdGr, faultSrc .getTrace(), faultSrc.getDip(), faultSrc.getDip(), faultSrc.getSeismDepthLow(), faultSrc .getSeismDepthUpp(), faultSrc .getFloatRuptureFlag()); } else { // mfd is not GR // if the uncertainty do not apply return the unchanged object return faultSrc; } } else { // not(rule == mMaxGRRelative || == bGRRelative) String msg = "Rule: " + rule.getRuleName().toString() + " not supported.\n" + "Check your input. Execution is stopped."; logger.info(msg); throw new IllegalArgumentException(msg); } } // applyRuleToFaultSource() /** * This method applies an "uncertainty" rule to a subduction source data * object * * @param subFaultSrc * : source data object subject to uncertainty * @param rule * : GEMLogicTreeRule specifing parameter uncertainty * @return: a new GEMSubductionSourceData object with the parameter subject * to uncertainty changed according to the rule. In case the rule * is not recognized an error is thrown and execution stops */ private static GEMSubductionFaultSourceData applyRuleToSubductionFaultSource( GEMSubductionFaultSourceData subFaultSrc, LogicTreeRule rule) { // if uncertainties on GR Mmax or GR b value if (rule.getRuleName().toString().equalsIgnoreCase( LogicTreeRuleParam.mMaxGRRelative.toString()) || rule.getRuleName().toString().equalsIgnoreCase( LogicTreeRuleParam.bGRRelative.toString())) { // mfd IncrementalMagFreqDist mfd = subFaultSrc.getMfd(); if (mfd instanceof GutenbergRichterMagFreqDist) { GutenbergRichterMagFreqDist newMfdGr = null; // create new mfd by applying rule if (rule.getRuleName().toString().equalsIgnoreCase( LogicTreeRuleParam.mMaxGRRelative.toString())) { newMfdGr = applyMmaxGrRelative( (GutenbergRichterMagFreqDist) mfd, rule .getVal(), subFaultSrc.getName()); } else if (rule.getRuleName().toString().equalsIgnoreCase( LogicTreeRuleParam.bGRRelative.toString())) { newMfdGr = applybGrRelative((GutenbergRichterMagFreqDist) mfd, rule.getVal(), subFaultSrc.getName()); } // return new subduction fault source with the new mfd return new GEMSubductionFaultSourceData(subFaultSrc.getID(), subFaultSrc.getName(), subFaultSrc.getTectReg(), subFaultSrc.getTopTrace(), subFaultSrc.getBottomTrace(), subFaultSrc.getRake(), newMfdGr, subFaultSrc.getFloatRuptureFlag()); } // end if mfd is GR // if uncertainty does not apply return unchanged object else { return subFaultSrc; } }// end if rule == mMaxGRRelative || == bGRRelative else { String msg = "Rule: " + rule.getRuleName().toString() + " not supported.\n" + "Check your input. Execution is stopped."; logger.info(msg); throw new IllegalArgumentException(msg); } } // applyRuleToSubductionFaultSource() private static ArrayList<GEMSourceData> applyRuleToSourceList( ArrayList<GEMSourceData> srcList, LogicTreeRule rule) { ArrayList<GEMSourceData> newSrcList = new ArrayList<GEMSourceData>(); for (GEMSourceData src : srcList) { if (src instanceof GEMAreaSourceData) { newSrcList.add(applyRuleToAreaSource((GEMAreaSourceData) src, rule)); } else if (src instanceof GEMPointSourceData) { newSrcList.add(applyRuleToPointSource((GEMPointSourceData) src, rule)); } else if (src instanceof GEMFaultSourceData) { newSrcList.add(applyRuleToFaultSource((GEMFaultSourceData) src, rule)); } else if (src instanceof GEMSubductionFaultSourceData) { newSrcList.add(applyRuleToSubductionFaultSource( (GEMSubductionFaultSourceData) src, rule)); } } return newSrcList; } /** * * @param mfdGR * : original magnitude frequency distribution * @param deltaMmax * : uncertainty on maximum magnitude * @param areaSrc * : source * @return */ private static GutenbergRichterMagFreqDist applyMmaxGrRelative( GutenbergRichterMagFreqDist mfdGR, double deltaMmax, String sourceName) { // minimum magnitude double mMin = mfdGR.getMagLower(); // b value double bVal = mfdGR.get_bValue(); // total moment rate double totMoRate = mfdGR.getTotalMomentRate(); // deltaM double deltaM = mfdGR.getDelta(); // calculate new mMax value // old mMax value double mMax = mfdGR.getMagUpper(); // add uncertainty value (deltaM/2 is added because mMax // refers to bin center mMax = mMax + deltaM / 2 + deltaMmax; // round mMax with respect to deltaM mMax = Math.round(mMax / deltaM) * deltaM; // move back to bin center mMax = mMax - deltaM / 2; // System.out.println("New mMax: "+mMax); if (mMax - mMin >= deltaM) { // calculate number of magnitude values int numVal = (int) Math.round((mMax - mMin) / deltaM + 1); // create new GR mfd GutenbergRichterMagFreqDist newMfdGr = new GutenbergRichterMagFreqDist(mMin, numVal, deltaM); newMfdGr.setAllButTotCumRate(mMin, mMax, totMoRate, bVal); // return new mfd return newMfdGr; } else { // stop execution and return null logger .info("Uncertaintiy value: " + deltaMmax + " on maximum magnitude for source: " + sourceName + " give maximum magnitude smaller than minimum magnitude!\n" + "Check your input. Execution stopped."); // System.out.println("Uncertaintiy value: " + deltaMmax + // " on maximum magnitude for source: " + sourceName // + " give maximum magnitude smaller than minimum magnitude!"); // System.out.println("Check your input. Execution stopped."); return null; } } private static GutenbergRichterMagFreqDist applybGrRelative( GutenbergRichterMagFreqDist mfdGR, double deltaB, String sourceName) { // minimum magnitude double mMin = mfdGR.getMagLower(); // maximum magnitude double mMax = mfdGR.getMagUpper(); // b value double bVal = mfdGR.get_bValue(); // total moment rate double totMoRate = mfdGR.getTotalMomentRate(); // deltaM double deltaM = mfdGR.getDelta(); // calculate new b value bVal = bVal + deltaB; if (bVal >= 0.0) { // calculate number of magnitude values int numVal = (int) Math.round((mMax - mMin) / deltaM + 1); // create new GR mfd GutenbergRichterMagFreqDist newMfdGr = new GutenbergRichterMagFreqDist(mMin, numVal, deltaM); newMfdGr.setAllButTotCumRate(mMin, mMax, totMoRate, bVal); // return new mfd return newMfdGr; } else { String msg = "Uncertaintiy value: " + deltaB + " on b value for source: " + sourceName + " give b value smaller than 0!\n" + "Check your input. Execution stopped!"; logger.info(msg); // System.out.println("Uncertaintiy value: " + deltaB + // " on b value for source: " + sourceName // + " give b value smaller than 0!"); // System.out.println("Check your input. Execution stopped!"); throw new IllegalArgumentException(msg); } } // applybGrRelative() /** * Set the GEM1ERF params given the parameters defined in * * @param erf * : erf for which parameters have to be set * @param calcConfig * : calculator configuration obejct containing parameters for * the ERF */ private void setGEM1ERFParams(GEM1ERF erf, Configuration calcConfig) { // set minimum magnitude /* * xxr: TODO: !!!type safety!!! apache's Configuration interface handles * a similar problem this way: Instead of defining one single method * like public void setParameter(String key, Object value) {...} there * is one method per type defined: setString(), setDouble(), setInt(), * ... */ erf.setParameter(GEM1ERF.MIN_MAG_NAME, calcConfig .getDouble(ConfigItems.MINIMUM_MAGNITUDE.name())); // set time span TimeSpan timeSpan = new TimeSpan(TimeSpan.NONE, TimeSpan.YEARS); timeSpan.setDuration(calcConfig .getDouble(ConfigItems.INVESTIGATION_TIME.name())); erf.setTimeSpan(timeSpan); // params for area source // set inclusion of area sources in the calculation erf.setParameter(GEM1ERF.INCLUDE_AREA_SRC_PARAM_NAME, calcConfig .getBoolean(ConfigItems.INCLUDE_AREA_SOURCES.name())); // set rupture type ("area source rupture model / // area_source_rupture_model / AreaSourceRuptureModel) erf.setParameter(GEM1ERF.AREA_SRC_RUP_TYPE_NAME, calcConfig .getString(ConfigItems.TREAT_AREA_SOURCE_AS.name())); // set area discretization erf.setParameter(GEM1ERF.AREA_SRC_DISCR_PARAM_NAME, calcConfig .getDouble(ConfigItems.AREA_SOURCE_DISCRETIZATION.name())); // set mag-scaling relationship erf .setParameter( GEM1ERF.AREA_SRC_MAG_SCALING_REL_PARAM_NAME, calcConfig .getString(ConfigItems.AREA_SOURCE_MAGNITUDE_SCALING_RELATIONSHIP .name())); // params for grid source // inclusion of grid sources in the calculation erf.setParameter(GEM1ERF.INCLUDE_GRIDDED_SEIS_PARAM_NAME, calcConfig .getBoolean(ConfigItems.INCLUDE_GRID_SOURCES.name())); // rupture model erf.setParameter(GEM1ERF.GRIDDED_SEIS_RUP_TYPE_NAME, calcConfig .getString(ConfigItems.TREAT_GRID_SOURCE_AS.name())); // mag-scaling relationship erf .setParameter( GEM1ERF.GRIDDED_SEIS_MAG_SCALING_REL_PARAM_NAME, calcConfig .getString(ConfigItems.AREA_SOURCE_MAGNITUDE_SCALING_RELATIONSHIP .name())); // params for fault source // inclusion of fault sources in the calculation erf.setParameter(GEM1ERF.INCLUDE_FAULT_SOURCES_PARAM_NAME, calcConfig .getBoolean(ConfigItems.INCLUDE_FAULT_SOURCE.name())); // rupture offset erf.setParameter(GEM1ERF.FAULT_RUP_OFFSET_PARAM_NAME, calcConfig .getDouble(ConfigItems.FAULT_RUPTURE_OFFSET.name())); // surface discretization erf.setParameter(GEM1ERF.FAULT_DISCR_PARAM_NAME, calcConfig .getDouble(ConfigItems.FAULT_SURFACE_DISCRETIZATION.name())); // mag-scaling relationship erf.setParameter(GEM1ERF.FAULT_MAG_SCALING_REL_PARAM_NAME, calcConfig .getString(ConfigItems.FAULT_MAGNITUDE_SCALING_RELATIONSHIP .name())); // mag-scaling sigma erf.setParameter(GEM1ERF.FAULT_SCALING_SIGMA_PARAM_NAME, calcConfig .getDouble(ConfigItems.FAULT_MAGNITUDE_SCALING_SIGMA.name())); // rupture aspect ratio erf.setParameter(GEM1ERF.FAULT_RUP_ASPECT_RATIO_PARAM_NAME, calcConfig .getDouble(ConfigItems.RUPTURE_ASPECT_RATIO.name())); // rupture floating type erf.setParameter(GEM1ERF.FAULT_FLOATER_TYPE_PARAM_NAME, calcConfig .getString(ConfigItems.RUPTURE_FLOATING_TYPE.name())); // params for subduction fault // inclusion of fault sources in the calculation erf.setParameter(GEM1ERF.INCLUDE_SUBDUCTION_SOURCES_PARAM_NAME, calcConfig .getBoolean(ConfigItems.INCLUDE_SUBDUCTION_FAULT_SOURCE .name())); // rupture offset erf.setParameter(GEM1ERF.SUB_RUP_OFFSET_PARAM_NAME, calcConfig .getDouble(ConfigItems.SUBDUCTION_FAULT_RUPTURE_OFFSET.name())); // surface discretization erf.setParameter(GEM1ERF.SUB_DISCR_PARAM_NAME, calcConfig .getDouble(ConfigItems.SUBDUCTION_FAULT_SURFACE_DISCRETIZATION .name())); // mag-scaling relationship erf .setParameter( GEM1ERF.SUB_MAG_SCALING_REL_PARAM_NAME, calcConfig .getString(ConfigItems.SUBDUCTION_FAULT_MAGNITUDE_SCALING_RELATIONSHIP .name())); // mag-scaling sigma erf.setParameter(GEM1ERF.SUB_SCALING_SIGMA_PARAM_NAME, calcConfig .getDouble(ConfigItems.SUBDUCTION_FAULT_MAGNITUDE_SCALING_SIGMA .name())); // rupture aspect ratio erf.setParameter(GEM1ERF.SUB_RUP_ASPECT_RATIO_PARAM_NAME, calcConfig .getDouble(ConfigItems.SUBDUCTION_RUPTURE_ASPECT_RATIO.name())); // rupture floating type erf .setParameter(GEM1ERF.SUB_FLOATER_TYPE_PARAM_NAME, calcConfig .getString(ConfigItems.SUBDUCTION_RUPTURE_FLOATING_TYPE .name())); // update erf.updateForecast(); } // setGEM1ERFParams() private static HashMap<TectonicRegionType, ScalarIntensityMeasureRelationshipAPI> sampleGemLogicTreeGMPE( HashMap<TectonicRegionType, LogicTree<ScalarIntensityMeasureRelationshipAPI>> listLtGMPE) { HashMap<TectonicRegionType, ScalarIntensityMeasureRelationshipAPI> hm = new HashMap<TectonicRegionType, ScalarIntensityMeasureRelationshipAPI>(); // loop over tectonic regions Iterator<TectonicRegionType> iter = listLtGMPE.keySet().iterator(); while (iter.hasNext()) { // get tectonic region type TectonicRegionType trt = iter.next(); // get corresponding logic tree LogicTree<ScalarIntensityMeasureRelationshipAPI> ltGMPE = listLtGMPE.get(trt); // sample the first branching level int branch = ltGMPE.sampleBranchingLevel(0); // select the corresponding gmpe from the end-branch mapping ScalarIntensityMeasureRelationshipAPI gmpe = ltGMPE.getEBMap().get(Integer.toString(branch)); hm.put(trt, gmpe); } return hm; } private ErfLogicTreeData createErfLogicTreeData(Configuration configuration) { // load ERF logic tree data ErfLogicTreeData erfLogicTree = new ErfLogicTreeData( getRelativePath(ConfigItems.ERF_LOGIC_TREE_FILE.name())); return erfLogicTree; } // createErfLogicTreeData() private GmpeLogicTreeData createGmpeLogicTreeData( Configuration configuration) { // load GMPE logic tree data String relativePath = getRelativePath(ConfigItems.GMPE_LOGIC_TREE_FILE.name()); String component = configuration.getString(ConfigItems.COMPONENT.name()); String intensityMeasureType = configuration.getString(ConfigItems.INTENSITY_MEASURE_TYPE .name()); Double period = configuration.getDouble(ConfigItems.PERIOD.name()); Double damping = configuration.getDouble(ConfigItems.DAMPING.name()); String gmpeTruncationType = configuration .getString(ConfigItems.GMPE_TRUNCATION_TYPE.name()); Double truncationLevel = configuration.getDouble(ConfigItems.TRUNCATION_LEVEL.name()); String standardDeviationType = configuration.getString(ConfigItems.STANDARD_DEVIATION_TYPE .name()); Double referenceVs30Value = configuration .getDouble(ConfigItems.REFERENCE_VS30_VALUE.name()); // instantiate eventually GmpeLogicTreeData gmpeLogicTree = new GmpeLogicTreeData(relativePath, component, intensityMeasureType, period, damping, gmpeTruncationType, truncationLevel, standardDeviationType, referenceVs30Value); // GmpeLogicTreeData gmpeLogicTree = // new GmpeLogicTreeData( // getRelativePath(ConfigItems.GMPE_LOGIC_TREE_FILE.name()), // config.getString(ConfigItems.COMPONENT.name()), config // .getString(ConfigItems.INTENSITY_MEASURE_TYPE // .name()), config // .getDouble(ConfigItems.PERIOD.name()), config // .getDouble(ConfigItems.DAMPING.name()), config // .getString(ConfigItems.GMPE_TRUNCATION_TYPE // .name()), // config.getDouble(ConfigItems.TRUNCATION_LEVEL.name()), // config.getString(ConfigItems.STANDARD_DEVIATION_TYPE // .name()), config // .getDouble(ConfigItems.REFERENCE_VS30_VALUE // .name())); return gmpeLogicTree; } // createGmpeLogicTreeData() // for testing public static void main(String[] args) throws IOException, SecurityException, IllegalArgumentException, ClassNotFoundException, InstantiationException, IllegalAccessException, NoSuchMethodException, InvocationTargetException, ConfigurationException { // Uncomment to test the configuration of the logging appenders: // String msg = // "User directory to put the file CalculatorConfig.properties -> " // + System.getProperty("user.dir"); // URL url = // Thread.currentThread().getContextClassLoader().getResource("."); // String workingDirectory = "working directory is : " + url.toString(); // logger.trace(msg); // logger.trace(url); // logger.debug(msg); // logger.debug(url); // logger.info(msg); // logger.info(url); // logger.warn(msg); // logger.warn(url); // logger.error(msg); // logger.error(url); // logger.fatal(msg); // logger.fatal(url); CommandLineCalculator clc = new CommandLineCalculator("CalculatorConfig.properties"); clc.doCalculation(); // clc.doCalculationThroughMonteCarloApproach(); // clc.saveMeanGroundMotionMapToGMTAsciiFile(); System.exit(0); } // main() } // class CommandLineCalculatorWithProperties
package com.izforge.izpack.util; import java.io.File; import java.io.FileOutputStream; import java.io.FileWriter; import java.math.BigInteger; import java.security.InvalidKeyException; import java.security.KeyPair; import java.security.KeyPairGenerator; import java.security.KeyStore; import java.security.NoSuchProviderException; import java.security.SecureRandom; import java.security.Security; import java.security.SignatureException; import java.security.cert.Certificate; import java.security.cert.X509Certificate; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.List; import org.bouncycastle.asn1.x500.X500NameBuilder; import org.bouncycastle.asn1.x500.style.BCStyle; import org.bouncycastle.asn1.x509.AuthorityKeyIdentifier; import org.bouncycastle.asn1.x509.BasicConstraints; import org.bouncycastle.asn1.x509.ExtendedKeyUsage; import org.bouncycastle.asn1.x509.Extension; import org.bouncycastle.asn1.x509.GeneralName; import org.bouncycastle.asn1.x509.GeneralNames; import org.bouncycastle.asn1.x509.KeyPurposeId; import org.bouncycastle.asn1.x509.KeyUsage; import org.bouncycastle.asn1.x509.X509Extension; import org.bouncycastle.asn1.x509.X509Extensions; import org.bouncycastle.cert.X509v3CertificateBuilder; import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter; import org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder; import org.bouncycastle.openssl.PEMWriter; import org.bouncycastle.operator.ContentSigner; import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder; import org.bouncycastle.x509.X509V3CertificateGenerator; import org.bouncycastle.x509.extension.AuthorityKeyIdentifierStructure; import org.bouncycastle.x509.extension.SubjectKeyIdentifierStructure; import com.izforge.izpack.installer.AutomatedInstallData; import com.izforge.izpack.installer.DataValidator; import com.izforge.izpack.installer.DataValidator.Status; public class CreateCertsValidator implements DataValidator { private String strMessage = ""; public static final String strMessageId = "messageid"; public static final String strMessageValue = "message.oldvalue"; // not to be stored public Status validateData(AutomatedInstallData adata) { try { // first create CA KeyPair pairCA = generateRSAKeyPair(4096); String countryCode = adata.getVariable("mongodb.ssl.certificate.countrycode"); String state = adata.getVariable("mongodb.ssl.certificate.state"); String city = adata.getVariable("mongodb.ssl.certificate.city"); String organization = adata.getVariable("mongodb.ssl.certificate.organization"); String organizationalUnit = adata.getVariable("mongodb.ssl.certificate.organisationalunit"); String name = adata.getVariable("mongodb.ssl.certificate.name"); String email = adata.getVariable("mongodb.ssl.certificate.email"); int validity = Integer.parseInt(adata.getVariable("mongodb.ssl.certificate.validity")); X509Certificate cacert = generateCAV3Certificate(pairCA, countryCode, organization, organizationalUnit, state, city, name, email, validity); String strCertPath = adata.getVariable("mongodb.dir.certs"); File dirCerts = new File (strCertPath); if (!dirCerts.exists()) dirCerts.mkdirs(); FileWriter cacertfile = new FileWriter(strCertPath + File.separator + "ca.cacrt"); PEMWriter pem = new PEMWriter(cacertfile); pem.writeObject(cacert); pem.close(); String capassphrase = adata.getVariable("mongodb.ssl.capassphrase"); KeyPairGeneratorDataValidator.writePrivateKey(strCertPath + File.separator + "ca.key", pairCA, capassphrase.toCharArray()); // then create server cert KeyPair pairServer = generateRSAKeyPair(2048); String hostname = adata.getVariable("mongodb.ssl.certificate.hostname"); X509Certificate servercert = generateServerV3Certificate(pairServer, countryCode, organization, organizationalUnit, state, city, hostname, null, validity, cacert , pairCA); FileWriter servercertfile = new FileWriter(strCertPath + File.separator + hostname + ".crt"); pem = new PEMWriter(servercertfile); pem.writeObject(servercert); pem.close(); KeyStore keyStore = KeyStore.getInstance("PKCS12", "BC"); keyStore.load(null, null); keyStore.setKeyEntry("trust", pairServer.getPrivate(), null, new Certificate[] { servercert }); String exportPassword = "sage"; keyStore.store(new FileOutputStream( strCertPath + File.separator + hostname + ".p12"), exportPassword.toCharArray()); String serverpassphrase = adata.getVariable("mongodb.ssl.serverpassphrase"); KeyPairGeneratorDataValidator.writePrivateKey(strCertPath + File.separator + hostname + ".key", pairServer, serverpassphrase.toCharArray()); adata.setVariable("mongodb.ssl.usecafile", "true"); File pemKeyFile = new File(strCertPath + File.separator + hostname + ".pem"); File certFile = new File(strCertPath + File.separator + hostname + ".crt"); File privKeyFile = new File(strCertPath + File.separator + hostname + ".key"); KeyPairGeneratorDataValidator.mergeFiles(new File[]{certFile,privKeyFile}, pemKeyFile); // then create a client cert KeyPair pairClient = generateRSAKeyPair(2048); X509Certificate clientcert = generateClientV3Certificate(pairClient, countryCode, organization, organizationalUnit, state, city, name, email, validity, cacert , pairCA); FileWriter clientcertfile = new FileWriter(strCertPath + File.separator + "client.crt"); pem = new PEMWriter(clientcertfile); pem.writeObject(clientcert); pem.close(); //String serverpassphrase = adata.getVariable("mongodb.ssl.serverpassphrase"); KeyPairGeneratorDataValidator.writePrivateKey(strCertPath + File.separator + "client.key", pairClient, null); File pemClientKeyFile = new File(strCertPath + File.separator + "client.pem"); File certClientFile = new File(strCertPath + File.separator + "client.crt"); File privClientKeyFile = new File(strCertPath + File.separator + "client.key"); KeyPairGeneratorDataValidator.mergeFiles(new File[]{certClientFile,privClientKeyFile}, pemClientKeyFile); // we need to says that this step was done at least one time adata.setVariable("mongodb.ssl.alreadydone", "true"); return Status.OK; } catch (Exception ex) { strMessage = ex.getMessage(); adata.setVariable(strMessageValue, strMessage); } return Status.ERROR; } public String getErrorMessageId() { return strMessageId; } public String getWarningMessageId() { return strMessageId; } public boolean getDefaultAnswer() { return false; } public static X509Certificate generateCAV3Certificate(KeyPair pair, String country, String organization, String organizationalUnit, String state, String locality, String name, String email, int validity) throws Exception { Security.addProvider(new org.bouncycastle.jce.provider.BouncyCastleProvider()); X500NameBuilder builder=new X500NameBuilder(BCStyle.INSTANCE); if (country !=null && !country.equals("")) builder.addRDN(BCStyle.C,country); if (organization !=null && !organization.equals("")) builder.addRDN(BCStyle.O,organization); if (organizationalUnit !=null && !organizationalUnit.equals("")) builder.addRDN(BCStyle.OU,organizationalUnit); if (state !=null && !state.equals("")) builder.addRDN(BCStyle.ST,state); if (locality !=null && !locality.equals("")) builder.addRDN(BCStyle.L,locality); if (name !=null && !name.equals("")) builder.addRDN(BCStyle.CN,name); if (email !=null && !email.equals("")) builder.addRDN(BCStyle.E,email); Date notBefore=new Date(); Calendar cal = Calendar.getInstance(); cal.setTime(notBefore); cal.add(Calendar.DAY_OF_YEAR, validity); Date notAfter = cal.getTime(); BigInteger serial=BigInteger.valueOf(System.currentTimeMillis()); X509v3CertificateBuilder certGen=new JcaX509v3CertificateBuilder(builder.build(),serial,notBefore,notAfter,builder.build(),pair.getPublic()); certGen.addExtension(Extension.basicConstraints, true, new BasicConstraints(true)); certGen.addExtension(Extension.keyUsage, true, new KeyUsage( KeyUsage.keyCertSign | KeyUsage.cRLSign )); SubjectKeyIdentifierStructure keyid = new SubjectKeyIdentifierStructure(pair.getPublic()); certGen.addExtension(Extension.authorityKeyIdentifier, false, new AuthorityKeyIdentifier(keyid.getKeyIdentifier())); certGen.addExtension(Extension.subjectKeyIdentifier, false, keyid); ContentSigner sigGen=new JcaContentSignerBuilder("SHA256WithRSAEncryption").setProvider(new org.bouncycastle.jce.provider.BouncyCastleProvider()).build(pair.getPrivate()); X509Certificate cert=new JcaX509CertificateConverter().setProvider(new org.bouncycastle.jce.provider.BouncyCastleProvider()).getCertificate(certGen.build(sigGen)); cert.checkValidity(new Date()); //cert.verify(cert.getPublicKey()); cert.verify(pair.getPublic()); return cert; } public static X509Certificate generateServerV3Certificate(KeyPair pair, String country, String organization, String organizationalUnit, String state, String locality, String name, String email, int validity, X509Certificate certCA, KeyPair pairCA) throws Exception { Security.addProvider(new org.bouncycastle.jce.provider.BouncyCastleProvider()); X500NameBuilder builder=new X500NameBuilder(BCStyle.INSTANCE); if (country !=null && !country.equals("")) builder.addRDN(BCStyle.C,country); if (organization !=null && !organization.equals("")) builder.addRDN(BCStyle.O,organization); if (organizationalUnit !=null && !organizationalUnit.equals("")) builder.addRDN(BCStyle.OU,organizationalUnit); if (state !=null && !state.equals("")) builder.addRDN(BCStyle.ST,state); if (locality !=null && !locality.equals("")) builder.addRDN(BCStyle.L,locality); if (name !=null && !name.equals("")) builder.addRDN(BCStyle.CN,name); if (email !=null && !email.equals("")) builder.addRDN(BCStyle.E,email); Date notBefore=new Date(); Calendar cal = Calendar.getInstance(); cal.setTime(notBefore); cal.add(Calendar.DAY_OF_YEAR, validity); Date notAfter = cal.getTime(); BigInteger serial=BigInteger.valueOf(System.currentTimeMillis()); X509v3CertificateBuilder certGen=new JcaX509v3CertificateBuilder(certCA,serial,notBefore,notAfter,builder.build(),pair.getPublic()); certGen.addExtension(Extension.basicConstraints, false, new BasicConstraints(false)); certGen.addExtension(Extension.keyUsage, true, new KeyUsage( KeyUsage.digitalSignature | KeyUsage.keyEncipherment | KeyUsage.dataEncipherment | KeyUsage.keyAgreement)); SubjectKeyIdentifierStructure keyid = new SubjectKeyIdentifierStructure(pair.getPublic()); certGen.addExtension(Extension.subjectKeyIdentifier, false, keyid); AuthorityKeyIdentifierStructure keyidCA = new AuthorityKeyIdentifierStructure(certCA); certGen.addExtension(Extension.authorityKeyIdentifier, false, keyidCA); List<GeneralName> subjectNames = new ArrayList<>(); subjectNames.add(new GeneralName(GeneralName.dNSName,name)); certGen.addExtension(Extension.subjectAlternativeName, false, new GeneralNames( subjectNames.toArray(new GeneralName[0]))); ContentSigner sigGen=new JcaContentSignerBuilder("SHA256WithRSAEncryption").setProvider(new org.bouncycastle.jce.provider.BouncyCastleProvider()).build(pairCA.getPrivate()); X509Certificate cert=new JcaX509CertificateConverter().setProvider(new org.bouncycastle.jce.provider.BouncyCastleProvider()).getCertificate(certGen.build(sigGen)); cert.checkValidity(new Date()); //cert.verify(cert.getPublicKey()); cert.verify(pairCA.getPublic()); return cert; } public static X509Certificate generateClientV3Certificate(KeyPair pair, String country, String organization, String organizationalUnit, String state, String locality, String name, String email, int validity, X509Certificate certCA, KeyPair pairCA) throws Exception { Security.addProvider(new org.bouncycastle.jce.provider.BouncyCastleProvider()); X500NameBuilder builder=new X500NameBuilder(BCStyle.INSTANCE); if (country !=null && !country.equals("")) builder.addRDN(BCStyle.C,country); if (organization !=null && !organization.equals("")) builder.addRDN(BCStyle.O,organization); if (organizationalUnit !=null && !organizationalUnit.equals("")) builder.addRDN(BCStyle.OU,organizationalUnit); if (state !=null && !state.equals("")) builder.addRDN(BCStyle.ST,state); if (locality !=null && !locality.equals("")) builder.addRDN(BCStyle.L,locality); //if (name !=null && !name.equals("")) builder.addRDN(BCStyle.CN,"client"); //if (email !=null && !email.equals("")) // builder.addRDN(BCStyle.E,email); Date notBefore=new Date(); Calendar cal = Calendar.getInstance(); cal.setTime(notBefore); cal.add(Calendar.DAY_OF_YEAR, validity); Date notAfter = cal.getTime(); BigInteger serial=BigInteger.valueOf(System.currentTimeMillis()); X509v3CertificateBuilder certGen=new JcaX509v3CertificateBuilder(certCA,serial,notBefore,notAfter,builder.build(),pair.getPublic()); certGen.addExtension(Extension.basicConstraints, false, new BasicConstraints(false)); certGen.addExtension(Extension.keyUsage, true, new KeyUsage( KeyUsage.digitalSignature | KeyUsage.dataEncipherment | KeyUsage.keyAgreement)); certGen.addExtension(Extension.extendedKeyUsage, true, new ExtendedKeyUsage(KeyPurposeId.id_kp_clientAuth)); SubjectKeyIdentifierStructure keyid = new SubjectKeyIdentifierStructure(pair.getPublic()); certGen.addExtension(Extension.subjectKeyIdentifier, false, keyid); AuthorityKeyIdentifierStructure keyidCA = new AuthorityKeyIdentifierStructure(certCA); certGen.addExtension(Extension.authorityKeyIdentifier, false, keyidCA); ContentSigner sigGen=new JcaContentSignerBuilder("SHA256WithRSAEncryption").setProvider(new org.bouncycastle.jce.provider.BouncyCastleProvider()).build(pairCA.getPrivate()); X509Certificate cert=new JcaX509CertificateConverter().setProvider(new org.bouncycastle.jce.provider.BouncyCastleProvider()).getCertificate(certGen.build(sigGen)); cert.checkValidity(new Date()); //cert.verify(cert.getPublicKey()); cert.verify(pairCA.getPublic()); return cert; } public static KeyPair generateRSAKeyPair(int nSize) throws Exception { KeyPairGenerator kpGen = KeyPairGenerator.getInstance("RSA"); kpGen.initialize(nSize, new SecureRandom()); return kpGen.generateKeyPair(); } }
package com.vladsch.flexmark.util.format; import com.vladsch.flexmark.util.data.DataHolder; import com.vladsch.flexmark.util.misc.CharPredicate; import com.vladsch.flexmark.util.sequence.BasedSequence; import com.vladsch.flexmark.util.sequence.Range; import com.vladsch.flexmark.util.sequence.RepeatedSequence; import com.vladsch.flexmark.util.sequence.SequenceUtils; import com.vladsch.flexmark.util.sequence.builder.SequenceBuilder; import com.vladsch.flexmark.util.sequence.builder.tree.BasedOffsetTracker; import com.vladsch.flexmark.util.sequence.builder.tree.OffsetInfo; import com.vladsch.flexmark.util.sequence.builder.tree.Segment; import com.vladsch.flexmark.util.sequence.mappers.SpecialLeadInHandler; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import static com.vladsch.flexmark.util.misc.CharPredicate.WHITESPACE; public class MarkdownParagraph { final private static char MARKDOWN_START_LINE_CHAR = SequenceUtils.LS; public static final List<SpecialLeadInHandler> EMPTY_LEAD_IN_HANDLERS = Collections.emptyList(); public static final List<TrackedOffset> EMPTY_OFFSET_LIST = Collections.emptyList(); final @NotNull BasedSequence baseSeq; final @NotNull BasedSequence altBaseSeq; final @NotNull CharWidthProvider charWidthProvider; private BasedSequence firstIndent = BasedSequence.NULL; private BasedSequence indent = BasedSequence.NULL; private int firstWidthOffset = 0; int width = 0; boolean keepHardLineBreaks = true; boolean keepSoftLineBreaks = false; boolean unEscapeSpecialLeadInChars = true; boolean escapeSpecialLeadInChars = true; boolean restoreTrackedSpaces = false; @Nullable DataHolder options = null; @NotNull List<? extends SpecialLeadInHandler> leadInHandlers = EMPTY_LEAD_IN_HANDLERS; private List<TrackedOffset> trackedOffsets = EMPTY_OFFSET_LIST; private boolean trackedOffsetsSorted = true; public MarkdownParagraph(CharSequence chars) { this(BasedSequence.of(chars)); } public MarkdownParagraph(BasedSequence chars) { this(chars, chars, CharWidthProvider.NULL); } public MarkdownParagraph(@NotNull BasedSequence chars, @NotNull CharWidthProvider charWidthProvider) { this(chars, chars, charWidthProvider); } public MarkdownParagraph(@NotNull BasedSequence chars, @NotNull BasedSequence altChars, @NotNull CharWidthProvider charWidthProvider) { baseSeq = chars; this.altBaseSeq = altChars; this.charWidthProvider = charWidthProvider; } public BasedSequence wrapTextNotTracked() { if (getFirstWidth() <= 0) return baseSeq; LeftAlignedWrapping wrapping = new LeftAlignedWrapping(baseSeq); return wrapping.wrapText(); } @NotNull public Range getContinuationStartSplice(int offset, boolean afterSpace, boolean afterDelete) { BasedSequence baseSequence = altBaseSeq.getBaseSequence(); assert offset >= 0 && offset <= baseSequence.length(); if (afterSpace && afterDelete) { BasedOffsetTracker preFormatTracker = BasedOffsetTracker.create(baseSeq); int startOfLine = baseSequence.startOfLine(offset); if (startOfLine > baseSeq.getStartOffset() && baseSequence.safeCharAt(offset) != ' ') { int previousNonBlank = baseSequence.lastIndexOfAnyNot(CharPredicate.SPACE_TAB_EOL, offset - 1); if (previousNonBlank < startOfLine) { // delete range between last non-blank and offset index @NotNull OffsetInfo offsetInfo = preFormatTracker.getOffsetInfo(offset, true); int offsetIndex = offsetInfo.endIndex; int previousNonBlankIndex = baseSeq.lastIndexOfAnyNot(CharPredicate.SPACE_TAB_EOL, offsetIndex - 1); return Range.of(previousNonBlankIndex + 1, offsetIndex); } } } return Range.NULL; } public BasedSequence wrapText() { if (getFirstWidth() <= 0) return baseSeq; if (trackedOffsets.isEmpty()) return wrapTextNotTracked(); // Adjust input text for wrapping by removing any continuation splice regions BasedSequence input = baseSeq; Range lastRange = Range.NULL; sortedTrackedOffsets(); int iMax = trackedOffsets.size(); for (int i = iMax; i TrackedOffset trackedOffset = trackedOffsets.get(i); if (lastRange.isEmpty() || !lastRange.contains(trackedOffset.getOffset())) { lastRange = getContinuationStartSplice(trackedOffset.getOffset(), trackedOffset.isAfterSpaceEdit(), trackedOffset.isAfterDelete()); if (lastRange.isNotEmpty()) { input = input.delete(lastRange.getStart(), lastRange.getEnd()); } } } LeftAlignedWrapping wrapping = new LeftAlignedWrapping(input); BasedSequence wrapped = wrapping.wrapText(); // FIX: apply after wrapping fixes BasedOffsetTracker tracker = BasedOffsetTracker.create(wrapped); if (restoreTrackedSpaces) { // NOTE: Restore trailing spaces at end of line if it has tracked offset on it int restoredAppendSpaces = 0; int baseSeqLastNonBlank = baseSeq.lastIndexOfAnyNot(CharPredicate.WHITESPACE) + 1; BasedOffsetTracker baseSeqTracker = BasedOffsetTracker.create(altBaseSeq); boolean haveAltBaseSeq = baseSeq != altBaseSeq; for (int i = iMax; i TrackedOffset trackedOffset = trackedOffsets.get(i); boolean isAfterSpaceInsert = haveAltBaseSeq && trackedOffset.isAfterSpaceEdit() && trackedOffset.isAfterInsert() && trackedOffset.getOffset() > 0; int startDelta = isAfterSpaceInsert ? 1 : 0; OffsetInfo baseInfo = baseSeqTracker.getOffsetInfo(trackedOffset.getOffset() - startDelta, startDelta == 0); int baseIndex = baseInfo.endIndex; final int indexSpacesBefore = baseSeq.countTrailing(CharPredicate.SPACE_TAB_EOL, baseIndex); final int indexSpacesAfter = baseSeq.countLeading(CharPredicate.SPACE, baseIndex); int baseIndexSpaces = indexSpacesBefore; int baseIndexSpacesAfter = indexSpacesAfter; boolean needSpace = baseIndexSpaces > 0; int endLine = baseSeq.endOfLine(baseIndex); int startLine = baseSeq.startOfLine(baseIndex); int firstNonBlank = baseSeq.indexOfAnyNot(CharPredicate.SPACE, startLine, endLine); // NOTE: if have alternate base sequence then mapping is done using the computed baseSeq offset OffsetInfo info = haveAltBaseSeq ? tracker.getOffsetInfo(baseIndex, startDelta == 0) : tracker.getOffsetInfo(trackedOffset.getOffset(), true); int index = info.endIndex; if (info.pos >= 0 && info.pos < tracker.size() && isAfterSpaceInsert) { Segment segment = tracker.getSegmentOffsetTree().getSegment(info.pos, tracker.getSequence()); if (segment.getStartOffset() == baseIndex) { // at start of segment after space need to move it after prev segment info = tracker.getOffsetInfo(baseIndex - baseIndexSpaces, false); index = info.endIndex; } else if (wrapped.isCharAt(index + 1, CharPredicate.EOL)) { // EOL inserted in between, move it to next char info = tracker.getOffsetInfo(baseIndex + 1, false); index = info.endIndex; } // System.out.println(String.format("startDelta: %d, spacesBefore: %d, spacesAfter: %d, prevCharAt: %d, charAt: %d, nextCharAt: %d, prevBaseAt: %d, baseAt: %d, nextBaseAt: %d, baseIndex: %d, info: %s, segment: %s", // startDelta, // baseIndexSpaces, // baseIndexSpacesAfter, // (int) wrapped.safeCharAt(index - 1), // (int) wrapped.safeCharAt(index), // (int) wrapped.safeCharAt(index + 1), // (int) baseSeq.safeCharAt(baseIndex - 1), // (int) baseSeq.safeCharAt(baseIndex), // (int) baseSeq.safeCharAt(baseIndex + 1), // baseIndex, // info.toString(), // segment.toString())); } int endLineWrapped = wrapped.endOfLine(index); int startLineWrapped = wrapped.startOfLine(index); int firstNonBlankWrapped = wrapped.indexOfAnyNot(CharPredicate.SPACE, startLineWrapped, endLineWrapped); if (trackedOffset.isAfterInsert() || trackedOffset.isAfterDelete()) { // need to keep it at the previous character but not when inserting space or deleting 1 char surrounded by spaces, // except when offset is at the end of line if (!trackedOffset.isAfterSpaceEdit()) { // if deleting non-space surrounded by spaces at the beginning of a paragraph then the preceding space is deleted so need to keep at position and insert spaces before if (index == 0 && baseIndexSpaces > 0 && baseIndexSpacesAfter > 0) { } else { index = info.startIndex; endLineWrapped = wrapped.endOfLine(index); startLineWrapped = wrapped.startOfLine(index); firstNonBlankWrapped = wrapped.indexOfAnyNot(CharPredicate.SPACE, startLineWrapped, endLineWrapped); if (trackedOffset.isAfterDelete() && index == endLineWrapped) baseIndexSpacesAfter = 0; baseIndexSpaces = 0; } } else if (index == firstNonBlankWrapped) { baseIndexSpaces = 0; } } // NOTE: if typing space before or on start of continuation line, then do not move tracked offset to previous line if (index <= firstNonBlankWrapped) { int unwrappedOffset = wrapped.getIndexOffset(firstNonBlankWrapped); if (unwrappedOffset >= 0) { int basePrevIndex = baseIndex+indexSpacesAfter; boolean isLineSep = baseSeq.safeCharAt(basePrevIndex) == SequenceUtils.LS; if (!isLineSep && baseIndex <= firstNonBlank && trackedOffset.isAfterSpaceEdit() && !trackedOffset.isAfterDelete()) { baseIndexSpaces = 0; baseIndexSpacesAfter = 0; } else if (isLineSep || trackedOffset.isAfterDelete() || (trackedOffset.isAfterSpaceEdit() && baseIndexSpacesAfter > 0)) { // tracked offset is followed by Line Separator, move the offset to end of previous line index = wrapped.endOfLine(wrapped.startOfLine(index) - 1); baseIndex = basePrevIndex; endLine = baseSeq.startOfLine(baseIndex); startLine = baseSeq.startOfLine(baseIndex); firstNonBlank = baseSeq.indexOfAnyNot(CharPredicate.SPACE, startLine, endLine); baseIndexSpaces = needSpace || isLineSep ? 1 : Math.min(1, baseIndexSpaces); baseIndexSpacesAfter = 0; } } } int lastNonBlank = baseSeq.lastIndexOfAnyNot(CharPredicate.SPACE, endLine); int wrappedOffsetSpaces = wrapped.countTrailing(CharPredicate.SPACE, index); if (baseIndex >= lastNonBlank) { // add only what is missing baseIndexSpaces = Math.max(0, baseIndexSpaces - wrappedOffsetSpaces); baseIndexSpacesAfter = 0; } else if (baseIndex > firstNonBlank) { // spaces before caret, see if need to add max 1 int spacesBefore = wrapped.countTrailing(CharPredicate.SPACE, index); baseIndexSpaces = Math.max(0, Math.min(1, baseIndexSpaces - spacesBefore)); int spacesAfter = wrapped.countLeading(CharPredicate.SPACE, index); baseIndexSpacesAfter = Math.max(0, Math.min(1, baseIndexSpacesAfter - spacesAfter)); } else if (baseIndex < firstNonBlank && trackedOffset.isAfterDelete() && !trackedOffset.isAfterSpaceEdit() && baseIndexSpaces > 0 && baseIndexSpacesAfter > 0) { // spaces before caret, see if need to add max 1 info = tracker.getOffsetInfo(baseSeq.getIndexOffset(firstNonBlank), true); index = info.endIndex; baseIndexSpaces = 0; baseIndexSpacesAfter = 1; } else { baseIndexSpaces = 0; baseIndexSpacesAfter = 0; } if (baseIndex < baseSeqLastNonBlank) { // insert in middle if (baseIndexSpaces + baseIndexSpacesAfter > 0) { wrapped = wrapped.insert(index, RepeatedSequence.ofSpaces(baseIndexSpaces + baseIndexSpacesAfter)); // need to adjust all following offsets by the amount inserted for (int j = i + 1; j < iMax; j++) { TrackedOffset trackedOffset1 = trackedOffsets.get(j); int indexJ = trackedOffset1.getIndex(); trackedOffset1.setIndex(indexJ + baseIndexSpaces + baseIndexSpacesAfter); } } } else { restoredAppendSpaces = Math.max(restoredAppendSpaces, baseIndexSpaces); } trackedOffset.setIndex(index + baseIndexSpaces); } // append any trailing spaces if (restoredAppendSpaces > 0) { wrapped = wrapped.appendSpaces(restoredAppendSpaces); } } else { // Now we map the tracked offsets to indexes in the resulting text for (int i = iMax; i TrackedOffset trackedOffset = trackedOffsets.get(i); int offset = trackedOffset.getOffset(); boolean baseIsWhiteSpaceAtOffset = baseSeq.isBaseCharAt(offset, WHITESPACE); if (baseIsWhiteSpaceAtOffset && !(baseSeq.isBaseCharAt(offset - 1, WHITESPACE))) { // we need to use previous non-blank and use that offset OffsetInfo info = tracker.getOffsetInfo(offset - 1, false); trackedOffset.setIndex(info.endIndex); } else if (!baseIsWhiteSpaceAtOffset && baseSeq.isBaseCharAt(offset + 1, WHITESPACE)) { // we need to use this non-blank and use that offset OffsetInfo info = tracker.getOffsetInfo(offset, false); trackedOffset.setIndex(info.startIndex); } else { OffsetInfo info = tracker.getOffsetInfo(offset, true); trackedOffset.setIndex(info.endIndex); } } } return wrapped; } public void addTrackedOffset(@NotNull TrackedOffset trackedOffset) { if (trackedOffsets == EMPTY_OFFSET_LIST) trackedOffsets = new ArrayList<>(); assert trackedOffset.getOffset() >= 0 && trackedOffset.getOffset() <= altBaseSeq.getBaseSequence().length(); trackedOffsets.removeIf(it -> it.getOffset() == trackedOffset.getOffset()); trackedOffsets.add(trackedOffset); trackedOffsetsSorted = false; } public List<TrackedOffset> getTrackedOffsets() { return sortedTrackedOffsets(); } private List<TrackedOffset> sortedTrackedOffsets() { if (!trackedOffsetsSorted) { trackedOffsets.sort(Comparator.comparing(TrackedOffset::getOffset)); trackedOffsetsSorted = true; } return trackedOffsets; } @Nullable public TrackedOffset getTrackedOffset(int offset) { sortedTrackedOffsets(); for (TrackedOffset trackedOffset : trackedOffsets) { if (trackedOffset.getOffset() == offset) return trackedOffset; if (trackedOffset.getOffset() > offset) break; } return null; } @NotNull public List<? extends SpecialLeadInHandler> getLeadInHandlers() { return leadInHandlers; } public void setLeadInHandlers(@NotNull List<? extends SpecialLeadInHandler> leadInHandlers) { this.leadInHandlers = leadInHandlers; } @Nullable public DataHolder getOptions() { return options; } public void setOptions(@Nullable DataHolder options) { this.options = options; } public boolean isRestoreTrackedSpaces() { return restoreTrackedSpaces; } public void setRestoreTrackedSpaces(boolean restoreTrackedSpaces) { this.restoreTrackedSpaces = restoreTrackedSpaces; } @NotNull public BasedSequence getChars() { return baseSeq; } public CharSequence getFirstIndent() { return firstIndent; } public void setFirstIndent(CharSequence firstIndent) { this.firstIndent = BasedSequence.of(firstIndent); } public CharSequence getIndent() { return indent; } public void setIndent(CharSequence indent) { this.indent = BasedSequence.of(indent); if (this.firstIndent.isNull()) this.firstIndent = this.indent; } public int getFirstWidth() { return (width == 0) ? 0 : Math.max(0, width + firstWidthOffset); } public int getFirstWidthOffset() { return firstWidthOffset; } public void setFirstWidthOffset(int firstWidthOffset) { this.firstWidthOffset = firstWidthOffset; } public int getWidth() { return width; } public void setWidth(int width) { this.width = Math.max(0, width); } public boolean getKeepHardBreaks() { return keepHardLineBreaks; } public void setKeepHardBreaks(boolean keepHardBreaks) { this.keepHardLineBreaks = keepHardBreaks; } public boolean getKeepSoftBreaks() { return keepSoftLineBreaks; } public boolean isUnEscapeSpecialLeadIn() { return unEscapeSpecialLeadInChars; } public void setUnEscapeSpecialLeadIn(boolean unEscapeSpecialLeadInChars) { this.unEscapeSpecialLeadInChars = unEscapeSpecialLeadInChars; } public boolean isEscapeSpecialLeadIn() { return escapeSpecialLeadInChars; } public void setEscapeSpecialLeadIn(boolean escapeSpecialLeadInChars) { this.escapeSpecialLeadInChars = escapeSpecialLeadInChars; } public void setKeepSoftBreaks(boolean keepLineBreaks) { this.keepSoftLineBreaks = keepLineBreaks; } @NotNull public CharWidthProvider getCharWidthProvider() { return charWidthProvider; } public enum TextType { WORD, SPACE, BREAK, MARKDOWN_BREAK, MARKDOWN_START_LINE } public static class Token { public final @NotNull TextType type; public final @NotNull Range range; public final boolean isFirstWord; private Token(@NotNull TextType type, @NotNull Range range, boolean isFirstWord) { this.type = type; this.range = range; this.isFirstWord = isFirstWord; } @Override public String toString() { return "token: " + type + " " + range + (isFirstWord ? " isFirst" : ""); } public BasedSequence subSequence(BasedSequence charSequence) { return range.basedSubSequence(charSequence); } public CharSequence subSequence(CharSequence charSequence) { return range.charSubSequence(charSequence); } @NotNull public static Token of(@NotNull TextType type, @NotNull Range range) { return new Token(type, range, false); } @NotNull public static Token of(@NotNull TextType type, int start, int end) { return new Token(type, Range.of(start, end), false); } @NotNull public static Token of(@NotNull TextType type, @NotNull Range range, boolean isFirstWord) { return new Token(type, range, isFirstWord); } @NotNull public static Token of(@NotNull TextType type, int start, int end, boolean isFirstWord) { return new Token(type, Range.of(start, end), isFirstWord); } } class LeftAlignedWrapping { final @NotNull BasedSequence baseSeq; final SequenceBuilder result; final TextTokenizer tokenizer; int col = 0; int lineCount = 0; final int spaceWidth = charWidthProvider.getSpaceWidth(); CharSequence lineIndent = getFirstIndent(); final CharSequence nextIndent = getIndent(); int lineWidth = spaceWidth * getFirstWidth(); final int nextWidth = width <= 0 ? Integer.MAX_VALUE : spaceWidth * width; int wordsOnLine = 0; BasedSequence leadingIndent = null; BasedSequence lastSpace = null; @NotNull List<? extends SpecialLeadInHandler> leadInHandlers = MarkdownParagraph.this.leadInHandlers; boolean unEscapeSpecialLeadInChars = MarkdownParagraph.this.unEscapeSpecialLeadInChars; boolean escapeSpecialLeadInChars = MarkdownParagraph.this.escapeSpecialLeadInChars; LeftAlignedWrapping(@NotNull BasedSequence baseSeq) { this.baseSeq = baseSeq; result = SequenceBuilder.emptyBuilder(baseSeq); tokenizer = new TextTokenizer(baseSeq); } void advance() { tokenizer.next(); } void addToken(Token token) { addChars(baseSeq.subSequence(token.range)); } void addChars(CharSequence charSequence) { result.append(charSequence); col += charWidthProvider.getStringWidth(charSequence); } void addSpaces(int count) { result.append(' ', count); col += charWidthProvider.getSpaceWidth() * count; } BasedSequence addSpaces(BasedSequence sequence, int count) { if (count <= 0) return sequence; BasedSequence remainder = null; // NOTE: can do splitting add from sequence before and after padding spaces to have start/end range if needed if (sequence != null) { addChars(sequence.subSequence(0, Math.min(sequence.length(), count))); if (sequence.length() > count) { remainder = sequence.subSequence(count); } count = Math.max(0, count - sequence.length()); } // add more spaces if needed if (count > 0) { addSpaces(count); } return remainder; } void afterLineBreak() { col = 0; wordsOnLine = 0; lineCount++; lineIndent = nextIndent; lineWidth = nextWidth; lastSpace = null; leadingIndent = null; } void processLeadInEscape(List<? extends SpecialLeadInHandler> handlers, BasedSequence sequence) { if (sequence.isNotEmpty() && escapeSpecialLeadInChars) { for (SpecialLeadInHandler handler : handlers) { if (handler.escape(sequence, options, this::addChars)) return; } } addChars(sequence); } void processLeadInUnEscape(List<? extends SpecialLeadInHandler> handlers, BasedSequence sequence) { if (sequence.isNotEmpty() && unEscapeSpecialLeadInChars) { for (SpecialLeadInHandler handler : handlers) { if (handler.unEscape(sequence, options, this::addChars)) return; } } addChars(sequence); } @NotNull BasedSequence wrapText() { while (true) { final Token token = tokenizer.getToken(); if (token == null) break; switch (token.type) { case SPACE: { if (col == 0) leadingIndent = baseSeq.subSequence(token.range); else lastSpace = baseSeq.subSequence(token.range); advance(); break; } case WORD: { if (col == 0 || col + charWidthProvider.getStringWidth(token.subSequence(baseSeq)) + spaceWidth <= lineWidth) { // fits, add it boolean firstNonBlank = col == 0; // System.out.println("token: " + token + " chars: " + chars.subSequence(token.range)); if (col > 0) { lastSpace = addSpaces(lastSpace, 1); } else { if (!SequenceUtils.isEmpty(lineIndent)) { addChars(lineIndent); } leadingIndent = null; } if (firstNonBlank && !token.isFirstWord) { processLeadInEscape(leadInHandlers, baseSeq.subSequence(token.range)); } else if (!firstNonBlank && token.isFirstWord) { processLeadInUnEscape(leadInHandlers, baseSeq.subSequence(token.range)); } else { addToken(token); } advance(); wordsOnLine++; } else { // need to insert a line break and repeat addChars(SequenceUtils.EOL); afterLineBreak(); } break; } case MARKDOWN_START_LINE: { // start a new line if not already new if (col > 0) { addChars(SequenceUtils.EOL); afterLineBreak(); } advance(); break; } case MARKDOWN_BREAK: { // start a new line if not already new if (keepHardLineBreaks) { if (col > 0) { addToken(token); afterLineBreak(); } } else { // treat as a space lastSpace = baseSeq.subSequence(token.range); } advance(); break; } case BREAK: { if (col > 0 && keepSoftLineBreaks) { addToken(token); afterLineBreak(); } advance(); break; } } } return result.toSequence(); } } public static class TextTokenizer { final private CharSequence chars; private final int maxIndex; private int index = 0; private int lastPos = 0; private boolean isInWord = false; private boolean isFirstNonBlank = true; private int lastConsecutiveSpaces = 0; private @Nullable Token token = null; TextTokenizer(@NotNull CharSequence chars) { this.chars = chars; maxIndex = this.chars.length(); reset(); } public void reset() { index = 0; lastPos = 0; isInWord = false; token = null; lastConsecutiveSpaces = 0; isFirstNonBlank = true; next(); } @Nullable Token getToken() { return token; } @NotNull public List<Token> asList() { ArrayList<Token> tokens = new ArrayList<Token>(); reset(); while (token != null) { tokens.add(token); next(); } return tokens; } void next() { token = null; while (index < maxIndex) { char c = chars.charAt(index); if (isInWord) { if (c == ' ' || c == '\t' || c == '\n' || c == MARKDOWN_START_LINE_CHAR) { isInWord = false; boolean isFirstWord = isFirstNonBlank; isFirstNonBlank = false; if (lastPos < index) { // have a word token = Token.of(TextType.WORD, lastPos, index, isFirstWord); lastPos = index; break; } } else { index++; } } else { // in white space if (c != ' ' && c != '\t' && c != '\n' && c != MARKDOWN_START_LINE_CHAR) { if (lastPos < index) { token = Token.of(TextType.SPACE, lastPos, index); lastPos = index; isInWord = true; lastConsecutiveSpaces = 0; break; } else { isInWord = true; lastConsecutiveSpaces = 0; } } else { if (c == '\n') { if (lastConsecutiveSpaces >= 2) { token = Token.of(TextType.MARKDOWN_BREAK, index - lastConsecutiveSpaces, index + 1); } else { token = Token.of(TextType.BREAK, index, index + 1); } lastPos = index + 1; lastConsecutiveSpaces = 0; isFirstNonBlank = true; index++; break; } else if (c == MARKDOWN_START_LINE_CHAR) { token = Token.of(TextType.MARKDOWN_START_LINE, index, index + 1); lastPos = index + 1; lastConsecutiveSpaces = 0; index++; break; } else { if (c == ' ') lastConsecutiveSpaces++; else lastConsecutiveSpaces = 0; index++; } } } } if (lastPos < index) { if (isInWord) { token = Token.of(TextType.WORD, lastPos, index, isFirstNonBlank); isFirstNonBlank = false; } else { token = Token.of(TextType.SPACE, lastPos, index); } lastPos = index; } } } }
package org.fedorahosted.flies.webtrans.client; import com.google.gwt.event.dom.client.KeyCodes; import com.google.gwt.event.dom.client.KeyUpEvent; import com.google.gwt.event.dom.client.KeyUpHandler; import com.google.gwt.user.client.ui.Button; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.TextBox; import com.google.gwt.user.client.ui.Widget; public class TransMemoryView extends FlowPanel implements TransMemoryPresenter.Display { private Button searchButton; private TextBox tmTextBox; private final FlowPanel resultsPanel; public TransMemoryView() { tmTextBox = new TextBox(); searchButton = new Button("Search"); tmTextBox.addKeyUpHandler(new KeyUpHandler() { @Override public void onKeyUp(KeyUpEvent event) { if( event.getNativeKeyCode() == KeyCodes.KEY_ENTER ) { searchButton.click(); } } }); add(tmTextBox); add(searchButton); resultsPanel = new FlowPanel(); add(resultsPanel); } @Override public Button getSearchButton() { // TODO Auto-generated method stub return searchButton; } public TextBox getTmTextBox() { return tmTextBox; } @Override public Widget asWidget() { // TODO Auto-generated method stub return this; } @Override public void startProcessing() { // TODO Auto-generated method stub } @Override public void stopProcessing() { // TODO Auto-generated method stub } @Override public void addResult(Widget widget) { resultsPanel.add(widget); } @Override public void clearResults() { resultsPanel.clear(); } }
package ru.job4j.chess; /** * Class Board. * @author Tatyana Fukova (mailto:tatyanafukova@gmail.com) * @since 20.11 */ public class Board { Figure[][] figures = new Figure[8][8]; void initializeBoard(){ for (int i = 0; i < this.figures.length; i++) { figures[i][i]=null; } } void addFigure(Figure figure){ this.figures[figure.position.x][figure.position.y]=figure; } /** * Method to check if the figure can move to the set position. * @param source initial cell * @param dist destination cell * @return result if the figure is able to move to the set position */ boolean move(Cell source, Cell dist){ if(figures[source.x][source.y] == null){ throw new FigureNotFoundException("Figure is not found"); } Cell[] pathFigure = figures[source.x][source.y].way(dist); for (int i = 1; i < pathFigure.length; i++) { if(figures[pathFigure[i].x][pathFigure[i].y]!=null){ throw new OccupiedWayException("Occupied position."); } } figures[dist.x][dist.y] = figures[source.x][source.y]; figures[source.x][source.y] = null; return true; } }
package gameengineV1; import gameengineinterfaces.GameElementKind; import gameengineinterfaces.GameState; import gamelogic.*; import alieninterfaces.*; import gameengineinterfaces.*; import java.io.File; import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.URL; import java.net.URLClassLoader; import java.util.Scanner; /** * * @author gmein */ public class GameEngineThread extends Thread { final private GameEngineV1 engine; public GameEngineThread(GameEngineV1 ge) { engine = ge; setName("GameEngineThread"); } @Override public void run() { GameCommand gc; boolean endGame = false; int totalTurns = 0; //Thread.currentThread().setPriority(Thread.MIN_PRIORITY); engine.grid = new SpaceGrid(engine.vis, 500, 500); try { addClassPathFile("alieninterfaces"); } catch (Exception e) { engine.vis.debugErr("GameEngineThread: run() could not load alieninterfaces.jar"); return; } engine.vis.debugOut("GameEngineThread: Started"); do { try { if (engine.gameState == GameState.Running && !endGame) { // giving the shell a channce to ask for confirmation for another turn here boolean continueGame; do { continueGame = engine.vis.showContinuePrompt(); // do we have work requests? Only peek, don't wait while (!engine.queue.isEmpty()) { gc = (GameCommand) engine.queue.remove(); // Process the work item endGame = processCommand(gc); // endGame signifies that an "End" request has come through } } while (!continueGame && !endGame); // Execute game turn long startTurnTime = System.nanoTime(); try { if (engine.grid.executeGameTurn()) { // return true == game over because at most one species left engine.gameState = GameState.Paused; engine.vis.showGameOver(); } } catch (Exception e) { engine.vis.debugErr("GameEngineThread: Unhandled exception during turn: " + e.getMessage()); e.printStackTrace(); } totalTurns++; engine.vis.showCompletedTurn(totalTurns, engine.grid.aliens.size(), System.nanoTime() - startTurnTime); } // between turns, process commands if (engine.queue.isEmpty()) { if (engine.gameState == GameState.Paused) { //engine.vis.debugOut("GameEngineThread: About to wait for msgs"); synchronized (engine.queue) { engine.queue.wait(); } } } // do we have work requests? while (!engine.queue.isEmpty()) { //engine.vis.debugOut("GameEngineThread: Dequeueing msg"); gc = (GameCommand) engine.queue.remove(); // Process the work item //engine.vis.debugOut("GameEngineThread: Processing command"); endGame = processCommand(gc); // endGame signifies that an "End" request has come through } } catch (Exception e) { e.printStackTrace(); engine.vis.debugErr("GameThread: Unknown exception: " + e.getMessage()); break; } } while (!endGame); engine.vis.debugOut("GameThread: Exit"); } private boolean processCommand(GameCommand gc) throws Exception { boolean gameOver = false; switch (gc.code) { case RandSeed: long randSeed = (long) gc.parameters[0]; engine.grid.rand.setSeed(randSeed); engine.vis.debugOut("RandSeed: " + randSeed); break; case SetVariable: String s = (String) gc.parameters[0]; if (s.equalsIgnoreCase("AlienChatter")) { String s2 = (String) gc.parameters[1]; if (s2.equalsIgnoreCase("on")) { AlienContainer.chatter = true; } else { AlienContainer.chatter = false; } } break; case AddElement: GameElementSpec element = (GameElementSpec) gc.parameters[0]; engine.vis.debugOut("GameEngineThread: Processing GameElement " + element.packageName + ":" + element.className + ", " + element.state); if (element.kind != GameElementKind.INVALID) { if (element.kind == GameElementKind.ALIEN) { try { element.cns = Load(element.packageName, element.className); } catch (Exception e) { engine.vis.debugErr("GameEngineThread: Error loading game element"); throw (e); } } // If it is a SpaceObject (there could be a cleaner way to do this) if (element.kind == GameElementKind.STAR || element.kind == GameElementKind.PLANET || element.kind == GameElementKind.RESIDENT) { //engine.vis.debugErr(element.toString()); } } engine.grid.addElement(element); break; case Pause: engine.vis.debugOut("GameEngineThread: Pausing"); engine.gameState = GameState.Paused; break; case Resume: engine.vis.debugOut("GameEngineThread: Resuming"); engine.vis.debugOut(" engine.gameState = GameState.Running; break; case List: engine.grid.listStatus(); break; case End: engine.vis.debugOut("GameEngineThread: End request, thread closing"); gameOver = true; break; default: break; } return gameOver; } // Dynamic class loader (.jar files) // stolen from StackOverflow, considered dark voodoo magic /** * Parameters of the method to add an URL to the System classes. */ private static final Class<?>[] parameters = new Class[]{URL.class}; /** * Adds the content pointed by the file to the classpath. * */ private void addClassPathFile(String packageName) throws IOException { String fullName; if (packageName.equalsIgnoreCase("stockaliens") || packageName.equalsIgnoreCase("alieninterfaces")) { fullName = engine.gameJarPath + System.getProperty("file.separator") + packageName + System.getProperty("file.separator") + "dist" + System.getProperty("file.separator") + packageName + ".jar"; } else { fullName = engine.alienPath + packageName + ".jar"; } URL url = new File(fullName).toURI().toURL(); URLClassLoader sysloader = (URLClassLoader) ClassLoader.getSystemClassLoader(); Class<?> sysclass = URLClassLoader.class; try { Method method = sysclass.getDeclaredMethod("addURL", parameters); method.setAccessible(true); method.invoke(sysloader, new Object[]{url}); } catch (Exception e) { e.printStackTrace(); throw new IOException("GameElementThread: Error, could not add URL to system classloader"); } //engine.vis.debugOut("GameElementThread: package " + packageName + " added as " + fullName); } public Constructor<?> Load(String packageName, String className) throws IOException, SecurityException, ClassNotFoundException, IllegalArgumentException, InstantiationException, IllegalAccessException, InvocationTargetException, NoSuchMethodException { Constructor<?> cs = null; try { addClassPathFile(packageName); cs = ClassLoader.getSystemClassLoader().loadClass(packageName + "." + className).getConstructor(); } catch (Exception e) { e.printStackTrace(); engine.vis.debugErr("GameElementThread: Error: Could not get constructor"); throw e; } return cs; } }
package ru.job4j.map; import java.util.ConcurrentModificationException; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Objects; /** * Class HashMap. * @author shustovakv (mailto: shustovakv@mail.ru) * @since 27.09.2018 */ public class HashMap<K, V> implements Iterable { private Items[] array; private int defaultSize = 10; private int size = 0; private int modCount = 0; public HashMap(int capacity) { if (capacity < this.defaultSize) { capacity = this.defaultSize; } this.array = new Items[capacity]; } public HashMap() { this.array = new Items[this.defaultSize]; } public boolean insert(K key, V value) { int factor = this.array.length; if (this.size > factor * 2 / 3) { resize(0); factor = this.array.length; } int index = findPosition(key, factor); if (this.array[index] != null) { return false; } this.array[index] = new Items(key, value); this.modCount++; this.size++; return true; } private void resize(int moreSize) { Items[] newArray = new Items[this.array.length * 2 + moreSize]; int factor = newArray.length; for (int i = 0; i < array.length; i++) { if (this.array[i] != null) { int index = findPosition((K) array[i].key, factor); if (newArray[i] == null) { newArray[index] = this.array[i]; } else { restart(factor); break; } } } this.array = newArray; } private void restart(int factor) { resize(factor); } private int findPosition(K key, int factor) { if (key == null) { return 0; } int h = key.hashCode(); h ^= (h >>> 2) ^ (h >>> 3); h %= factor; return h >= 0 ? h : h * -1; } public V get(K key) { int index = findPosition(key, this.array.length); if (this.array[index] != null && this.array[index].key.equals(key)) { return (V) this.array[index].value; } return null; } public boolean delete(K key) { this.modCount int index = findPosition(key, this.array.length); if (this.array[index] != null && this.array[index].key.equals(key)) { this.array[index] = null; this.size return true; } return false; } @Override public Iterator<V> iterator() { return new Iterator<V>() { private int itIndex = 0; private int expectedModCount = modCount; @Override public boolean hasNext() { if (expectedModCount != modCount) { throw new ConcurrentModificationException("Only for read"); } while (itIndex < getCapacity()) { if (array[itIndex] != null) { return true; } else { itIndex++; } } return false; } @Override public V next() { if (!hasNext()) { throw new NoSuchElementException("No more elements"); } return (V) array[itIndex++].value; } }; } public void showHashMap() { for (int i = 0; i < array.length; i++) { if (array[i] != null) { System.out.println("Key: " + array[i].key + "Value: " + array[i].value); } } } public int getCapacity() { return this.array.length; } class Items<K, V> { private K key; private V value; public Items(K key, V value) { this.key = key; this.value = value; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Items thing = (Items) o; return Objects.equals(key, thing.key) && Objects.equals(value, thing.value); } @Override public int hashCode() { return Objects.hash(key, value); } } }
package org.pentaho.di.core.row; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.math.BigDecimal; import java.math.MathContext; import java.util.Calendar; import java.util.Date; import java.util.Locale; import java.util.zip.CheckedInputStream; import java.util.zip.Adler32; import java.util.zip.CRC32; import java.security.MessageDigest; import org.apache.commons.lang.StringUtils; import org.apache.commons.codec.language.Metaphone; import org.apache.commons.codec.language.DoubleMetaphone; import org.apache.commons.codec.language.RefinedSoundex; import org.apache.commons.codec.language.Soundex; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.core.xml.XMLCheck; import org.apache.commons.vfs.FileObject; import org.apache.commons.vfs.provider.local.LocalFile; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleValueException; import org.pentaho.di.core.fileinput.CharsetToolkit; import com.wcohen.ss.Jaro; import com.wcohen.ss.JaroWinkler; import com.wcohen.ss.NeedlemanWunsch; public class ValueDataUtil { /** * @deprecated Use {@link Const#ltrim(String)} instead */ public static final String leftTrim(String string) { return Const.ltrim(string); } /** * @deprecated Use {@link Const#rtrim(String)} instead */ public static final String rightTrim(String string) { return Const.rtrim(string); } /** * Determines whether or not a character is considered a space. * A character is considered a space in Kettle if it is a space, a tab, a newline or a cariage return. * @param c The character to verify if it is a space. * @return true if the character is a space. false otherwise. * @deprecated Use {@link Const#isSpace(char)} instead */ public static final boolean isSpace(char c) { return Const.isSpace(c); } /** * Trims a string: removes the leading and trailing spaces of a String. * @param string The string to trim * @return The trimmed string. * @deprecated Use {@link Const#trim(String)} instead */ public static final String trim(String string) { return Const.trim(string); } /**Levenshtein distance (LD) is a measure of the similarity between two strings, * which we will refer to as the source string (s) and the target string (t). * The distance is the number of deletions, insertions, or substitutions required to transform s into t. */ public static Long getLevenshtein_Distance(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) { if(dataA==null || dataB==null) return null; return new Long(StringUtils.getLevenshteinDistance(dataA.toString(),dataB.toString())); } /**DamerauLevenshtein distance is a measure of the similarity between two strings, * which we will refer to as the source string (s) and the target string (t). * The distance is the number of deletions, insertions, or substitutions required to transform s into t. */ public static Long getDamerauLevenshtein_Distance(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) { if(dataA==null || dataB==null) return null; return new Long(Utils.getDamerauLevenshteinDistance(dataA.toString(),dataB.toString())); } /**NeedlemanWunsch distance is a measure of the similarity between two strings, * which we will refer to as the source string (s) and the target string (t). * The distance is the number of deletions, insertions, or substitutions required to transform s into t. */ public static Long getNeedlemanWunsch_Distance(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) { if(dataA==null || dataB==null) return null; return new Long((int) new NeedlemanWunsch().score(dataA.toString(),dataB.toString())); } /**Jaro similitude is a measure of the similarity between two strings, * which we will refer to as the source string (s) and the target string (t). */ public static Double getJaro_Similitude(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) { if(dataA==null || dataB==null) return null; return new Double(new Jaro().score(dataA.toString(),dataB.toString())); } /**JaroWinkler similitude is a measure of the similarity between two strings, * which we will refer to as the source string (s) and the target string (t). */ public static Double getJaroWinkler_Similitude(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) { if(dataA==null || dataB==null) return null; return new Double(new JaroWinkler().score(dataA.toString(),dataB.toString())); } public static String get_Metaphone(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return null; return (new Metaphone()).metaphone(dataA.toString()); } public static String get_Double_Metaphone(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return null; return (new DoubleMetaphone()).doubleMetaphone(dataA.toString()); } public static String get_SoundEx(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return null; return (new Soundex()).encode(dataA.toString()); } public static String get_RefinedSoundEx(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return null; return (new RefinedSoundex()).encode(dataA.toString()); } public static String initCap(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return null; return Const.initCap(dataA.toString()); } public static String upperCase(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return null; return dataA.toString().toUpperCase(); } public static String lowerCase(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return null; return dataA.toString().toLowerCase(); } public static String escapeXML(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return null; return Const.escapeXML(dataA.toString()); } public static String unEscapeXML(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return null; return Const.unEscapeXml(dataA.toString()); } public static String escapeHTML(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return null; return Const.escapeHtml(dataA.toString()); } public static String unEscapeHTML(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return null; return Const.unEscapeHtml(dataA.toString()); } public static String escapeSQL(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return null; return Const.escapeSQL(dataA.toString()); } public static String useCDATA(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return null; return "<![CDATA["+dataA.toString()+"]]>"; } public static String removeCR(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return null; return Const.removeCR(dataA.toString()); } public static String removeLF(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return null; return Const.removeLF(dataA.toString()); } public static String removeCRLF(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return null; return Const.removeCRLF(dataA.toString()); } public static String removeTAB(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return null; return Const.removeTAB(dataA.toString()); } public static String getDigits(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return null; return Const.getDigitsOnly(dataA.toString()); } public static String removeDigits(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return null; return Const.removeDigits(dataA.toString()); } public static long stringLen(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return 0; return dataA.toString().length(); } public static String createChecksum(ValueMetaInterface metaA, Object dataA, String type) { String md5Hash = null; FileInputStream in=null; try { in = new FileInputStream(dataA.toString()); int bytes = in.available(); byte[] buffer = new byte[bytes]; in.read(buffer); StringBuffer md5HashBuff = new StringBuffer(32); byte[] b = MessageDigest.getInstance(type).digest(buffer); int len = b.length; for (int x=0; x<len; x++) { md5HashBuff.append(String.format("%02x",b[x])); } md5Hash=md5HashBuff.toString(); }catch (Exception e){} finally{try{if(in!=null) in.close();}catch(Exception e){};} return md5Hash; } public static Long ChecksumCRC32(ValueMetaInterface metaA, Object dataA) { long checksum =0; FileObject file=null; try { file=KettleVFS.getFileObject(dataA.toString()); CheckedInputStream cis = null; // Computer CRC32 checksum cis = new CheckedInputStream( (FileInputStream)((LocalFile)file).getInputStream(), new CRC32()); byte[] buf = new byte[128]; while(cis.read(buf) >= 0) { } checksum = cis.getChecksum().getValue(); } catch (Exception e) { }finally { if(file!=null) try{file.close();file=null;}catch(Exception e){}; } return checksum; } public static Long ChecksumAdler32(ValueMetaInterface metaA, Object dataA) { long checksum =0; FileObject file=null; try { file=KettleVFS.getFileObject(dataA.toString()); CheckedInputStream cis = null; // Computer Adler-32 checksum cis = new CheckedInputStream( (FileInputStream)((LocalFile)file).getInputStream(), new Adler32()); byte[] buf = new byte[128]; while(cis.read(buf) >= 0) { } checksum = cis.getChecksum().getValue(); } catch (Exception e) { //throw new Exception(e); }finally { if(file!=null) try{file.close();file=null;}catch(Exception e){}; } return checksum; } public static Object plus(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) throws KettleValueException { if (dataA==null || dataB==null) return null; switch(metaA.getType()) { case ValueMetaInterface.TYPE_STRING : return metaA.getString(dataA)+metaB.getString(dataB); case ValueMetaInterface.TYPE_NUMBER : return new Double( metaA.getNumber(dataA).doubleValue()+metaB.getNumber(dataB).doubleValue()); case ValueMetaInterface.TYPE_INTEGER : return new Long( metaA.getInteger(dataA).longValue()+metaB.getInteger(dataB).longValue()); case ValueMetaInterface.TYPE_BOOLEAN : return Boolean.valueOf( metaA.getBoolean(dataA).booleanValue() || metaB.getBoolean(dataB).booleanValue()); case ValueMetaInterface.TYPE_BIGNUMBER : return metaA.getBigNumber(dataA).add( metaB.getBigNumber(dataB)); default: throw new KettleValueException("The 'plus' function only works on numeric data and Strings." ); } } public static Object plus3(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB, ValueMetaInterface metaC, Object dataC) throws KettleValueException { if (dataA==null || dataB==null || dataC==null) return null; switch(metaA.getType()) { case ValueMetaInterface.TYPE_STRING : return metaA.getString(dataA)+metaB.getString(dataB)+metaC.getString(dataC); case ValueMetaInterface.TYPE_NUMBER : return new Double( metaA.getNumber(dataA).doubleValue()+metaB.getNumber(dataB).doubleValue()+metaC.getNumber(dataC).doubleValue()); case ValueMetaInterface.TYPE_INTEGER : return new Long( metaA.getInteger(dataA).longValue()+metaB.getInteger(dataB).longValue()+metaC.getInteger(dataC).longValue()); case ValueMetaInterface.TYPE_BOOLEAN : return Boolean.valueOf( metaA.getBoolean(dataA).booleanValue() || metaB.getBoolean(dataB).booleanValue() || metaB.getBoolean(dataC).booleanValue()); case ValueMetaInterface.TYPE_BIGNUMBER : return metaA.getBigNumber(dataA).add( metaB.getBigNumber(dataB).add( metaC.getBigNumber(dataC))); default: throw new KettleValueException("The 'plus' function only works on numeric data and Strings." ); } } public static Object sum(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) throws KettleValueException { if (dataA==null && dataB==null) return null; if (dataA==null && dataB!=null) return metaA.convertData(metaB, dataB); if (dataA!=null && dataB==null) return dataA; return plus(metaA, dataA, metaB, dataB); } public static Object loadFileContentInBinary(ValueMetaInterface metaA, Object dataA) throws KettleValueException { if (dataA==null) return null; FileObject file=null; FileInputStream fis=null; try { file=KettleVFS.getFileObject(dataA.toString()); fis=(FileInputStream)((LocalFile)file).getInputStream(); int fileSize=(int)file.getContent().getSize(); byte[] content=Const.createByteArray(fileSize); fis.read(content, 0,fileSize); return content; } catch (Exception e) { throw new KettleValueException(e); } finally { try{ if(fis!=null) fis.close(); fis=null; if(file!=null) file.close(); file=null; }catch(Exception e){}; } } public static Object minus(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) throws KettleValueException { if (dataA==null || dataB==null) return null; switch(metaA.getType()) { case ValueMetaInterface.TYPE_NUMBER : return new Double( metaA.getNumber(dataA).doubleValue()-metaB.getNumber(dataB).doubleValue()); case ValueMetaInterface.TYPE_INTEGER : return new Long( metaA.getInteger(dataA).longValue()-metaB.getInteger(dataB).longValue()); case ValueMetaInterface.TYPE_BIGNUMBER : return metaA.getBigNumber(dataA).subtract( metaB.getBigNumber(dataB)); case ValueMetaInterface.TYPE_DATE : return new Long( metaA.getInteger(dataA).longValue()-metaB.getInteger(dataB).longValue()); default: throw new KettleValueException("The 'minus' function only works on numeric data." ); } } public static Object multiply(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) throws KettleValueException { if (dataA==null || dataB==null) return null; if ((metaB.isString() && metaA.isNumeric()) || (metaB.isNumeric() && metaA.isString())) { return multiplyString(metaA, dataA, metaB, dataB); } return multiplyNumeric(metaA, dataA, metaB, dataB); } protected static Object multiplyNumeric(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) throws KettleValueException { switch(metaA.getType()) { case ValueMetaInterface.TYPE_NUMBER : return multiplyDoubles(metaA.getNumber(dataA), metaB.getNumber(dataB)); case ValueMetaInterface.TYPE_INTEGER : return multiplyLongs(metaA.getInteger(dataA), metaB.getInteger(dataB)); case ValueMetaInterface.TYPE_BIGNUMBER : return multiplyBigDecimals(metaA.getBigNumber(dataA), metaB.getBigNumber(dataB), null); default: throw new KettleValueException("The 'multiply' function only works on numeric data optionally multiplying strings." ); } } public static Double multiplyDoubles(Double a, Double b) { return new Double(a.doubleValue() * b.doubleValue()); } public static Long multiplyLongs(Long a, Long b) { return new Long(a.longValue() * b.longValue()); } public static BigDecimal multiplyBigDecimals(BigDecimal a, BigDecimal b, MathContext mc) { if (mc == null) mc = MathContext.DECIMAL64; return a.multiply(b, mc); } protected static Object multiplyString(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) throws KettleValueException { StringBuffer s; String append=""; int n; if (metaB.isString()) { s=new StringBuffer(metaB.getString(dataB)); append=metaB.getString(dataB); n=metaA.getInteger(dataA).intValue(); } else { s=new StringBuffer(metaA.getString(dataA)); append=metaA.getString(dataA); n=metaB.getInteger(dataB).intValue(); } if (n==0) s.setLength(0); else for (int i=1;i<n;i++) s.append(append); return s.toString(); } public static Object divide(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) throws KettleValueException { if (dataA==null || dataB==null) return null; switch(metaA.getType()) { case ValueMetaInterface.TYPE_NUMBER : return divideDoubles(metaA.getNumber(dataA), metaB.getNumber(dataB)); case ValueMetaInterface.TYPE_INTEGER : return divideLongs(metaA.getInteger(dataA), metaB.getInteger(dataB)); case ValueMetaInterface.TYPE_BIGNUMBER : return divideBigDecimals(metaA.getBigNumber(dataA), metaB.getBigNumber(dataB), null); default: throw new KettleValueException("The 'divide' function only works on numeric data." ); } } public static Double divideDoubles(Double a, Double b) { return new Double(a.doubleValue() / b.doubleValue()); } public static Long divideLongs(Long a, Long b) { return new Long(a.longValue() / b.longValue()); } public static BigDecimal divideBigDecimals(BigDecimal a, BigDecimal b, MathContext mc) { if (mc == null) mc = MathContext.DECIMAL64; return a.divide(b, mc); } public static Object sqrt(ValueMetaInterface metaA, Object dataA) throws KettleValueException { if (dataA==null) return null; switch(metaA.getType()) { case ValueMetaInterface.TYPE_NUMBER : return new Double( Math.sqrt( metaA.getNumber(dataA).doubleValue()) ); case ValueMetaInterface.TYPE_INTEGER : return new Long( Math.round( Math.sqrt( metaA.getNumber(dataA).doubleValue()) ) ); case ValueMetaInterface.TYPE_BIGNUMBER : return BigDecimal.valueOf( Math.sqrt( metaA.getNumber(dataA).doubleValue()) ); default: throw new KettleValueException("The 'sqrt' function only works on numeric data." ); } } /** * 100 * A / B * * @param metaA * @param dataA * @param metaB * @param dataB * @return * @throws KettleValueException */ public static Object percent1(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) throws KettleValueException { if (dataA==null || dataB==null) return null; switch(metaA.getType()) { case ValueMetaInterface.TYPE_NUMBER : return divideDoubles(multiplyDoubles(100.0D, metaA.getNumber(dataA)), metaB.getNumber(dataB)); case ValueMetaInterface.TYPE_INTEGER : return divideLongs(multiplyLongs(100L, metaA.getInteger(dataA)), metaB.getInteger(dataB)); case ValueMetaInterface.TYPE_BIGNUMBER : return divideBigDecimals(multiplyBigDecimals(metaA.getBigNumber(dataA), new BigDecimal(100), null), metaB.getBigNumber(dataB), null); default: throw new KettleValueException("The 'A/B in %' function only works on numeric data" ); } } /** * A - ( A * B / 100 ) * * @param metaA * @param dataA * @param metaB * @param dataB * @return * @throws KettleValueException */ public static Object percent2(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) throws KettleValueException { if (dataA==null || dataB==null) return null; switch(metaA.getType()) { case ValueMetaInterface.TYPE_NUMBER : return new Double( metaA.getNumber(dataA).doubleValue() - divideDoubles(multiplyDoubles(metaA.getNumber(dataA), metaB.getNumber(dataB)), 100.0D)); case ValueMetaInterface.TYPE_INTEGER : return new Long( metaA.getInteger(dataA).longValue() - divideLongs(multiplyLongs(metaA.getInteger(dataA), metaB.getInteger(dataB)), 100L)); case ValueMetaInterface.TYPE_BIGNUMBER : return metaA.getBigNumber(dataA).subtract( divideBigDecimals(metaA.getBigNumber(dataA), multiplyBigDecimals(metaB.getBigNumber(dataB), new BigDecimal(100), null), null)); default: throw new KettleValueException("The 'A-B%' function only works on numeric data" ); } } /** * A + ( A * B / 100 ) * * @param metaA * @param dataA * @param metaB * @param dataB * @return * @throws KettleValueException */ public static Object percent3(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) throws KettleValueException { if (dataA==null || dataB==null) return null; switch(metaA.getType()) { case ValueMetaInterface.TYPE_NUMBER : return new Double( metaA.getNumber(dataA).doubleValue() + divideDoubles(multiplyDoubles(metaA.getNumber(dataA), metaB.getNumber(dataB)), 100.0D)); case ValueMetaInterface.TYPE_INTEGER : return new Long( metaA.getInteger(dataA).longValue() + divideLongs(multiplyLongs(metaA.getInteger(dataA), metaB.getInteger(dataB)), 100L)); case ValueMetaInterface.TYPE_BIGNUMBER : return metaA.getBigNumber(dataA).add( divideBigDecimals(metaA.getBigNumber(dataA), multiplyBigDecimals(metaB.getBigNumber(dataB), new BigDecimal(100), null), null)); default: throw new KettleValueException("The 'A+B%' function only works on numeric data" ); } } /** * A + B * C * * @param metaA * @param dataA * @param metaB * @param dataB * @return * @throws KettleValueException */ public static Object combination1(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB, ValueMetaInterface metaC, Object dataC) throws KettleValueException { if (dataA==null || dataB==null || dataC==null) return null; switch(metaA.getType()) { case ValueMetaInterface.TYPE_NUMBER : return new Double( metaA.getNumber(dataA).doubleValue() + ( metaB.getNumber(dataB).doubleValue() * metaC.getNumber(dataC).doubleValue() )); case ValueMetaInterface.TYPE_INTEGER : return new Long( metaA.getInteger(dataA).longValue() + ( metaB.getInteger(dataB).longValue() * metaC.getInteger(dataC).longValue() )); case ValueMetaInterface.TYPE_BIGNUMBER : return metaA.getBigNumber(dataA).add(multiplyBigDecimals(metaB.getBigNumber(dataB), metaC.getBigNumber(dataC), null)); default: throw new KettleValueException("The 'combination1' function only works on numeric data" ); } } /** * SQRT( A*A + B*B ) * * @param metaA * @param dataA * @param metaB * @param dataB * @return * @throws KettleValueException */ public static Object combination2(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) throws KettleValueException { if (dataA==null || dataB==null) return null; switch(metaA.getType()) { case ValueMetaInterface.TYPE_NUMBER : return new Double( Math.sqrt( metaA.getNumber(dataA).doubleValue() * metaA.getNumber(dataA).doubleValue() + metaB.getNumber(dataB).doubleValue() * metaB.getNumber(dataB).doubleValue() )); case ValueMetaInterface.TYPE_INTEGER : return new Long( Math.round( Math.sqrt( metaA.getInteger(dataA).longValue() * metaA.getInteger(dataA).longValue() + metaB.getInteger(dataB).longValue() / metaB.getInteger(dataB).longValue() ))); case ValueMetaInterface.TYPE_BIGNUMBER : return BigDecimal.valueOf( Math.sqrt( metaA.getNumber(dataA).doubleValue() * metaA.getNumber(dataA).doubleValue() + metaB.getNumber(dataB).doubleValue() * metaB.getNumber(dataB).doubleValue() )); default: throw new KettleValueException("The 'combination2' function only works on numeric data" ); } } public static Object round(ValueMetaInterface metaA, Object dataA) throws KettleValueException { if (dataA==null) return null; switch(metaA.getType()) { case ValueMetaInterface.TYPE_NUMBER : return new Double( Math.round( metaA.getNumber(dataA).doubleValue()) ); case ValueMetaInterface.TYPE_INTEGER : return metaA.getInteger(dataA); case ValueMetaInterface.TYPE_BIGNUMBER : return new BigDecimal( Math.round( metaA.getNumber(dataA).doubleValue()) ); default: throw new KettleValueException("The 'round' function only works on numeric data" ); } } public static Object abs(ValueMetaInterface metaA, Object dataA) throws KettleValueException { if (dataA==null) return null; switch(metaA.getType()) { case ValueMetaInterface.TYPE_NUMBER : return new Double( Math.abs(metaA.getNumber(dataA).doubleValue()) ); case ValueMetaInterface.TYPE_INTEGER : return metaA.getInteger(Math.abs(metaA.getNumber(dataA).longValue()) ); case ValueMetaInterface.TYPE_BIGNUMBER : return new BigDecimal( Math.abs( metaA.getNumber(dataA).doubleValue()) ); default: throw new KettleValueException("The 'abs' function only works on numeric data" ); } } public static Object round(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) throws KettleValueException { if (dataA==null || dataB==null) return null; switch(metaA.getType()) { case ValueMetaInterface.TYPE_NUMBER : return new Double( Const.round( metaA.getNumber(dataA).doubleValue(), metaB.getInteger(dataB).intValue()) ); case ValueMetaInterface.TYPE_INTEGER : return metaA.getInteger(dataA); case ValueMetaInterface.TYPE_BIGNUMBER : // Round it to the desired number of digits. BigDecimal number = metaA.getBigNumber(dataA); return number.setScale( metaB.getInteger(dataB).intValue(), BigDecimal.ROUND_HALF_EVEN); default: throw new KettleValueException("The 'round' function only works on numeric data" ); } } public static Object nvl(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) throws KettleValueException { switch(metaA.getType()) { case ValueMetaInterface.TYPE_STRING: if (dataA==null) return metaB.getString(dataB); else return metaA.getString(dataA); case ValueMetaInterface.TYPE_NUMBER : if (dataA==null) return metaB.getNumber(dataB); else return metaA.getNumber(dataA); case ValueMetaInterface.TYPE_INTEGER : if (dataA==null) return metaB.getInteger(dataB); else return metaA.getInteger(dataA); case ValueMetaInterface.TYPE_BIGNUMBER : if (dataA==null) return metaB.getBigNumber(dataB); else return metaA.getBigNumber(dataA); case ValueMetaInterface.TYPE_DATE: if (dataA==null) return metaB.getDate(dataB); else return metaA.getDate(dataA); case ValueMetaInterface.TYPE_BOOLEAN: if (dataA==null) return metaB.getBoolean(dataB); else return metaA.getBoolean(dataA); case ValueMetaInterface.TYPE_BINARY: if (dataA==null) return metaB.getBinary(dataB); else return metaA.getBinary(dataA); default: throw new KettleValueException("The 'nvl' function doesn't know how to handle data type "+metaA.getType() ); } } public static Object removeTimeFromDate(ValueMetaInterface metaA, Object dataA) throws KettleValueException { if (metaA.isDate()) { Calendar cal = Calendar.getInstance(); Date date = metaA.getDate(dataA); if (date!=null) { cal.setTime(date); return Const.removeTimeFromDate(date); }else return null; } throw new KettleValueException("The 'removeTimeFromDate' function only works with a date"); } public static Object addTimeToDate(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB,ValueMetaInterface metaC, Object dataC) throws KettleValueException { if(dataA==null) return null; if (metaA.isDate()) { try{ if(dataC==null) return Const.addTimeToDate(metaA.getDate(dataA), metaB.getString(dataB), null); else return Const.addTimeToDate(metaA.getDate(dataA), metaB.getString(dataB), metaC.getString(dataC)); }catch(Exception e) {throw new KettleValueException(e);} } throw new KettleValueException("The 'addTimeToDate' function only works with a date"); } public static Object addDays(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) throws KettleValueException { if (metaA.isDate() && metaB.isInteger()) { Calendar cal = Calendar.getInstance(); cal.setTime(metaA.getDate(dataA)); cal.add(Calendar.DAY_OF_YEAR, metaB.getInteger(dataB).intValue()); return cal.getTime(); } throw new KettleValueException("The 'addDays' function only works with a date and an integer"); } public static Object addMonths(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) throws KettleValueException { if (metaA.isDate() && metaB.isInteger()) { if (dataA!=null && dataB!=null) { Calendar cal = Calendar.getInstance(); cal.setTime(metaA.getDate(dataA)); int year = cal.get(Calendar.YEAR); int month = cal.get(Calendar.MONTH); int day = cal.get(Calendar.DAY_OF_MONTH); month+=metaB.getInteger(dataB).intValue(); int newyear = year+(int)Math.floor(month/12); int newmonth = month%12; cal.set(newyear, newmonth, 1); int newday = cal.getActualMaximum(Calendar.DAY_OF_MONTH); if (newday<day) cal.set(Calendar.DAY_OF_MONTH, newday); else cal.set(Calendar.DAY_OF_MONTH, day); return( cal.getTime() ); } } throw new KettleValueException("The 'add_months' function only works on a dates"); } /** * This method introduces rounding errors based on time of day and timezones. It should not be used * except for the case where this rounding error is desired. * @deprecated */ public static Object DateDiffLegacy(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) throws KettleValueException { if (metaA.isDate() && metaB.isDate()) { if (dataA!=null && dataB!=null) { // Get msec from each, and subtract. long diff = metaA.getDate(dataA).getTime() - metaB.getDate(dataB).getTime(); return new Long(diff / (1000 * 60 * 60 * 24)); }else return null; } throw new KettleValueException("The 'DateDiff' function only works with dates"); } /** * Returns the number of days that have elapsed between dataA and dataB. * * @param metaA * @param dataA The "end date" * @param metaB * @param dataB The "start date" * @return Number of days * @throws KettleValueException */ public static Object DateDiff(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) throws KettleValueException { if (metaA.isDate() && metaB.isDate()) { if (dataA!=null && dataB!=null) { Date startDate = metaB.getDate(dataB); Date endDate = metaA.getDate(dataA); Calendar stDateCal = Calendar.getInstance(); Calendar endDateCal = Calendar.getInstance(); stDateCal.setTime(startDate); endDateCal.setTime(endDate); long endL = endDateCal.getTimeInMillis() + endDateCal.getTimeZone().getOffset( endDateCal.getTimeInMillis() ); long startL = stDateCal.getTimeInMillis() + stDateCal.getTimeZone().getOffset( stDateCal.getTimeInMillis() ); return new Long(((endL - startL) / 86400000)); } else { return null; } } throw new KettleValueException("The 'DateDiff' function only works with dates"); } public static Object DateWorkingDiff(ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB) throws KettleValueException { if (metaA.isDate() && metaB.isDate()) { if (dataA!=null && dataB!=null) { Date fromDate = metaA.getDate(dataA) ; Date toDate=metaB.getDate(dataB); boolean singminus=false; if (fromDate.after(toDate)) { singminus=true; Date temp = fromDate; fromDate = toDate; toDate = temp; } Calendar calFrom = Calendar.getInstance(); calFrom.setTime(fromDate); Calendar calTo = Calendar.getInstance(); calTo.setTime(toDate); int iNoOfWorkingDays = 0; do { if (calFrom.get(Calendar.DAY_OF_WEEK) != Calendar.SATURDAY && calFrom.get(Calendar.DAY_OF_WEEK) != Calendar.SUNDAY) { iNoOfWorkingDays += 1; } calFrom.add(Calendar.DATE, 1); } while (calFrom.getTimeInMillis() < calTo.getTimeInMillis()); return new Long( singminus?-iNoOfWorkingDays:iNoOfWorkingDays); }else return null; } throw new KettleValueException("The 'DateDiff' function only works with dates"); } public static Object yearOfDate(ValueMetaInterface metaA, Object dataA) throws KettleValueException { if (dataA==null) return null; if (metaA.isDate()) { Calendar calendar = Calendar.getInstance(); calendar.setTime( metaA.getDate(dataA) ); return new Long( calendar.get(Calendar.YEAR) ); } throw new KettleValueException("The 'yearOfDate' function only works with dates"); } public static Object monthOfDate(ValueMetaInterface metaA, Object dataA) throws KettleValueException { if (dataA==null) return null; if (metaA.isDate()) { Calendar calendar = Calendar.getInstance(); calendar.setTime( metaA.getDate(dataA) ); return new Long( calendar.get(Calendar.MONTH) + 1 ); } throw new KettleValueException("The 'monthOfDate' function only works with dates"); } public static Object quarterOfDate(ValueMetaInterface metaA, Object dataA) throws KettleValueException { if (dataA==null) return null; if (metaA.isDate()) { Calendar calendar = Calendar.getInstance(); calendar.setTime( metaA.getDate(dataA) ); return new Long((calendar.get(Calendar.MONTH) + 3) / 3); } throw new KettleValueException("The 'monthOfDate' function only works with dates"); } public static Object dayOfYear(ValueMetaInterface metaA, Object dataA) throws KettleValueException { if (dataA==null) return null; if (metaA.isDate()) { Calendar calendar = Calendar.getInstance(); calendar.setTime( metaA.getDate(dataA) ); return new Long( calendar.get(Calendar.DAY_OF_YEAR) ); } throw new KettleValueException("The 'dayOfYear' function only works with dates"); } public static Object dayOfMonth(ValueMetaInterface metaA, Object dataA) throws KettleValueException { if (dataA==null) return null; if (metaA.isDate()) { Calendar calendar = Calendar.getInstance(); calendar.setTime( metaA.getDate(dataA) ); return new Long( calendar.get(Calendar.DAY_OF_MONTH) ); } throw new KettleValueException("The 'dayOfMonth' function only works with dates"); } public static Object dayOfWeek(ValueMetaInterface metaA, Object dataA) throws KettleValueException { if (dataA==null) return null; if (metaA.isDate()) { Calendar calendar = Calendar.getInstance(); calendar.setTime( metaA.getDate(dataA) ); return new Long( calendar.get(Calendar.DAY_OF_WEEK) ); } throw new KettleValueException("The 'dayOfWeek' function only works with dates"); } public static Object weekOfYear(ValueMetaInterface metaA, Object dataA) throws KettleValueException { if (dataA==null) return null; if (metaA.isDate()) { Calendar calendar = Calendar.getInstance(); calendar.setTime( metaA.getDate(dataA) ); return new Long( calendar.get(Calendar.WEEK_OF_YEAR) ); } throw new KettleValueException("The 'weekOfYear' function only works with dates"); } public static Object weekOfYearISO8601(ValueMetaInterface metaA, Object dataA) throws KettleValueException { if (dataA==null) return null; if (metaA.isDate()) { Calendar calendar = Calendar.getInstance(Locale.ENGLISH); calendar.setMinimalDaysInFirstWeek(4); calendar.setFirstDayOfWeek(Calendar.MONDAY); calendar.setTime( metaA.getDate(dataA) ); return new Long( calendar.get(Calendar.WEEK_OF_YEAR) ); } throw new KettleValueException("The 'weekOfYearISO8601' function only works with dates"); } public static Object yearOfDateISO8601(ValueMetaInterface metaA, Object dataA) throws KettleValueException { if (dataA==null) return null; if (metaA.isDate()) { Calendar calendar = Calendar.getInstance(Locale.ENGLISH); calendar.setMinimalDaysInFirstWeek(4); calendar.setFirstDayOfWeek(Calendar.MONDAY); calendar.setTime( metaA.getDate(dataA) ); int week = calendar.get(Calendar.WEEK_OF_YEAR); int month = calendar.get(Calendar.MONTH); int year = calendar.get(Calendar.YEAR); // fix up for the year taking into account ISO8601 weeks if ( week >= 52 && month == 0 ) year if ( week <= 2 && month == 11 ) year++; return new Long( year ); } throw new KettleValueException("The 'yearOfDateISO8601' function only works with dates"); } /** * Change a hexadecimal string into normal ASCII representation. E.g. if Value * contains string "61" afterwards it would contain value "a". If the * hexadecimal string is of odd length a leading zero will be used. * * Note that only the low byte of a character will be processed, this * is for binary transformations. * * @return Value itself * @throws KettleValueException */ public static String hexToByteDecode(ValueMetaInterface meta, Object data) throws KettleValueException { if (meta.isNull(data)) { return null; } String hexString = meta.getString(data); int len = hexString.length(); char chArray[] = new char[(len + 1) / 2]; boolean evenByte = true; int nextByte = 0; // we assume a leading 0 if the length is not even. if ((len % 2) == 1) evenByte = false; int nibble; int i, j; for (i = 0, j = 0; i < len; i++) { char c = hexString.charAt(i); if ((c >= '0') && (c <= '9')) nibble = c - '0'; else if ((c >= 'A') && (c <= 'F')) nibble = c - 'A' + 0x0A; else if ((c >= 'a') && (c <= 'f')) nibble = c - 'a' + 0x0A; else throw new KettleValueException("invalid hex digit '" + c + "'."); if (evenByte) { nextByte = (nibble << 4); } else { nextByte += nibble; chArray[j] = (char)nextByte; j++; } evenByte = ! evenByte; } return new String(chArray); } /** * Change a string into its hexadecimal representation. E.g. if Value * contains string "a" afterwards it would contain value "0061". * * Note that transformations happen in groups of 4 hex characters, so * the value of a characters is always in the range 0-65535. * * @return * @throws KettleValueException */ public static String byteToHexEncode(ValueMetaInterface metaA, Object dataA) throws KettleValueException { if (dataA==null) return null; final char hexDigits[] = { '0','1','2','3','4','5','6','7','8','9','A','B','C','D','E','F' }; String hex = metaA.getString(dataA); char[] s = hex.toCharArray(); StringBuffer hexString = new StringBuffer(2 * s.length); for (int i = 0; i < s.length; i++) { hexString.append(hexDigits[(s[i] & 0x00F0) >> 4]); // hi nibble hexString.append(hexDigits[s[i] & 0x000F]); // lo nibble } return hexString.toString(); } /** * Change a string into its hexadecimal representation. E.g. if Value * contains string "a" afterwards it would contain value "0061". * * Note that transformations happen in groups of 4 hex characters, so * the value of a characters is always in the range 0-65535. * * @return A string with Hex code * @throws KettleValueException In case of a data conversion problem. */ public static String charToHexEncode(ValueMetaInterface meta, Object data) throws KettleValueException { final char hexDigits[] = { '0','1','2','3','4','5','6','7','8','9','A','B','C','D','E','F' }; if (meta.isNull(data)) { return null; } String hex = meta.getString(data); char[] s = hex.toCharArray(); StringBuffer hexString = new StringBuffer(2 * s.length); for (int i = 0; i < s.length; i++) { hexString.append(hexDigits[(s[i] & 0xF000) >> 12]); // hex 1 hexString.append(hexDigits[(s[i] & 0x0F00) >> 8]); // hex 2 hexString.append(hexDigits[(s[i] & 0x00F0) >> 4]); // hex 3 hexString.append(hexDigits[s[i] & 0x000F]); // hex 4 } return hexString.toString(); } /** * Change a hexadecimal string into normal ASCII representation. E.g. if Value * contains string "61" afterwards it would contain value "a". If the * hexadecimal string is of a wrong length leading zeroes will be used. * * Note that transformations happen in groups of 4 hex characters, so * the value of a characters is always in the range 0-65535. * * @return A hex-to-char decoded String * @throws KettleValueException */ public static String hexToCharDecode(ValueMetaInterface meta, Object data) throws KettleValueException { if (meta.isNull(data)) { return null; } String hexString = meta.getString(data); int len = hexString.length(); char chArray[] = new char[(len + 3) / 4]; int charNr; int nextChar = 0; // we assume a leading 0s if the length is not right. charNr = (len % 4); if ( charNr == 0 ) charNr = 4; int nibble; int i, j; for (i = 0, j = 0; i < len; i++) { char c = hexString.charAt(i); if ((c >= '0') && (c <= '9')) nibble = c - '0'; else if ((c >= 'A') && (c <= 'F')) nibble = c - 'A' + 0x0A; else if ((c >= 'a') && (c <= 'f')) nibble = c - 'a' + 0x0A; else throw new KettleValueException("invalid hex digit '" + c + "'."); if (charNr == 4) { nextChar = (nibble << 12); charNr } else if (charNr == 3) { nextChar += (nibble << 8); charNr } else if (charNr == 2) { nextChar += (nibble << 4); charNr } else // charNr == 1 { nextChar += nibble; chArray[j] = (char)nextChar; charNr = 4; j++; } } return new String(chArray); } /** * Right pad a string: adds spaces to a string until a certain length. * If the length is smaller then the limit specified, the String is truncated. * @param ret The string to pad * @param limit The desired length of the padded string. * @return The padded String. */ public static final String rightPad(String ret, int limit) { if (ret == null) return rightPad(new StringBuffer(), limit); else return rightPad(new StringBuffer(ret), limit); } /** * Right pad a StringBuffer: adds spaces to a string until a certain length. * If the length is smaller then the limit specified, the String is truncated. * @param ret The StringBuffer to pad * @param limit The desired length of the padded string. * @return The padded String. */ public static final String rightPad(StringBuffer ret, int limit) { int len = ret.length(); int l; if (len > limit) { ret.setLength(limit); } else { for (l = len; l < limit; l++) ret.append(' '); } return ret.toString(); } /** * Replace value occurances in a String with another value. * @param string The original String. * @param repl The text to replace * @param with The new text bit * @return The resulting string with the text pieces replaced. */ public static final String replace(String string, String repl, String with) { StringBuffer str = new StringBuffer(string); for (int i = str.length() - 1; i >= 0; i { if (str.substring(i).startsWith(repl)) { str.delete(i, i + repl.length()); str.insert(i, with); } } return str.toString(); } /** * Alternate faster version of string replace using a stringbuffer as input. * * @param str The string where we want to replace in * @param code The code to search for * @param repl The replacement string for code */ public static void replaceBuffer(StringBuffer str, String code, String repl) { int clength = code.length(); int i = str.length() - clength; while (i >= 0) { String look = str.substring(i, i + clength); if (look.equalsIgnoreCase(code)) // Look for a match! { str.replace(i, i + clength, repl); } i } } /** * Count the number of spaces to the left of a text. (leading) * @param field The text to examine * @return The number of leading spaces found. */ public static final int nrSpacesBefore(String field) { int nr = 0; int len = field.length(); while (nr < len && field.charAt(nr) == ' ') { nr++; } return nr; } /** * Count the number of spaces to the right of a text. (trailing) * @param field The text to examine * @return The number of trailing spaces found. */ public static final int nrSpacesAfter(String field) { int nr = 0; int len = field.length(); while (nr < len && field.charAt(field.length() - 1 - nr) == ' ') { nr++; } return nr; } /** * Checks whether or not a String consists only of spaces. * @param str The string to check * @return true if the string has nothing but spaces. */ public static final boolean onlySpaces(String str) { for (int i = 0; i < str.length(); i++) if (!isSpace(str.charAt(i))) return false; return true; } /** * Checks an xml file is well formed. * @param metaA The ValueMetaInterface * @param dataA The value (filename) * @return true if the file is well formed. */ public static boolean isXMLFileWellFormed(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return false; String filename=dataA.toString(); FileObject file=null; try { file=KettleVFS.getFileObject(filename); return XMLCheck.isXMLFileWellFormed(file); }catch(Exception e) { }finally { if(file!=null) try{file.close();}catch(Exception e){}; } return false; } /** * Checks an xml string is well formed. * @param metaA The ValueMetaInterface * @param dataA The value (filename) * @return true if the file is well formed. */ public static boolean isXMLWellFormed(ValueMetaInterface metaA, Object dataA) { if(dataA==null) return false; try { return XMLCheck.isXMLWellFormed(new ByteArrayInputStream(metaA.getBinary(dataA))); }catch(Exception e) {} return false; } /** * Get file encoding. * @param metaA The ValueMetaInterface * @param dataA The value (filename) * @return file encoding. */ public static String getFileEncoding(ValueMetaInterface metaA, Object dataA) throws KettleValueException { if(dataA==null) return null; try { return CharsetToolkit.guessEncodingName(new File(metaA.getString(dataA))); }catch(Exception e) { throw new KettleValueException(e); } } }
package org.opencms.gwt.client.util; import org.opencms.gwt.client.ui.css.I_CmsLayoutBundle; import org.opencms.gwt.client.util.impl.DOMImpl; import org.opencms.gwt.client.util.impl.DocumentStyleImpl; import org.opencms.util.CmsStringUtil; import java.util.ArrayList; import java.util.List; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.Document; import com.google.gwt.dom.client.Element; import com.google.gwt.dom.client.NativeEvent; import com.google.gwt.dom.client.NodeList; import com.google.gwt.dom.client.Style.Unit; import com.google.gwt.user.client.DOM; import com.google.gwt.user.client.ui.HasHorizontalAlignment; import com.google.gwt.user.client.ui.HasHorizontalAlignment.HorizontalAlignmentConstant; /** * Utility class to access the HTML DOM.<p> * * @author Tobias Herrmann * * @version $Revision: 1.23 $ * * @since 8.0.0 */ public final class CmsDomUtil { /** * HTML tag attributes.<p> */ public static enum Attribute { /** class. */ clazz { /** * @see java.lang.Enum#toString() */ @Override public String toString() { return "class"; } }, /** title. */ title; } /** * Helper class to encapsulate an attribute/value pair.<p> */ public static class AttributeValue { /** The attribute. */ private Attribute m_attr; /** The attribute value. */ private String m_value; /** * Constructor.<p> * * @param attr the attribute */ public AttributeValue(Attribute attr) { this(attr, null); } /** * Constructor.<p> * * @param attr the attribute * @param value the value */ public AttributeValue(Attribute attr, String value) { m_attr = attr; m_value = value; } /** * Returns the attribute.<p> * * @return the attribute */ public Attribute getAttr() { return m_attr; } /** * Returns the value.<p> * * @return the value */ public String getValue() { return m_value; } /** * Sets the value.<p> * * @param value the value to set */ public void setValue(String value) { m_value = value; } /** * @see java.lang.Object#toString() */ @Override public String toString() { StringBuffer sb = new StringBuffer(); sb.append(m_attr.toString()); if (m_value != null) { sb.append("=\"").append(m_value).append("\""); } return sb.toString(); } } /** * CSS Colors.<p> */ public static enum Color { /** CSS Color. */ red; } /** * HTML entities.<p> */ public static enum Entity { /** non-breaking space. */ hellip, /** non-breaking space. */ nbsp; /** * Returns the HTML code for this entity.<p> * * @return the HTML code for this entity */ public String html() { return "&" + super.name() + ";"; } } /** * CSS Properties.<p> */ public static enum Style { /** CSS Property. */ backgroundColor, /** CSS Property. */ backgroundImage, /** CSS property. */ borderLeftWidth, /** CSS property. */ borderRightWidth, /** CSS Property. */ borderStyle, /** CSS Property. */ display, /** CSS Property. */ floatCss { /** * @see java.lang.Enum#toString() */ @Override public String toString() { return "float"; } }, /** CSS Property. */ fontFamily, /** CSS Property. */ fontSize, /** CSS Property. */ fontSizeAdjust, /** CSS Property. */ fontStretch, /** CSS Property. */ fontStyle, /** CSS Property. */ fontVariant, /** CSS Property. */ fontWeight, /** CSS Property. */ height, /** CSS Property. */ left, /** CSS Property. */ letterSpacing, /** CSS Property. */ lineHeight, /** CSS Property. */ marginBottom, /** CSS Property. */ marginTop, /** CSS Property. */ opacity, /** CSS Property. */ padding, /** CSS Property. */ position, /** CSS Property. */ textAlign, /** CSS Property. */ textDecoration, /** CSS Property. */ textIndent, /** CSS Property. */ textShadow, /** CSS Property. */ textTransform, /** CSS Property. */ top, /** CSS Property. */ visibility, /** CSS Property. */ whiteSpace, /** CSS Property. */ width, /** CSS Property. */ wordSpacing, /** CSS Property. */ wordWrap; } /** * CSS Property values.<p> */ public static enum StyleValue { /** CSS Property value. */ absolute, /** CSS Property value. */ auto, /** CSS Property value. */ hidden, /** CSS Property value. */ none, /** CSS Property value. */ normal, /** CSS Property value. */ nowrap, /** CSS Property value. */ transparent; } /** * HTML Tags.<p> */ public static enum Tag { /** HTML Tag. */ a, /** HTML Tag. */ ALL { /** * @see java.lang.Enum#toString() */ @Override public String toString() { return "*"; } }, /** HTML Tag. */ b, /** HTML Tag. */ body, /** HTML Tag. */ div, /** HTML Tag. */ h1, /** HTML Tag. */ h2, /** HTML Tag. */ h3, /** HTML Tag. */ h4, /** HTML Tag. */ li, /** HTML Tag. */ p, /** HTML Tag. */ script, /** HTML Tag. */ span, /** HTML Tag. */ ul; } /** Browser dependent DOM implementation. */ private static DOMImpl domImpl; /** Browser dependent style implementation. */ private static DocumentStyleImpl styleImpl; /** * Hidden constructor.<p> */ private CmsDomUtil() { // doing nothing } /** * Generates a closing tag.<p> * * @param tag the tag to use * * @return HTML code */ public static String close(Tag tag) { return "</" + tag.name() + ">"; } /** * This method will create an {@link com.google.gwt.user.client.Element} for the given HTML. * The HTML should have a single root tag, if not, the first tag will be used and all others discarded. * Script-tags will be ignored. * * @param html the HTML to use for the element * * @return the created element * * @throws Exception if something goes wrong */ public static com.google.gwt.user.client.Element createElement(String html) throws Exception { com.google.gwt.user.client.Element wrapperDiv = DOM.createDiv(); wrapperDiv.setInnerHTML(html); com.google.gwt.user.client.Element elementRoot = (com.google.gwt.user.client.Element)wrapperDiv.getFirstChildElement(); DOM.removeChild(wrapperDiv, elementRoot); // just in case we have a script tag outside the root HTML-tag while ((elementRoot != null) && (elementRoot.getTagName().toLowerCase().equals(Tag.script.name()))) { elementRoot = (com.google.gwt.user.client.Element)wrapperDiv.getFirstChildElement(); DOM.removeChild(wrapperDiv, elementRoot); } if (elementRoot == null) { CmsDebugLog.getInstance().printLine( "Could not create element as the given HTML has no appropriate root element"); throw new UnsupportedOperationException( "Could not create element as the given HTML has no appropriate root element"); } return elementRoot; } /** * Convenience method to assemble the HTML to use for a button face.<p> * * @param text text the up face text to set, set to <code>null</code> to not show any * @param imageClass the up face image class to use, set to <code>null</code> to not show any * @param align the alignment of the text in reference to the image * * @return the HTML */ public static String createFaceHtml(String text, String imageClass, HorizontalAlignmentConstant align) { StringBuffer sb = new StringBuffer(); if (align == HasHorizontalAlignment.ALIGN_LEFT) { if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(text)) { sb.append(text.trim()); } } if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(imageClass)) { String clazz = imageClass; if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(text)) { if (align == HasHorizontalAlignment.ALIGN_LEFT) { clazz += " " + I_CmsLayoutBundle.INSTANCE.buttonCss().spacerLeft(); } else { clazz += " " + I_CmsLayoutBundle.INSTANCE.buttonCss().spacerRight(); } } AttributeValue attr = new AttributeValue(Attribute.clazz, clazz); sb.append(enclose(Tag.span, "", attr)); } if (align == HasHorizontalAlignment.ALIGN_RIGHT) { if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(text)) { sb.append(text.trim()); } } return sb.toString(); } /** * Creates an iFrame element with the given name attribute.<p> * * @param name the name attribute value * * @return the iFrame element */ public static com.google.gwt.dom.client.Element createIFrameElement(String name) { return getDOMImpl().createIFrameElement(Document.get(), name); } /** * Encloses the given text with the given tag.<p> * * @param tag the tag to use * @param text the text to enclose * @param attrs the optional tag attributes * * @return HTML code */ public static String enclose(Tag tag, String text, AttributeValue... attrs) { return open(tag, attrs) + text + close(tag); } /** * Triggers a mouse-out event for the given element.<p> * * Useful in case something is capturing all events.<p> * * @param element the element to use */ public static void ensureMouseOut(Element element) { NativeEvent nativeEvent = Document.get().createMouseOutEvent(0, 0, 0, 0, 0, false, false, false, false, 0, null); element.dispatchEvent(nativeEvent); } /** * Returns the given element or it's closest ancestor with the given class.<p> * * Returns <code>null</code> if no appropriate element was found.<p> * * @param element the element * @param className the class name * * @return the matching element */ public static Element getAncestor(Element element, String className) { if (hasClass(className, element)) { return element; } if (element.getTagName().equalsIgnoreCase(Tag.body.name())) { return null; } return getAncestor(element.getParentElement(), className); } /** * Returns the given element or it's closest ancestor with the given tag name.<p> * * Returns <code>null</code> if no appropriate element was found.<p> * * @param element the element * @param tag the tag name * * @return the matching element */ public static Element getAncestor(Element element, Tag tag) { if (element.getTagName().equalsIgnoreCase(tag.name())) { return element; } if (element.getTagName().equalsIgnoreCase(Tag.body.name())) { return null; } return getAncestor(element.getParentElement(), tag); } /** * Returns the given element or it's closest ancestor with the given tag and class.<p> * * Returns <code>null</code> if no appropriate element was found.<p> * * @param element the element * @param tag the tag name * @param className the class name * * @return the matching element */ public static Element getAncestor(Element element, Tag tag, String className) { if (element.getTagName().equalsIgnoreCase(tag.name()) && hasClass(className, element)) { return element; } if (element.getTagName().equalsIgnoreCase(Tag.body.name())) { return null; } return getAncestor(element.getParentElement(), tag, className); } /** * Returns the computed style of the given element.<p> * * @param element the element * @param style the CSS property * * @return the currently computed style */ public static String getCurrentStyle(Element element, Style style) { if (styleImpl == null) { styleImpl = GWT.create(DocumentStyleImpl.class); } return styleImpl.getCurrentStyle(element, style.toString()); } /** * Returns the computed style of the given element as floating point number.<p> * * @param element the element * @param style the CSS property * * @return the currently computed style */ public static double getCurrentStyleFloat(Element element, Style style) { String currentStyle = getCurrentStyle(element, style); return CmsClientStringUtil.parseFloat(currentStyle); } /** * Returns the computed style of the given element as number.<p> * * @param element the element * @param style the CSS property * * @return the currently computed style */ public static int getCurrentStyleInt(Element element, Style style) { String currentStyle = getCurrentStyle(element, style); return CmsClientStringUtil.parseInt(currentStyle); } /** * Returns all elements from the DOM with the given CSS class and tag name.<p> * * @param className the class name to look for * @param tag the tag * * @return the matching elements */ public static List<Element> getElementByClass(String className, Tag tag) { return getElementsByClass(className, tag, Document.get().getBody()); } /** * Returns all elements from the DOM with the given CSS class.<p> * * @param className the class name to look for * * @return the matching elements */ public static List<Element> getElementsByClass(String className) { return getElementsByClass(className, Tag.ALL, Document.get().getBody()); } /** * Returns all elements with the given CSS class including the root element.<p> * * @param className the class name to look for * @param rootElement the root element of the search * * @return the matching elements */ public static List<Element> getElementsByClass(String className, Element rootElement) { return getElementsByClass(className, Tag.ALL, rootElement); } /** * Returns all elements with the given CSS class and tag name including the root element.<p> * * @param className the class name to look for * @param tag the tag * @param rootElement the root element of the search * * @return the matching elements */ public static List<Element> getElementsByClass(String className, Tag tag, Element rootElement) { if ((rootElement == null) || (className == null) || (className.trim().length() == 0) || (tag == null)) { return null; } className = className.trim(); List<Element> result = new ArrayList<Element>(); if (internalHasClass(className, rootElement)) { result.add(rootElement); } NodeList<Element> elements = rootElement.getElementsByTagName(tag.toString()); for (int i = 0; i < elements.getLength(); i++) { if (internalHasClass(className, elements.getItem(i))) { result.add(elements.getItem(i)); } } return result; } /** * Utility method to determine if the given element has a set background.<p> * * @param element the element * * @return <code>true</code> if the element has a background set */ public static boolean hasBackground(Element element) { String backgroundColor = CmsDomUtil.getCurrentStyle(element, Style.backgroundColor); String backgroundImage = CmsDomUtil.getCurrentStyle(element, Style.backgroundImage); if ((backgroundColor.equals(StyleValue.transparent.toString())) && ((backgroundImage == null) || (backgroundImage.trim().length() == 0) || backgroundImage.equals(StyleValue.none.toString()))) { return false; } return true; } /** * Utility method to determine if the given element has a set border.<p> * * @param element the element * * @return <code>true</code> if the element has a border */ public static boolean hasBorder(Element element) { String borderStyle = CmsDomUtil.getCurrentStyle(element, Style.borderStyle); if ((borderStyle == null) || borderStyle.equals(StyleValue.none.toString()) || (borderStyle.length() == 0)) { return false; } return true; } /** * Indicates if the given element has a CSS class.<p> * * @param className the class name to look for * @param element the element * * @return <code>true</code> if the element has the given CSS class */ public static boolean hasClass(String className, Element element) { return internalHasClass(className.trim(), element); } /** * Generates an opening tag.<p> * * @param tag the tag to use * @param attrs the optional tag attributes * * @return HTML code */ public static String open(Tag tag, AttributeValue... attrs) { StringBuffer sb = new StringBuffer(); sb.append("<").append(tag.name()); for (AttributeValue attr : attrs) { sb.append(" ").append(attr.toString()); } sb.append(">"); return sb.toString(); } /** * Positions an element in the DOM relative to another element.<p> * * @param elem the element to position * @param referenceElement the element relative to which the first element should be positioned * @param dx the x offset relative to the reference element * @param dy the y offset relative to the reference element */ public static void positionElement(Element elem, Element referenceElement, double dx, double dy) { com.google.gwt.dom.client.Style style = elem.getStyle(); style.setLeft(0, Unit.PX); style.setTop(0, Unit.PX); double myX = elem.getAbsoluteLeft(); double myY = elem.getAbsoluteTop(); double refX = referenceElement.getAbsoluteLeft(); double refY = referenceElement.getAbsoluteTop(); double newX = refX - myX + dx; double newY = refY - myY + dy; style.setLeft(newX, Unit.PX); style.setTop(newY, Unit.PX); } /** * Returns the DOM implementation.<p> * * @return the DOM implementation */ private static DOMImpl getDOMImpl() { if (domImpl == null) { domImpl = GWT.create(DOMImpl.class); } return domImpl; } /** * Internal method to indicate if the given element has a CSS class.<p> * * @param className the class name to look for * @param element the element * * @return <code>true</code> if the element has the given CSS class */ private static boolean internalHasClass(String className, Element element) { String elementClass = element.getClassName().trim(); boolean hasClass = elementClass.equals(className); hasClass |= elementClass.contains(" " + className + " "); hasClass |= elementClass.startsWith(className + " "); hasClass |= elementClass.endsWith(" " + className); return hasClass; } }
package wcanalysis.charting; import java.awt.BasicStroke; import java.awt.Color; import java.awt.geom.Rectangle2D; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.Reader; import java.io.Writer; import java.util.List; import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVRecord; import org.jfree.chart.ChartFactory; import org.jfree.chart.ChartMouseEvent; import org.jfree.chart.ChartMouseListener; import org.jfree.chart.ChartPanel; import org.jfree.chart.JFreeChart; import org.jfree.chart.axis.NumberAxis; import org.jfree.chart.axis.ValueAxis; import org.jfree.chart.panel.CrosshairOverlay; import org.jfree.chart.plot.Crosshair; import org.jfree.chart.plot.PlotOrientation; import org.jfree.chart.plot.ValueMarker; import org.jfree.chart.plot.XYPlot; import org.jfree.chart.renderer.xy.XYLineAndShapeRenderer; import org.jfree.data.xy.XYDataset; import org.jfree.data.xy.XYSeries; import org.jfree.data.xy.XYSeriesCollection; import org.jfree.ui.ApplicationFrame; import org.jfree.ui.RectangleEdge; import org.jfree.ui.RefineryUtilities; import wcanalysis.fitting.FunctionFitter; /** * @author Kasper Luckow */ public class WorstCaseChart extends ApplicationFrame implements ChartMouseListener { private static final long serialVersionUID = 7777887151534005094L; private Crosshair xCrosshair; private Crosshair yCrosshair; private ChartPanel chartPanel; public static void main(String[] args) throws IOException { if(args.length < 1 || args.length > 2) { System.err.print("Accepts 2 args: path to csv file and optionally \"output\" which will output the data set of the models"); System.exit(-1); } boolean output = false; if(args[args.length - 1].equalsIgnoreCase("output")) { output = true; } if(args[0].equalsIgnoreCase("batch")) { String csvFile = args[1]; processBatchResultsCSV(csvFile, output); } else { String csvFile = args[0]; processStandardResultsCSV(csvFile, output); } } private static void processBatchResultsCSV(String csvFile, boolean output) throws IOException { Reader in = new FileReader(csvFile); Iterable<CSVRecord> records = CSVFormat.DEFAULT.parse(in); boolean first = true; for(CSVRecord rec : records) { if(rec.toString().startsWith("#") && first) { //probably the most ridiculous csv library when // you have to do // this? first = false; continue; } DataCollection dataCollection = new DataCollection(); double inputSize = 1; for(String element : rec) { double y = Double.parseDouble(element); dataCollection.addDatapoint(inputSize++, y); } visualizeDataCollection(dataCollection, output, new File(csvFile).getParentFile()); } } private static void visualizeDataCollection(DataCollection dataCollection, boolean output, File baseDir) throws IOException { XYSeriesCollection series = FunctionFitter.computeSeries(dataCollection, 10); if(output) { for(XYSeries ser : (List<XYSeries>)series.getSeries()) { String fileName = ser.getDescription() + ".csv"; File f = new File(baseDir, fileName); Writer w = new FileWriter(f); FunctionFitter.seriesToCSV(ser, w); } } WorstCaseChart wcChart = new WorstCaseChart(series); wcChart.pack(); RefineryUtilities.centerFrameOnScreen(wcChart); wcChart.setVisible(true); } private static void processStandardResultsCSV(String csvFile, boolean output) throws IOException { Reader in = new FileReader(csvFile); DataCollection dataCollection = new DataCollection(); Iterable<CSVRecord> records = CSVFormat.DEFAULT.parse(in); boolean first = true; for(CSVRecord rec : records) { if(first) { //probably the most ridiculous csv library when you have to do this? first = false; continue; } int x = Integer.parseInt(rec.get(0)); int y = Integer.parseInt(rec.get(1)); dataCollection.addDatapoint(x, y); } visualizeDataCollection(dataCollection, output, new File(csvFile).getParentFile()); } public WorstCaseChart(XYSeriesCollection dataCollection) { super("Worst case"); JFreeChart chart = createChart(dataCollection); createChartPanel(chart); } public WorstCaseChart(XYSeriesCollection dataCollection, double maxInputReq, double maxResReq) { super("Worst case"); JFreeChart chart = createChart(dataCollection); XYPlot plot = chart.getXYPlot(); ValueMarker vertMarker = new ValueMarker(maxInputReq); vertMarker.setPaint(Color.red); plot.addDomainMarker(vertMarker); // vertical line ValueMarker horizMarker = new ValueMarker(maxResReq); horizMarker.setPaint(Color.red); plot.addRangeMarker(horizMarker); // horizontal line createChartPanel(chart); } private void createChartPanel(JFreeChart chart) { chartPanel = new ChartPanel(chart); chartPanel.addChartMouseListener(this); CrosshairOverlay crosshairOverlay = new CrosshairOverlay(); xCrosshair = new Crosshair(Double.NaN, Color.GRAY, new BasicStroke(0f)); xCrosshair.setLabelVisible(true); yCrosshair = new Crosshair(Double.NaN, Color.GRAY, new BasicStroke(0f)); yCrosshair.setLabelVisible(true); crosshairOverlay.addDomainCrosshair(xCrosshair); crosshairOverlay.addRangeCrosshair(yCrosshair); chartPanel.addOverlay(crosshairOverlay); chartPanel.setPreferredSize(new java.awt.Dimension(800, 600)); setContentPane(chartPanel); } private JFreeChart createChart(XYDataset dataset) { //Create the chart final JFreeChart chart = ChartFactory.createXYLineChart( "Worst Case Prediction Model", "Input Size", "Depth", dataset, PlotOrientation.VERTICAL, true, true, false ); chart.setBackgroundPaint(Color.white); XYPlot plot = chart.getXYPlot(); plot.setBackgroundPaint(Color.lightGray); plot.setDomainGridlinePaint(Color.white); plot.setRangeGridlinePaint(Color.white); XYLineAndShapeRenderer renderer = new XYLineAndShapeRenderer(); plot.setRenderer(renderer); // change the auto tick unit selection to integer units only... final NumberAxis rangeAxis = (NumberAxis) plot.getRangeAxis(); rangeAxis.setStandardTickUnits(NumberAxis.createIntegerTickUnits()); return chart; } @Override public void chartMouseClicked(ChartMouseEvent arg0) { //ignore } @Override public void chartMouseMoved(ChartMouseEvent event) { Rectangle2D dataArea = this.chartPanel.getScreenDataArea(); JFreeChart chart = event.getChart(); XYPlot plot = (XYPlot) chart.getPlot(); ValueAxis xAxis = plot.getDomainAxis(); double x = xAxis.java2DToValue(event.getTrigger().getX(), dataArea, RectangleEdge.BOTTOM); ValueAxis yAxis = plot.getRangeAxis(); double y = yAxis.java2DToValue(event.getTrigger().getY(), dataArea, RectangleEdge.LEFT); //Alternatively, obtain y for one of the subplots, which would be very neat. //We should find the "nearest" subplot to the cursor -- this is easy //double y = DatasetUtilities.findYValue(plot.getDataset(), 0, x); this.xCrosshair.setValue(x); this.yCrosshair.setValue(y); } }
package ValkyrienWarfareBase.Proxy; import ValkyrienWarfareBase.EventsClient; import ValkyrienWarfareBase.KeyHandler; import ValkyrienWarfareBase.ValkyrienWarfareMod; import ValkyrienWarfareBase.API.Vector; import ValkyrienWarfareBase.Client.RenderManagerOverride; import ValkyrienWarfareBase.Math.Quaternion; import ValkyrienWarfareBase.PhysicsManagement.PhysicsWrapperEntity; import ValkyrienWarfareBase.Render.PhysObjectRenderFactory; import code.elix_x.excomms.reflection.ReflectionHelper.AClass; import net.minecraft.block.Block; import net.minecraft.client.Minecraft; import net.minecraft.client.renderer.ItemRenderer; import net.minecraft.client.renderer.RenderGlobal; import net.minecraft.client.renderer.block.model.ModelResourceLocation; import net.minecraft.client.renderer.culling.ICamera; import net.minecraft.client.renderer.entity.RenderManager; import net.minecraft.item.Item; import net.minecraftforge.client.model.obj.OBJLoader; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.fml.client.registry.RenderingRegistry; import net.minecraftforge.fml.common.event.FMLInitializationEvent; import net.minecraftforge.fml.common.event.FMLPostInitializationEvent; import net.minecraftforge.fml.common.event.FMLPreInitializationEvent; public class ClientProxy extends CommonProxy { KeyHandler keyEvents = new KeyHandler(); public static ICamera lastCamera; @Override public void preInit(FMLPreInitializationEvent e) { super.preInit(e); OBJLoader.INSTANCE.addDomain(ValkyrienWarfareMod.MODID.toLowerCase()); RenderingRegistry.registerEntityRenderingHandler(PhysicsWrapperEntity.class, new PhysObjectRenderFactory()); } @Override public void init(FMLInitializationEvent e) { super.init(e); MinecraftForge.EVENT_BUS.register(new EventsClient()); MinecraftForge.EVENT_BUS.register(keyEvents); registerBlockItem(ValkyrienWarfareMod.physicsInfuser); registerBlockItem(ValkyrienWarfareMod.physicsInfuserCreative); } @Override public void postInit(FMLPostInitializationEvent e) { super.postInit(e); new AClass<>(Minecraft.class).<RenderManager>getDeclaredField("renderManager").setAccessible(true).set(Minecraft.getMinecraft(), new RenderManagerOverride(Minecraft.getMinecraft().getRenderManager())); new AClass<>(ItemRenderer.class).<RenderManager>getDeclaredField("renderManager").setAccessible(true).setFinal(false).set(Minecraft.getMinecraft().getItemRenderer(), Minecraft.getMinecraft().getRenderManager()); new AClass<>(RenderGlobal.class).<RenderManager>getDeclaredField("renderManager").setFinal(false).set(Minecraft.getMinecraft().renderGlobal, Minecraft.getMinecraft().getRenderManager()); } private void registerBlockItem(Block toRegister) { Item item = Item.getItemFromBlock(toRegister); Minecraft.getMinecraft().getRenderItem().getItemModelMesher().register(item, 0, new ModelResourceLocation(ValkyrienWarfareMod.MODID + ":" + item.getUnlocalizedName().substring(5), "inventory")); } @Override public void updateShipPartialTicks(PhysicsWrapperEntity entity) { double partialTicks = Minecraft.getMinecraft().getRenderPartialTicks(); // entity.wrapping.renderer.updateTranslation(partialTicks); Vector centerOfRotation = entity.wrapping.centerCoord; if (entity.wrapping.renderer == null) { return; } entity.wrapping.renderer.curPartialTick = partialTicks; double moddedX = entity.lastTickPosX + (entity.posX - entity.lastTickPosX) * partialTicks; double moddedY = entity.lastTickPosY + (entity.posY - entity.lastTickPosY) * partialTicks; double moddedZ = entity.lastTickPosZ + (entity.posZ - entity.lastTickPosZ) * partialTicks; double p0 = Minecraft.getMinecraft().thePlayer.lastTickPosX + (Minecraft.getMinecraft().thePlayer.posX - Minecraft.getMinecraft().thePlayer.lastTickPosX) * (double) partialTicks; double p1 = Minecraft.getMinecraft().thePlayer.lastTickPosY + (Minecraft.getMinecraft().thePlayer.posY - Minecraft.getMinecraft().thePlayer.lastTickPosY) * (double) partialTicks; double p2 = Minecraft.getMinecraft().thePlayer.lastTickPosZ + (Minecraft.getMinecraft().thePlayer.posZ - Minecraft.getMinecraft().thePlayer.lastTickPosZ) * (double) partialTicks; Quaternion smoothRotation = entity.wrapping.renderer.getSmoothRotationQuat(partialTicks); double[] radians = smoothRotation.toRadians(); double moddedPitch = Math.toDegrees(radians[0]); double moddedYaw = Math.toDegrees(radians[1]); double moddedRoll = Math.toDegrees(radians[2]); entity.wrapping.coordTransform.updateRenderMatrices(moddedX, moddedY, moddedZ, moddedPitch, moddedYaw, moddedRoll); } }
package app.bvk.encounter.dialog; import java.io.IOException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import app.bvk.entity.Creature; import app.bvk.library.CreatureLibrary; import javafx.application.Platform; import javafx.beans.property.ObjectProperty; import javafx.beans.property.SimpleObjectProperty; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.collections.transformation.FilteredList; import javafx.fxml.FXML; import javafx.fxml.FXMLLoader; import javafx.scene.control.Button; import javafx.scene.control.ButtonBar.ButtonData; import javafx.scene.control.ButtonType; import javafx.scene.control.Dialog; import javafx.scene.control.DialogPane; import javafx.scene.control.ListCell; import javafx.scene.control.ListView; import javafx.scene.control.TextField; public class NpcDialogPane extends DialogPane { private static final Logger LOGGER = LoggerFactory.getLogger(NpcDialogPane.class); private FXMLLoader loader; @FXML private ListView<Creature> creatureListView; @FXML private TextField filterTextField; private ObjectProperty<Creature> selectedCreature = new SimpleObjectProperty<>(); private Dialog<Creature> parentDialog; public NpcDialogPane(final Dialog<Creature> parent) { this.parentDialog = parent; this.loader = new FXMLLoader(this.getClass().getClassLoader().getResource("EncounterNpcWindowGui.fxml")); this.loader.setRoot(this); this.loader.setController(this); try { this.loader.load(); } catch (final IOException e) { LOGGER.error("ERROR while loading encounter fxml", e); } } @FXML private void initialize() { final ButtonType addButtonType = new ButtonType("Add", ButtonData.APPLY); final ButtonType cancelButtonType = new ButtonType("Cancel", ButtonData.CANCEL_CLOSE); this.getButtonTypes().add(addButtonType); this.getButtonTypes().add(cancelButtonType); final Button addButton = (Button) this.lookupButton(addButtonType); addButton.setDefaultButton(true); final Button cancelButton = (Button) this.lookupButton(cancelButtonType); cancelButton.setCancelButton(true); Platform.runLater(() -> this.filterTextField.requestFocus()); this.creatureListView.setCellFactory(cellData -> new ListCell<Creature>() { @Override protected void updateItem(final Creature creature, final boolean empty) { super.updateItem(creature, empty); if (empty || creature == null) { this.setText(null); this.setGraphic(null); } else { this.setText(creature.getName().getValue()); } } }); this.creatureListView.setOnMouseClicked(event -> { if (event.getClickCount() == 2) { this.parentDialog.resultProperty().set(this.creatureListView.getSelectionModel().getSelectedItem()); } }); final ObservableList<Creature> creatureObservableList = FXCollections.observableArrayList(CreatureLibrary.getInstance().getCreatures()); this.creatureListView.setItems(creatureObservableList); this.filterTextField.textProperty().addListener((obs, oldValue, newValue) -> { final FilteredList<Creature> filteredList = new FilteredList<>(creatureObservableList, predicate -> true); filteredList.setPredicate(creature -> { final boolean isEmpty = newValue == null || newValue.isEmpty(); final boolean nameMatch = creature.getName().getValue().toLowerCase().contains(newValue.toLowerCase()); return isEmpty || nameMatch; }); this.creatureListView.setItems(filteredList); }); this.selectedCreature.bind(this.creatureListView.getSelectionModel().selectedItemProperty()); } public Creature getSelectedCreature() { return this.selectedCreature.getValue(); } }
package application.controllers; import application.fxobjects.cell.Cell; import application.fxobjects.cell.Edge; import application.fxobjects.cell.LineageColor; import application.fxobjects.cell.layout.CellLayout; import application.fxobjects.cell.layout.TreeLayout; import application.fxobjects.cell.tree.LeafCell; import core.Filter; import core.MetaData; import core.graph.PhylogeneticTree; import javafx.scene.control.ScrollPane; import javafx.scene.input.ScrollEvent; import javafx.scene.layout.AnchorPane; import javafx.scene.layout.Background; import javafx.scene.layout.BackgroundFill; import javafx.scene.paint.Color; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.ResourceBundle; import static core.Filter.*; import static application.fxobjects.cell.LineageColor.*; import static application.fxobjects.cell.LineageColor.*; import static core.MetaData.META_DATA; /** * Class responsible for setting up the scroll pane containing the phylogenetic tree. */ public class TreeController extends Controller<ScrollPane> { private PhylogeneticTree pt; private List<Edge> collectedEdges; private List<Cell> selectedStrains; private List<Cell> collectedStrains; private TreeMouseHandling treeMouseHandling; private AnchorPane root; /** * Class constructor. * * @param m MainController. */ public TreeController(MainController m) { super(new ScrollPane()); this.pt = new PhylogeneticTree(); this.selectedStrains = new ArrayList<>(); this.collectedStrains = new ArrayList<>(); this.treeMouseHandling = new TreeMouseHandling(m); this.getRoot().setHbarPolicy(ScrollPane.ScrollBarPolicy.ALWAYS); this.getRoot().setVbarPolicy(ScrollPane.ScrollBarPolicy.ALWAYS); this.getRoot().addEventFilter(ScrollEvent.SCROLL, event -> { if (event.getDeltaY() != 0) { event.consume(); } }); init(); } /** * Get the phylogenetic tree. * * @return The phylogenetic tree. */ public PhylogeneticTree getPT() { return pt; } @Override public void initialize(URL location, ResourceBundle resources) { } /** * Add cells from the model to the gui. */ public void init() { root = new AnchorPane(); CellLayout layout = new TreeLayout(pt.getModel(), 30); layout.execute(); List<Cell> nodeList = pt.getModel().getAddedCells(); List<Edge> edgeList = pt.getModel().getAddedEdges(); nodeList.forEach(treeMouseHandling::setMouseHandling); edgeList.forEach(treeMouseHandling::setMouseHandling); // Add all cells and edges to the anchor pane root.getChildren().addAll(pt.getModel().getAddedCells()); root.getChildren().addAll(pt.getModel().getAddedEdges()); this.getRoot().setContent(root); } /** * Selects strains to keep them highlighted. */ public void selectStrains() { collectedStrains.forEach(e -> { if (selectedStrains.contains(e)) { selectedStrains.remove(e); } else { selectedStrains.add(e); } modifyGraphOptions(); }); } /** * Colors the selected strains after un-hover. */ public void colorSelectedStrains() { selectedStrains.forEach(this::applyCellHighlight); } /** * Applies the highlight in the phylogenetic tree on hovering over a leafNode. * * @param cell the Cell being hovered over. */ @SuppressWarnings("checkstyle:linelength") public void applyCellHighlight(Cell cell) { if (cell instanceof LeafCell) { String name = ((LeafCell) cell).getName(); List<Cell> parentList = new ArrayList<>(); parentList.add(cell); collectedStrains.clear(); collectedStrains.add(cell); if (name.contains("TKK")) { applyColorUpwards(parentList, determineEdgeLinColor(META_DATA.get(name).getLineage()), 4.0); applyColorOnCell(cell, determineSelectedLeafLinColor(META_DATA.get(name).getLineage())); } else if (name.contains("G")) { applyColorUpwards(parentList, LineageColor.LIN4, 4.0); applyColorOnCell(cell, SLIN4); } else { applyColorUpwards(parentList, Color.YELLOW, 4.0); } } } /** * Reverts the highlight in the phylogenetic tree on losing hover over a leafNode. * * @param cell the Cell which is no longer being hovered over. */ public void revertCellHighlight(Cell cell) { if (cell instanceof LeafCell) { String name = ((LeafCell) cell).getName(); List<Cell> parentList = new ArrayList<>(); parentList.add(cell); collectedStrains.clear(); collectedStrains.add(cell); if (name.contains("TKK")) { applyColorUpwards(parentList, Color.BLACK, 1.0); applyColorOnCell(cell, determineLeafLinColor(META_DATA.get(name).getLineage())); } else if (name.contains("G")) { applyColorUpwards(parentList, Color.BLACK, 1.0); applyColorOnCell(cell, GLIN4); } else { applyColorUpwards(parentList, Color.BLACK, 1.0); } applyColorUpwards(parentList, Color.BLACK, 1.0); } } /** * Applies the highlight in the phylogenetic tree on hovering over an Edge. * * @param edge the Edge being hovered over. */ public void applyEdgeHighlight(Edge edge) { collectedEdges = new ArrayList<>(); collectedStrains.clear(); collectEdges(edge); applyColorOnCells(); applyColorOnEdges(determineEdgeLinColor(getCommonLineage()), 4.0); } /** * Reverts the highlight in the phylogenetic tree on losing hover over an Edge. * * @param edge the Edge which is no longer being hovered over. */ public void revertEdgeHighlight(Edge edge) { collectedEdges = new ArrayList<>(); collectedStrains.clear(); collectEdges(edge); revertColorOnCells(); applyColorOnEdges(Color.BLACK, 1.0); } /** * Apply a certain color and stroke to all cells being hovered over. */ @SuppressWarnings("checkstyle:linelength") private void applyColorOnCells() { collectedStrains.forEach(s -> { LeafCell c = (LeafCell) s; if (c.getName().contains("TKK")) { c.setBackground( new Background( new BackgroundFill( determineSelectedLeafLinColor( META_DATA.get( (c.getName())).getLineage() ), null, null ) ) ); } else if (c.getName().contains("G")) { c.setBackground( new Background( new BackgroundFill(LineageColor.SLIN4, null, null ) ) ); } } ); } /** * Revert a certain color and stroke to all cells being hovered over. */ @SuppressWarnings("checkstyle:linelength") private void revertColorOnCells() { collectedStrains.forEach(s -> { LeafCell c = (LeafCell) s; if (c.getName().contains("TKK")) { c.setBackground( new Background( new BackgroundFill( determineLeafLinColor( META_DATA.get( (c.getName())).getLineage() ), null, null ) ) ); } else if (c.getName().contains("G")) { c.setBackground( new Background( new BackgroundFill(LineageColor.GLIN4, null, null ) ) ); } }); } /** * Apply a certain color and stroke to the cell being hovered over. * * @param e the given Cell. * @param c the given Color. */ private void applyColorOnCell(Cell e, Color c) { e.setBackground(new Background(new BackgroundFill(c, null, null))); } /** * Apply a certain color and stroke to the edges upwards from the node in the list. * * @param l the given List of Edges. * @param c the given Color. * @param s the given stroke. */ private void applyColorUpwards(List<Cell> l, Color c, double s) { while (!l.isEmpty()) { Cell next = l.remove(0); l.addAll(next.getCellParents()); if (next.getCellId() != 0) { Edge e = pt.getModel().getEdgeFromChild(next); e.getLine().setStroke(c); e.getLine().setStrokeWidth(s); } } } /** * Apply a certain color and stroke to the edges in the list. * * @param c the given Color. * @param s the given stroke. */ private void applyColorOnEdges(Color c, double s) { collectedEdges.forEach(e -> { e.getLine().setStroke(c); e.getLine().setStrokeWidth(s); }); } /** * Collect all edges that will be highlighted for selection */ private void collectEdges(Edge edge) { collectedEdges.add(edge); collectEdgesUpwards(edge.getSource()); collectEdgesDownwards(edge.getTarget()); } /** * Collect all selection covered edges from a Cell. * * @param c a Cell. */ private void collectEdgesUpwards(Cell c) { List<Cell> parentList = new ArrayList<>(); parentList.add(c); while (!parentList.isEmpty()) { Cell next = parentList.remove(0); parentList.addAll(next.getCellParents()); if (next.getCellId() != 0) { collectedEdges.add(pt.getModel().getEdgeFromChild(next)); } } } /** * Collect all selection covered edges from a Cell. * * @param c a Cell. */ private void collectEdgesDownwards(Cell c) { List<Cell> childList = new ArrayList<>(); childList.add(c); while (!childList.isEmpty()) { Cell next = childList.remove(0); childList.addAll(next.getCellChildren()); if (!(next instanceof LeafCell)) { collectedEdges.addAll(pt.getModel().getEdgeFromParent(next)); } else { collectedStrains.add(next); } } } /** * Get the most common lineage of the collected strains. * * @return the common lineage identifier. */ private int getCommonLineage() { int common = 0; int count = 0; for (int i = 0; i < 10; i++) { int tempCount = 0; for (Cell c : collectedStrains) { if (MetaData.META_DATA.containsKey(((LeafCell) c).getName()) && MetaData.META_DATA.get(((LeafCell) c).getName()).getLineage() == i) { tempCount++; } } if (tempCount > count) { count = tempCount; common = i; } } return common; } /** * Getter method for the selected strains. * * @return a list with the selected strains. */ public List<Cell> getSelectedStrains() { return selectedStrains; } /** * Getter method for the selected strains. * * @return a list with the selected strains. */ public List<String> getSelectedGenomes() { List<String> genomes = new ArrayList<>(); selectedStrains.forEach(s -> genomes.add(((LeafCell) s).getName())); selectedStrains.clear(); modifyGraphOptions(); return genomes; } /** * Modifies the option on the MenuBarItem that shows the graph with the selected strain(s). */ private void modifyGraphOptions() { int s = selectedStrains.size(); if (s == 0) { MenuFactory.showOnlyThisStrain.setDisable(true); MenuFactory.showSelectedStrains.setDisable(true); } else if (s == 1) { MenuFactory.showOnlyThisStrain.setDisable(false); MenuFactory.showSelectedStrains.setDisable(false); } else { MenuFactory.showOnlyThisStrain.setDisable(true); MenuFactory.showSelectedStrains.setDisable(false); } } public void getCellByName(String name) { double max = 0; double cLoc = 0; for (Object c : root.getChildren() ) { if (c instanceof LeafCell) { if (((LeafCell) c).getName().contains(name)) { selectedStrains.add((LeafCell) c); System.out.println("Found it"); cLoc = ((LeafCell) c).getLayoutY(); } if (((LeafCell) c).getLayoutX() > max) { max = ((LeafCell) c).getLayoutY(); } } } colorSelectedStrains(); modifyGraphOptions(); getRoot().setVvalue(cLoc/max); } public void clearSelection() { selectedStrains.forEach(this::revertCellHighlight); selectedStrains.clear(); } // public void filterPhyloLineage(Filter f) { // switch (f) { // case LIN1 : META_DATA.values().forEach(s -> { // if(s.getLineage() == 1) { // selectedStrains.add(getCellByName(s.getName())); // break; }
package main.java.author.view.tabs; import java.awt.BorderLayout; import java.awt.GridLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.text.NumberFormat; import javax.swing.BorderFactory; import javax.swing.JButton; import javax.swing.JComboBox; import javax.swing.JComponent; import javax.swing.JFormattedTextField; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JTextField; import main.java.author.controller.MainController; import main.java.author.controller.TabController; public class GameSettingsEditorTab extends EditorTab{ private JPanel settingsPanel = new JPanel(); private JComboBox gameModeList; private JComboBox gameDifficultyList; private JLabel levelsPerGameLabel; private JLabel livesLabel; private JLabel beginningMoneyLabel; private JLabel tilesPerRowLabel; private JLabel tilesPerColumnLabel; private JTextField levelsPerGameField; private JTextField livesField; private JTextField beginningMoneyField; private JTextField tilesPerRowField; private JTextField tilesPerColumnField; private static final String LEVELS_STRING = "Levels Per Game: "; private static final String LIVES_STRING = "Lives: "; private static final String WAVES_STRING = "Waves Per Level: "; private static final String ENEMIES_STRING = "Enemies Per Wave: "; private static final String MONEY_STRING = "Beginning Money: "; private static final String ROW_TILES_STRING = "Number of Rows: "; private static final String COLUMN_TILES_STRING = "Number of Columns: "; String[] GAME_MODE_STRINGS = {"Survival Mode", "Boss Mode"}; String[] GAME_DIFFICULTY_STRINGS = {"Easy", "Medium", "Hard"}; private NumberFormat numberFormat; private JButton submitButton; private JButton musicButton; public GameSettingsEditorTab(TabController gameSettingsController){ super(gameSettingsController); createSettingsPanel(); add(settingsPanel, BorderLayout.CENTER); } private void createSettingsPanel() { settingsPanel.setLayout(new BorderLayout()); settingsPanel.add(makeDropDownMenus(), BorderLayout.NORTH); settingsPanel.add(makeAttributesPane(), BorderLayout.SOUTH); settingsPanel.setBorder(BorderFactory.createEmptyBorder(20, 20, 20, 20)); } private JComponent makeDropDownMenus(){ JPanel dropDownMenus = new JPanel(); dropDownMenus.setLayout(new BorderLayout()); gameModeList = new JComboBox(GAME_MODE_STRINGS); gameModeList.setSelectedIndex(1); gameModeList.addActionListener(new ActionListener(){ @Override public void actionPerformed(ActionEvent arg0) { // TODO Auto-generated method stub // Would probably switch between the specific attributes to display or just make unique panels for each as classes. // and then do some more logic outside of this action listener to decide what to display. } }); gameDifficultyList = new JComboBox(GAME_DIFFICULTY_STRINGS); gameDifficultyList.setSelectedIndex(1); gameDifficultyList.addActionListener(new ActionListener(){ @Override public void actionPerformed(ActionEvent e) { // TODO Auto-generated method stub } }); dropDownMenus.add(gameModeList, BorderLayout.NORTH); dropDownMenus.add(gameDifficultyList, BorderLayout.SOUTH); return dropDownMenus; } private JComponent makeAttributesPane(){ JPanel attributes = new JPanel(); attributes.setLayout(new BorderLayout()); attributes.add(makeLabelPane(), BorderLayout.WEST); attributes.add(makeFieldPane(), BorderLayout.EAST); attributes.add(makeButtons(), BorderLayout.SOUTH); return attributes; } private JComponent makeButtons(){ JPanel buttons = new JPanel(); submitButton = new JButton("Submit"); submitButton.addActionListener(new ActionListener(){ @Override public void actionPerformed(ActionEvent e) { // TODO Auto-generated method stub } }); musicButton = new JButton("Choose Music"); musicButton.addActionListener(new ActionListener(){ @Override public void actionPerformed(ActionEvent e) { // TODO Auto-generated method stub } }); buttons.add(musicButton, BorderLayout.NORTH); buttons.add(submitButton, BorderLayout.SOUTH); return buttons; } private JComponent makeLabelPane(){ levelsPerGameLabel = new JLabel(LEVELS_STRING); livesLabel = new JLabel(LIVES_STRING); beginningMoneyLabel = new JLabel(MONEY_STRING); tilesPerRowLabel = new JLabel(ROW_TILES_STRING); tilesPerColumnLabel = new JLabel(COLUMN_TILES_STRING); JPanel labels = new JPanel( new GridLayout(0, 1)); labels.add(levelsPerGameLabel); labels.add(livesLabel); labels.add(beginningMoneyLabel); labels.add(tilesPerRowLabel); labels.add(tilesPerColumnLabel); return labels; } private JComponent makeFieldPane(){ levelsPerGameField = new JFormattedTextField(numberFormat); livesField = new JFormattedTextField(numberFormat); levelsPerGameField.setColumns(10); beginningMoneyField = new JFormattedTextField(numberFormat); tilesPerRowField = new JFormattedTextField(numberFormat); tilesPerColumnField = new JFormattedTextField(numberFormat); JPanel fields = new JPanel(new GridLayout(0, 1)); fields.add(levelsPerGameField); fields.add(livesField); fields.add(beginningMoneyField); fields.add(tilesPerRowField); fields.add(tilesPerColumnField); return fields; } }
package be.isach.samaritan.command; import be.isach.samaritan.util.MathUtils; import be.isach.samaritan.util.SamaritanConstants; import be.isach.samaritan.util.TextUtil; import net.dv8tion.jda.entities.MessageChannel; class CommandHelp extends Command { private static final int COMMANDS_PER_PAGE = 10; /** * Command Constructor. * * @param messageChannel The text Channel where command is called. * @param commandData The Command Data, providing the Guild, the executor and the Samaritan instance. * @param args The args provided when command was called. */ CommandHelp(MessageChannel messageChannel, CommandData commandData, String[] args) { super(messageChannel, commandData, args); } /** * Called when the command is executed. * * @param args Arguments provided by user. */ @Override void onExecute(String[] args) { int page = 1; if (args != null && args.length > 0 && !args[0].isEmpty()) { if(MathUtils.isInteger(args[0])) { page = Integer.parseInt(args[0]); } else { onHelpForCommand(args[0]); return; } } page = Math.max(1, page); page = Math.min(getMaxPages(), page); showHelp(page); } private void showHelp(int page) { int totalScale = CommandType.longestStringLength() + 4; int totalScaleDesc = CommandType.longestDescriptionLength() + 4; StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append("```"); stringBuilder.append(" \nAvailable commands (Total: " + CommandType.values().length + "): \n"); stringBuilder.append(" \nPage ").append(page).append("/").append(getMaxPages()).append(" (-help [page]) \n\n"); stringBuilder.append("Alias").append(TextUtil.getSpaces(totalScale - "Alias".length())).append(" "); stringBuilder.append("Description").append(TextUtil.getSpaces(totalScaleDesc - "Description".length())); stringBuilder.append("Required Access Level"); stringBuilder.append("\n\n"); int from = (page - 1) * COMMANDS_PER_PAGE; int to = Math.min(CommandType.values().length - 1, COMMANDS_PER_PAGE * page - 1); for (int i = from; i <= to; i++) { CommandType commandType = CommandType.values()[i]; String access = commandType.getRequiredAccessLevel() + ""; String alias = commandType.getAliases().get(0) + TextUtil.getSpaces(totalScale - commandType.getAliases().get(0).length()); String desc = commandType.getDescription() + TextUtil.getSpaces(totalScaleDesc - commandType.getDescription().length()); stringBuilder.append(SamaritanConstants.PREFIX).append(alias); stringBuilder.append(desc); stringBuilder.append(access); stringBuilder.append("\n"); } stringBuilder.append("```"); getMessageChannel().sendMessage(stringBuilder.toString()); } private int getMaxPages() { return (int)Math.ceil(CommandType.values().length / (double)COMMANDS_PER_PAGE); } private void onHelpForCommand(String commandLabel) { if (!CommandType.isValidCommandAlias(commandLabel)) { getMessageChannel().sendMessage("```" + commandLabel + "``` isn't a valid command!"); return; } CommandType commandType = CommandType.fromAlias(commandLabel); String stringBuilder = "\nCommand " + commandLabel + "\n" + "Aliases: `" + TextUtil.formatAliasesList(commandType.getAliases()) + "`\n" + "Description: `" + commandType.getDescription() + "`\n" + "Required Access Level: `" + commandType.getRequiredAccessLevel() + "`\n"; getMessageChannel().sendMessage(stringBuilder); } }
package br.edu.ifrn.helppet.validacao; import br.edu.ifrn.helppet.dominio.Animal; import java.util.ArrayList; /** * * @author camila */ public class AnimalVL { private final ArrayList<String> tipo = new ArrayList<>(); public AnimalVL(){ tipo.add("Adoção"); tipo.add("Perdido"); tipo.add("Resgate"); } private String validarTipo(Animal a){ if(a.getTipoAnimal() != null){ if(!a.getTipoAnimal().isEmpty()){ if(tipo.contains(a.getTipoAnimal())){ return "OK"; } else { return "Campo inválido: "+a.getTipoAnimal(); } } else { return "Campo vazio"; } } else { return "Campo nulo"; } } private String validarNome(Animal a) { int cont = 0; if (a.getNomeAnimal() == null) { a.setNomeAnimal("Pet " + a.getIdAnimal()); return "OK"; } else { if (a.getNomeAnimal().length() >= 3 && a.getNomeAnimal().length() <= 30) { for (int i = 0; i < a.getNomeAnimal().length(); i++) { if (Character.isLetter(a.getNomeAnimal().charAt(i))) { cont++; } } if (cont >= 3) { return "OK"; } else { return "O nome do animal deve ter no mínimo 3 letras"; } } else { if (a.getNomeAnimal().length() < 3) { return "O nome do animal deve conter no mínimo 3 caracteres"; } else { return "O nome do animal deve conter no máximo 30 caracteres"; } } } } }
package cat.nyaa.nyaautils.realm; import cat.nyaa.nyaautils.I18n; import cat.nyaa.nyaautils.NyaaUtils; import cat.nyaa.utils.Message; import cat.nyaa.utils.MessageType; import org.bukkit.Location; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; import org.bukkit.event.player.PlayerJoinEvent; import org.bukkit.event.player.PlayerMoveEvent; import java.util.HashMap; import java.util.UUID; public class RealmListener implements Listener { public NyaaUtils plugin; public HashMap<UUID, String> currentRealm = new HashMap<>(); public RealmListener(NyaaUtils pl) { plugin = pl; plugin.getServer().getPluginManager().registerEvents(this, pl); } @EventHandler(priority = EventPriority.MONITOR, ignoreCancelled = true) public void onPlayerJoin(PlayerJoinEvent event) { if (!currentRealm.containsKey(event.getPlayer().getUniqueId())) { currentRealm.put(event.getPlayer().getUniqueId(), Realm.__DEFAULT__); } } @EventHandler public void onPlayerMove(PlayerMoveEvent event) { Player player = event.getPlayer(); UUID id = player.getUniqueId(); String currentRealmName = currentRealm.getOrDefault(id, ""); Realm realm = getRealm(player.getLocation()); if (realm == null) { return; } if (currentRealmName.equals(realm.getName()) && realm.inArea(player.getLocation())) { return; } if (!currentRealmName.equals(realm.getName()) && !Realm.__DEFAULT__.equals(realm.getName())) { currentRealm.put(id, realm.getName()); if(plugin.cfg.realm_notification_type == MessageType.TITLE){ String title, subtitle; if (realm.getType().equals(RealmType.PUBLIC)) { title = I18n.format("user.realm.notification.public_title", realm.getName()); subtitle = I18n.format("user.realm.notification.public_subtitle"); } else { title = I18n.format("user.realm.notification.private_title", realm.getName()); subtitle = I18n.format("user.realm.notification.private_subtitle", realm.getOwner().getName()); } Message.sendTitle(player, new Message(title).inner, new Message(subtitle).inner, plugin.cfg.realm_notification_title_fadein_tick, plugin.cfg.realm_notification_title_stay_tick, plugin.cfg.realm_notification_title_fadeout_tick ); }else{ if (realm.getType().equals(RealmType.PUBLIC)) { new Message(I18n.format("user.realm.notification.public", realm.getName())). send(player, plugin.cfg.realm_notification_type); } else { new Message(I18n.format("user.realm.notification.private", realm.getName(), realm.getOwner().getName())).send(player, plugin.cfg.realm_notification_type); } } return; } else if (!currentRealm.containsKey(id) || !Realm.__DEFAULT__.equals(currentRealmName)) { currentRealm.put(id, Realm.__DEFAULT__); new Message(plugin.cfg.realm_default_name).send(player, plugin.cfg.realm_notification_type); } return; } public Realm getRealm(Location loc) { Realm realm = plugin.cfg.realmConfig.realmList.get(Realm.__DEFAULT__); for (Realm r : plugin.cfg.realmConfig.realmList.values()) { if (r.getName().equals(Realm.__DEFAULT__)) { continue; } if (r.inArea(loc) && (realm == null || realm.getPriority() < r.getPriority())) { realm = r; } } return realm; } }
package ch.eiafr.cojac.interval; import static java.lang.Math.PI; /** * <p> * Note : the mathematical operation does not treat operation with overflow * In the future, maybe implments some features to frame those special event * Example : [-MAX_VALUE;MAX_VALUE] + [0.0;0.0] is giving [-infinity;infinity] * </p> * * @version 0.1 */ public class DoubleInterval implements Comparable<DoubleInterval> { public double inf; public double sup; private boolean isNan; private static final double PI_2 = PI / 2.0; private static final double PI3_2 = PI * 1.5; private static final double PI2 = PI * 2.0; /** * Constructor * * @param inf need to be smaller than sup * @param sup need to be bigger than inf */ public DoubleInterval(double inf, double sup) { if (Double.isNaN(inf) || Double.isNaN(sup)) { this.inf = Double.NaN; this.sup = Double.NaN; this.isNan = true; } else { this.inf = inf; this.sup = sup; this.isNan = false; } } /** * Constructor * * @param value value of the created interval, same has new DoubleInterval(a,a); */ public DoubleInterval(double value) { if (Double.isNaN(inf) || Double.isNaN(sup) || Double.isNaN(value)) { this.inf = Double.NaN; this.sup = Double.NaN; this.isNan = true; } else { this.inf = this.sup = value; this.isNan = false; } } /** * @param o another DoubleInterval to be compared with this * * @return - 1 if this is absolutely bigger than o * - 0 if there is some shared region * - -1 if this is absolutely smaller than o */ @Override public int compareTo(DoubleInterval o) { if (this.isNan && o.isNan) { return 0; } if (this.isNan) { return -1; } if (o.isNan) { return 1; } if (o.sup < this.inf) { return 1; } if (o.inf > this.sup) { return -1; } return 0; } /** * <p> * Note : the comparaison with infinity are the same with some basic double... * [NEGATIVE_INFINITY;POSITIVE_INFINITY] includes NEGATIVE_INFINITY and POSITIVE_INFINITY * </p> * * @param value dobule that's is compared with this (see has a set) * * @return - value < inf -> 1 , the value is under the set * - value > inf && value < sup -> 0 , the value is in the set ! * - value > sup -> -1 , the value is over the set */ public int compareTo(double value) { if (this.isNan && Double.isNaN(value)) { return 0; } if (this.isNan) { return -1; } if (Double.isNaN(value)) { return 1; } if (value < inf) { return 1; } if (value > sup) { return -1; } return 0; } /** * @param o DoubleInterval to be compared with this * * @return true only if the Interval are strictly equals */ public boolean strictCompareTo(DoubleInterval o) { if (o.isNan || this.isNan) { return false; } return (this.inf == o.inf && this.sup == o.sup); } @Override public String toString() { if (isNan) { return "[NaN;NaN]"; } return String.format("[%f;%f]", this.inf, this.sup); } /** * @param a DoubleInterval to use * * @return the width of the interval */ public static double width(DoubleInterval a) { if (a.isNan) { return Double.NaN; } assert (a.sup >= a.inf); return a.sup - a.inf; } /** * Test if b is in the interval a * * @param a DoubleInterval see has a set * @param b double b to test * * @return true if b is in a, else false * if the interval isNan, return false */ /* Interval operation */ public static boolean isIn(DoubleInterval a, double b) { if (a.isNan) { return false; } return (b >= a.inf && b <= a.sup); } /** * @param a DoubleInterval supposed to bee the encompassing one * @param b DoubleInterval supposed to be inside the DoubleInterval a * * @return true if b is completely in the Interval a, false otherwise */ public static boolean isIn(DoubleInterval a, DoubleInterval b) { if (a.isNan) { return false; } return (b.inf >= a.inf && b.sup <= a.sup); } /** * @return true if the interval is bounding NaN */ public boolean isNan() { return isNan; } /** * Used for some test... test if the Interval is Degenerated * <p> * Note : is the Interval is NaN, return true... * See DoubleInterval.isNan() * </p> * * @return true if the interval ins't degenerated : this.inf <= this.sup, else false */ public boolean testBounds() { if (this.isNan) { return true; } return this.inf <= this.sup; } /* Mathematical operations */ /** * @param a 1st operand of the addition * @param b 2st operand of the addition * * @return a new DoubleInterval that's the result of the a + b operation on interval */ public static DoubleInterval add(DoubleInterval a, DoubleInterval b) { if (a.isNan || b.isNan) { return new DoubleInterval(Double.NaN); } double v1 = a.inf + b.inf; double v2 = a.sup + b.sup; return roundedInterval(v1, v2); } /** * @param a 1st operand of the subtraction * @param b 2st operand of the subtraction * * @return a new DoubleInterval that's the result of the a - b operation on interval */ public static DoubleInterval sub(DoubleInterval a, DoubleInterval b) { if (a.isNan || b.isNan) { return new DoubleInterval(Double.NaN); } double v1 = a.inf - b.sup; double v2 = a.sup - b.inf; return roundedInterval(v1, v2); } /** * @param a 1st operand of the multiplication * @param b 2st operand of the multiplication * * @return a new DoubleInterval that's the result of the a * b operation on interval */ public static DoubleInterval mul(DoubleInterval a, DoubleInterval b) { if (a.isNan || b.isNan) { return new DoubleInterval(Double.NaN); } double v1 = Math.min(Math.min(a.inf * b.inf, a.inf * b.sup), Math.min(a.sup * b.inf, a.sup * b.sup)); double v2 = Math.max(Math.max(a.inf * b.inf, a.inf * b.sup), Math.max(a.sup * b.inf, a.sup * b.sup)); return roundedInterval(v1, v2); } /** * @param a 1st operand of the division * @param b 2st operand of the division * * @return a DoubleInterval that's the result of the a/b operation * - if the b interval contains 0.0, the result interval is NaN */ public static DoubleInterval div(DoubleInterval a, DoubleInterval b) { if (a.isNan || b.isNan || isIn(b, 0.0)) { return new DoubleInterval(Double.NaN); } double v1 = Math.min(Math.min(a.inf / b.inf, a.inf / b.sup), Math.min(a.sup / b.inf, a.sup / b.sup)); double v2 = Math.max(Math.max(a.inf / b.inf, a.inf / b.sup), Math.max(a.sup / b.inf, a.sup / b.sup)); return roundedInterval(v1, v2); } /** * @param base 1st operand of the power 2 operation * * @return a new DoubleInterval that's the result of the pow operation on an interval */ public static DoubleInterval pow2(DoubleInterval base) { if (base.isNan) { return new DoubleInterval(Double.NaN); } if (base.inf > 0.0) { double v1 = Math.pow(base.inf, 2.0); double v2 = Math.pow(base.sup, 2.0); return roundedInterval(v1, v2); } else if (base.sup < 0.0) { double v1 = Math.pow(base.sup, 2.0); double v2 = Math.pow(base.inf, 2.0); return roundedInterval(v1, v2); } else // 0 is in the base interval { return new DoubleInterval(0.0, Math.max(Math.pow(base.inf, 2.0), Math.pow(base.sup, 2.0))); } } /** * @param base 1st operand of the power exponent operation * PRE : base.inf >= 0.0 * @param exponent 2st operand of the operation * * @return a new DoubleInterval that's the result of the pow operation on an interval */ public static DoubleInterval pow(DoubleInterval base, double exponent) { if (base.isNan || Double.isNaN(exponent)) { return new DoubleInterval(Double.NaN); } assert (base.inf >= 0.0); double v1 = Math.pow(base.inf, exponent); double v2 = Math.pow(base.sup, exponent); return roundedInterval(v1, v2); } /** * @param base 1st operand of the power exponent operation * PRE : base.inf >= 0.0 * @param exponent 2st operand of the operation * * @return a new DoubleInterval that's the result of the base^exponent operation * because the pow function is monotone, the result is esay to compute */ public static DoubleInterval pow(DoubleInterval base, DoubleInterval exponent) { if (base.isNan || exponent.isNan) { return new DoubleInterval(Double.NaN); } assert (base.inf >= 0.0); double v1 = Math.min(Math.min(Math.pow(base.inf, exponent.inf), Math.pow(base.inf, exponent.sup)), Math.min(Math.pow(base.sup, exponent.inf), Math.pow(base.sup, exponent.sup))); double v2 = Math.max(Math.max(Math.pow(base.inf, exponent.inf), Math.pow(base.inf, exponent.sup)), Math.max(Math.pow(base.sup, exponent.inf), Math.pow(base.sup, exponent.sup))); return roundedInterval(v1, v2); } /** * @param a argument for the exponential function * * @return a new DoubleInterval that's the result of the exponential function */ public static DoubleInterval exp(DoubleInterval a) { if (a.isNan) { return new DoubleInterval(Double.NaN); } double v1 = Math.exp(a.inf); double v2 = Math.exp(a.sup); return roundedInterval(v1, v2); } /** * @param a argument for the logarithmic function * PRE : param a must be > 0 * * @return a new DoubleInterval that's the result of the logarithmic function (ln) */ public static DoubleInterval log(DoubleInterval a) { if (a.isNan || a.inf < 0) { return new DoubleInterval(Double.NaN); } assert (a.inf > 0.0); double v1 = Math.log(a.inf); double v2 = Math.log(a.sup); return roundedInterval(v1, v2); } /** * @param a argument for the logarithmic base 10 function * PRE : param a must be > 0 * * @return a new DoubleInterval that's the result of the logarithmic function */ public static DoubleInterval log10(DoubleInterval a) { if (a.isNan || a.inf < 0) { return new DoubleInterval(Double.NaN); } assert (a.inf > 0.0); double v1 = Math.log10(a.inf); double v2 = Math.log10(a.sup); return roundedInterval(v1, v2); } /** * @param a operand of the square 2 operation * PRE : the interval must be positive (a.inf >= 0.0) * * @return a new DoubleInterval that's the result of the sqrt operation */ public static DoubleInterval sqrt(DoubleInterval a) { if (a.isNan) { return new DoubleInterval(Double.NaN); } assert (a.inf >= 0.0); double v1 = Math.sqrt(a.inf); double v2 = Math.sqrt(a.sup); return roundedInterval(v1, v2); } /** * @param a operand of the absolute operation * * @return a new DoubleInterval that's the absolute interval of the operand */ public static DoubleInterval abs(DoubleInterval a) { if (a.isNan) { return new DoubleInterval(Double.NaN); } if (isIn(a, 0.0)) { double v1 = 0.0; double v2 = Math.max(-a.inf, a.sup); v2 = v2 + Math.ulp(v2); return new DoubleInterval(v1, v2); } else if (a.sup < 0) { return new DoubleInterval(-a.sup, -a.inf); } else //(a.inf > 0) { return new DoubleInterval(a.inf, a.sup); // No need rounded, the result is already know } } /** * @param a operand of the negative operation * * @return a new DoubleInterval that's the negative the operand */ public static DoubleInterval neg(DoubleInterval a) { if (a.isNan) { return new DoubleInterval(Double.NaN); } return new DoubleInterval(-a.sup, -a.inf); } /** * Max and min are in pi/2 and 3*pi/2 * * @param a operand of the sinus operation on interval * * @return a new DoubleInterval that's the result of the sinus operation */ public static DoubleInterval sin(DoubleInterval a) { if (a.isNan) { return new DoubleInterval(Double.NaN); } if (width(a) >= 2 * PI) { return new DoubleInterval(-1.0, 1.0); } double inf; double sup; // convert the interval into the [-2*pi ; 2*pi] if (a.inf < -PI2) { if (a.sup < -PI2) { inf = a.inf % PI2; sup = a.sup % PI2; // inf and sup are between -2*pi and 0 } else { inf = a.inf + PI2; sup = a.sup + PI2; } } else if (a.sup > PI2) { if (a.inf > PI2) { inf = a.inf % PI2; sup = a.sup % PI2; } else { inf = a.inf - PI2; sup = a.sup - PI2; } } else { inf = a.inf; sup = a.sup; } assert (inf > -PI2 && inf < PI2); assert (sup > -PI2 && sup < PI2); if(inf > sup) { if(inf > 0.0) { inf -= PI2; } else { sup += PI2; } } assert (inf <= sup); if (inf <= -PI3_2) // inf is in section a { assert (inf > -PI2 && inf <= -PI3_2); if (sup <= -PI3_2) // both are in section a { assert (sup > -PI2 && sup <= -PI3_2); return roundedInterval(Math.sin(inf), Math.sin(sup)); } else if (sup <= -PI_2) // sup is in section b { assert (sup <= -PI_2 && sup > -PI3_2); double v1 = Math.min(Math.sin(inf), Math.sin(sup)); return new DoubleInterval(v1 - Math.ulp(v1), 1.0); } else // sup in int the c section { assert (sup > -PI_2 && sup <= PI_2); return new DoubleInterval(-1.0, 1.0); } } else if (inf <= -PI_2) // inf is in b section { assert (inf <= -PI_2 && inf > -PI3_2); if (sup <= -PI_2) // both in b section { assert (sup <= -PI_2 && sup > -PI3_2); double v1 = Math.sin(sup); double v2 = Math.sin(inf); return roundedInterval(v1, v2); } else if (sup <= PI_2) // sup is in c section { assert (sup > -PI_2 && sup <= PI_2); double v1 = -1.0; double v2 = Math.max(Math.sin(inf), Math.sin(sup)); return new DoubleInterval(v1, v2 + Math.ulp(v2)); } else // sup is in d section { assert (sup > PI_2 && sup <= PI3_2); return new DoubleInterval(-1.0, 1.0); } } else if (inf <= PI_2) // inf is in the c section { assert (inf > -PI_2 && inf <= PI_2); if (sup <= PI_2) // both in c section { assert (sup > -PI_2 && sup <= PI_2); return roundedInterval(Math.sin(inf), Math.sin(sup)); } else if (sup <= PI3_2) // sup in d section { assert (sup > PI_2 && sup <= PI3_2); double v1 = Math.min(Math.sin(inf), Math.sin(sup)); double v2 = 1.0; return new DoubleInterval(v1 - Math.ulp(v1), v2); } else // sup is in e section { assert (sup <= PI2 && sup > PI3_2); return new DoubleInterval(-1.0, 1.0); } } else if (inf <= PI3_2) // inf is in d section { assert (inf > PI_2 && inf <= PI3_2); if (sup <= PI3_2) // sup is in d section { assert (sup > PI_2 && sup <= PI3_2); double v1 = Math.sin(sup); double v2 = Math.sin(inf); return roundedInterval(v1, v2); } else // sup is in e section { assert (sup <= PI2 && sup > PI3_2); double v1 = -1.0; double v2 = Math.max(Math.sin(inf), Math.sin(sup)); return new DoubleInterval(v1, v2 + Math.ulp(v2)); } } else // both in e section { assert (inf > PI3_2 && inf <= PI2); assert (sup > PI3_2 && sup <= PI2); double v1 = Math.sin(inf); double v2 = Math.sin(sup); return roundedInterval(v1, v2); } } public static DoubleInterval cos(DoubleInterval a) { // using sin(x + 2*pi) = cos(x) //return sin(new DoubleInterval(a.inf + PI_2,a.sup + PI_2)); assert (a.inf <= a.sup); if (a.isNan) { return new DoubleInterval(Double.NaN); } if (width(a) >= 2 * PI) { return new DoubleInterval(-1.0, 1.0); } double inf; double sup; // convert the interval into the [-2*pi ; 2*pi] if (a.inf < -PI2) { if (a.sup < -PI2) { inf = a.inf % PI2; sup = a.sup % PI2; // inf and sup are between -2*pi and 0 } else { inf = a.inf + PI2; sup = a.sup + PI2; } } else if (a.sup > PI2) { if (a.inf > PI2) { inf = a.inf % PI2; sup = a.sup % PI2; } else { inf = a.inf - PI2; sup = a.sup - PI2; } } else { inf = a.inf; sup = a.sup; } assert (inf > -PI2 && inf < PI2); assert (sup > -PI2 && sup < PI2); if(inf > sup) { if(inf > 0.0) { inf -= PI2; } else { sup += PI2; } } assert (inf <= sup); if(inf < -PI) // inf is in the a section { assert (inf < -PI && inf >= -PI2); if(sup < -PI) { assert (sup >= -PI2 && sup < -PI); double v1 = Math.cos(sup); double v2 = Math.cos(inf); return roundedInterval(v1,v2); } else if(sup < 0.0) { assert (sup >= -PI && sup < 0.0); double v1 = -1.0; double v2 = Math.max(Math.cos(inf),Math.cos(sup)); return new DoubleInterval(v1,v2 + Math.ulp(v2)); } else { assert (sup >= 0.0 & sup <= PI); return new DoubleInterval(-1.0,1.0); } } else if(inf < 0.0) // inf is in the b section { assert (inf < 0.0 && inf >= -PI); if(sup < 0.0) // sup is in the b section { assert (sup < 0.0 & sup >= -PI); double v1 = Math.cos(inf); double v2 = Math.cos(sup); return roundedInterval(v1,v2); } else if(sup < PI) // sup is in the c section { assert (sup < PI && sup >= 0.0); double v1 = Math.min(Math.cos(inf),Math.cos(sup)); double v2 = 1.0; return new DoubleInterval(v1 - Math.ulp(v1),v2); } else // sup is in the d section { assert (sup > PI && sup <= PI2); return new DoubleInterval(-1.0,1.0); } } else if(inf < PI) // inf is in the c section { assert (inf < PI && inf >= 0.0); if(sup < PI) // sup is in the c section { assert (sup < PI && sup >= 0.0); double v1 = Math.cos(sup); double v2 = Math.cos(inf); return roundedInterval(v1,v2); } else // sup is in the d section { assert (sup >= PI && sup <= PI2); double v1 = -1.0; double v2 = Math.max(Math.cos(inf),Math.cos(sup)); return new DoubleInterval(v1,v2 + Math.ulp(v2)); } } else // inf is in the d section and sup too { assert (inf <= PI2 && inf >= -PI2); assert (inf <= PI2 && inf >= -PI2); double v1 = Math.cos(inf); double v2 = Math.cos(sup); return roundedInterval(v1,v2); } } /** * @param inf inferior bound of the interval * @param sup superior bound of the interval * * @return a new DoubleInterval with the inf minus ulp and the sup bonus ulp */ private static DoubleInterval roundedInterval(double inf, double sup) { if (Double.isNaN(inf) || Double.isNaN(sup)) { return new DoubleInterval(Double.NaN); } return new DoubleInterval(inf - Math.ulp(inf), sup + Math.ulp(sup)); } }
package com.sometrik.framework; import android.graphics.Bitmap.Config; import android.graphics.Color; import android.graphics.Typeface; import android.graphics.drawable.GradientDrawable; import android.view.Gravity; import android.view.View; import android.widget.LinearLayout; import android.widget.LinearLayout.LayoutParams; import android.widget.TextView; public class FWTextView extends TextView implements NativeCommandHandler { private FrameWork frame; public FWTextView(FrameWork frame) { super(frame); this.frame = frame; // Color color = new Color(); this.setBackground(null); this.setBackgroundColor(Color.rgb(255, 255, 255)); } @Override public void addChild(View view) { System.out.println("FWTextView couldn't handle command"); } @Override public void addOption(int optionId, String text) { System.out.println("FWTextView couldn't handle command"); } @Override public void setValue(String v) { setText(v); } @Override public void setValue(int v) { System.out.println("FWTextView couldn't handle command"); } @Override public void setViewEnabled(Boolean enabled) { setEnabled(enabled); } @Override public void setStyle(String key, String value) { if (key.equals("font-size")) { if (value.equals("small")) { this.setTextSize(9); } else if (value.equals("medium")) { this.setTextSize(12); } else if (value.equals("large")) { this.setTextSize(15); } else { setTextSize(Integer.parseInt(value)); } } else if (key.equals("padding-top")) { setPadding(getPaddingLeft(), Integer.parseInt(value), getPaddingRight(), getPaddingBottom()); } else if (key.equals("padding-bottom")) { setPadding(getPaddingLeft(), getPaddingTop(), getPaddingRight(), Integer.parseInt(value)); } else if (key.equals("padding-left")) { setPadding(Integer.parseInt(value), getPaddingTop(), getPaddingRight(), getPaddingBottom()); } else if (key.equals("padding-right")) { setPadding(getPaddingLeft(), getPaddingTop(), Integer.parseInt(value), getPaddingBottom()); } else if (key.equals("width")) { LinearLayout.LayoutParams params = (LayoutParams) getLayoutParams(); if (value.equals("wrap-content")) { params.width = LinearLayout.LayoutParams.WRAP_CONTENT; } else if (value.equals("match-parent")) { params.width = LinearLayout.LayoutParams.MATCH_PARENT; } else { final float scale = getContext().getResources().getDisplayMetrics().density; int pixels = (int) (Integer.parseInt(value) * scale + 0.5f); params.width = pixels; } setLayoutParams(params); } else if (key.equals("height")) { LinearLayout.LayoutParams params = (LayoutParams) getLayoutParams(); if (value.equals("wrap-content")) { params.height = LinearLayout.LayoutParams.WRAP_CONTENT; } else if (value.equals("match-parent")) { params.height = LinearLayout.LayoutParams.MATCH_PARENT; } else { final float scale = getContext().getResources().getDisplayMetrics().density; int pixels = (int) (Integer.parseInt(value) * scale + 0.5f); params.height = pixels; } setLayoutParams(params); } else if (key.equals("typeface")) { if (value.equals("bold")) { setTypeface(null, Typeface.BOLD); } else if (value.equals("normal")) { setTypeface(null, Typeface.NORMAL); } else if (value.equals("italic")) { setTypeface(null, Typeface.ITALIC); } else if (value.equals("bold-italic")) { setTypeface(null, Typeface.BOLD_ITALIC); } } else if (key.equals("borders")) { this.setBackground(frame.getResources().getDrawable(android.R.drawable.dialog_holo_light_frame)); // GradientDrawable gdDefault = new GradientDrawable(); // gdDefault.setColor(Color.parseColor("#c1272d")); // float radius = 2.0f; // gdDefault.setCornerRadii(new float[] { radius, radius, 0, 0, 0, 0, radius, radius }); // this.setBackground(gdDefault); } else if (key.equals("weight")) { LinearLayout.LayoutParams params = (LayoutParams) getLayoutParams(); params.weight = Integer.parseInt(value); setLayoutParams(params); } else if (key.equals("align-text")) { if (value.equals("left")) { setTextAlignment(TEXT_ALIGNMENT_TEXT_START); } else if (value.equals("center")) { setTextAlignment(TEXT_ALIGNMENT_CENTER); } } else if (key.equals("single-line")) { setSingleLine(); } else if (key.equals("text-color")) { this.setTextColor(Color.parseColor(value)); } else if (key.equals("gravity")) { LinearLayout.LayoutParams params = (LayoutParams) getLayoutParams(); if (value.equals("bottom")) { setGravity(Gravity.BOTTOM); } else if (value.equals("top")) { setGravity(Gravity.TOP); } else if (value.equals("left")) { setGravity(Gravity.LEFT); } else if (value.equals("right")) { setGravity(Gravity.RIGHT); } else if (value.equals("center")) { setGravity(Gravity.CENTER); } else if (value.equals("center-vertical")) { setGravity(Gravity.CENTER_VERTICAL); } else if (value.equals("center-horizontal")) { setGravity(Gravity.CENTER_HORIZONTAL); } setLayoutParams(params); } } @Override public void setError(boolean hasError, String errorText) { //TODO System.out.println("FWTextView couldn't handle command"); } @Override public int getElementId() { return getId(); } @Override public void onScreenOrientationChange(boolean isLandscape) { // TODO Auto-generated method stub } @Override public void addData(String text, int row, int column, int sheet) { System.out.println("FWTextView couldn't handle command"); } @Override public void setViewVisibility(boolean visibility) { if (visibility){ this.setVisibility(VISIBLE); } else { this.setVisibility(INVISIBLE); } } @Override public void clear() { System.out.println("couldn't handle command"); } @Override public void flush() { // TODO Auto-generated method stub } @Override public void addColumn(String text, int columnType) { // TODO Auto-generated method stub } @Override public void reshape(int value, int size) { // TODO Auto-generated method stub } @Override public void setImage(byte[] bytes, int width, int height, Config config) { // TODO Auto-generated method stub } @Override public void reshape(int size) { // TODO Auto-generated method stub } }
package ChemHelper; import java.awt.BorderLayout; import java.awt.Container; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.LinkedList; import javax.swing.JButton; import javax.swing.JFrame; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPanel; import Equation.Equation; import Functions.*; public class ChemHelper extends JFrame{ //Primary GUI class Container pane; JPanel last, buttons, eqButtons, numButtons; JMenuBar menu; Function[] funcs; JButton saveEq, useEq, saveNum, useNum; Equation equation; Function lastFunc; LinkedList<Double> savedNumbers; public ChemHelper(){ pane = getContentPane(); pane.setLayout(new BorderLayout()); createMenu(); pane.add(menu, BorderLayout.NORTH); pane.add(funcs[0].getPanel(), BorderLayout.WEST); //sets periodic table to show by default last = funcs[0].getPanel(); lastFunc = funcs[0]; saveEq = new JButton("Save equation"); saveEq.addActionListener(new EquationSaver()); useEq = new JButton("Use saved"); useEq.addActionListener(new EquationSaver()); eqButtons = new JPanel(); eqButtons.add(saveEq); eqButtons.add(useEq); eqButtons.setVisible(false); saveNum = new JButton("Save numbers"); saveNum.addActionListener(new NumberSaver()); useNum = new JButton("Use saved"); useNum.addActionListener(new NumberSaver()); numButtons = new JPanel(); numButtons.add(saveNum); numButtons.add(useNum); numButtons.setVisible(false); savedNumbers = new LinkedList<Double>(); buttons = new JPanel(); buttons.add(eqButtons); buttons.add(numButtons); pane.add(buttons, BorderLayout.SOUTH); buttons.setVisible(false); equation = null; pack(); this.setPreferredSize(this.getSize()); setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); setVisible(true); } private void createMenu() { funcs = new Function[16]; funcs[0] = new PeriodicTable(); funcs[1] = new ElectronShell(); funcs[2] = new CompoundStoichiometry(); funcs[3] = new Stoichiometry(); funcs[4] = new LimitingReactant(); funcs[5] = new PercentYield(); funcs[6] = new IdealGas(); funcs[7] = new ContainerChanges(); funcs[8] = new Effusion(); funcs[9] = new EquationReader(); funcs[10] = new RateLaw(); funcs[11] = new Combustion(); funcs[12] = new Nuclear(); funcs[13] = new Empirical(); funcs[14] = new Density(); funcs[15] = new About(); String[] menuNames = {"General Information", "Stoichiometry", "Gas Laws", "Reactions", "Other"}; //Lists the names of the different menus on the menu bar. int[] menuCutoffs = {0, 2, 6, 9, 13}; //Specifies the indices where a new menu would start from funcs menu = new JMenuBar(); for(int menuNum = 0; menuNum < menuCutoffs.length; menuNum++) { int startIndex = menuCutoffs[menuNum], endIndex; if(menuNum + 1 == menuCutoffs.length) endIndex = funcs.length - 1; else endIndex = menuCutoffs[menuNum + 1] - 1; JMenu thisMenu = new JMenu(menuNames[menuNum]); for(int index = startIndex; index <= endIndex; index++) { thisMenu.add(new FunctionMenuItem(funcs[index])); } menu.add(thisMenu); } } private class FunctionMenuItem extends JMenuItem { private Function function; public FunctionMenuItem(Function function) { super(function.toString()); this.function = function; addActionListener(new FunctionListener()); } public Function getFunction() { return function; } private class FunctionListener implements ActionListener { public void actionPerformed(ActionEvent arg0) { if(last!=null) pane.remove(last); lastFunc = ((FunctionMenuItem)arg0.getSource()).getFunction(); JPanel func = lastFunc.getPanel(); pane.add(func, BorderLayout.WEST); if(lastFunc.equation()) eqButtons.setVisible(true); else eqButtons.setVisible(false); if(lastFunc.number()) numButtons.setVisible(true); else numButtons.setVisible(false); buttons.setVisible(true); pane.repaint(); pack(); last = func; lastFunc.resetFocus(); } } } private class EquationSaver implements ActionListener { public void actionPerformed(ActionEvent arg0) { if(((JButton)arg0.getSource()).getText().equals("Save equation")) { if(lastFunc.saveEquation() != null) equation = lastFunc.saveEquation(); } else { if(equation != null) lastFunc.useSaved(equation); } } } private class NumberSaver implements ActionListener { public void actionPerformed(ActionEvent arg0) { if(((JButton)arg0.getSource()).getText().equals("Save numbers")) { double toSave = lastFunc.saveNumber(); if(toSave != 0 && savedNumbers.indexOf(toSave) == -1) savedNumbers.addFirst(toSave); } else { Object selected = JOptionPane.showInputDialog(pane, "Choose a number to use", "Choose Number", JOptionPane.PLAIN_MESSAGE, null, savedNumbers.toArray(), new Double(0)); if(selected instanceof Double) lastFunc.useSavedNumber((Double)selected); } } } public static void main(String[] args){ new ChemHelper(); } }
package com.mapswithme.maps; import android.app.Application; import android.content.SharedPreferences; import android.content.pm.PackageManager.NameNotFoundException; import android.os.Environment; import android.os.Handler; import android.os.Message; import android.preference.PreferenceManager; import android.util.Log; import java.io.File; import com.google.gson.Gson; import com.mapswithme.maps.background.AppBackgroundTracker; import com.mapswithme.maps.background.Notifier; import com.mapswithme.maps.bookmarks.data.BookmarkManager; import com.mapswithme.maps.downloader.CountryItem; import com.mapswithme.maps.downloader.MapManager; import com.mapswithme.maps.editor.Editor; import com.mapswithme.maps.location.TrackRecorder; import com.mapswithme.maps.sound.TtsPlayer; import com.mapswithme.util.Config; import com.mapswithme.util.Constants; import com.mapswithme.util.ThemeSwitcher; import com.mapswithme.util.UiUtils; import com.mapswithme.util.Yota; import com.mapswithme.util.statistics.AlohaHelper; import com.parse.Parse; import com.parse.ParseException; import com.parse.ParseInstallation; import com.parse.SaveCallback; public class MwmApplication extends Application { private final static String TAG = "MwmApplication"; // Parse private static final String PREF_PARSE_DEVICE_TOKEN = "ParseDeviceToken"; private static final String PREF_PARSE_INSTALLATION_ID = "ParseInstallationId"; private static MwmApplication sSelf; private SharedPreferences mPrefs; private AppBackgroundTracker mBackgroundTracker; private final Gson mGson = new Gson(); private boolean mAreCountersInitialized; private boolean mIsFrameworkInitialized; private Handler mMainLoopHandler; private final Object mMainQueueToken = new Object(); private final MapManager.StorageCallback mStorageCallbacks = new MapManager.StorageCallback() { @Override public void onStatusChanged(String countryId, int newStatus) { Notifier.cancelDownloadSuggest(); if (newStatus == CountryItem.STATUS_FAILED) Notifier.notifyDownloadFailed(countryId); } @Override public void onProgress(String countryId, long localSize, long remoteSize) {} }; public MwmApplication() { super(); sSelf = this; } public static MwmApplication get() { return sSelf; } public static Gson gson() { return sSelf.mGson; } public static AppBackgroundTracker backgroundTracker() { return sSelf.mBackgroundTracker; } public static SharedPreferences prefs() { return sSelf.mPrefs; } @Override public void onCreate() { super.onCreate(); mMainLoopHandler = new Handler(getMainLooper()); initPaths(); nativeInitPlatform(getApkPath(), getDataStoragePath(), getTempPath(), getObbGooglePath(), BuildConfig.FLAVOR, BuildConfig.BUILD_TYPE, Yota.isFirstYota(), UiUtils.isTablet()); initParse(); mPrefs = getSharedPreferences(getString(R.string.pref_file_name), MODE_PRIVATE); mBackgroundTracker = new AppBackgroundTracker(); TrackRecorder.init(); Editor.init(); } public void initNativeCore() { if (mIsFrameworkInitialized) return; nativeInitFramework(); MapManager.nativeSubscribe(mStorageCallbacks); initNativeStrings(); BookmarkManager.nativeLoadBookmarks(); TtsPlayer.INSTANCE.init(this); ThemeSwitcher.restart(); mIsFrameworkInitialized = true; } @SuppressWarnings("ResultOfMethodCallIgnored") private void initPaths() { new File(getDataStoragePath()).mkdirs(); new File(getTempPath()).mkdirs(); } private void initNativeStrings() { nativeAddLocalization("country_status_added_to_queue", getString(R.string.country_status_added_to_queue)); nativeAddLocalization("country_status_downloading", getString(R.string.country_status_downloading)); nativeAddLocalization("country_status_download", getString(R.string.country_status_download)); nativeAddLocalization("country_status_download_without_routing", getString(R.string.country_status_download_without_routing)); nativeAddLocalization("country_status_download_failed", getString(R.string.country_status_download_failed)); nativeAddLocalization("try_again", getString(R.string.try_again)); nativeAddLocalization("not_enough_free_space_on_sdcard", getString(R.string.not_enough_free_space_on_sdcard)); nativeAddLocalization("dropped_pin", getString(R.string.dropped_pin)); nativeAddLocalization("my_places", getString(R.string.my_places)); nativeAddLocalization("my_position", getString(R.string.my_position)); nativeAddLocalization("routes", getString(R.string.routes)); nativeAddLocalization("cancel", getString(R.string.cancel)); nativeAddLocalization("routing_failed_unknown_my_position", getString(R.string.routing_failed_unknown_my_position)); nativeAddLocalization("routing_failed_has_no_routing_file", getString(R.string.routing_failed_has_no_routing_file)); nativeAddLocalization("routing_failed_start_point_not_found", getString(R.string.routing_failed_start_point_not_found)); nativeAddLocalization("routing_failed_dst_point_not_found", getString(R.string.routing_failed_dst_point_not_found)); nativeAddLocalization("routing_failed_cross_mwm_building", getString(R.string.routing_failed_cross_mwm_building)); nativeAddLocalization("routing_failed_route_not_found", getString(R.string.routing_failed_route_not_found)); nativeAddLocalization("routing_failed_internal_error", getString(R.string.routing_failed_internal_error)); } public boolean isFrameworkInitialized() { return mIsFrameworkInitialized; } public String getApkPath() { try { return getPackageManager().getApplicationInfo(BuildConfig.APPLICATION_ID, 0).sourceDir; } catch (final NameNotFoundException e) { Log.e(TAG, "Can't get apk path from PackageManager"); return ""; } } public static String getDataStoragePath() { return Environment.getExternalStorageDirectory().getAbsolutePath() + Constants.MWM_DIR_POSTFIX; } public String getTempPath() { final File cacheDir = getExternalCacheDir(); if (cacheDir != null) return cacheDir.getAbsolutePath(); return Environment.getExternalStorageDirectory().getAbsolutePath() + String.format(Constants.STORAGE_PATH, BuildConfig.APPLICATION_ID, Constants.CACHE_DIR); } private static String getObbGooglePath() { final String storagePath = Environment.getExternalStorageDirectory().getAbsolutePath(); return storagePath.concat(String.format(Constants.OBB_PATH, BuildConfig.APPLICATION_ID)); } static { System.loadLibrary("mapswithme"); } /* * init Parse SDK */ private void initParse() { // Do not initialize Parse in default open-source version. final String appId = PrivateVariables.parseApplicationId(); if (appId.isEmpty()) return; Parse.initialize(this, appId, PrivateVariables.parseClientKey()); ParseInstallation.getCurrentInstallation().saveInBackground(new SaveCallback() { @Override public void done(ParseException e) { SharedPreferences prefs = prefs(); String previousId = prefs.getString(PREF_PARSE_INSTALLATION_ID, ""); String previousToken = prefs.getString(PREF_PARSE_DEVICE_TOKEN, ""); String newId = ParseInstallation.getCurrentInstallation().getInstallationId(); String newToken = ParseInstallation.getCurrentInstallation().getString("deviceToken"); if (!previousId.equals(newId) || !previousToken.equals(newToken)) { org.alohalytics.Statistics.logEvent(AlohaHelper.PARSE_INSTALLATION_ID, newId); org.alohalytics.Statistics.logEvent(AlohaHelper.PARSE_DEVICE_TOKEN, newToken); prefs.edit() .putString(PREF_PARSE_INSTALLATION_ID, newId) .putString(PREF_PARSE_DEVICE_TOKEN, newToken).apply(); } } }); } public void initCounters() { if (!mAreCountersInitialized) { mAreCountersInitialized = true; Config.updateLaunchCounter(); PreferenceManager.setDefaultValues(this, R.xml.prefs_misc, false); } } public static void onUpgrade() { Config.resetAppSessionCounters(); } @SuppressWarnings("unused") void processFunctor(final long functorPointer) { Message m = Message.obtain(mMainLoopHandler, new Runnable() { @Override public void run() { nativeProcessFunctor(functorPointer); } }); m.obj = mMainQueueToken; mMainLoopHandler.sendMessage(m); } void clearFunctorsOnUiThread() { mMainLoopHandler.removeCallbacksAndMessages(mMainQueueToken); } /** * Initializes native Platform with paths. Should be called before usage of any other native components. */ private native void nativeInitPlatform(String apkPath, String storagePath, String tmpPath, String obbGooglePath, String flavorName, String buildType, boolean isYota, boolean isTablet); private static native void nativeInitFramework(); private static native void nativeAddLocalization(String name, String value); private static native void nativeProcessFunctor(long functorPointer); }
package com.akiban.server.store; import static com.akiban.server.store.RowCollector.SCAN_FLAGS_DEEP; import static com.akiban.server.store.RowCollector.SCAN_FLAGS_END_AT_EDGE; import static com.akiban.server.store.RowCollector.SCAN_FLAGS_START_AT_EDGE; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.akiban.ais.model.Column; import com.akiban.ais.model.HKeyColumn; import com.akiban.ais.model.HKeySegment; import com.akiban.ais.model.UserTable; import com.akiban.message.ErrorCode; import com.akiban.server.AkServerUtil; import com.akiban.server.FieldDef; import com.akiban.server.IndexDef; import com.akiban.server.InvalidOperationException; import com.akiban.server.RowData; import com.akiban.server.RowDef; import com.akiban.server.RowDefCache; import com.akiban.server.RowType; import com.akiban.server.TableStatistics; import com.akiban.server.TableStatus; import com.akiban.server.TableStatusCache; import com.akiban.server.api.dml.ColumnSelector; import com.akiban.server.api.dml.scan.LegacyRowWrapper; import com.akiban.server.api.dml.scan.NewRow; import com.akiban.server.api.dml.scan.NiceRow; import com.akiban.server.message.ScanRowsRequest; import com.akiban.server.service.ServiceManagerImpl; import com.akiban.server.service.session.Session; import com.akiban.server.service.tree.TreeService; import com.akiban.util.Tap; import com.persistit.Exchange; import com.persistit.Key; import com.persistit.KeyFilter; import com.persistit.KeyState; import com.persistit.Management.DisplayFilter; import com.persistit.Persistit; import com.persistit.Transaction; import com.persistit.Tree; import com.persistit.Value; import com.persistit.exception.PersistitException; import com.persistit.exception.RollbackException; import com.persistit.exception.TransactionFailedException; public class PersistitStore implements Store { private static final Session.MapKey<Integer, List<RowCollector>> COLLECTORS = Session.MapKey.mapNamed("collectors"); final static int INITIAL_BUFFER_SIZE = 1024; private static final Logger LOG = LoggerFactory .getLogger(PersistitStore.class.getName()); private static final Tap WRITE_ROW_TAP = Tap.add(new Tap.PerThread( "write: write_row")); private static final Tap UPDATE_ROW_TAP = Tap.add(new Tap.PerThread( "write: update_row")); private static final Tap DELETE_ROW_TAP = Tap.add(new Tap.PerThread( "write: delete_row")); private static final Tap TX_COMMIT_TAP = Tap.add(new Tap.PerThread( "write: tx_commit")); private static final Tap TX_RETRY_TAP = Tap.add(new Tap.PerThread( "write: tx_retry", Tap.Count.class)); private static final Tap NEW_COLLECTOR_TAP = Tap.add(new Tap.PerThread( "read: new_collector")); static final int MAX_TRANSACTION_RETRY_COUNT = 10; private final static int MEGA = 1024 * 1024; private final static int MAX_ROW_SIZE = 5000000; private final static int MAX_INDEX_TRANCHE_SIZE = 10 * MEGA; private final static int KEY_STATE_SIZE_OVERHEAD = 50; private final static byte[] EMPTY_BYTE_ARRAY = new byte[0]; private final static String COLLECTORS_SESSION_KEY = "collectors"; private boolean verbose = false; private boolean deferIndexes = false; RowDefCache rowDefCache; TreeService treeService; TableStatusCache tableStatusCache; boolean forceToDisk = false; // default to "group commit" private DisplayFilter originalDisplayFilter; private PersistitStoreIndexManager indexManager; private final Map<Tree, SortedSet<KeyState>> deferredIndexKeys = new HashMap<Tree, SortedSet<KeyState>>(); private int deferredIndexKeyLimit = MAX_INDEX_TRANCHE_SIZE; public synchronized void start() throws Exception { treeService = ServiceManagerImpl.get().getTreeService(); tableStatusCache = treeService.getTableStatusCache(); indexManager = new PersistitStoreIndexManager(this, treeService); rowDefCache = new RowDefCache(tableStatusCache); originalDisplayFilter = getDb().getManagement().getDisplayFilter(); getDb().getManagement().setDisplayFilter( new RowDataDisplayFilter(this, treeService, originalDisplayFilter)); } public synchronized void stop() throws Exception { getDb().getManagement().setDisplayFilter(originalDisplayFilter); treeService = null; indexManager = null; rowDefCache = null; } @Override public void crash() throws Exception { stop(); } @Override public Store cast() { return this; } @Override public Class<Store> castClass() { return Store.class; } public Persistit getDb() { return treeService.getDb(); } public Exchange getExchange(final Session session, final RowDef rowDef, final IndexDef indexDef) throws PersistitException { if (indexDef == null) { final RowDef groupRowDef = rowDef.isGroupTable() ? rowDef : rowDefCache.getRowDef(rowDef.getGroupRowDefId()); return treeService.getExchange(session, groupRowDef); } else { return treeService.getExchange(session, indexDef); } } public void releaseExchange(final Session session, final Exchange exchange) { treeService.releaseExchange(session, exchange); } // Given a RowData for a table, construct an hkey for a row in the table. // For a table that does not contain its own hkey, this method uses the // parent join // columns as needed to find the hkey of the parent table. long constructHKey(Session session, Exchange hEx, RowDef rowDef, RowData rowData, boolean insertingRow) throws PersistitException, InvalidOperationException { // Initialize the hkey being constructed long uniqueId = -1; Key hKey = hEx.getKey(); hKey.clear(); // Metadata for the row's table UserTable table = rowDef.userTable(); FieldDef[] fieldDefs = rowDef.getFieldDefs(); // Metadata and other state for the parent table RowDef parentRowDef = null; if (rowDef.getParentRowDefId() != 0) { parentRowDef = rowDefCache.getRowDef(rowDef.getParentRowDefId()); } IndexDef.I2H[] indexToHKey = null; int i2hPosition = 0; Exchange parentPKExchange = null; boolean parentExists = false; // Nested loop over hkey metadata: All the segments of an hkey, and all // the columns of a segment. List<HKeySegment> hKeySegments = table.hKey().segments(); int s = 0; while (s < hKeySegments.size()) { HKeySegment hKeySegment = hKeySegments.get(s++); // Write the ordinal for this segment RowDef segmentRowDef = rowDefCache.getRowDef(hKeySegment.table() .getTableId()); hKey.append(segmentRowDef.getOrdinal()); // Iterate over the segment's columns List<HKeyColumn> hKeyColumns = hKeySegment.columns(); int c = 0; while (c < hKeyColumns.size()) { HKeyColumn hKeyColumn = hKeyColumns.get(c++); UserTable hKeyColumnTable = hKeyColumn.column().getUserTable(); if (hKeyColumnTable != table) { // Hkey column from row of parent table if (parentPKExchange == null) { // Initialize parent metadata and state assert parentRowDef != null : rowDef; IndexDef parentPK = parentRowDef.getPKIndexDef(); indexToHKey = parentPK.hkeyFields(); parentPKExchange = getExchange(session, rowDef, parentPK); constructParentPKIndexKey(parentPKExchange.getKey(), rowDef, rowData); parentExists = parentPKExchange.hasChildren(); if (parentExists) { boolean hasNext = parentPKExchange.next(true); assert hasNext : rowData; } // parent does not necessarily exist. rowData could be // an orphan } IndexDef.I2H i2h = indexToHKey[i2hPosition++]; if (i2h.isOrdinalType()) { assert i2h.ordinal() == segmentRowDef.getOrdinal() : hKeyColumn; i2h = indexToHKey[i2hPosition++]; } if (parentExists) { appendKeyFieldFromKey(parentPKExchange.getKey(), hKey, i2h.indexKeyLoc()); } else { // orphan row hKey.append(null); } } else { // Hkey column from rowData Column column = hKeyColumn.column(); FieldDef fieldDef = fieldDefs[column.getPosition()]; if (insertingRow && column.isAkibanPKColumn()) { // Must be a PK-less table. Use unique id from // TableStatus. TableStatus tableStatus = segmentRowDef .getTableStatus(); uniqueId = tableStatus.allocateNewUniqueId(); hKey.append(uniqueId); // Write rowId into the value part of the row also. rowData.updateNonNullLong(fieldDef, uniqueId); } else { appendKeyField(hKey, fieldDef, rowData); } } } } if (parentPKExchange != null) { releaseExchange(session, parentPKExchange); } return uniqueId; } void constructHKey(Exchange hEx, RowDef rowDef, int[] ordinals, int[] nKeyColumns, FieldDef[] hKeyFieldDefs, Object[] hKeyValues) throws Exception { final Key hkey = hEx.getKey(); hkey.clear(); int k = 0; for (int i = 0; i < ordinals.length; i++) { hkey.append(ordinals[i]); for (int j = 0; j < nKeyColumns[i]; j++) { FieldDef fieldDef = hKeyFieldDefs[k]; if (fieldDef.isPKLessTableCounter()) { // TODO: Maintain a counter elsewhere, maybe in the // FieldDef. At the end of the bulk load, // TODO: assign the counter to TableStatus. TableStatus tableStatus = fieldDef.getRowDef() .getTableStatus(); hkey.append(tableStatus.allocateNewUniqueId()); } else { appendKeyField(hkey, fieldDef, hKeyValues[k]); } k++; } } } /** * Given a RowData, the hkey where it will be stored, and an IndexDef for a * table, construct the index key. * * @param rowData * @param indexDef */ void constructIndexKey(Key iKey, RowData rowData, IndexDef indexDef, Key hKey) throws PersistitException { IndexDef.H2I[] fassoc = indexDef.indexKeyFields(); iKey.clear(); for (int index = 0; index < fassoc.length; index++) { IndexDef.H2I assoc = fassoc[index]; if (assoc.fieldIndex() >= 0) { int fieldIndex = assoc.fieldIndex(); RowDef rowDef = indexDef.getRowDef(); appendKeyField(iKey, rowDef.getFieldDef(fieldIndex), rowData); } else if (assoc.hKeyLoc() >= 0) { appendKeyFieldFromKey(hKey, iKey, assoc.hKeyLoc()); } else { throw new IllegalStateException("Invalid FA"); } } } /** * Given an index key and an indexDef, construct the corresponding hkey for * the row identified by the index key. * * @param hKey * @param indexKey * @param indexDef */ void constructHKeyFromIndexKey(final Key hKey, final Key indexKey, final IndexDef indexDef) { final IndexDef.I2H[] fassoc = indexDef.hkeyFields(); hKey.clear(); for (int index = 0; index < fassoc.length; index++) { final IndexDef.I2H fa = fassoc[index]; if (fa.isOrdinalType()) { hKey.append(fa.ordinal()); } else { final int depth = fassoc[index].indexKeyLoc(); if (depth < 0 || depth > indexKey.getDepth()) { throw new IllegalStateException( "IndexKey too shallow - requires depth=" + depth + ": " + indexKey); } appendKeyFieldFromKey(indexKey, hKey, fa.indexKeyLoc()); } } } /** * Given an indexDef, construct the corresponding hkey containing nulls. * * @param hKey * @param indexDef */ void constructHKeyFromNullIndexKey(final Key hKey, final IndexDef indexDef) { final IndexDef.I2H[] fassoc = indexDef.hkeyFields(); hKey.clear(); for (int index = 0; index < fassoc.length; index++) { final IndexDef.I2H fa = fassoc[index]; if (fa.isOrdinalType()) { hKey.append(fa.ordinal()); } else { hKey.append(null); } } } /** * Given a RowData for a table, construct an Exchange set up with a Key that * is the prefix of the parent's primary key index key. * * @param rowData */ void constructParentPKIndexKey(final Key iKey, final RowDef rowDef, final RowData rowData) { iKey.clear(); appendKeyFields(iKey, rowDef, rowData, rowDef.getParentJoinFields()); } void appendKeyFields(final Key key, final RowDef rowDef, final RowData rowData, final int[] fields) { for (int fieldIndex = 0; fieldIndex < fields.length; fieldIndex++) { final FieldDef fieldDef = rowDef.getFieldDef(fields[fieldIndex]); appendKeyField(key, fieldDef, rowData); } } void appendKeyField(final Key key, final FieldDef fieldDef, final RowData rowData) { fieldDef.getEncoding().toKey(fieldDef, rowData, key); } private void appendKeyFieldFromKey(final Key fromKey, final Key toKey, final int depth) { fromKey.indexTo(depth); int from = fromKey.getIndex(); fromKey.indexTo(depth + 1); int to = fromKey.getIndex(); if (from >= 0 && to >= 0 && to > from) { System.arraycopy(fromKey.getEncodedBytes(), from, toKey.getEncodedBytes(), toKey.getEncodedSize(), to - from); toKey.setEncodedSize(toKey.getEncodedSize() + to - from); } } private void appendKeyField(final Key key, final FieldDef fieldDef, Object value) { fieldDef.getEncoding().toKey(fieldDef, value, key); } @Override public RowDefCache getRowDefCache() { return rowDefCache; } private static <T> T errorIfNull(String description, T object) { if (object == null) { throw new NullPointerException(description + " is null; did you call startUp()?"); } return object; } public IndexManager getIndexManager() { return errorIfNull("index manager", indexManager); } @Override public void setVerbose(final boolean verbose) { this.verbose = verbose; } @Override public boolean isVerbose() { return verbose; } /** * WRites a row * * @param rowData * the row data * @throws InvalidOperationException * if the given table is unknown or deleted; or if there's a * duplicate key error */ @Override public void writeRow(final Session session, final RowData rowData) throws InvalidOperationException, PersistitException { final int rowDefId = rowData.getRowDefId(); if (rowData.getRowSize() > MAX_ROW_SIZE) { if (LOG.isWarnEnabled()) { LOG.warn("RowData size " + rowData.getRowSize() + " is larger than current limit of " + MAX_ROW_SIZE + " bytes"); } } WRITE_ROW_TAP.in(); final RowDef rowDef = rowDefCache.getRowDef(rowDefId); final Transaction transaction = treeService.getTransaction(session); Exchange hEx = null; try { long uniqueId = -1; hEx = getExchange(session, rowDef, null); int retries = MAX_TRANSACTION_RETRY_COUNT; for (;;) { transaction.begin(); try { // Does the heavy lifting of looking up the full hkey in // parent's primary index if necessary. uniqueId = constructHKey(session, hEx, rowDef, rowData, true); if (hEx.isValueDefined()) { complainAboutDuplicateKey("PRIMARY", hEx.getKey()); } packRowData(hEx, rowDef, rowData); // Store the h-row hEx.store(); if (rowDef.isAutoIncrement()) { final long location = rowDef.fieldLocation(rowData, rowDef.getAutoIncrementField()); if (location != 0) { final long autoIncrementValue = rowData .getIntegerValue((int) location, (int) (location >>> 32)); tableStatusCache.updateAutoIncrementValue(rowDefId, autoIncrementValue); } } tableStatusCache.incrementRowCount(rowDefId); if (uniqueId > 0) { tableStatusCache .updateUniqueIdValue(rowDefId, uniqueId); } for (final IndexDef indexDef : rowDef.getIndexDefs()) { // Insert the index keys (except for the case of a // root table's PK index.) if (!indexDef.isHKeyEquivalent()) insertIntoIndex(session, indexDef, rowData, hEx.getKey(), deferIndexes); } // The row being inserted might be the parent of orphan rows // already present. The hkeys of these // orphan rows need to be maintained. The hkeys of interest // contain the PK from the inserted row, // and nulls for other hkey fields nearer the root. // TODO: optimizations // - If we knew that no descendent table had an orphan (e.g. // store this info in TableStatus), // then this propagation could be skipped. hEx.clear(); Key hKey = hEx.getKey(); UserTable table = rowDef.userTable(); List<Column> pkColumns = table .getPrimaryKeyIncludingInternal().getColumns(); List<HKeySegment> hKeySegments = table.hKey().segments(); int s = 0; while (s < hKeySegments.size()) { HKeySegment segment = hKeySegments.get(s++); RowDef segmentRowDef = rowDefCache.getRowDef(segment .table().getTableId()); hKey.append(segmentRowDef.getOrdinal()); List<HKeyColumn> hKeyColumns = segment.columns(); int c = 0; while (c < hKeyColumns.size()) { HKeyColumn hKeyColumn = hKeyColumns.get(c++); Column column = hKeyColumn.column(); RowDef columnTableRowDef = rowDefCache .getRowDef(column.getTable().getTableId()); if (pkColumns.contains(column)) { appendKeyField(hKey, columnTableRowDef.getFieldDef(column .getPosition()), rowData); } else { hKey.append(null); } } } propagateDownGroup(session, hEx); transaction.commit(forceToDisk); TX_COMMIT_TAP.in(); break; } catch (RollbackException re) { TX_RETRY_TAP.out(); if (--retries < 0) { throw new TransactionFailedException(); } } finally { TX_COMMIT_TAP.out(); transaction.end(); } } if (deferredIndexKeyLimit <= 0) { putAllDeferredIndexKeys(session); } return; } finally { releaseExchange(session, hEx); WRITE_ROW_TAP.out(); } } private void complainAboutDuplicateKey(String indexName, Key hkey) throws InvalidOperationException { throw new InvalidOperationException(ErrorCode.DUPLICATE_KEY, "Non-unique key for index %s: %s", indexName, hkey); } @Override public void writeRowForBulkLoad(final Session session, Exchange hEx, RowDef rowDef, RowData rowData, int[] ordinals, int[] nKeyColumns, FieldDef[] hKeyFieldDefs, Object[] hKeyValues) throws Exception { /* * if (verbose && LOG.isInfoEnabled()) { LOG.info("BulkLoad writeRow: " * + rowData.toString(rowDefCache)); } */ constructHKey(hEx, rowDef, ordinals, nKeyColumns, hKeyFieldDefs, hKeyValues); packRowData(hEx, rowDef, rowData); // Store the h-row hEx.store(); /* * for (final IndexDef indexDef : rowDef.getIndexDefs()) { // Insert the * index keys (except for the case of a // root table's PK index.) if * (!indexDef.isHKeyEquivalent()) { insertIntoIndex(indexDef, rowData, * hEx.getKey(), deferIndexes); } } if (deferredIndexKeyLimit <= 0) { * putAllDeferredIndexKeys(); } */ return; } // TODO - remove - this is used only by the PersistitStoreAdapter in // bulk loader. @Override public void updateTableStats(final Session session, RowDef rowDef, long rowCount) throws Exception { // no-up for now } @Override public void deleteRow(final Session session, final RowData rowData) throws InvalidOperationException, PersistitException { DELETE_ROW_TAP.in(); final int rowDefId = rowData.getRowDefId(); final RowDef rowDef = rowDefCache.getRowDef(rowDefId); Exchange hEx = null; final Transaction transaction = treeService.getTransaction(session); try { hEx = getExchange(session, rowDef, null); int retries = MAX_TRANSACTION_RETRY_COUNT; for (;;) { transaction.begin(); try { constructHKey(session, hEx, rowDef, rowData, false); hEx.fetch(); // Verify that the row exists if (!hEx.getValue().isDefined()) { throw new InvalidOperationException( ErrorCode.NO_SUCH_RECORD, "Missing record at key: %s", hEx.getKey()); } // Verify that the row hasn't changed. Note: at some point // we may want to optimize the protocol to send only PK and // FK fields in oldRowData, in which case this test will // need to change. // TODO - review. With covering indexes, that day has come. // We can no longer do this comparison when the "old" row // has only its PK fields. // final int oldStart = rowData.getInnerStart(); // final int oldSize = rowData.getInnerSize(); // if (!bytesEqual(rowData.getBytes(), oldStart, oldSize, // hEx // .getValue().getEncodedBytes(), 0, hEx.getValue() // .getEncodedSize())) { // throw new StoreException(HA_ERR_RECORD_CHANGED, // "Record changed at key " + hEx.getKey()); // Remove the h-row hEx.remove(); tableStatusCache.decrementRowCount(rowDefId); // Remove the indexes, including the PK index for (final IndexDef indexDef : rowDef.getIndexDefs()) { if (!indexDef.isHKeyEquivalent()) { deleteIndex(session, indexDef, rowDef, rowData, hEx.getKey()); } } // The row being deleted might be the parent of rows that // now become orphans. The hkeys // of these rows need to be maintained. propagateDownGroup(session, hEx); transaction.commit(forceToDisk); return; } catch (RollbackException re) { if (--retries < 0) { throw new TransactionFailedException(); } } finally { transaction.end(); } } } finally { releaseExchange(session, hEx); DELETE_ROW_TAP.out(); } } @Override public void updateRow(final Session session, final RowData oldRowData, final RowData newRowData, final ColumnSelector columnSelector) throws InvalidOperationException, PersistitException { final int rowDefId = oldRowData.getRowDefId(); if (newRowData.getRowDefId() != rowDefId) { throw new IllegalArgumentException( "RowData values have different rowDefId values: (" + rowDefId + "," + newRowData.getRowDefId() + ")"); } UPDATE_ROW_TAP.in(); final RowDef rowDef = rowDefCache.getRowDef(rowDefId); Exchange hEx = null; final Transaction transaction = treeService.getTransaction(session); try { hEx = getExchange(session, rowDef, null); int retries = MAX_TRANSACTION_RETRY_COUNT; for (;;) { transaction.begin(); try { final TableStatus ts = rowDef.getTableStatus(); constructHKey(session, hEx, rowDef, oldRowData, false); hEx.fetch(); // Verify that the row exists if (!hEx.getValue().isDefined()) { throw new InvalidOperationException( ErrorCode.NO_SUCH_RECORD, "Missing record at key: %s", hEx.getKey()); } // Combine current version of row with the version coming in // on the update request. // This is done by taking only the values of columns listed // in the column selector. RowData currentRow = new RowData(EMPTY_BYTE_ARRAY); expandRowData(hEx, currentRow); final RowData mergedRowData = columnSelector == null ? newRowData : mergeRows(rowDef, currentRow, newRowData, columnSelector); // Verify that it hasn't changed. Note: at some point we // may want to optimize the protocol to send only PK and FK // fields in oldRowData, in which case this test will need // to change. if (!fieldsEqual(rowDef, oldRowData, mergedRowData, rowDef .getPKIndexDef().getFields()) || !fieldsEqual(rowDef, oldRowData, mergedRowData, rowDef.getParentJoinFields())) { deleteRow(session, oldRowData); writeRow(session, mergedRowData); } else { packRowData(hEx, rowDef, mergedRowData); // Store the h-row hEx.store(); // Update the indexes for (final IndexDef indexDef : rowDef.getIndexDefs()) { if (!indexDef.isHKeyEquivalent()) { updateIndex(session, indexDef, rowDef, currentRow, mergedRowData, hEx.getKey()); } } } transaction.commit(forceToDisk); return; } catch (RollbackException re) { if (--retries < 0) { throw new TransactionFailedException(); } } finally { transaction.end(); } } } finally { releaseExchange(session, hEx); UPDATE_ROW_TAP.out(); } } private void propagateDownGroup(Session session, Exchange exchange) throws PersistitException, InvalidOperationException { // exchange is positioned at a row R that has just been replaced by R', // (because we're processing an update // that has to be implemented as delete/insert). hKey is the hkey of R. // The replacement, R', is already present. // For each descendent* D of R, this method deletes and reinserts D. // Reinsertion of D causes its hkey to be // recomputed. This may depend on an ancestor being updated (if part of // D's hkey comes from the parent's // PK index). That's OK because updates are processed preorder, (i.e., // ancestors before descendents). // This method will modify the state of exchange. // * D is a descendent of R means that D is below R in the group. I.e., // hkey(R) is a prefix of hkey(D). // TODO: Optimizations // - Don't have to visit children that contain their own hkey // - Don't have to visit children whose hkey contains no changed column Key hKey = exchange.getKey(); KeyFilter filter = new KeyFilter(hKey, hKey.getDepth() + 1, Integer.MAX_VALUE); RowData descendentRowData = new RowData(EMPTY_BYTE_ARRAY); while (exchange.next(filter)) { Value value = exchange.getValue(); int descendentRowDefId = AkServerUtil.getInt( value.getEncodedBytes(), RowData.O_ROW_DEF_ID - RowData.LEFT_ENVELOPE_SIZE); RowDef descendentRowDef = rowDefCache.getRowDef(descendentRowDefId); expandRowData(exchange, descendentRowData); // Delete the current row from the tree exchange.remove(); tableStatusCache.decrementRowCount(descendentRowDefId); for (IndexDef indexDef : descendentRowDef.getIndexDefs()) { if (!indexDef.isHKeyEquivalent()) { deleteIndex(session, indexDef, descendentRowDef, descendentRowData, exchange.getKey()); } } // Reinsert it, recomputing the hkey writeRow(session, descendentRowData); } } /** * Remove data from the <b>entire group</b> that this RowDef ID is contained * in. This includes all table and index data for all user and group tables * in the group. * * @param session * Session to work on. * @param rowDefId * RowDef ID to select group to truncate * @throws PersistitException * for a PersistIt level error (e.g. Rollback) */ @Override public void truncateGroup(final Session session, final int rowDefId) throws PersistitException { RowDef groupRowDef = rowDefCache.getRowDef(rowDefId); if (!groupRowDef.isGroupTable()) { groupRowDef = rowDefCache.getRowDef(groupRowDef.getGroupRowDefId()); } final Transaction transaction = treeService.getTransaction(session); int retries = MAX_TRANSACTION_RETRY_COUNT; for (;;) { transaction.begin(); try { // Remove the index trees for (final IndexDef indexDef : groupRowDef.getIndexDefs()) { if (!indexDef.isHKeyEquivalent()) { final Exchange iEx = getExchange(session, groupRowDef, indexDef); iEx.removeAll(); releaseExchange(session, iEx); } indexManager.deleteIndexAnalysis(session, indexDef); } for (final RowDef userRowDef : groupRowDef .getUserTableRowDefs()) { for (final IndexDef indexDef : userRowDef.getIndexDefs()) { indexManager.deleteIndexAnalysis(session, indexDef); } } // remove the htable tree final Exchange hEx = getExchange(session, groupRowDef, null); hEx.removeAll(); releaseExchange(session, hEx); for (int i = 0; i < groupRowDef.getUserTableRowDefs().length; i++) { final int childRowDefId = groupRowDef.getUserTableRowDefs()[i] .getRowDefId(); tableStatusCache.truncate(childRowDefId); } transaction.commit(forceToDisk); return; } catch (RollbackException re) { if (--retries < 0) { throw new TransactionFailedException(); } } finally { transaction.end(); } } } @Override public void truncateTableStatus(final Session session, final int rowDefId) throws PersistitException { final Transaction transaction = treeService.getTransaction(session); transaction.begin(); try { tableStatusCache.truncate(rowDefId); transaction.commit(forceToDisk); return; } finally { transaction.end(); } } @Override public RowCollector getSavedRowCollector(final Session session, final int tableId) throws InvalidOperationException { final List<RowCollector> list = collectorsForTableId(session, tableId); if (list.isEmpty()) { LOG.debug("Nested RowCollector on tableId={} depth={}", tableId, (list.size() + 1)); throw new InvalidOperationException(ErrorCode.CURSOR_IS_FINISHED, "No RowCollector for tableId=%d (depth=%d)", tableId, list.size() + 1); } return list.get(list.size() - 1); } @Override public void addSavedRowCollector(final Session session, final RowCollector rc) { final Integer tableId = rc.getTableId(); final List<RowCollector> list = collectorsForTableId(session, tableId); if (!list.isEmpty()) { LOG.debug("Note: Nested RowCollector on tableId={} depth={}", tableId, list.size() + 1); assert list.get(list.size() - 1) != rc : "Redundant call"; // This disallows the patch because we agreed not to fix the // bug. However, these changes fix a memory leak, which is // important for robustness. // throw new StoreException(122, "Bug 255 workaround is disabled"); } list.add(rc); } @Override public void removeSavedRowCollector(final Session session, final RowCollector rc) throws InvalidOperationException { final Integer tableId = rc.getTableId(); final List<RowCollector> list = collectorsForTableId(session, tableId); if (list.isEmpty()) { throw new InvalidOperationException(ErrorCode.INTERNAL_ERROR, "Attempt to remove RowCollector from empty list"); } final RowCollector removed = list.remove(list.size() - 1); if (removed != rc) { throw new InvalidOperationException(ErrorCode.INTERNAL_ERROR, "Attempt to remove the wrong RowCollector"); } } private List<RowCollector> collectorsForTableId(final Session session, final int tableId) { List<RowCollector> list = session.get(COLLECTORS, tableId); if (list == null) { list = new ArrayList<RowCollector>(); session.put(COLLECTORS, tableId, list); } return list; } private final RowDef checkRequest(final Session session, int rowDefId, RowData start, RowData end, int indexId, int scanFlags) throws InvalidOperationException, PersistitException { if (start != null && start.getRowDefId() != rowDefId) { throw new IllegalArgumentException( "Start and end RowData must specify the same rowDefId"); } if (end != null && end.getRowDefId() != rowDefId) { throw new IllegalArgumentException( "Start and end RowData must specify the same rowDefId"); } final RowDef rowDef = rowDefCache.getRowDef(rowDefId); if (rowDef == null) { throw new IllegalArgumentException("No RowDef for rowDefId " + rowDefId); } return rowDef; } public RowCollector newRowCollector(Session session, ScanRowsRequest request) throws InvalidOperationException, PersistitException { return newRowCollector(session, request.getTableId(), request.getIndexId(), request.getScanFlags(), request.getStart(), request.getEnd(), request.getColumnBitMap()); } @Override public RowCollector newRowCollector(Session session, int rowDefId, int indexId, int scanFlags, RowData start, RowData end, byte[] columnBitMap) throws InvalidOperationException, PersistitException { NEW_COLLECTOR_TAP.in(); RowDef rowDef = checkRequest(session, rowDefId, start, end, indexId, scanFlags); RowCollector rc = new PersistitStoreRowCollector(session, this, scanFlags, start, end, columnBitMap, rowDef, indexId); NEW_COLLECTOR_TAP.out(); return rc; } public final static long HACKED_ROW_COUNT = 2; @Override public long getRowCount(final Session session, final boolean exact, final RowData start, final RowData end, final byte[] columnBitMap) throws Exception { // TODO: Compute a reasonable value. The value "2" is a hack - // special because it's not 0 or 1, but small enough to induce // MySQL to use an index rather than full table scan. return HACKED_ROW_COUNT; // TODO: delete the HACKED_ROW_COUNT field when // this gets fixed // final int tableId = start.getRowDefId(); // final TableStatus status = tableManager.getTableStatus(tableId); // return status.getRowCount(); } @Override public TableStatistics getTableStatistics(final Session session, int tableId) throws Exception { final RowDef rowDef = rowDefCache.getRowDef(tableId); final TableStatistics ts = new TableStatistics(tableId); final TableStatus status = rowDef.getTableStatus(); if (rowDef.getRowType() == RowType.GROUP) { ts.setRowCount(2); ts.setAutoIncrementValue(-1); } else { ts.setAutoIncrementValue(status.getAutoIncrementValue()); ts.setRowCount(status.getRowCount()); } ts.setUpdateTime(Math.max(status.getLastUpdateTime(), status.getLastWriteTime())); ts.setCreationTime(status.getCreationTime()); // TODO - get correct values ts.setMeanRecordLength(100); ts.setBlockSize(8192); indexManager.populateTableStatistics(session, ts); return ts; } @Override public void analyzeTable(final Session session, final int tableId) throws Exception { final RowDef rowDef = rowDefCache.getRowDef(tableId); indexManager.analyzeTable(session, rowDef); } // FOR TESTING ONLY @Override public List<RowData> fetchRows(final Session session, final String schemaName, final String tableName, final String columnName, final Object least, final Object greatest, final String leafTableName) throws Exception { final ByteBuffer payload = ByteBuffer.allocate(65536); return fetchRows(session, schemaName, tableName, columnName, least, greatest, leafTableName, payload); } public List<RowData> fetchRows(final Session session, final String schemaName, final String tableName, final String columnName, final Object least, final Object greatest, final String leafTableName, final ByteBuffer payload) throws Exception { final List<RowData> list = new ArrayList<RowData>(); final String compoundName = schemaName + "." + tableName; final RowDef rowDef = rowDefCache.getRowDef(compoundName); if (rowDef == null) { throw new InvalidOperationException(ErrorCode.NO_SUCH_TABLE, compoundName); } final RowDef groupRowDef = rowDef.isGroupTable() ? rowDef : rowDefCache .getRowDef(rowDef.getGroupRowDefId()); final RowDef[] userRowDefs = groupRowDef.getUserTableRowDefs(); FieldDef fieldDef = null; for (final FieldDef fd : rowDef.getFieldDefs()) { if (fd.getName().equals(columnName)) { fieldDef = fd; break; } } if (fieldDef == null) { throw new InvalidOperationException(ErrorCode.NO_SUCH_COLUMN, columnName + " in " + compoundName); } IndexDef indexDef = null; for (final IndexDef id : rowDef.getIndexDefs()) { if (id.getFields()[0] == fieldDef.getFieldIndex()) { indexDef = id; if (indexDef.getFields().length == 1) { break; } } } if (indexDef == null) { throw new InvalidOperationException(ErrorCode.NO_INDEX, "on column " + columnName + " in " + compoundName); } boolean deepMode = false; RowDef leafRowDef = null; if (tableName.equals(leafTableName)) { leafRowDef = rowDef; } else if (leafTableName == null) { leafRowDef = rowDef; deepMode = true; } else for (int index = 0; index < userRowDefs.length; index++) { if (userRowDefs[index].getTableName().equals(leafTableName)) { leafRowDef = userRowDefs[index]; break; } } if (leafRowDef == null) { throw new InvalidOperationException(ErrorCode.NO_SUCH_TABLE, leafTableName + " in group"); } RowData start = null; RowData end = null; int flags = deepMode ? SCAN_FLAGS_DEEP : 0; if (least == null) { flags |= SCAN_FLAGS_START_AT_EDGE; } else { final Object[] startValues = new Object[groupRowDef.getFieldCount()]; startValues[fieldDef.getFieldIndex() + rowDef.getColumnOffset()] = least; start = new RowData(new byte[128]); start.createRow(groupRowDef, startValues); } if (greatest == null) { flags |= SCAN_FLAGS_END_AT_EDGE; } else { final Object[] endValues = new Object[groupRowDef.getFieldCount()]; endValues[fieldDef.getFieldIndex() + rowDef.getColumnOffset()] = greatest; end = new RowData(new byte[128]); end.createRow(groupRowDef, endValues); } final byte[] bitMap = new byte[(7 + groupRowDef.getFieldCount()) / 8]; for (RowDef def = leafRowDef; def != null;) { final int bit = def.getColumnOffset(); final int fc = def.getFieldCount(); for (int i = bit; i < bit + fc; i++) { bitMap[i / 8] |= (1 << (i % 8)); } if (def != rowDef && def.getParentRowDefId() != 0) { def = rowDefCache.getRowDef(def.getParentRowDefId()); } else { break; } } final RowCollector rc = newRowCollector(session, groupRowDef.getRowDefId(), indexDef.getId(), flags, start, end, bitMap); while (rc.hasMore()) { payload.clear(); while (rc.collectNextRow(payload)) ; payload.flip(); for (int p = payload.position(); p < payload.limit();) { final RowData rowData = new RowData(payload.array(), p, payload.limit()); rowData.prepareRow(p); list.add(rowData); p = rowData.getRowEnd(); } } rc.close(); return list; } boolean hasNullIndexSegments(final RowData rowData, final IndexDef indexDef) { assert indexDef.getRowDef().getRowDefId() == rowData.getRowDefId(); for (int i : indexDef.getFields()) { if (rowData.isNull(i)) { return true; } } return false; } void insertIntoIndex(final Session session, final IndexDef indexDef, final RowData rowData, final Key hkey, final boolean deferIndexes) throws InvalidOperationException, PersistitException { final Exchange iEx = getExchange(session, indexDef.getRowDef(), indexDef); constructIndexKey(iEx.getKey(), rowData, indexDef, hkey); final Key key = iEx.getKey(); if (indexDef.isUnique() && !hasNullIndexSegments(rowData, indexDef)) { KeyState ks = new KeyState(key); key.setDepth(indexDef.getIndexKeySegmentCount()); if (iEx.hasChildren()) { complainAboutDuplicateKey(indexDef.getName(), key); } ks.copyTo(key); } iEx.getValue().clear(); if (deferIndexes) { synchronized (deferredIndexKeys) { SortedSet<KeyState> keySet = deferredIndexKeys.get(iEx .getTree()); if (keySet == null) { keySet = new TreeSet<KeyState>(); deferredIndexKeys.put(iEx.getTree(), keySet); } final KeyState ks = new KeyState(iEx.getKey()); keySet.add(ks); deferredIndexKeyLimit -= (ks.getBytes().length + KEY_STATE_SIZE_OVERHEAD); } } else { iEx.store(); } releaseExchange(session, iEx); } void putAllDeferredIndexKeys(final Session session) throws PersistitException { synchronized (deferredIndexKeys) { for (final Map.Entry<Tree, SortedSet<KeyState>> entry : deferredIndexKeys .entrySet()) { final Exchange iEx = treeService.getExchange(session, entry.getKey()); buildIndexAddKeys(entry.getValue(), iEx); entry.getValue().clear(); } deferredIndexKeyLimit = MAX_INDEX_TRANCHE_SIZE; } } void updateIndex(final Session session, final IndexDef indexDef, final RowDef rowDef, final RowData oldRowData, final RowData newRowData, final Key hkey) throws PersistitException { if (!fieldsEqual(rowDef, oldRowData, newRowData, indexDef.getFields())) { final Exchange oldExchange = getExchange(session, rowDef, indexDef); constructIndexKey(oldExchange.getKey(), oldRowData, indexDef, hkey); final Exchange newExchange = getExchange(session, rowDef, indexDef); constructIndexKey(newExchange.getKey(), newRowData, indexDef, hkey); oldExchange.getValue().clear(); newExchange.getValue().clear(); oldExchange.remove(); newExchange.store(); releaseExchange(session, newExchange); releaseExchange(session, oldExchange); } } void deleteIndex(final Session session, final IndexDef indexDef, final RowDef rowDef, final RowData rowData, final Key hkey) throws PersistitException { final Exchange iEx = getExchange(session, rowDef, indexDef); constructIndexKey(iEx.getKey(), rowData, indexDef, hkey); boolean removed = iEx.remove(); releaseExchange(session, iEx); } boolean bytesEqual(final byte[] a, final int aoffset, final int asize, final byte[] b, final int boffset, final int bsize) { if (asize != bsize) { return false; } for (int i = 0; i < asize; i++) { if (a[i + aoffset] != b[i + boffset]) { return false; } } return true; } boolean fieldsEqual(final RowDef rowDef, final RowData a, final RowData b, final int[] fieldIndexes) { for (int index = 0; index < fieldIndexes.length; index++) { final int fieldIndex = fieldIndexes[index]; final long aloc = rowDef.fieldLocation(a, fieldIndex); final long bloc = rowDef.fieldLocation(b, fieldIndex); if (!bytesEqual(a.getBytes(), (int) aloc, (int) (aloc >>> 32), b.getBytes(), (int) bloc, (int) (bloc >>> 32))) { return false; } } return true; } public void packRowData(final Exchange hEx, final RowDef rowDef, final RowData rowData) throws PersistitException { final int start = rowData.getInnerStart(); final int size = rowData.getInnerSize(); hEx.getValue().ensureFit(size); System.arraycopy(rowData.getBytes(), start, hEx.getValue() .getEncodedBytes(), 0, size); int storedTableId = treeService.aisToStore(rowDef, rowData.getRowDefId()); AkServerUtil.putInt(hEx.getValue().getEncodedBytes(), RowData.O_ROW_DEF_ID - RowData.LEFT_ENVELOPE_SIZE, storedTableId); hEx.getValue().setEncodedSize(size); } public void expandRowData(final Exchange exchange, final RowData rowData) throws InvalidOperationException, PersistitException { // TODO this needs to be a more specific exception final int size = exchange.getValue().getEncodedSize(); final int rowDataSize = size + RowData.ENVELOPE_SIZE; final byte[] valueBytes = exchange.getValue().getEncodedBytes(); byte[] rowDataBytes = rowData.getBytes(); if (rowDataSize < RowData.MINIMUM_RECORD_LENGTH || rowDataSize > RowData.MAXIMUM_RECORD_LENGTH) { if (LOG.isErrorEnabled()) { LOG.error("Value at " + exchange.getKey() + " is not a valid row - skipping"); } throw new InvalidOperationException(ErrorCode.INTERNAL_CORRUPTION, "Corrupt RowData at " + exchange.getKey()); } int rowDefId = AkServerUtil.getInt(valueBytes, RowData.O_ROW_DEF_ID - RowData.LEFT_ENVELOPE_SIZE); rowDefId = treeService.storeToAis(exchange.getVolume(), rowDefId); if (rowDataSize > rowDataBytes.length) { rowDataBytes = new byte[rowDataSize + INITIAL_BUFFER_SIZE]; rowData.reset(rowDataBytes); } // Assemble the Row in a byte array to allow column // elision AkServerUtil.putInt(rowDataBytes, RowData.O_LENGTH_A, rowDataSize); AkServerUtil.putChar(rowDataBytes, RowData.O_SIGNATURE_A, RowData.SIGNATURE_A); System.arraycopy(valueBytes, 0, rowDataBytes, RowData.O_FIELD_COUNT, size); AkServerUtil.putChar(rowDataBytes, RowData.O_SIGNATURE_B + rowDataSize, RowData.SIGNATURE_B); AkServerUtil.putInt(rowDataBytes, RowData.O_LENGTH_B + rowDataSize, rowDataSize); AkServerUtil.putInt(rowDataBytes, RowData.O_ROW_DEF_ID, rowDefId); rowData.prepareRow(0); } public void buildIndexes(final Session session, final String ddl) { flushIndexes(session); final Set<RowDef> userRowDefs = new HashSet<RowDef>(); final Set<RowDef> groupRowDefs = new HashSet<RowDef>(); // Find the groups containing indexes selected for rebuild. for (final RowDef rowDef : rowDefCache.getRowDefs()) { if (!rowDef.isGroupTable()) { for (final IndexDef indexDef : rowDef.getIndexDefs()) { if (isIndexSelected(indexDef, ddl)) { userRowDefs.add(rowDef); final RowDef group = rowDefCache.getRowDef(rowDef .getGroupRowDefId()); if (group != null) { groupRowDefs.add(group); } } } } } for (final RowDef rowDef : groupRowDefs) { final RowData rowData = new RowData(new byte[MAX_ROW_SIZE]); rowData.createRow(rowDef, new Object[0]); final byte[] columnBitMap = new byte[(rowDef.getFieldCount() + 7) / 8]; // Project onto all columns of selected user tables for (final RowDef user : rowDef.getUserTableRowDefs()) { if (userRowDefs.contains(user)) { for (int bit = 0; bit < user.getFieldCount(); bit++) { final int c = bit + user.getColumnOffset(); columnBitMap[c / 8] |= (1 << (c % 8)); } } } int indexKeyCount = 0; try { final PersistitStoreRowCollector rc = (PersistitStoreRowCollector) newRowCollector( session, rowDef.getRowDefId(), 0, 0, rowData, rowData, columnBitMap); // final KeyFilter hFilter = rc.getHFilter(); final Exchange hEx = rc.getHExchange(); hEx.getKey().clear(); // while (hEx.traverse(Key.GT, hFilter, Integer.MAX_VALUE)) { while (hEx.next(true)) { expandRowData(hEx, rowData); final int tableId = rowData.getRowDefId(); final RowDef userRowDef = rowDefCache.getRowDef(tableId); if (userRowDefs.contains(userRowDef)) { for (final IndexDef indexDef : userRowDef .getIndexDefs()) { if (isIndexSelected(indexDef, ddl)) { insertIntoIndex(session, indexDef, rowData, hEx.getKey(), true); indexKeyCount++; } } if (deferredIndexKeyLimit <= 0) { putAllDeferredIndexKeys(session); } } } } catch (Exception e) { LOG.error("Exception while trying to index group table " + rowDef.getSchemaName() + "." + rowDef.getTableName(), e); } flushIndexes(session); if (LOG.isInfoEnabled()) { LOG.debug( "Inserted {} index keys into group {}.{}", new Object[] { indexKeyCount, rowDef.getSchemaName(), rowDef.getTableName() } ); } } } public void flushIndexes(final Session session) { try { putAllDeferredIndexKeys(session); } catch (Exception e) { LOG.error("Exception while trying " + " to flush deferred index keys", e); } } public void deleteIndexes(final Session session, final String ddl) { for (final RowDef rowDef : rowDefCache.getRowDefs()) { if (!rowDef.isGroupTable()) { for (final IndexDef indexDef : rowDef.getIndexDefs()) { if (isIndexSelected(indexDef, ddl)) { try { final Exchange iEx = getExchange(session, rowDef, indexDef); iEx.removeAll(); } catch (Exception e) { LOG.error( "Exception while trying to remove index tree " + indexDef.getTreeName(), e); } } } } } } private boolean isIndexSelected(final IndexDef indexDef, final String ddl) { return !indexDef.isHKeyEquivalent() && (!ddl.contains("table=") || ddl.contains("table=(" + indexDef.getRowDef().getTableName() + ")")) && (!ddl.contains("index=") || ddl.contains("index=(" + indexDef.getName() + ")")); } private void buildIndexAddKeys(final SortedSet<KeyState> keys, final Exchange iEx) throws PersistitException { final long start = System.nanoTime(); for (final KeyState keyState : keys) { keyState.copyTo(iEx.getKey()); iEx.store(); } final long elapsed = System.nanoTime() - start; if (LOG.isInfoEnabled()) { LOG.debug("Index builder inserted {} keys into index tree {} in {} seconds", new Object[]{ keys.size(), iEx.getTree().getName(), elapsed / 1000000000 }); } } private RowData mergeRows(RowDef rowDef, RowData currentRow, RowData newRowData, ColumnSelector columnSelector) { NewRow mergedRow = NiceRow.fromRowData(currentRow, rowDef); NewRow newRow = new LegacyRowWrapper(newRowData); int fields = rowDef.getFieldCount(); for (int i = 0; i < fields; i++) { if (columnSelector.includesColumn(i)) { mergedRow.put(i, newRow.get(i)); } } return mergedRow.toRowData(); } @Override public boolean isDeferIndexes() { return deferIndexes; } @Override public void setDeferIndexes(final boolean defer) { deferIndexes = defer; } public void traverse(Session session, RowDef rowDef, TreeRecordVisitor visitor) throws PersistitException, InvalidOperationException { assert rowDef.isGroupTable() : rowDef; Exchange exchange = getExchange(session, rowDef, null).append( Key.BEFORE); try { visitor.initialize(this, exchange); while (exchange.next(true)) { visitor.visit(); } } finally { releaseExchange(session, exchange); } } public void traverse(Session session, IndexDef indexDef, IndexRecordVisitor visitor) throws PersistitException, InvalidOperationException { Exchange exchange = getExchange(session, null, indexDef).append( Key.BEFORE); try { visitor.initialize(exchange); while (exchange.next(true)) { visitor.visit(); } } finally { releaseExchange(session, exchange); } } }