answer
stringlengths
17
10.2M
package org.mtransit.android.ui.fragment; import java.util.List; import org.mtransit.android.R; import org.mtransit.android.commons.BundleUtils; import org.mtransit.android.commons.ColorUtils; import org.mtransit.android.commons.MTLog; import org.mtransit.android.commons.PreferenceUtils; import org.mtransit.android.commons.data.Route; import org.mtransit.android.commons.ui.fragment.MTFragmentV4; import org.mtransit.android.commons.ui.widget.MTArrayAdapter; import org.mtransit.android.data.AgencyProperties; import org.mtransit.android.data.DataSourceProvider; import org.mtransit.android.data.JPaths; import org.mtransit.android.task.RTSAgencyRoutesLoader; import org.mtransit.android.ui.MainActivity; import org.mtransit.android.ui.fragment.AgencyTypeFragment; import org.mtransit.android.ui.view.MTJPathsView; import android.content.Context; import android.os.Bundle; import android.support.v4.app.LoaderManager; import android.support.v4.content.Loader; import android.text.TextUtils; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.view.ViewStub; import android.widget.AbsListView; import android.widget.AdapterView; import android.widget.TextView; public class RTSAgencyRoutesFragment extends MTFragmentV4 implements AgencyTypeFragment.AgencyFragment, LoaderManager.LoaderCallbacks<List<Route>>, AdapterView.OnItemClickListener { private static final String TAG = RTSAgencyRoutesFragment.class.getSimpleName(); private String tag = TAG; @Override public String getLogTag() { return this.tag; } public void setLogTag(String tag) { this.tag = TAG + "-" + tag; } private static final String EXTRA_AGENCY_AUTHORITY = "extra_agency_authority"; private static final String EXTRA_FRAGMENT_POSITION = "extra_fragment_position"; private static final String EXTRA_LAST_VISIBLE_FRAGMENT_POSITION = "extra_last_visible_fragment_position"; public static RTSAgencyRoutesFragment newInstance(int fragmentPosition, int lastVisibleFragmentPosition, AgencyProperties agency) { RTSAgencyRoutesFragment f = new RTSAgencyRoutesFragment(); Bundle args = new Bundle(); args.putString(EXTRA_AGENCY_AUTHORITY, agency.getAuthority()); if (fragmentPosition >= 0) { args.putInt(EXTRA_FRAGMENT_POSITION, fragmentPosition); } if (lastVisibleFragmentPosition >= 0) { args.putInt(EXTRA_LAST_VISIBLE_FRAGMENT_POSITION, lastVisibleFragmentPosition); } f.setArguments(args); return f; } private AgencyProperties agency; private int fragmentPosition = -1; private int lastVisibleFragmentPosition = -1; private boolean fragmentVisible = false; private RTSRouteArrayAdapter adapter; private String emptyText = null; @Override public AgencyProperties getAgency() { return agency; } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { super.onCreateView(inflater, container, savedInstanceState); final View view = inflater.inflate(R.layout.fragment_rts_agency_routes, container, false); setupView(view); return view; } private void restoreInstanceState(Bundle savedInstanceState) { MTLog.v(this, "restoreInstanceState(%s)", savedInstanceState); final String agencyAuthority = BundleUtils.getString(EXTRA_AGENCY_AUTHORITY, savedInstanceState, getArguments()); if (!TextUtils.isEmpty(agencyAuthority)) { this.agency = DataSourceProvider.get().getAgency(getActivity(), agencyAuthority); if (this.agency != null) { setLogTag(this.agency.getShortName()); } } final Integer fragmentPosition = BundleUtils.getInt(EXTRA_FRAGMENT_POSITION, savedInstanceState, getArguments()); if (fragmentPosition != null) { if (fragmentPosition.intValue() >= 0) { this.fragmentPosition = fragmentPosition.intValue(); } else { this.fragmentPosition = -1; } } final Integer lastVisibleFragmentPosition = BundleUtils.getInt(EXTRA_LAST_VISIBLE_FRAGMENT_POSITION, savedInstanceState, getArguments()); if (lastVisibleFragmentPosition != null) { if (lastVisibleFragmentPosition.intValue() >= 0) { this.lastVisibleFragmentPosition = lastVisibleFragmentPosition; } else { this.lastVisibleFragmentPosition = -1; } } } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); restoreInstanceState(savedInstanceState); initAdapter(); switchView(getView()); } private void initAdapter() { if (this.adapter != null) { return; } this.adapter = new RTSRouteArrayAdapter(getActivity(), this.agency.getAuthority(), isShowingListInsteadOfGrid()); final View view = getView(); setupView(view); switchView(view); } private void setupView(View view) { if (view == null || this.adapter == null) { return; } final AbsListView absListView = (AbsListView) view.findViewById(isShowingListInsteadOfGrid() ? R.id.list : R.id.grid); absListView.setAdapter(this.adapter); absListView.setOnItemClickListener(this); } private Boolean showingListInsteadOfGrid = null; private boolean isShowingListInsteadOfGrid() { if (this.showingListInsteadOfGrid == null) { final boolean showingListInsteadOfGridLastSet = PreferenceUtils.getPrefDefault(getActivity(), PreferenceUtils.PREFS_RTS_ROUTES_SHOWING_LIST_INSTEAD_OF_GRID_LAST_SET, PreferenceUtils.PREFS_RTS_ROUTES_SHOWING_LIST_INSTEAD_OF_GRID_DEFAULT); this.showingListInsteadOfGrid = this.agency == null ? showingListInsteadOfGridLastSet : PreferenceUtils.getPrefDefault(getActivity(), PreferenceUtils.getPREFS_RTS_ROUTES_SHOWING_LIST_INSTEAD_OF_GRID(this.agency.getAuthority()), showingListInsteadOfGridLastSet); } return this.showingListInsteadOfGrid.booleanValue(); } private void checkIfShowingListInsteadOfGridChanged() { if (this.showingListInsteadOfGrid == null) { return; } final boolean showingListInsteadOfGridLastSet = PreferenceUtils.getPrefDefault(getActivity(), PreferenceUtils.PREFS_RTS_ROUTES_SHOWING_LIST_INSTEAD_OF_GRID_LAST_SET, PreferenceUtils.PREFS_RTS_ROUTES_SHOWING_LIST_INSTEAD_OF_GRID_DEFAULT); boolean newShowingListInsteadOfGrid = this.agency == null ? showingListInsteadOfGridLastSet : PreferenceUtils.getPrefDefault(getActivity(), PreferenceUtils.getPREFS_RTS_ROUTES_SHOWING_LIST_INSTEAD_OF_GRID(this.agency.getAuthority()), showingListInsteadOfGridLastSet); if (newShowingListInsteadOfGrid != this.showingListInsteadOfGrid.booleanValue()) { setShowingListInsteadOfGrid(newShowingListInsteadOfGrid); } } private void setShowingListInsteadOfGrid(boolean newShowingListInsteadOfGrid) { if (this.showingListInsteadOfGrid != null && this.showingListInsteadOfGrid.booleanValue() == newShowingListInsteadOfGrid) { return; // nothing changed } this.showingListInsteadOfGrid = newShowingListInsteadOfGrid; // switching to grid PreferenceUtils.savePrefDefault(getActivity(), PreferenceUtils.PREFS_RTS_ROUTES_SHOWING_LIST_INSTEAD_OF_GRID_LAST_SET, this.showingListInsteadOfGrid.booleanValue(), false); if (this.agency != null) { PreferenceUtils.savePrefDefault(getActivity(), PreferenceUtils.getPREFS_RTS_ROUTES_SHOWING_LIST_INSTEAD_OF_GRID(this.agency.getAuthority()), this.showingListInsteadOfGrid.booleanValue(), false); } initAdapter(); this.adapter.seShowingListInsteadOfGrid(this.showingListInsteadOfGrid); setupView(getView()); switchView(getView()); this.adapter.notifyDataSetChanged(); updateListGridToggleMenuItem(); } private void switchListGrid() { setShowingListInsteadOfGrid(!isShowingListInsteadOfGrid()); // switching } @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { final Route route = this.adapter.getItem(position); if (route != null) { ((MainActivity) getActivity()).addFragmentToStack(RTSRouteFragment.newInstance(this.agency.getAuthority(), route, null, null)); } } @Override public void setFragmentPosition(int fragmentPosition) { this.fragmentPosition = fragmentPosition; setFragmentVisibleAtPosition(this.lastVisibleFragmentPosition); // force reset visibility } @Override public void setFragmentVisibleAtPosition(int visibleFragmentPosition) { if (this.lastVisibleFragmentPosition == visibleFragmentPosition && ( (this.fragmentPosition == visibleFragmentPosition && this.fragmentVisible) || (this.fragmentPosition != visibleFragmentPosition && !this.fragmentVisible) ) ) { return; } this.lastVisibleFragmentPosition = visibleFragmentPosition; if (this.fragmentPosition < 0) { return; } if (this.fragmentPosition == visibleFragmentPosition) { onFragmentVisible(); } else { onFragmentInvisible(); } } private void onFragmentInvisible() { if (!this.fragmentVisible) { return; // already invisible } this.fragmentVisible = false; } private void onFragmentVisible() { if (this.fragmentVisible) { return; // already visible } this.fragmentVisible = true; if (this.adapter.isEmpty()) { getLoaderManager().restartLoader(ROUTES_LOADER, null, this); } else { switchView(getView()); } checkIfShowingListInsteadOfGridChanged(); updateListGridToggleMenuItem(); } private static final int ROUTES_LOADER = 0; @Override public Loader<List<Route>> onCreateLoader(int id, Bundle args) { switch (id) { case ROUTES_LOADER: final RTSAgencyRoutesLoader rtsAgencyRoutesLoader = new RTSAgencyRoutesLoader(getActivity(), this.agency); return rtsAgencyRoutesLoader; default: MTLog.w(this, "Loader ID '%s' unknown!", id); return null; } } @Override public void onLoaderReset(Loader<List<Route>> loader) { if (this.adapter != null) { this.adapter.clear(); this.adapter = null; } } @Override public void onLoadFinished(Loader<List<Route>> loader, List<Route> data) { this.adapter.setRoutes(data); switchView(getView()); } @Override public void onPause() { super.onPause(); onFragmentInvisible(); } @Override public void onResume() { super.onResume(); if (this.fragmentPosition < 0 || this.fragmentPosition == this.lastVisibleFragmentPosition) { onFragmentVisible(); } // ELSE would be call later } @Override public void onDestroy() { super.onDestroy(); if (this.adapter != null) { this.adapter.onDestroy(); } } private void switchView(View view) { if (view == null) { return; } if (this.adapter == null || !this.adapter.isInitialized()) { showLoading(view); } else if (this.adapter.getCount() == 0) { showEmpty(view); } else { showList(view); } } private void showList(View view) { if (view.findViewById(R.id.loading) != null) { // IF inflated/present DO view.findViewById(R.id.loading).setVisibility(View.GONE); // hide } if (view.findViewById(R.id.empty) != null) { // IF inflated/present DO view.findViewById(R.id.empty).setVisibility(View.GONE); // hide } view.findViewById(isShowingListInsteadOfGrid() ? R.id.grid : R.id.list).setVisibility(View.GONE); // show view.findViewById(isShowingListInsteadOfGrid() ? R.id.list : R.id.grid).setVisibility(View.VISIBLE); // show } private void showLoading(View view) { if (view.findViewById(R.id.list) != null) { // IF inflated/present DO view.findViewById(R.id.list).setVisibility(View.GONE); // hide } if (view.findViewById(R.id.grid) != null) { // IF inflated/present DO view.findViewById(R.id.grid).setVisibility(View.GONE); // hide } if (view.findViewById(R.id.empty) != null) { // IF inflated/present DO view.findViewById(R.id.empty).setVisibility(View.GONE); // hide } if (view.findViewById(R.id.loading) == null) { // IF NOT present/inflated DO ((ViewStub) view.findViewById(R.id.loading_stub)).inflate(); // inflate } view.findViewById(R.id.loading).setVisibility(View.VISIBLE); // show } private void showEmpty(View view) { if (view.findViewById(R.id.list) != null) { // IF inflated/present DO view.findViewById(R.id.list).setVisibility(View.GONE); // hide } if (view.findViewById(R.id.grid) != null) { // IF inflated/present DO view.findViewById(R.id.grid).setVisibility(View.GONE); // hide } if (view.findViewById(R.id.loading) != null) { // IF inflated/present DO view.findViewById(R.id.loading).setVisibility(View.GONE); // hide } if (view.findViewById(R.id.empty) == null) { // IF NOT present/inflated DO ((ViewStub) view.findViewById(R.id.empty_stub)).inflate(); // inflate } if (!TextUtils.isEmpty(this.emptyText)) { ((TextView) view.findViewById(R.id.empty_text)).setText(this.emptyText); } view.findViewById(R.id.empty).setVisibility(View.VISIBLE); // show } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { super.onCreateOptionsMenu(menu, inflater); if (menu.findItem(R.id.menu_toggle_list_grid) == null) { inflater.inflate(R.menu.rts_agency_routes, menu); ((MainActivity) getActivity()).addMenuItem(R.id.menu_toggle_list_grid, menu.findItem(R.id.menu_toggle_list_grid)); if (!this.fragmentVisible) { menu.findItem(R.id.menu_toggle_list_grid).setVisible(false); } } updateListGridToggleMenuItem(); } private void updateListGridToggleMenuItem() { if (!this.fragmentVisible) { return; } final MenuItem listGridToggleMenuItem = ((MainActivity) getActivity()).getMenuItem(R.id.menu_toggle_list_grid); if (listGridToggleMenuItem == null) { return; } listGridToggleMenuItem.setIcon(isShowingListInsteadOfGrid() ? R.drawable.ic_action_grid_holo_light : R.drawable.ic_action_list_holo_light); listGridToggleMenuItem.setTitle(isShowingListInsteadOfGrid() ? R.string.menu_action_grid : R.string.menu_action_list); listGridToggleMenuItem.setVisible(true); } @Override public boolean onOptionsItemSelected(MenuItem item) { if (!this.fragmentVisible) { return false; // not handled } switch (item.getItemId()) { case R.id.menu_toggle_list_grid: switchListGrid(); return true; // handled } return super.onOptionsItemSelected(item); } private static class RTSRouteArrayAdapter extends MTArrayAdapter<Route> implements MTLog.Loggable { private static final String TAG = RTSRouteArrayAdapter.class.getSimpleName(); @Override public String getLogTag() { return TAG; } private List<Route> routes = null; private LayoutInflater layoutInflater; private String authority; private boolean showingListInsteadOfGrid; public RTSRouteArrayAdapter(Context context, String authority, boolean showingListInsteadOfGrid) { super(context, -1); this.layoutInflater = LayoutInflater.from(context); this.authority = authority; this.showingListInsteadOfGrid = showingListInsteadOfGrid; } public void seShowingListInsteadOfGrid(boolean showingListInsteadOfGrid) { this.showingListInsteadOfGrid = showingListInsteadOfGrid; } public void setRoutes(List<Route> routes) { this.routes = routes; } public boolean isInitialized() { return this.routes != null; } @Override public int getCount() { return this.routes == null ? 0 : this.routes.size(); } @Override public int getItemViewType(int position) { return this.showingListInsteadOfGrid ? 0 : 1; } @Override public int getViewTypeCount() { return 2; } @Override public Route getItem(int position) { return this.routes == null ? null : this.routes.get(position); } @Override public View getView(int position, View convertView, ViewGroup parent) { return getRouteView(position, convertView, parent); } private View getRouteView(int position, View convertView, ViewGroup parent) { if (convertView == null) { convertView = this.layoutInflater.inflate(this.showingListInsteadOfGrid ? R.layout.layout_rts_route_list_item : R.layout.layout_rts_route_grid_item, parent, false); RouteViewHolder holder = new RouteViewHolder(); holder.routeFL = convertView.findViewById(R.id.route); holder.routeShortNameTv = (TextView) convertView.findViewById(R.id.route_short_name); holder.routeTypeImg = (MTJPathsView) convertView.findViewById(R.id.route_type_img); holder.routeLongNameTv = (TextView) convertView.findViewById(R.id.route_long_name); convertView.setTag(holder); } updateRouteView(position, convertView); return convertView; } private View updateRouteView(int position, View convertView) { Route route = getItem(position); if (convertView == null) { return convertView; } RouteViewHolder holder = (RouteViewHolder) convertView.getTag(); if (route == null) { holder.routeFL.setVisibility(View.GONE); } else { final int routeTextColor = ColorUtils.parseColor(route.textColor); final int routeColor = ColorUtils.parseColor(route.color); if (TextUtils.isEmpty(route.shortName)) { holder.routeShortNameTv.setVisibility(View.INVISIBLE); final JPaths rtsRouteLogo = DataSourceProvider.get().getRTSRouteLogo(getContext(), this.authority); if (rtsRouteLogo != null) { holder.routeTypeImg.setJSON(rtsRouteLogo); holder.routeTypeImg.setColor(routeTextColor); holder.routeTypeImg.setVisibility(View.VISIBLE); } else { holder.routeTypeImg.setVisibility(View.GONE); } } else { holder.routeTypeImg.setVisibility(View.GONE); holder.routeShortNameTv.setText(route.shortName); holder.routeShortNameTv.setTextColor(routeTextColor); holder.routeShortNameTv.setVisibility(View.VISIBLE); } if (holder.routeLongNameTv != null) { holder.routeLongNameTv.setTextColor(routeTextColor); if (TextUtils.isEmpty(route.longName)) { holder.routeLongNameTv.setVisibility(View.GONE); } else { holder.routeLongNameTv.setText(route.longName); holder.routeLongNameTv.setVisibility(View.VISIBLE); } } holder.routeFL.setBackgroundColor(routeColor); holder.routeFL.setVisibility(View.VISIBLE); } return convertView; } public void onDestroy() { if (this.routes != null) { this.routes.clear(); this.routes = null; } } public static class RouteViewHolder { TextView routeShortNameTv; View routeFL; MTJPathsView routeTypeImg; TextView routeLongNameTv; } } }
package com.sudicode.tunejar.menu; import com.sudicode.tunejar.player.Gui; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static com.jayway.awaitility.Awaitility.await; import static com.jayway.awaitility.Duration.FIVE_SECONDS; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.*; public class ThemeMenuTest { private static final Logger logger = LoggerFactory.getLogger(ThemeMenuTest.class); private Gui gui = Gui.getInstance(); public void testDarkTheme() throws Exception { gui.getRobot().clickOn("#themeSelector"); await().atMost(FIVE_SECONDS).until(() -> gui.getRobot().clickOn("Dark Theme")); logger.info("Clicked on Dark Theme"); await().atMost(FIVE_SECONDS).until(() -> gui.getPlayer().getScene().getStylesheets().get(0), endsWith("Dark%20Theme.css")); assertThat(gui.getPlayer().getOptions().getTheme(), is(equalTo(("Dark Theme")))); } public void testModena() throws Exception { gui.getRobot().clickOn("#themeSelector"); await().atMost(FIVE_SECONDS).until(() -> gui.getRobot().clickOn("Modena")); logger.info("Clicked on Modena"); await().atMost(FIVE_SECONDS).until(() -> gui.getPlayer().getScene().getStylesheets().get(0), endsWith("Modena.css")); assertThat(gui.getPlayer().getOptions().getTheme(), is(equalTo("Modena"))); } }
package mocap; import java.io.IOException; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.net.MulticastSocket; import java.net.SocketException; import java.net.SocketTimeoutException; import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; /** * Class for connecting to a NatNet compatible MoCap server. * * @author Stefan Marks */ public class NatNetClient implements MoCapClient { public static final String CLIENT_NAME = "Java MoCap Client"; public static final byte CLIENT_VERSION[] = { 1, 1, 0, 0 }; public static final byte NATNET_VERSION[] = { 2, 9, 0, 0 }; /** * Class for storing the server information. */ public class ServerInfo { public String serverName; public byte[] versionServer; public byte[] versionNatNet; } // Portnumbers: Default is 1510/1511, // but that seems to collide with Cortex. // 1503 is taken by Windows messenger, // 1512 is taken by WINS // -> so let's use 1508, 1509 private final static int PORT_COMMAND = 1508; private final static int PORT_DATA = 1509; // timeout values for initial connections and for streaming private final int TIMEOUT_INITIAL = 1000; private final int TIMEOUT_RUNNING = 100; private final static short NAT_PING = 0; private final static short NAT_PINGRESPONSE = 1; private final static short NAT_REQUEST = 2; private final static short NAT_RESPONSE = 3; private final static short NAT_REQUEST_MODELDEF = 4; private final static short NAT_MODELDEF = 5; private final static short NAT_REQUEST_FRAMEOFDATA = 6; private final static short NAT_FRAMEOFDATA = 7; private final static short NAT_MESSAGESTRING = 8; private final static short NAT_UNRECOGNIZED_REQUEST = 100; private final static short DATASET_TYPE_MARKERSET = 0; private final static short DATASET_TYPE_RIGIDBODY = 1; private final static short DATASET_TYPE_SKELETON = 2; private final static short DATASET_TYPE_FORCEPLATE = 3; private final static int MAX_NAMELENGTH = 256; private final static int MAX_PACKETSIZE = 65535; // 10000 is not enough for 4 actors private final Command COMMAND_FRAMEOFDATA = new Command_RequestFrameOfData(); private final Command COMMAND_MODELDEF = new Command_RequestModelDefinition(); private interface Command { void marshal(ByteBuffer buffer); } private interface Response { } /** * Class for a command to ping the server. */ private class Command_Ping implements Command { public Command_Ping(String clientName, byte[] clientVersion) { this.clientName = clientName; this.versionApp = new byte[4]; for ( int i = 0 ; i < this.versionApp.length ; i++ ) { if ( i < clientVersion.length ) { versionApp[i] = clientVersion[i]; } else { versionApp[i] = 0; } } } private final String clientName; private final byte versionApp[]; @Override public void marshal(ByteBuffer buf) { buf.rewind(); buf.putShort((short) NAT_PING); buf.putShort((short) 0); // length to be filled in later buf.put(clientName.getBytes()).putChar('\0'); // client name for ( int i = clientName.length() + 2 ; i < MAX_NAMELENGTH ; i++ ) { // pad with 0 to full name length buf.put((byte) 0); } // version information for (int i = 0; i < 4; i++) { buf.put(versionApp[i]); } for (int i = 0; i < 4; i++) { buf.put(NATNET_VERSION[i]); } } } /** * Class for a ping response from the server. */ private class Response_Ping implements Response { private Response_Ping(ByteBuffer buf, ServerInfo info) { info.serverName = unmarshalString(buf); for ( int i = info.serverName.length() + 1 ; i < MAX_NAMELENGTH ; i++ ) { // skip rest of maximum string length buf.get(); } info.versionServer = new byte[4]; info.versionNatNet = new byte[4]; for ( int i = 0 ; i < 4 ; i++ ) { info.versionServer[i] = buf.get(); } for ( int i = 0 ; i < 4 ; i++ ) { info.versionNatNet[i] = buf.get(); } } } /** * Class for a command to request the model definition. */ private class Command_RequestModelDefinition implements Command { private Command_RequestModelDefinition() { // nothing to do here } @Override public void marshal(ByteBuffer buf) { buf.rewind(); buf.putShort(NAT_REQUEST_MODELDEF); buf.putShort((short) 0); } } /** * Class for a response with a model definition. */ private class Response_ModelDefinition implements Response { private Response_ModelDefinition(ByteBuffer buf, Scene scene) { logBufferData(buf, buf.remaining()); int nDatasets = buf.getInt(); // datasets List<Actor> actors = new LinkedList<>(); List<Device> devices = new LinkedList<>(); for ( int datasetIdx = 0 ; datasetIdx < nDatasets ; datasetIdx++ ) { int datasetType = buf.getInt(); switch ( datasetType ) { case DATASET_TYPE_MARKERSET : { parseMarkerset(buf, actors); break; } case DATASET_TYPE_RIGIDBODY : { parseRigidBody(buf, actors); break; } case DATASET_TYPE_SKELETON : { parseSkeleton(buf, actors); break; } case DATASET_TYPE_FORCEPLATE : { parseForcePlate(buf, devices); break; } default: { LOG.log(Level.WARNING, "Invalid dataset type {0} in model definition respose.", datasetType); break; } } } synchronized(scene) { scene.actors = actors.toArray(new Actor[actors.size()]); scene.devices = devices.toArray(new Device[devices.size()]); } // scene might have changed -> update listeners notifyListeners_Change(); } private void parseMarkerset(ByteBuffer buf, List<Actor> actors) { int id = 0; // no ID for markersets String name = unmarshalString(buf); // markerset name Actor actor = new Actor(scene, name, id); int nMarkers = buf.getInt(); // marker count // TODO: Sanity check on the number before allocating that much space actor.markers = new Marker[nMarkers]; for ( int markerIdx = 0 ; markerIdx < nMarkers ; markerIdx++ ) { name = unmarshalString(buf); Marker marker = new Marker(actor, name); actor.markers[markerIdx] = marker; } actors.add(actor); } private void parseRigidBody(ByteBuffer buf, List<Actor> actors) { String name = unmarshalString(buf); // name int id = buf.getInt(); // rigid body name should be equal to actor name: search Actor actor = null; for ( Actor a : actors ) { if ( a.name.equals(name) ) { actor = a; break; } } if ( actor == null ) { // names don't match > try IDs if ( (id >=0) && (id < actors.size()) ) { actor = actors.get(id); } } if ( actor == null ) { LOG.log(Level.WARNING, "Rigid Body {0} could not be matched to an actor.", name); actor = new Actor(scene, name, id); actors.add(actor); } Bone bone = new Bone(actor, name, id); buf.getInt(); // Parent ID (ignore for rigid body - will be -1) bone.parent = null; // rigid bodies should not have a parent bone.ox = buf.getFloat(); // X offset bone.oy = buf.getFloat(); // Y offset bone.oz = buf.getFloat(); // Z offset actor.bones = new Bone[1]; actor.bones[0] = bone; } private void parseSkeleton(ByteBuffer buf, List<Actor> actors) { final boolean includesBoneNames = // starting at v2.0 (serverInfo.versionNatNet[0] >= 2); String skeletonName = unmarshalString(buf); // name int skeletonId = buf.getInt(); // rigid body name should be equal to actor name: search Actor actor = null; for ( Actor a : actors ) { if ( a.name.equals(skeletonName) ) { actor = a; actor.id = skeletonId; // associate actor and skeleton } } if ( actor == null ) { // names don't match > try IDs if ( (skeletonId >=0) && (skeletonId < actors.size()) ) { actor = actors.get(skeletonId); } } if ( actor == null ) { LOG.log(Level.WARNING, "Skeleton {0} could not be matched to an actor.", skeletonName); actor = new Actor(scene, skeletonName, skeletonId); actors.add(actor); } actor.id = skeletonId; int nBones = buf.getInt(); // Skeleton bone count // TODO: Sanity check on the number before allocating that much space actor.bones = new Bone[nBones]; for ( int boneIdx = 0 ; boneIdx < nBones ; boneIdx++ ) { String name = ""; if ( includesBoneNames ) { name = unmarshalString(buf); // Bone name } int id = buf.getInt(); // Bone ID Bone bone = new Bone(actor, name, id); bone.parent = actor.findBone(buf.getInt()); // Skeleton parent ID if ( bone.parent != null ) { // if bone has a parent, update child list of parent bone.parent.children.add(bone); } // build chain from root to this bone bone.buildChain(); bone.ox = buf.getFloat(); // X offset bone.oy = buf.getFloat(); // Y offset bone.oz = buf.getFloat(); // Z offset actor.bones[boneIdx] = bone; } } private void parseForcePlate(ByteBuffer buf, List<Device> devices) { int id = buf.getInt(); // force plate ID String name = unmarshalString(buf); // force plate serial Device device = new Device(scene, name, id); // skip next 652 bytes // (SDK 2.9 sample code does not explain what this is about) buf.position(buf.position() + 652); int nChannels = buf.getInt(); // channel count device.channels = new Channel[nChannels]; for ( int channelIdx = 0 ; channelIdx < nChannels ; channelIdx++ ) { name = unmarshalString(buf); Channel channel = new Channel(device, name); device.channels[channelIdx] = channel; } devices.add(device); } } /** * Class for a command to request the latest frame of data. */ private class Command_RequestFrameOfData implements Command { private Command_RequestFrameOfData() { // nothing to do here } @Override public void marshal(ByteBuffer buf) { buf.rewind(); buf.putShort(NAT_REQUEST_FRAMEOFDATA); buf.putShort((short) 0); } } /** * Class for a response with the latest frame of data. */ private class Response_FrameOfData implements Response { private Response_FrameOfData(ByteBuffer buf, Scene scene) { // determine special datasets depending on NatNet version final boolean includesMarkerIDsAndSizes = // starting at v2.0 (serverInfo.versionNatNet[0] >= 2); final boolean includesSkeletonData = // starting at v2.1 ( (serverInfo.versionNatNet[0] == 2) && (serverInfo.versionNatNet[1] >= 1) ) || (serverInfo.versionNatNet[0] > 2); final boolean includesTrackingState = // starting at v2.6 ( (serverInfo.versionNatNet[0] == 2) && (serverInfo.versionNatNet[1] >= 6) ) || (serverInfo.versionNatNet[0] > 2); final boolean includesLabelledMarkers = // starting at v2.3 ( (serverInfo.versionNatNet[0] == 2) && (serverInfo.versionNatNet[1] >= 3) ) || (serverInfo.versionNatNet[0] > 2); final boolean includesLabelledMarkerFlags = // starting at v2.6 ( (serverInfo.versionNatNet[0] == 2) && (serverInfo.versionNatNet[1] >= 6) ) || (serverInfo.versionNatNet[0] > 2); final boolean includesForcePlateData = // starting at v2.9 ( (serverInfo.versionNatNet[0] == 2) && (serverInfo.versionNatNet[1] >= 9) ) || (serverInfo.versionNatNet[0] > 2); synchronized(scene) { int frameNumber = buf.getInt(); // frame number // check if this is a newer frame // delta < 10: but do consider looping playback // when frame numbers suddenly differ significantly int deltaFrame = frameNumber - scene.frameNumber; if ( (deltaFrame < 0) && (deltaFrame > -10) ) return; // old frame, get out logBufferData(buf, 400); scene.frameNumber = frameNumber; // Read actor data int nActors = buf.getInt(); // actor count for ( int actorIdx = 0 ; actorIdx < nActors ; actorIdx++ ) { String actorName = unmarshalString(buf); // find the corresponding actor Actor actor = scene.findActor(actorName); int nMarkers = buf.getInt(); for ( int markerIdx = 0 ; markerIdx < nMarkers ; markerIdx++ ) { Marker marker = (actor != null) ? actor.markers[markerIdx] : DUMMY_MARKER; // read coordinate marker.px = buf.getFloat(); marker.py = buf.getFloat(); marker.pz = buf.getFloat(); // XYZ == 0 indicates lost tracking marker.tracked = (marker.px != 0) || (marker.py != 0) || (marker.pz != 0); } } // skip unidentified marker data int nUnidentifiedMarkers = buf.getInt(); final int unidentifiedMarkerDataSize = 3 * 4; // 3 floats buf.position(buf.position() + unidentifiedMarkerDataSize * nUnidentifiedMarkers); // without skipping: // for ( int idx = 0 ; idx < nUnknownMarkers ; idx++ ) // buf.getFloat(); // x // buf.getFloat(); // y // buf.getFloat(); // z // Read rigid body data int nRigidBodies = buf.getInt(); // bone count for ( int rigidBodyIdx = 0 ; rigidBodyIdx < nRigidBodies ; rigidBodyIdx++ ) { int rigidBodyID = buf.getInt(); // get rigid body ID // find the corresponding actor Bone bone = DUMMY_BONE; if ( checkActorId(rigidBodyID) ) { Actor actor = scene.actors[rigidBodyID]; if ( actor.bones.length == 0 ) { // in case there is no bone, create one actor.bones = new Bone[1]; actor.bones[0] = new Bone(actor, "", 0); } bone = actor.bones[0]; } bone.px = buf.getFloat(); // position bone.py = buf.getFloat(); bone.pz = buf.getFloat(); bone.qx = buf.getFloat(); // rotation bone.qy = buf.getFloat(); bone.qz = buf.getFloat(); bone.qw = buf.getFloat(); int nMarkers = buf.getInt(); for ( int i = 0 ; i < nMarkers ; i++ ) { buf.getFloat(); // Marker X buf.getFloat(); // Marker Y buf.getFloat(); // Marker Z } if ( includesMarkerIDsAndSizes ) { // also, marker IDs and sizes for ( int i = 0 ; i < nMarkers ; i++ ) { buf.getInt(); // Marker ID } // and sizes for ( int i = 0 ; i < nMarkers ; i++ ) { buf.getFloat(); // Marker size } buf.getFloat(); // Mean marker error } // Tracking state if ( includesTrackingState ) { short state = buf.getShort(); // 0x01 : rigid body was successfully tracked in this frame bone.tracked = (state & 0x01) != 0; } else { // tracking state not sent separately, // but position = (0,0,0) used as "not tracked" indicator bone.tracked = (bone.px != 0) || (bone.py != 0) || (bone.pz != 0); } } // Read skeleton data if ( includesSkeletonData ) { int nSkeletons = buf.getInt(); for ( int skeletonIdx = 0 ; skeletonIdx < nSkeletons ; skeletonIdx++ ) { // read skeleton ID and find actor int skeletonId = buf.getInt(); Actor actor = scene.findActor(skeletonId); if ( actor == null ) { System.err.println("could not find actor " + skeletonId); return; } // # of bones in skeleton int nBones = buf.getInt(); // TODO: Number sanity check for ( int nBodyIdx = 0 ; nBodyIdx < nBones ; nBodyIdx++ ) { // read bone ID and find bone int boneId = buf.getInt(); Bone bone = actor.findBone(boneId); if ( bone == null ) bone = DUMMY_BONE; bone.px = buf.getFloat(); // read position bone.py = buf.getFloat(); bone.pz = buf.getFloat(); bone.qx = buf.getFloat(); // read orientation bone.qy = buf.getFloat(); bone.qz = buf.getFloat(); bone.qw = buf.getFloat(); // read/skip rigid marker data int nMarkers = buf.getInt(); for ( int i = 0 ; i < nMarkers ; i++ ) { buf.getFloat(); // X/Y/Z position buf.getFloat(); buf.getFloat(); } for ( int i = 0 ; i < nMarkers ; i++ ) { buf.getInt(); // Marker IDs } for ( int i = 0 ; i < nMarkers ; i++ ) { buf.getFloat(); // Marker size } // Mean marker error // ATTENTION: Used to transmit bone length bone.length = buf.getFloat(); // Tracking state if ( includesTrackingState ) { short state = buf.getShort(); // 0x01 : rigid body was successfully tracked in this frame bone.tracked = (state & 0x01) != 0; } else { // tracking state not sent separately, // but position = (0,0,0) used as "not tracked" indicator bone.tracked = (bone.px != 0) || (bone.py != 0) || (bone.pz != 0); } } // next rigid body } // next skeleton } // skip labelled markers if ( includesLabelledMarkers ) { int nLabelledMarkers = buf.getInt(); final int labelledMarkerDataSize = includesLabelledMarkerFlags ? 5 * 4 + 1 * 2 : // 1 int, 4 floats, 1 short 5 * 4; // 1 int, 4 floats buf.position(buf.position() + nLabelledMarkers * labelledMarkerDataSize); // without skipping: // for ( int markerIdx = 0; markerIdx < nLabeledMarkers; markerIdx++ ) // int id = buf.getInt(); // float x = buf.getFloat(); // float y = buf.getFloat(); // float z = buf.getFloat(); // float size = buf.getFloat(); // if ( includesLabelledMarkerFlags ) // short params = buf.getShort(); } // read force plate data if ( includesForcePlateData ) { int nForcePlates = buf.getInt(); for (int forcePlateIdx = 0; forcePlateIdx < nForcePlates; forcePlateIdx++) { // read force plate ID and find corresponding device int forcePlateId = buf.getInt(); Device device = scene.findDevice(forcePlateId); if ( device == null ) device = DUMMY_DEVICE; // channel count int nChannels = buf.getInt(); // channel data for (int i = 0; i < nChannels; i++) { // frame count int nFrames = buf.getInt(); float value = 0; for (int frameIdx = 0; frameIdx < nFrames; frameIdx++) { // frame data value = buf.getFloat(); } if ( i < device.channels.length ) { // effectively only read the last (or only) value device.channels[i].value = value; } } } } // read latency and convert from s to ms scene.latency = (int) (buf.getFloat() * 1000); } notifyListeners_Update(); } } /** * Class for a custom request command. */ private class Command_Request implements Command { private Command_Request(String request) { this.request = request; } @Override public void marshal(ByteBuffer buf) { buf.rewind(); buf.putShort(NAT_REQUEST); buf.putShort((short) 0); buf.put(request.getBytes()).putChar('\0'); } private final String request; } /** * Class for a request response from the server. */ private class Response_Request implements Response { private Response_Request(ByteBuffer buf) { returnValue = unmarshalString(buf); } /** * Gets the string returned by the request. * * @return the request return value */ public String getValue() { return returnValue; } private final String returnValue; } /** * Class for a ping response from the server. */ private class Response_UnrecognizedRequest implements Response { private Response_UnrecognizedRequest() { // nothing to do here } } /** * Creates a NatNet compatible Motion Capture client. */ public NatNetClient() { this(CLIENT_NAME, CLIENT_VERSION); } /** * Creates a Natnet compatible Motion Capture client. * * @param applicationName the name of the application * @param applicationVersion the version number of the application (array of max. size 4) */ public NatNetClient(String applicationName, byte[] applicationVersion) { this.appName = applicationName.substring(0, Math.min(128, applicationName.length())); this.appVersion = applicationVersion; this.connected = false; this.frameStreaming = false; this.scene = new Scene(); this.cmdSocket = null; this.packetOut = new DatagramPacket(new byte[MAX_PACKETSIZE], MAX_PACKETSIZE); this.packetIn = new DatagramPacket(new byte[MAX_PACKETSIZE], MAX_PACKETSIZE); this.bufOut = ByteBuffer.allocate(MAX_PACKETSIZE).order(ByteOrder.LITTLE_ENDIAN); this.serverInfo = new ServerInfo(); this.sceneListeners = new HashSet<>(); } @Override public boolean connect(InetAddress host) { if ( connected ) { disconnect(); } try { cmdSocket = new DatagramSocket(); cmdSocket.connect(host, PORT_COMMAND); cmdSocket.setSoTimeout(100); packetOut.setAddress(null); // make packet neutral Response_Ping ping = pingServer(); if ( ping != null ) { connected = true; LOG.log(Level.INFO, "Connected to server ''{0}'' v{1}.{2}.{3}.{4}, NatNet v{5}.{6}.{7}.{8}", new Object[]{ serverInfo.serverName, serverInfo.versionServer[0], serverInfo.versionServer[1], serverInfo.versionServer[2], serverInfo.versionServer[3], serverInfo.versionNatNet[0], serverInfo.versionNatNet[1], serverInfo.versionNatNet[2], serverInfo.versionNatNet[3] }); // trigger sending of scene description and the first frame sendCommandPacket(COMMAND_MODELDEF); receiveResponsePacket(Response_ModelDefinition.class); sendCommandPacket(COMMAND_FRAMEOFDATA); receiveResponsePacket(Response_FrameOfData.class); // get data stream source address InetAddress dataStreamAddr = host; String strDataStreamAddr = sendCommand("getDataStreamAddress"); try { dataStreamAddr = InetAddress.getByName(strDataStreamAddr); } catch (UnknownHostException e) { LOG.log(Level.WARNING, "Could not resolve data stream address ''{0}''", strDataStreamAddr); } LOG.log(Level.INFO, "Server data stream address: {0} {1}", new Object[] { dataStreamAddr, dataStreamAddr.isMulticastAddress() ? "(multicast)" : "" }); // start stream receiver thread frameStreaming = false; receiverThread = new ReceiverThread(dataStreamAddr); receiverThread.start(); } else { cmdSocket.close(); cmdSocket = null; } } catch (IllegalArgumentException | SocketException e) { if ( cmdSocket == null ) { LOG.severe("Could not create socket."); } else { LOG.log(Level.SEVERE, "Could not connect to server ({0}).", e.getMessage()); } cmdSocket = null; } return connected; } @Override public boolean isConnected() { return connected; } @Override public String getServerName() { return serverInfo.serverName + " v" + serverInfo.versionServer[0] + "." + serverInfo.versionServer[1] + "." + serverInfo.versionServer[2] + "." + serverInfo.versionServer[3]; } @Override public void update() { // only poll "manually" when streaming does not work // (for whatever reason) if ( connected && !frameStreaming ) { sendCommandPacket(COMMAND_FRAMEOFDATA); receiveResponsePacket(Response_FrameOfData.class); } } @Override public final Scene getScene() { return scene; } @Override public String sendCommand(String command) { String retVal = null; sendCommandPacket(new Command_Request(command)); Response response = receiveResponsePacket(Response_Request.class); if ( response != null ) { retVal = ((Response_Request) response).getValue(); } return retVal; } @Override public boolean disconnect() { if ( connected ) { if ( receiverThread != null ) { receiverThread.terminate(); try { receiverThread.join(1000); } catch (InterruptedException e) { // ignore } receiverThread = null; } cmdSocket.disconnect(); cmdSocket.close(); cmdSocket = null; connected = false; } return !connected; } @Override public boolean addSceneListener(SceneListener listener) { boolean added = sceneListeners.add(listener); if ( added ) { // immediately notify listener.sceneChanged(scene); } return added; } @Override public boolean removeSceneListener(SceneListener listener) { boolean removed = sceneListeners.remove(listener); return removed; } private boolean checkActorId(int actorId) { boolean valid = (actorId >= 0) && (actorId < scene.actors.length); if ( !valid ) { LOG.log(Level.WARNING, "Invalid actor ID {0}", actorId); } return valid; } private boolean checkBoneId(int actorId, int boneId) { boolean valid = (boneId >= 0) && (boneId < scene.actors[actorId].bones.length); if ( !valid ) { LOG.log(Level.WARNING, "Invalid bone ID {0}", boneId); } return valid; } private Response_Ping pingServer() { Response_Ping result = null; if ( cmdSocket != null ) { if ( sendCommandPacket(new Command_Ping(appName, appVersion)) ) { result = (Response_Ping) receiveResponsePacket(Response_Ping.class); } } return result; } private boolean sendCommandPacket(Command cmd) { boolean success = false; cmd.marshal(bufOut); int len = bufOut.position(); bufOut.putShort(2, (short) (len - 4)); // adapt length of data packet (less id and packet size) // dump(bufOut, len); packetOut.setLength(len); packetOut.setData(bufOut.array(), 0, len); try { cmdSocket.send(packetOut); success = true; } catch (IOException e) { LOG.log(Level.SEVERE, "Could not send command ({0}).", e.getMessage()); } return success; } private Response receiveResponsePacket(Class c) { Response response = null; try { do { cmdSocket.receive(packetIn); response = parsePacket(packetIn); errorCounter = 0; } while ( !c.isInstance(response) ); } catch (IOException e) { if ( errorCounter == 0 ) { LOG.log(Level.SEVERE, "Could not receive command ({0}).", e.getMessage()); } errorCounter++; if ( errorCounter > 30 ) { LOG.log(Level.SEVERE, "Too many errors > disconnecting."); disconnect(); } } return response; } private Response parsePacket(DatagramPacket packet) { Response response = null; int rcvLength = packet.getLength(); if ( rcvLength > 0 ) { final ByteBuffer bufIn = ByteBuffer.wrap(packet.getData(), 0, rcvLength).order(ByteOrder.LITTLE_ENDIAN); logBufferData(bufIn, rcvLength); int packetId = bufIn.getShort(); int packetLen = bufIn.getShort(); int receivedLen = rcvLength; if ( packetLen == receivedLen - 4 ) // don't count the 4 bytes id and length { switch ( packetId ) { case NAT_PINGRESPONSE : { response = new Response_Ping(bufIn, serverInfo); break; } case NAT_RESPONSE : { response = new Response_Request(bufIn); break; } case NAT_MODELDEF : { response = new Response_ModelDefinition(bufIn, scene); break; } case NAT_FRAMEOFDATA : { response = new Response_FrameOfData(bufIn, scene); break; } case NAT_UNRECOGNIZED_REQUEST : { response = new Response_UnrecognizedRequest(); LOG.log(Level.WARNING, "Unrecognized request."); break; } default: { LOG.log(Level.WARNING, "Unknown packet ID {0}.", packetId); break; } } } else { LOG.log(Level.WARNING, "Incoming packet length error (ID={0}, Packet Length={1}, Received Length={2}).", new Object[]{packetId, packetLen, receivedLen}); } } return response; } /** * Extracts a null-terminated string from the buffer. * * @param buf the buffer to extract the string from * * @return the extracted string */ private String unmarshalString(ByteBuffer buf) { StringBuilder s = new StringBuilder(); char c; while ( (c = (char) buf.get()) != '\0' ) { s.append(c); } return s.toString(); } /** * Notifies all scene listeners about the update of the scene data. */ private void notifyListeners_Update() { for ( SceneListener listener : sceneListeners ) { listener.sceneUpdated(scene); } } /** * Notifies scene listeners of a scene structure change. */ private void notifyListeners_Change() { for ( SceneListener listener : sceneListeners ) { listener.sceneChanged(scene); } } private void logBufferData(ByteBuffer buf, int len) { if ( LOG.isLoggable(Level.FINE) ) { String address = ""; String hexData = ""; String asciiData = ""; byte[] arr = buf.array(); int idx = 0; final int width = 16; while ( idx < len ) { if ( idx % width == 0 ) { address = String.format("%04x", idx); hexData = ""; asciiData = ""; } byte d = arr[idx]; hexData += String.format("%02x ", d); asciiData += (d >= 32) && (d < 127) ? (char) d : "."; idx++; if ( (idx == len) || (idx % width == 0) ) { for ( int i = hexData.length() ; i < width * 3 ; i++ ) { hexData += " "; } LOG.log(Level.FINE, "{0} : {1} | {2}", new Object[]{address, hexData, asciiData}); } } } } private class ReceiverThread extends Thread { public ReceiverThread(InetAddress dataStreamAddress) { try { if ( dataStreamAddress.isMulticastAddress() ) { MulticastSocket socket = new MulticastSocket(PORT_DATA); socket.joinGroup(dataStreamAddress); dataSocket = socket; } else { dataSocket = new DatagramSocket(PORT_DATA, dataStreamAddress); } dataSocket.setSoTimeout(TIMEOUT_INITIAL); packetIn = new DatagramPacket(new byte[MAX_PACKETSIZE], MAX_PACKETSIZE); } catch (IOException e) { LOG.log(Level.SEVERE, "Could not start receiver thread ({0}).", e.getMessage()); } } @Override public void run() { if ( dataSocket == null ) return; runReceiver = true; LOG.info("Receiver thread started"); boolean firstPacketReceived = false; int timeoutCounter = 0; while ( runReceiver ) { try { dataSocket.receive(packetIn); parsePacket(packetIn); frameStreaming = true; if ( !firstPacketReceived ) { LOG.info("Data stream active"); firstPacketReceived = true; timeoutCounter = 0; // OK, data is coming in > set timeout to less try { dataSocket.setSoTimeout(TIMEOUT_RUNNING); } catch (SocketException e) { // do nothing } } } catch (SocketTimeoutException ex) { if ( (timeoutCounter > 10) && frameStreaming ) { // data was streaming (or is expected to) frameStreaming = false; if ( firstPacketReceived ) { LOG.warning("Data stream stopped unexpectedly"); } else { LOG.warning("No data stream detected"); } try { // try again with longer timeout dataSocket.setSoTimeout(TIMEOUT_INITIAL); firstPacketReceived = false; } catch (SocketException e) { // ignore } } } catch (IOException ex) { // uh oh, panic, disconnect runReceiver = false; LOG.log(Level.SEVERE, "Error while receiving data stream ({0})", ex.getMessage()); } } dataSocket.disconnect(); dataSocket.close(); dataSocket = null; LOG.info("Receiver thread stopped"); } public void terminate() { if ( runReceiver ) { LOG.info("Stopping receiver thread"); runReceiver = false; } } private DatagramSocket dataSocket; private DatagramPacket packetIn; private boolean runReceiver; } private final String appName; private final byte[] appVersion; private final Scene scene; private DatagramSocket cmdSocket; private final DatagramPacket packetIn, packetOut; private final ByteBuffer bufOut; private boolean connected; private int errorCounter; private boolean frameStreaming; private final ServerInfo serverInfo; private ReceiverThread receiverThread; private final Set<SceneListener> sceneListeners; private final static Marker DUMMY_MARKER = new Marker(null, "dummy"); private final static Bone DUMMY_BONE = new Bone(null, "dummy", 0); private final static Device DUMMY_DEVICE = new Device(null, "dummy", 0); private final static Logger LOG = Logger.getLogger(NatNetClient.class.getName()); }
package org.pentaho.di.job.entries.simpleeval; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import java.util.regex.Pattern; import org.pentaho.di.cluster.SlaveServer; import org.pentaho.di.core.Const; import org.pentaho.di.core.Result; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.row.ValueMeta; import org.pentaho.di.core.util.StringUtil; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.entry.JobEntryBase; import org.pentaho.di.job.entry.JobEntryInterface; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.w3c.dom.Node; /** * This defines a 'simple evaluation' job entry. * * @author Samatar Hassan * @since 01-01-2009 */ public class JobEntrySimpleEval extends JobEntryBase implements Cloneable, JobEntryInterface { private static Class<?> PKG = JobEntrySimpleEval.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$ public static final String[] valueTypeDesc = new String[] { BaseMessages.getString(PKG, "JobSimpleEval.EvalPreviousField.Label"), BaseMessages.getString(PKG, "JobSimpleEval.EvalVariable.Label"), }; public static final String[] valueTypeCode = new String[] { "field", "variable" }; public static final int VALUE_TYPE_FIELD=0; public static final int VALUE_TYPE_VARIABLE=1; public int valuetype; public static final String[] successConditionDesc = new String[] { BaseMessages.getString(PKG, "JobSimpleEval.SuccessWhenEqual.Label"), BaseMessages.getString(PKG, "JobSimpleEval.SuccessWhenDifferent.Label"), BaseMessages.getString(PKG, "JobSimpleEval.SuccessWhenContains.Label"), BaseMessages.getString(PKG, "JobSimpleEval.SuccessWhenNotContains.Label"), BaseMessages.getString(PKG, "JobSimpleEval.SuccessWhenStartWith.Label"), BaseMessages.getString(PKG, "JobSimpleEval.SuccessWhenNotStartWith.Label"), BaseMessages.getString(PKG, "JobSimpleEval.SuccessWhenEndWith.Label"), BaseMessages.getString(PKG, "JobSimpleEval.SuccessWhenNotEndWith.Label"), BaseMessages.getString(PKG, "JobSimpleEval.SuccessWhenRegExp.Label") }; public static final String[] successConditionCode = new String[] { "equal", "different", "contains", "notcontains", "startswith", "notstatwith", "endswith", "notendwith", "regexp" }; public static final int SUCCESS_CONDITION_EQUAL=0; public static final int SUCCESS_CONDITION_DIFFERENT=1; public static final int SUCCESS_CONDITION_CONTAINS=2; public static final int SUCCESS_CONDITION_NOT_CONTAINS=3; public static final int SUCCESS_CONDITION_START_WITH=4; public static final int SUCCESS_CONDITION_NOT_START_WITH=5; public static final int SUCCESS_CONDITION_END_WITH=6; public static final int SUCCESS_CONDITION_NOT_END_WITH=7; public static final int SUCCESS_CONDITION_REGEX=8; public int successcondition; public static final String[] fieldTypeDesc = new String[] { BaseMessages.getString(PKG, "JobSimpleEval.FieldTypeString.Label"), BaseMessages.getString(PKG, "JobSimpleEval.FieldTypeNumber.Label"), BaseMessages.getString(PKG, "JobSimpleEval.FieldTypeDateTime.Label"), BaseMessages.getString(PKG, "JobSimpleEval.FieldTypeBoolean.Label"), }; public static final String[] fieldTypeCode = new String[] { "string", "number", "datetime", "boolean" }; public static final int FIELD_TYPE_STRING=0; public static final int FIELD_TYPE_NUMBER=1; public static final int FIELD_TYPE_DATE_TIME=2; public static final int FIELD_TYPE_BOOLEAN=3; public int fieldtype; public static final String[] successNumberConditionDesc = new String[] { BaseMessages.getString(PKG, "JobSimpleEval.SuccessWhenEqual.Label"), BaseMessages.getString(PKG, "JobSimpleEval.SuccessWhenDifferent.Label"), BaseMessages.getString(PKG, "JobSimpleEval.SuccessWhenSmallThan.Label"), BaseMessages.getString(PKG, "JobSimpleEval.SuccessWhenSmallOrEqualThan.Label"), BaseMessages.getString(PKG, "JobSimpleEval.SuccessWhenGreaterThan.Label"), BaseMessages.getString(PKG, "JobSimpleEval.SuccessWhenGreaterOrEqualThan.Label"), BaseMessages.getString(PKG, "JobSimpleEval.SuccessBetween.Label"), }; public static final String[] successNumberConditionCode = new String[] { "equal", "different", "smaller", "smallequal", "greater", "greaterequal", "between" }; public static final int SUCCESS_NUMBER_CONDITION_EQUAL=0; public static final int SUCCESS_NUMBER_CONDITION_DIFFERENT=1; public static final int SUCCESS_NUMBER_CONDITION_SMALLER=2; public static final int SUCCESS_NUMBER_CONDITION_SMALLER_EQUAL=3; public static final int SUCCESS_NUMBER_CONDITION_GREATER=4; public static final int SUCCESS_NUMBER_CONDITION_GREATER_EQUAL=5; public static final int SUCCESS_NUMBER_CONDITION_BETWEEN=6; public int successnumbercondition; public static final String[] successBooleanConditionDesc = new String[] { BaseMessages.getString(PKG, "JobSimpleEval.SuccessWhenTrue.Label"), BaseMessages.getString(PKG, "JobSimpleEval.SuccessWhenFalse.Label") }; public static final String[] successBooleanConditionCode = new String[] { "true", "false" }; public static final int SUCCESS_BOOLEAN_CONDITION_TRUE=0; public static final int SUCCESS_BOOLEAN_CONDITION_FALSE=1; public int successbooleancondition; private String fieldname; private String variablename; private String mask; private String comparevalue; private String minvalue; private String maxvalue; private boolean successwhenvarset; public JobEntrySimpleEval(String n) { super(n, ""); valuetype=VALUE_TYPE_FIELD; successcondition=SUCCESS_CONDITION_EQUAL; successnumbercondition=SUCCESS_NUMBER_CONDITION_EQUAL; successbooleancondition=SUCCESS_BOOLEAN_CONDITION_FALSE; minvalue=null; maxvalue=null; comparevalue=null; fieldname=null; variablename=null; fieldtype=FIELD_TYPE_STRING; mask=null; successwhenvarset=false; setID(-1L); } public JobEntrySimpleEval() { this(""); } public Object clone() { JobEntrySimpleEval je = (JobEntrySimpleEval) super.clone(); return je; } private static String getValueTypeCode(int i) { if (i < 0 || i >= valueTypeCode.length) return valueTypeCode[0]; return valueTypeCode[i]; } private static String getFieldTypeCode(int i) { if (i < 0 || i >= fieldTypeCode.length) return fieldTypeCode[0]; return fieldTypeCode[i]; } private static String getSuccessConditionCode(int i) { if (i < 0 || i >= successConditionCode.length) return successConditionCode[0]; return successConditionCode[i]; } public static String getSuccessNumberConditionCode(int i) { if (i < 0 || i >= successNumberConditionCode.length) return successNumberConditionCode[0]; return successNumberConditionCode[i]; } private static String getSuccessBooleanConditionCode(int i) { if (i < 0 || i >= successBooleanConditionCode.length) return successBooleanConditionCode[0]; return successBooleanConditionCode[i]; } public String getXML() { StringBuffer retval = new StringBuffer(300); retval.append(super.getXML()); retval.append(" ").append(XMLHandler.addTagValue("valuetype",getValueTypeCode(valuetype))); retval.append(" ").append(XMLHandler.addTagValue("fieldname", fieldname)); retval.append(" ").append(XMLHandler.addTagValue("variablename", variablename)); retval.append(" ").append(XMLHandler.addTagValue("fieldtype",getFieldTypeCode(fieldtype))); retval.append(" ").append(XMLHandler.addTagValue("mask", mask)); retval.append(" ").append(XMLHandler.addTagValue("comparevalue", comparevalue)); retval.append(" ").append(XMLHandler.addTagValue("minvalue", minvalue)); retval.append(" ").append(XMLHandler.addTagValue("maxvalue", maxvalue)); retval.append(" ").append(XMLHandler.addTagValue("successcondition",getSuccessConditionCode(successcondition))); retval.append(" ").append(XMLHandler.addTagValue("successnumbercondition",getSuccessNumberConditionCode(successnumbercondition))); retval.append(" ").append(XMLHandler.addTagValue("successbooleancondition",getSuccessBooleanConditionCode(successbooleancondition))); retval.append(" ").append(XMLHandler.addTagValue("successwhenvarset", successwhenvarset)); return retval.toString(); } private static int getValueTypeByCode(String tt) { if (tt == null) return 0; for (int i = 0; i < valueTypeCode.length; i++) { if (valueTypeCode[i].equalsIgnoreCase(tt)) return i; } return 0; } private static int getSuccessNumberByCode(String tt) { if (tt == null) return 0; for (int i = 0; i < successNumberConditionCode.length; i++) { if (successNumberConditionCode[i].equalsIgnoreCase(tt)) return i; } return 0; } private static int getSuccessBooleanByCode(String tt) { if (tt == null) return 0; for (int i = 0; i < successBooleanConditionCode.length; i++) { if (successBooleanConditionCode[i].equalsIgnoreCase(tt)) return i; } return 0; } private static int getFieldTypeByCode(String tt) { if (tt == null) return 0; for (int i = 0; i < fieldTypeCode.length; i++) { if (fieldTypeCode[i].equalsIgnoreCase(tt)) return i; } return 0; } private static int getSuccessConditionByCode(String tt) { if (tt == null) return 0; for (int i = 0; i < successConditionCode.length; i++) { if (successConditionCode[i].equalsIgnoreCase(tt)) return i; } return 0; } public void setSuccessWhenVarSet(boolean successwhenvarset) { this.successwhenvarset=successwhenvarset; } public boolean isSuccessWhenVarSet() { return this.successwhenvarset; } public static int getSuccessNumberConditionByCode(String tt) { if (tt == null) return 0; for (int i = 0; i < successNumberConditionCode.length; i++) { if (successNumberConditionCode[i].equalsIgnoreCase(tt)) return i; } return 0; } private static int getSuccessBooleanConditionByCode(String tt) { if (tt == null) return 0; for (int i = 0; i < successBooleanConditionCode.length; i++) { if (successBooleanConditionCode[i].equalsIgnoreCase(tt)) return i; } return 0; } public void loadXML(Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep) throws KettleXMLException { try { super.loadXML(entrynode, databases, slaveServers); valuetype = getValueTypeByCode(Const.NVL(XMLHandler.getTagValue(entrynode, "valuetype"), "")); fieldname = XMLHandler.getTagValue(entrynode, "fieldname"); fieldtype = getFieldTypeByCode(Const.NVL(XMLHandler.getTagValue(entrynode, "fieldtype"), "")); variablename = XMLHandler.getTagValue(entrynode, "variablename"); mask = XMLHandler.getTagValue(entrynode, "mask"); comparevalue = XMLHandler.getTagValue(entrynode, "comparevalue"); minvalue = XMLHandler.getTagValue(entrynode, "minvalue"); maxvalue = XMLHandler.getTagValue(entrynode, "maxvalue"); successcondition = getSuccessConditionByCode(Const.NVL(XMLHandler.getTagValue(entrynode, "successcondition"), "")); successnumbercondition = getSuccessNumberConditionByCode(Const.NVL(XMLHandler.getTagValue(entrynode, "successnumbercondition"), "")); successbooleancondition = getSuccessBooleanConditionByCode(Const.NVL(XMLHandler.getTagValue(entrynode, "successbooleancondition"), "")); successwhenvarset = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "successwhenvarset")); } catch(KettleXMLException xe) { throw new KettleXMLException(BaseMessages.getString(PKG, "JobEntrySimple.Error.Exception.UnableLoadXML"), xe); } } public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException { try { valuetype = getValueTypeByCode(Const.NVL(rep.getJobEntryAttributeString(id_jobentry,"valuetype"), "")); fieldname = rep.getJobEntryAttributeString(id_jobentry, "fieldname"); variablename = rep.getJobEntryAttributeString(id_jobentry, "variablename"); fieldtype = getFieldTypeByCode(Const.NVL(rep.getJobEntryAttributeString(id_jobentry,"fieldtype"), "")); mask = rep.getJobEntryAttributeString(id_jobentry, "mask"); comparevalue = rep.getJobEntryAttributeString(id_jobentry, "comparevalue"); minvalue = rep.getJobEntryAttributeString(id_jobentry, "minvalue"); maxvalue = rep.getJobEntryAttributeString(id_jobentry, "maxvalue"); successcondition = getSuccessConditionByCode(Const.NVL(rep.getJobEntryAttributeString(id_jobentry,"successcondition"), "")); successnumbercondition = getSuccessNumberConditionByCode(Const.NVL(rep.getJobEntryAttributeString(id_jobentry,"successnumbercondition"), "")); successbooleancondition = getSuccessBooleanConditionByCode(Const.NVL(rep.getJobEntryAttributeString(id_jobentry,"successbooleancondition"), "")); successwhenvarset = rep.getJobEntryAttributeBoolean(id_jobentry, "successwhenvarset"); } catch(KettleException dbe) { throw new KettleException(BaseMessages.getString(PKG, "JobEntrySimple.Error.Exception.UnableLoadRep")+id_jobentry, dbe); } } public void saveRep(Repository rep, ObjectId id_job) throws KettleException { try { rep.saveJobEntryAttribute(id_job, getObjectId(),"valuetype", getValueTypeCode(valuetype)); rep.saveJobEntryAttribute(id_job, getObjectId(), "fieldname", fieldname); rep.saveJobEntryAttribute(id_job, getObjectId(), "variablename", variablename); rep.saveJobEntryAttribute(id_job, getObjectId(),"fieldtype", getFieldTypeCode(fieldtype)); rep.saveJobEntryAttribute(id_job, getObjectId(), "fieldtype", fieldtype); rep.saveJobEntryAttribute(id_job, getObjectId(), "mask", mask); rep.saveJobEntryAttribute(id_job, getObjectId(), "comparevalue", comparevalue); rep.saveJobEntryAttribute(id_job, getObjectId(), "minvalue", minvalue); rep.saveJobEntryAttribute(id_job, getObjectId(), "maxvalue", maxvalue); rep.saveJobEntryAttribute(id_job, getObjectId(),"successcondition", getSuccessConditionCode(successcondition)); rep.saveJobEntryAttribute(id_job, getObjectId(),"successnumbercondition", getSuccessNumberConditionCode(successnumbercondition)); rep.saveJobEntryAttribute(id_job, getObjectId(),"successbooleancondition", getSuccessBooleanConditionCode(successbooleancondition)); rep.saveJobEntryAttribute(id_job, getObjectId(), "successwhenvarset", successwhenvarset); } catch(KettleDatabaseException dbe) { throw new KettleException(BaseMessages.getString(PKG, "JobEntrySimple.Error.Exception.UnableSaveRep")+id_job, dbe); } } public Result execute(Result previousResult, int nr) throws KettleException { Result result = previousResult; result.setNrErrors(1); result.setResult(false); String sourcevalue=null; switch (valuetype) { case VALUE_TYPE_FIELD: List<RowMetaAndData> rows = result.getRows(); RowMetaAndData resultRow = null; if(isDetailed()) logDetailed(BaseMessages.getString(PKG, "JobEntrySimpleEval.Log.ArgFromPrevious.Found",(rows!=null?rows.size():0)+ "")); if(rows.size()==0) { rows=null; logError(BaseMessages.getString(PKG, "JobEntrySimpleEval.Error.NoRows")); return result; } // get first row resultRow = rows.get(0); String realfieldname=environmentSubstitute(fieldname); int indexOfField=-1; indexOfField=resultRow.getRowMeta().indexOfValue(realfieldname); if(indexOfField==-1) { logError(BaseMessages.getString(PKG, "JobEntrySimpleEval.Error.FieldNotExist",realfieldname)); resultRow=null; rows=null; return result; } sourcevalue=resultRow.getString(indexOfField,null); resultRow=null; rows=null; break; case VALUE_TYPE_VARIABLE: if(Const.isEmpty(variablename)) { logError(BaseMessages.getString(PKG, "JobEntrySimpleEval.Error.VariableMissing")); return result; } if(isSuccessWhenVarSet()) { // return variable name String variableName=StringUtil.getVariableName(getVariableWithSpec()); if(System.getProperty(variableName)!=null) { if(isDetailed()) logDetailed(BaseMessages.getString(PKG, "JobEntrySimpleEval.VariableSet",variableName)); result.setResult(true); result.setNrErrors(0); return result; }else { if(isDetailed()) logDetailed(BaseMessages.getString(PKG, "JobEntrySimpleEval.VariableNotSet",variableName)); return result; } } sourcevalue=environmentSubstitute(getVariableWithSpec()); break; default: break; } if(isDetailed()) logDetailed(BaseMessages.getString(PKG, "JobSimpleEval.Log.ValueToevaluate",sourcevalue)); boolean success=false; String realCompareValue=environmentSubstitute(comparevalue); String realMinValue=environmentSubstitute(minvalue); String realMaxValue=environmentSubstitute(maxvalue); switch (fieldtype) { case FIELD_TYPE_STRING: switch (successcondition) { case SUCCESS_CONDITION_EQUAL: // equal if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValue",sourcevalue,realCompareValue)); success=(sourcevalue.equals(realCompareValue)); if(valuetype== VALUE_TYPE_VARIABLE && !success) { // make the empty value evaluate to true when compared to a not set variable if(Const.isEmpty(realCompareValue)) { String variableName=StringUtil.getVariableName(variablename); if(System.getProperty(variableName)==null) { success=true; } } } break; case SUCCESS_CONDITION_DIFFERENT: // different if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValue",sourcevalue,realCompareValue)); success=(!sourcevalue.equals(realCompareValue)); break; case SUCCESS_CONDITION_CONTAINS: // contains if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValue",sourcevalue,realCompareValue)); success=(sourcevalue.contains(realCompareValue)); break; case SUCCESS_CONDITION_NOT_CONTAINS: // not contains if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValue",sourcevalue,realCompareValue)); success=(!sourcevalue.contains(realCompareValue)); break; case SUCCESS_CONDITION_START_WITH: // starts with if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValue",sourcevalue,realCompareValue)); success=(sourcevalue.startsWith(realCompareValue)); break; case SUCCESS_CONDITION_NOT_START_WITH: // not start with if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValue",sourcevalue,realCompareValue)); success=(!sourcevalue.startsWith(realCompareValue)); break; case SUCCESS_CONDITION_END_WITH: // ends with if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValue",sourcevalue,realCompareValue)); success=(sourcevalue.endsWith(realCompareValue)); break; case SUCCESS_CONDITION_NOT_END_WITH: // not ends with if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValue",sourcevalue,realCompareValue)); success=(!sourcevalue.endsWith(realCompareValue)); break; case SUCCESS_CONDITION_REGEX: // regexp if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValue",sourcevalue,realCompareValue)); success=(Pattern.compile(realCompareValue).matcher(sourcevalue).matches()); break; default: break; } break; case FIELD_TYPE_NUMBER: double valuenumber; try{valuenumber=Double.parseDouble(sourcevalue); }catch(Exception e) {logError(BaseMessages.getString(PKG, "JobEntrySimpleEval.Error.UnparsableNumber",sourcevalue,e.getMessage()));return result;} double valuecompare; switch (successnumbercondition) { case SUCCESS_NUMBER_CONDITION_EQUAL: // equal if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValue",sourcevalue,realCompareValue)); try{valuecompare=Double.parseDouble(realCompareValue); }catch(Exception e) {logError(BaseMessages.getString(PKG, "JobEntrySimpleEval.Error.UnparsableNumber",realCompareValue,e.getMessage()));return result;} success=(valuenumber==valuecompare); break; case SUCCESS_NUMBER_CONDITION_DIFFERENT: // different if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValue",sourcevalue,realCompareValue)); try{valuecompare=Double.parseDouble(realCompareValue); }catch(Exception e) {logError(BaseMessages.getString(PKG, "JobEntrySimpleEval.Error.UnparsableNumber",realCompareValue,e.getMessage()));return result;} success=(valuenumber!=valuecompare); break; case SUCCESS_NUMBER_CONDITION_SMALLER: // smaller if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValue",sourcevalue,realCompareValue)); try{valuecompare=Double.parseDouble(realCompareValue); }catch(Exception e) {logError(BaseMessages.getString(PKG, "JobEntrySimpleEval.Error.UnparsableNumber",realCompareValue,e.getMessage()));return result;} success=(valuenumber<valuecompare); break; case SUCCESS_NUMBER_CONDITION_SMALLER_EQUAL: // smaller or equal if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValue",sourcevalue,realCompareValue)); try{valuecompare=Double.parseDouble(realCompareValue); }catch(Exception e) {logError(BaseMessages.getString(PKG, "JobEntrySimpleEval.Error.UnparsableNumber",realCompareValue,e.getMessage()));return result;} success=(valuenumber<=valuecompare); break; case SUCCESS_NUMBER_CONDITION_GREATER: // greater try{valuecompare=Double.parseDouble(realCompareValue); }catch(Exception e) {logError(BaseMessages.getString(PKG, "JobEntrySimpleEval.Error.UnparsableNumber",realCompareValue,e.getMessage()));return result;} success=(valuenumber>valuecompare); break; case SUCCESS_NUMBER_CONDITION_GREATER_EQUAL: // greater or equal if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValue",sourcevalue,realCompareValue)); try{valuecompare=Double.parseDouble(realCompareValue); }catch(Exception e) {logError(BaseMessages.getString(PKG, "JobEntrySimpleEval.Error.UnparsableNumber",realCompareValue,e.getMessage()));return result;} success=(valuenumber>=valuecompare); break; case SUCCESS_NUMBER_CONDITION_BETWEEN: // between min and max if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValues",realMinValue,realMaxValue)); double valuemin; try{valuemin=Double.parseDouble(realMinValue); }catch(Exception e) {logError(BaseMessages.getString(PKG, "JobEntrySimpleEval.Error.UnparsableNumber",realMinValue,e.getMessage()));return result;} double valuemax; try{valuemax=Double.parseDouble(realMaxValue); }catch(Exception e) {logError(BaseMessages.getString(PKG, "JobEntrySimpleEval.Error.UnparsableNumber",realMaxValue,e.getMessage()));return result;} if(valuemin>=valuemax) { logError(BaseMessages.getString(PKG, "JobEntrySimpleEval.Error.IncorrectNumbers",realMinValue,realMaxValue)); return result; } success=(valuenumber>=valuemin && valuenumber<=valuemax); break; default: break; } break; case FIELD_TYPE_DATE_TIME: String realMask=environmentSubstitute(mask); SimpleDateFormat df = new SimpleDateFormat(); if (!Const.isEmpty(realMask)) df.applyPattern(realMask); Date datevalue=null; try{datevalue=convertToDate(sourcevalue, realMask, df); }catch(Exception e) {logError(e.getMessage());return result;} Date datecompare; switch (successnumbercondition) { case SUCCESS_NUMBER_CONDITION_EQUAL: // equal if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValue",sourcevalue,realCompareValue)); try{datecompare=convertToDate(realCompareValue, realMask, df); }catch(Exception e) {logError(e.getMessage());return result;} success=(datevalue.equals(datecompare)); break; case SUCCESS_NUMBER_CONDITION_DIFFERENT: // different if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValue",sourcevalue,realCompareValue)); try{datecompare=convertToDate(realCompareValue, realMask, df); }catch(Exception e) {logError(e.getMessage());return result;} success=(!datevalue.equals(datecompare)); break; case SUCCESS_NUMBER_CONDITION_SMALLER: // smaller if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValue",sourcevalue,realCompareValue)); try{datecompare=convertToDate(realCompareValue, realMask, df); }catch(Exception e) {logError(e.getMessage());return result;} success=(datevalue.before(datecompare)); break; case SUCCESS_NUMBER_CONDITION_SMALLER_EQUAL: // smaller or equal if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValue",sourcevalue,realCompareValue)); try{datecompare=convertToDate(realCompareValue, realMask, df); }catch(Exception e) {logError(e.getMessage());return result;} success=(datevalue.before(datecompare) || datevalue.equals(datecompare)); break; case SUCCESS_NUMBER_CONDITION_GREATER: // greater if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValue",sourcevalue,realCompareValue)); try{datecompare=convertToDate(realCompareValue, realMask, df); }catch(Exception e) {logError(e.getMessage());return result;} success=(datevalue.after(datecompare)); break; case SUCCESS_NUMBER_CONDITION_GREATER_EQUAL: // greater or equal if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValue",sourcevalue,realCompareValue)); try{datecompare=convertToDate (realCompareValue, realMask, df); }catch(Exception e) {logError(e.getMessage());return result;} success=(datevalue.after(datecompare) || datevalue.equals(datecompare)) ; break; case SUCCESS_NUMBER_CONDITION_BETWEEN: // between min and max if(isDebug()) logDebug( BaseMessages.getString(PKG, "JobSimpleEval.Log.CompareWithValues",realMinValue,realMaxValue)); Date datemin; try{datemin=convertToDate(realMinValue, realMask, df); }catch(Exception e) {logError(e.getMessage());return result;} Date datemax; try{datemax=convertToDate(realMaxValue, realMask, df); }catch(Exception e) {logError(e.getMessage());return result;} if(datemin.after(datemax) || datemin.equals(datemax)) { logError(BaseMessages.getString(PKG, "JobEntrySimpleEval.Error.IncorrectDates",realMinValue,realMaxValue)); return result; } success=((datevalue.after(datemin)|| datevalue.equals(datemin)) && (datevalue.before(datemax)|| datevalue.equals(datemax))); break; default: break; } df=null; break; case FIELD_TYPE_BOOLEAN: boolean valuebool; try{ valuebool=ValueMeta.convertStringToBoolean(sourcevalue); }catch(Exception e) {logError(BaseMessages.getString(PKG, "JobEntrySimpleEval.Error.UnparsableBoolean",sourcevalue,e.getMessage()));return result;} switch (successbooleancondition) { case SUCCESS_BOOLEAN_CONDITION_FALSE: // false success=(!valuebool); break; case SUCCESS_BOOLEAN_CONDITION_TRUE: // true success=(valuebool); break; default: break; } break; default: break; } if(success) { result.setResult(true); result.setNrErrors(0); } return result; } /* * Returns variable with specifications */ private String getVariableWithSpec() { String variable=getVariableName(); if((!variable.contains(StringUtil.UNIX_OPEN) && !variable.contains(StringUtil.WINDOWS_OPEN) && !variable.contains(StringUtil.HEX_OPEN)) && ((!variable.contains(StringUtil.UNIX_CLOSE) && !variable.contains(StringUtil.WINDOWS_CLOSE) && !variable.contains(StringUtil.HEX_CLOSE)))) { // Add specifications to variable variable=StringUtil.UNIX_OPEN + variable + StringUtil.UNIX_CLOSE; if(isDetailed()) logDetailed(BaseMessages.getString(PKG, "JobEntrySimpleEval.CheckingVariable", variable)); } return variable; } private Date convertToDate(String valueString,String mask,SimpleDateFormat df) throws KettleException { Date datevalue=null; try{ datevalue=df.parse(valueString); }catch(Exception e) { throw new KettleException(BaseMessages.getString(PKG, "JobEntrySimpleEval.Error.UnparsableDate",valueString)); } return datevalue; } public static String getValueTypeDesc(int i) { if (i < 0 || i >= valueTypeDesc.length) return valueTypeDesc[0]; return valueTypeDesc[i]; } public static String getFieldTypeDesc(int i) { if (i < 0 || i >= fieldTypeDesc.length) return fieldTypeDesc[0]; return fieldTypeDesc[i]; } public static String getSuccessConditionDesc(int i) { if (i < 0 || i >= successConditionDesc.length) return successConditionDesc[0]; return successConditionDesc[i]; } public static String getSuccessNumberConditionDesc(int i) { if (i < 0 || i >= successNumberConditionDesc.length) return successNumberConditionDesc[0]; return successNumberConditionDesc[i]; } public static String getSuccessBooleanConditionDesc(int i) { if (i < 0 || i >= successBooleanConditionDesc.length) return successBooleanConditionDesc[0]; return successBooleanConditionDesc[i]; } public static int getValueTypeByDesc(String tt) { if (tt == null) return 0; for (int i = 0; i < valueTypeDesc.length; i++) { if (valueTypeDesc[i].equalsIgnoreCase(tt)) return i; } // If this fails, try to match using the code. return getValueTypeByCode(tt); } public static int getFieldTypeByDesc(String tt) { if (tt == null) return 0; for (int i = 0; i < fieldTypeDesc.length; i++) { if (fieldTypeDesc[i].equalsIgnoreCase(tt)) return i; } // If this fails, try to match using the code. return getFieldTypeByCode(tt); } public static int getSuccessConditionByDesc(String tt) { if (tt == null) return 0; for (int i = 0; i < successConditionDesc.length; i++) { if (successConditionDesc[i].equalsIgnoreCase(tt)) return i; } // If this fails, try to match using the code. return getSuccessConditionByCode(tt); } public static int getSuccessNumberConditionByDesc(String tt) { if (tt == null) return 0; for (int i = 0; i < successNumberConditionDesc.length; i++) { if (successNumberConditionDesc[i].equalsIgnoreCase(tt)) return i; } // If this fails, try to match using the code. return getSuccessNumberByCode(tt); } public static int getSuccessBooleanConditionByDesc(String tt) { if (tt == null) return 0; for (int i = 0; i < successBooleanConditionDesc.length; i++) { if (successBooleanConditionDesc[i].equalsIgnoreCase(tt)) return i; } // If this fails, try to match using the code. return getSuccessBooleanByCode(tt); } public void setMinValue(String minvalue) { this.minvalue=minvalue; } public String getMinValue() { return minvalue; } public void setCompareValue(String comparevalue) { this.comparevalue=comparevalue; } public String getMask() { return mask; } public void setMask(String mask) { this.mask=mask; } public String getFieldName() { return fieldname; } public void setFieldName(String fieldname) { this.fieldname=fieldname; } public String getVariableName() { return variablename; } public void setVariableName(String variablename) { this.variablename=variablename; } public String getCompareValue() { return comparevalue; } public void setMaxValue(String maxvalue) { this.maxvalue=maxvalue; } public String getMaxValue() { return maxvalue; } public boolean evaluates() { return true; } }
package imagej.ops.map.parallel; import static org.junit.Assert.assertEquals; import imagej.ops.AbstractFunction; import imagej.ops.AbstractInplaceFunction; import imagej.ops.AbstractOpTest; import imagej.ops.Op; import imagej.ops.map.FunctionMapII; import imagej.ops.map.FunctionMapIIRAI; import net.imglib2.Cursor; import net.imglib2.img.Img; import net.imglib2.type.numeric.integer.ByteType; import org.junit.Before; import org.junit.Test; /** * Testing multi threaded implementation ({@link FunctionMapIIRAIP} and * {@link FunctionMapIIP}) of the mappers. Assumption: Naive Implementation of * {@link FunctionMapIIRAI} works fine. * * @author Christian Dietz */ public class ThreadedMapTest extends AbstractOpTest { private Img<ByteType> in; private Img<ByteType> out; @Before public void initImg() { in = generateByteTestImg(true, 10, 10); out = generateByteTestImg(false, 10, 10); } @Test public void testMapII() { final Op functional = ops.op(FunctionMapII.class, out, in, new AddOneFunctional()); functional.run(); final Cursor<ByteType> cursor1 = in.cursor(); final Cursor<ByteType> cursor2 = out.cursor(); while (cursor1.hasNext()) { cursor1.fwd(); cursor2.fwd(); assertEquals(cursor1.get().get() + 1, cursor2.get().get()); } } @Test public void testFunctionMapIIRAIP() { final Op functional = ops.op(FunctionMapIIRAIP.class, out, in, new AddOneFunctional()); functional.run(); final Cursor<ByteType> cursor1 = in.cursor(); final Cursor<ByteType> cursor2 = out.cursor(); while (cursor1.hasNext()) { cursor1.fwd(); cursor2.fwd(); assertEquals(cursor1.get().get() + 1, cursor2.get().get()); } } @Test public void testFunctionMapIIP() { final Op functional = ops.op(FunctionMapIIP.class, out, in, new AddOneFunctional()); functional.run(); final Cursor<ByteType> cursor1 = in.cursor(); final Cursor<ByteType> cursor2 = out.cursor(); while (cursor1.hasNext()) { cursor1.fwd(); cursor2.fwd(); assertEquals(cursor1.get().get() + 1, cursor2.get().get()); } } @Test public void testInplaceMapP() { final Cursor<ByteType> cursor1 = in.copy().cursor(); final Cursor<ByteType> cursor2 = in.cursor(); final Op functional = ops.op(InplaceMapP.class, in, new AddOneInplace()); functional.run(); while (cursor1.hasNext()) { cursor1.fwd(); cursor2.fwd(); assertEquals(cursor1.get().get() + 1, cursor2.get().get()); } } } // Helper classes class AddOneInplace extends AbstractInplaceFunction<ByteType> { @Override public ByteType compute(final ByteType arg) { arg.inc(); return arg; } } class AddOneFunctional extends AbstractFunction<ByteType, ByteType> { @Override public ByteType compute(final ByteType input, final ByteType output) { output.set(input); output.inc(); return output; } }
package org.sipfoundry.sipxconfig.phone.yealink; public class yealinkConstants { public static final String MIME_TYPE_PLAIN = "text/plain"; public static final String MIME_TYPE_XML = "text/xml"; public static final String XML_DIAL_NOW = "dialnow.xml"; public static final String XML_CONTACT_DATA = "directory.xml"; public static final String WEB_ITEMS_LEVEL = "webitemslevel.cfg"; public static final String VENDOR = "Yealink"; // Line specific settings used in /etc/yealinkPhone/line.xml public static final String USER_ID_V6X_SETTING = "account/UserName"; public static final String USER_ID_V7X_SETTING = "basic/user_name"; public static final String AUTH_ID_V6X_SETTING = "account/AuthName"; public static final String AUTH_ID_V7X_SETTING = "basic/auth_name"; public static final String DISPLAY_NAME_V6X_SETTING = "account/DisplayName"; public static final String DISPLAY_NAME_V7X_SETTING = "basic/display_name"; public static final String PASSWORD_V6X_SETTING = "account/password"; public static final String PASSWORD_V7X_SETTING = "basic/password"; public static final String REGISTRATION_SERVER_HOST_V6X_SETTING = "account/SIPServerHost"; public static final String REGISTRATION_SERVER_HOST_V7X_SETTING = "basic/sip_server_host"; public static final String REGISTRATION_SERVER_PORT_V6X_SETTING = "account/SIPServerPort"; public static final String REGISTRATION_SERVER_PORT_V7X_SETTING = "basic/sip_server_port"; public static final String OUTBOUND_HOST_V6X_SETTING = "account/OutboundHost"; public static final String OUTBOUND_HOST_V7X_SETTING = "basic/outbound_host"; public static final String OUTBOUND_PORT_V6X_SETTING = "account/OutboundPort"; public static final String OUTBOUND_PORT_V7X_SETTING = "basic/outbound_port"; public static final String BACKUP_OUTBOUND_HOST_V6X_SETTING = "account/BackOutboundHost"; public static final String BACKUP_OUTBOUND_HOST_V7X_SETTING = "basic/backup_outbound_host"; public static final String BACKUP_OUTBOUND_PORT_V6X_SETTING = "account/BackOutboundPort"; public static final String BACKUP_OUTBOUND_PORT_V7X_SETTING = "basic/backup_outbound_port"; public static final String VOICE_MAIL_NUMBER_V6X_SETTING = "Message/VoiceNumber"; public static final String VOICE_MAIL_NUMBER_V7X_SETTING = "basic/voice_mail.number"; // Phone specific settings used in /etc/yealinkPhone/phone.xml public static final String DNS_SERVER1_V6X_SETTING = "network-wan/DNS/PrimaryDNS"; public static final String DNS_SERVER1_V7X_SETTING = "network/network.primary_dns"; public static final String DNS_SERVER2_V6X_SETTING = "network-wan/DNS/SecondaryDNS"; public static final String DNS_SERVER2_V7X_SETTING = "network/network.secondary_dns"; public static final String LOCAL_TIME_SERVER1_V6X_SETTING = "preference/Time/TimeServer1"; public static final String LOCAL_TIME_SERVER1_V7X_SETTING = "time/local_time.ntp_server1"; public static final String LOCAL_TIME_SERVER2_V6X_SETTING = "preference/Time/TimeServer2"; public static final String LOCAL_TIME_SERVER2_V7X_SETTING = "time/local_time.ntp_server2"; public static final String LOCAL_TIME_ZONE_V6X_SETTING = "preference/Time/TimeZone"; public static final String LOCAL_TIME_ZONE_V7X_SETTING = "time/local_time.time_zone"; public static final String SYSLOG_SERVER_V6X_SETTING = "upgrade/SYSLOG/SyslogdIP"; public static final String SYSLOG_SERVER_V7X_SETTING = "network/syslog.server"; public static final String REMOTE_PHONEBOOK_0_URL_V6X_SETTING = "RemotePhoneBook/0/URL"; public static final String REMOTE_PHONEBOOK_0_URL_V7X_SETTING = "remote-phonebook/remote_phonebook.data.1.url"; public static final String REMOTE_PHONEBOOK_0_NAME_V6X_SETTING = "RemotePhoneBook/0/Name"; public static final String REMOTE_PHONEBOOK_0_NAME_V7X_SETTING = "remote-phonebook/remote_phonebook.data.1.name"; public static final String FIRMWARE_SERVER_ADDRESS_SETTING = "upgrade/firmware/server_ip"; public static final String FIRMWARE_URL_V6X_SETTING = "upgrade/firmware/url"; public static final String FIRMWARE_URL_V7X_SETTING = "downloads/firmware.url"; public static final String FIRMWARE_HTTP_URL_SETTING = "upgrade/firmware/http_url"; public static final String FIRMWARE_NAME_SETTING = "upgrade/firmware/firmware_name"; public static final String AUTOPROVISIONING_SERVER_URL_V6X_SETTING = "upgrade/autoprovision/strServerURL"; public static final String AUTOPROVISIONING_SERVER_URL_V7X_SETTING = "auto-provisioning/auto_provision.server.url"; public static final String AUTOPROVISIONING_SERVER_ADDRESS_V6X_SETTING = "upgrade/autoprovision/server_address"; public static final String ADVANCED_MUSIC_SERVER_URI_V6X_SETTING = "account/MusicServerUri"; public static final String ADVANCED_MUSIC_SERVER_URI_V7X_SETTING = "advanced/music_server_uri"; public static final String LANG_FILE_NAME_V6X_SETTING = "upgrade/LangFile/server_address"; public static final String LANG_FILE_NAME_V7X_SETTING = "downloads/gui_lang.url"; // T2X except T20 public static final String LOGO_FILE_NAME_V6X_SETTING = "upgrade/Logo/server_address"; public static final String LOGO_FILE_NAME_V7X_SETTING = "downloads/lcd_logo.url"; // T3X and VP530 only public static final String WALLPAPER_FILE_NAME_V7X_SETTING = "downloads/wallpaper_upload.url"; // T3X public static final String SCREENSAVER_FILE_NAME_V7X_SETTING = "downloads/screen_saver.pic.url"; public static final String DIAL_NOW_URL_V6X_SETTING = "DialNow/server_address"; public static final String DIAL_NOW_URL_V7X_SETTING = "downloads/dialplan_dialnow.url"; }
package mho.wheels.iterables; import mho.wheels.io.Readers; import mho.wheels.math.BinaryFraction; import mho.wheels.numberUtils.FloatingPointUtils; import mho.wheels.random.IsaacPRNG; import org.jetbrains.annotations.NotNull; import org.junit.Before; import org.junit.Test; import java.math.BigDecimal; import java.math.BigInteger; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Objects; import static mho.wheels.iterables.IterableUtils.*; import static mho.wheels.testing.Testing.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.fail; // @formatter:off public strictfp class RandomProviderTest { private static RandomProvider P; private static RandomProvider Q; private static RandomProvider R; private static final int DEFAULT_SAMPLE_SIZE = 1000000; private static final int DEFAULT_TOP_COUNT = 10; private static final int TINY_LIMIT = 20; @Before public void initialize() { P = RandomProvider.example(); Q = new RandomProvider(toList(replicate(IsaacPRNG.SIZE, 0))); R = new RandomProvider(toList(IterableUtils.range(1, IsaacPRNG.SIZE))); } @Test public void testConstructor() { RandomProvider provider = new RandomProvider(); aeq(provider.getScale(), 32); aeq(provider.getSecondaryScale(), 8); } @Test public void testConstructor_int() { aeq(Q, "RandomProvider[@-7948823947390831374, 32, 8]"); aeq(R, "RandomProvider[@2449928962525148503, 32, 8]"); aeq( new RandomProvider(toList(IterableUtils.rangeBy(-1, -1, -IsaacPRNG.SIZE))), "RandomProvider[@3417306423260907531, 32, 8]" ); } @Test public void testExample() { aeq(RandomProvider.example(), "RandomProvider[@-8800290164235921060, 32, 8]"); } @Test public void testGetScale() { aeq(P.getScale(), 32); aeq(new RandomProvider().withScale(100).getScale(), 100); aeq(new RandomProvider().withScale(3).getScale(), 3); } @Test public void testGetSecondaryScale() { aeq(P.getSecondaryScale(), 8); aeq(new RandomProvider().withSecondaryScale(100).getSecondaryScale(), 100); aeq(new RandomProvider().withSecondaryScale(3).getSecondaryScale(), 3); } @Test public void testGetSeed() { aeq( P.getSeed(), "[-1740315277, -1661427768, 842676458, -1268128447, -121858045, 1559496322, -581535260, -1819723670," + " -334232530, 244755020, -534964695, 301563516, -1795957210, 1451814771, 1299826235, -666749112," + " -1729602324, -565031294, 1897952431, 1118663606, -299718943, -1499922009, -837624734, 1439650052," + " 312777359, -1140199484, 688524765, 739702138, 1480517762, 1622590976, 835969782, -204259962," + " -606452012, -1671898934, 368548728, -333429570, -1477682221, -638975525, -402896626, 1106834480," + " -1454735450, 1532680389, 1878326075, 1597781004, 619389131, -898266263, 1900039432, 1228960795," + " 1091764975, -1435988581, 1465994011, -241076337, 980038049, -821307198, -25801148, -1278802989," + " -290171171, 1063693093, 1718162965, -297113539, -1723402396, 1063795076, 1779331877, 1606303707," + " 1342330210, -2115595746, -718013617, 889248973, 1553964562, -2000156621, 1009070370, 998677106," + " 309828058, -816607592, 347096084, -565436493, -1836536982, -39909763, -1384351460, 586300570," + " -1545743273, -118730601, -1026888351, -643914920, 159473612, -509882909, 2003784095, -1582123439," + " 1199200850, -980627072, 589064158, 1351400003, 1083549876, -1039880174, 1634495699, -1583272739," + " 1765688283, -316629870, 577895752, -145082312, -645859550, 1496562313, 1970005163, -104842168," + " 285710655, 970623004, 375952155, -1114509491, 9760898, 272385973, 1160942220, 79933456, 642681904," + " -1291288677, -238849129, 1196057424, -587416967, -2000013062, 953214572, -2003974223, -179005208," + " -1599818904, 1963556499, -1494628627, 293535669, -1033907228, 1690848472, 1958730707, 1679864529," + " -450182832, -1398178560, 2092043951, 892850383, 662556689, -1954880564, -1297875796, -562200510," + " 1753810661, 612072956, -1182875, 294510681, -485063306, 1608426289, 1466734719, 2978810," + " -2134449847, 855495682, -1563923271, -306227772, 147934567, 926758908, 1903257258, 1602676310," + " -1151393146, 303067731, -1371065668, 1908028886, -425534720, 1241120323, -2101606174, 545122109," + " 1781213901, -146337786, -1205949803, -235261172, 1019855899, -193216104, -1286568040, -294909212," + " 1086948319, 1903298288, 2119132684, -581936319, -2070422261, 2086926428, -1303966999, -1365365119," + " -1891227288, 346044744, 488440551, -790513873, -2045294651, -1270631847, -2126290563, -1816128137," + " 1473769929, 784925032, 292983675, -325413283, -2117417065, 1156099828, -1188576148, -1134724577," + " 937972245, -924106996, 1553688888, 324720865, 2001615528, 998833644, 137816765, 1901776632," + " 2000206935, 942793606, -1742718537, 1909590681, -1332632806, -1355397404, 152253803, -193623640," + " 1601921213, -427930872, 1154642563, 1204629137, 581648332, 1921167008, 2054160403, -1709752639," + " -402951456, 1597748885, 351809052, -1039041413, 1958075309, 1071372680, 1249922658, -2077011328," + " -2088560037, 643876593, -691661336, 2124992669, -534970427, 1061266818, -1731083093, 195764083," + " 1773077546, 304479557, 244603812, 834384133, 1684120407, 1493413139, 1731211584, -2062213553," + " -270682579, 44310291, 564559440, 957643125, 1374924466, 962420298, 1319979537, 1206138289," + " -948832823, -909756549, -664108386, -1355112330, -125435854, -1502071736, -790593389]" ); aeq( Q.getSeed(), "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0," + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0," + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0," + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0," + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0," + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0," + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0," + " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]" ); aeq( R.getSeed(), "[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27," + " 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51," + " 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75," + " 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99," + " 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118," + " 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137," + " 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156," + " 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175," + " 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194," + " 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213," + " 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232," + " 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251," + " 252, 253, 254, 255, 256]" ); try { new RandomProvider(Collections.emptyList()); fail(); } catch (IllegalArgumentException ignored) {} try { new RandomProvider(Arrays.asList(1, 2, 3)); fail(); } catch (IllegalArgumentException ignored) {} } @Test public void testWithScale() { aeq(P.withScale(100), "RandomProvider[@-8800290164235921060, 100, 8]"); aeq(Q.withScale(3), "RandomProvider[@-7948823947390831374, 3, 8]"); aeq(R.withScale(0), "RandomProvider[@2449928962525148503, 0, 8]"); } @Test public void testWithSecondaryScale() { aeq(P.withSecondaryScale(100), "RandomProvider[@-8800290164235921060, 32, 100]"); aeq(Q.withSecondaryScale(3), "RandomProvider[@-7948823947390831374, 32, 3]"); aeq(R.withSecondaryScale(0), "RandomProvider[@2449928962525148503, 32, 0]"); } @Test public void testCopy() { RandomProvider copy = P.copy(); assertEquals(P, copy); P.nextInt(); assertEquals(P, copy); } @Test public void testDeepCopy() { RandomProvider copy = P.deepCopy(); assertEquals(P, copy); P.nextInt(); assertNotEquals(P, copy); } @Test public void testReset() { RandomProvider PDependent = P.withScale(10); RandomProvider original = P.deepCopy(); RandomProvider dependent = original.withScale(10); assertEquals(PDependent, dependent); P.nextInt(); assertNotEquals(P, original); assertNotEquals(PDependent, dependent); P.reset(); assertEquals(P, original); assertEquals(PDependent, dependent); } @Test public void testGetId() { aeq(P.getId(), -8800290164235921060L); P.nextInt(); aeq(P.getId(), -6220528511995005615L); aeq(Q.getId(), -7948823947390831374L); aeq(R.getId(), 2449928962525148503L); } private static <T> void simpleProviderHelper( @NotNull Iterable<T> xs, @NotNull String output, @NotNull String sampleCountOutput ) { List<T> sample = toList(take(DEFAULT_SAMPLE_SIZE, xs)); aeqit(take(TINY_LIMIT, sample), output); aeqit(sampleCount(sample).entrySet(), sampleCountOutput); P.reset(); } @Test public void testNextInt() { aeq(P.nextInt(), -1084795351); aeq(Q.nextInt(), 405143795); aeq(R.nextInt(), 87945096); } @Test public void testIntegers() { aeqit( take(TINY_LIMIT, P.integers()), "[-1084795351, 1143001545, -1986160253, -1177145870, -968883275, -1465892161, -470080200," + " -2011352603, -248472835, 1997176995, 293205759, -106693423, -1593537177, -206249451, 565581811," + " -195502731, 102870776, -1612587755, -483804495, -831718234]" ); } @Test public void testNextLong() { aeq(P.nextLong(), -4659160554254839351L); aeq(Q.nextLong(), 1740079350508374669L); aeq(R.nextLong(), 377721315096188309L); } @Test public void testLongs() { aeqit( take(TINY_LIMIT, P.longs()), "[-4659160554254839351, -8530493328132264462, -4161321976937299265, -2018979083213524507," + " -1067182698272227165, 1259309150092131537, -6844190056086445547, 2429155385556917621," + " 441826621316521237, -2077924480219546458, 404281420475794401, -3799772176394282532," + " -3259952746839854786, -1600663848124449857, 7874913887470575742, -6974357164754656982," + " 8454731288392606713, 347198304573602423, -601743751419410562, -2127248600113938899]" ); } @Test public void testNextBoolean() { aeq(P.nextBoolean(), true); aeq(Q.nextBoolean(), true); aeq(R.nextBoolean(), false); } @Test public void testBooleans() { simpleProviderHelper( P.booleans(), "[true, true, true, false, true, true, false, true, true, true, true, true, true, true, true, true," + " false, true, true, false]", "[true=499965, false=500035]" ); } private static void nextUniformSample_Iterable_helper(@NotNull String xs, @NotNull String output) { aeq(Objects.toString(P.nextUniformSample(readIntegerListWithNulls(xs))), output); P.reset(); } @Test public void testNextUniformSample_Iterable() { nextUniformSample_Iterable_helper("[3, 1, 4, 1]", "1"); nextUniformSample_Iterable_helper("[3, 1, null, 1]", "1"); P.reset(); try { P.nextUniformSample(Collections.emptyList()); } catch (ArithmeticException ignored) {} } private static void uniformSample_Iterable_helper(@NotNull String xs, @NotNull String output) { aeqitLimit(TINY_LIMIT, P.uniformSample(readIntegerListWithNulls(xs)), output); P.reset(); } @Test public void testUniformSample_Iterable() { uniformSample_Iterable_helper( "[3, 1, 4, 1]", "[1, 1, 1, 4, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 4, ...]" ); uniformSample_Iterable_helper("[]", "[]"); uniformSample_Iterable_helper( "[3, 1, null, 1]", "[1, 1, 1, null, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, null, ...]" ); } private static void nextUniformSample_String_helper(@NotNull String s, char output) { aeq(P.nextUniformSample(s), output); P.reset(); } @Test public void testNextUniformSample_String() { nextUniformSample_String_helper("hello", 'e'); try { P.nextUniformSample(""); } catch (ArithmeticException ignored) {} } private static void uniformSample_String_helper(@NotNull String s, @NotNull String output) { P.reset(); aeqcs(P.uniformSample(s), output); } @Test public void testUniformSample_String() { uniformSample_String_helper( "hello", "eellhlelheeooleollleoololohllollllholhhlhhohllllehelollehllllllleolleolelehelllohoohelllllehllllolo" + "llloellhhllloollohohlllohollo" ); uniformSample_String_helper("", ""); } @Test public void testNextOrdering() { aeq(P.nextOrdering(), "LT"); aeq(Q.nextOrdering(), "LT"); aeq(R.nextOrdering(), "EQ"); } @Test public void testOrderings() { simpleProviderHelper( P.orderings(), "[LT, LT, GT, LT, EQ, LT, LT, LT, LT, LT, EQ, LT, LT, GT, LT, GT, EQ, GT, LT, LT]", "[LT=333313, GT=333615, EQ=333072]" ); } @Test public void testNextRoundingMode() { aeq(P.nextRoundingMode(), "UP"); aeq(Q.nextRoundingMode(), "CEILING"); aeq(R.nextRoundingMode(), "UNNECESSARY"); } @Test public void testRoundingModes() { simpleProviderHelper( P.roundingModes(), "[UP, UP, CEILING, DOWN, HALF_UP, HALF_EVEN, UNNECESSARY, HALF_UP, HALF_UP, CEILING, HALF_EVEN, UP," + " HALF_EVEN, HALF_UP, CEILING, HALF_UP, UNNECESSARY, HALF_UP, UP, HALF_DOWN]", "[UP=124820, CEILING=125380, DOWN=124690, HALF_UP=124847, HALF_EVEN=124918, UNNECESSARY=124948," + " HALF_DOWN=125283, FLOOR=125114]" ); } @Test public void testNextPositiveByte() { aeq(P.nextPositiveByte(), 41); aeq(Q.nextPositiveByte(), 115); aeq(R.nextPositiveByte(), 8); } @Test public void testPositiveBytes() { aeqit(take(TINY_LIMIT, P.positiveBytes()), "[41, 73, 3, 114, 53, 63, 56, 101, 125, 35, 127, 81, 103, 21, 115, 117, 120, 21, 49, 38]"); } @Test public void testNextPositiveShort() { aeq(P.nextPositiveShort(), 22057); aeq(Q.nextPositiveShort(), 243); aeq(R.nextPositiveShort(), 28552); } @Test public void testPositiveShorts() { aeqit(take(TINY_LIMIT, P.positiveShorts()), "[22057, 20937, 6531, 11762, 949, 17087, 9528, 12773, 6909, 163, 30463, 31953, 3431, 25109, 6131," + " 23925, 12024, 23829, 15025, 31910]"); } @Test public void testNextPositiveInt() { aeq(P.nextPositiveInt(), 1143001545); aeq(Q.nextPositiveInt(), 405143795); aeq(R.nextPositiveInt(), 87945096); } @Test public void testPositiveIntegers() { aeqit(take(TINY_LIMIT, P.positiveIntegers()), "[1143001545, 1997176995, 293205759, 565581811, 102870776, 94129103, 1488978913, 1855658460," + " 1833521269, 595157118, 1108943146, 1968520527, 80838404, 181782398, 960691757, 442512834," + " 474345991, 896325532, 1936225302, 419244611]"); } @Test public void testNextPositiveLong() { aeq(P.nextPositiveLong(), 1259309150092131537L); aeq(Q.nextPositiveLong(), 1740079350508374669L); aeq(R.nextPositiveLong(), 377721315096188309L); } @Test public void testPositiveLongs() { aeqit(take(TINY_LIMIT, P.positiveLongs()), "[1259309150092131537, 2429155385556917621, 441826621316521237, 404281420475794401," + " 7874913887470575742, 8454731288392606713, 347198304573602423, 1900578154019506034," + " 2037300520516627497, 3849688850220341092, 8316024350196968003, 8774587835203863104," + " 7027759477968838149, 4582566483620040494, 104407546425062322, 7601919310667137530," + " 8935450729811208701, 1568186602409462170, 8008008025538113060, 2525682745804362002]"); } @Test public void testNextNegativeByte() { aeq(P.nextNegativeByte(), -42); aeq(Q.nextNegativeByte(), -116); aeq(R.nextNegativeByte(), -9); } @Test public void testNegativeBytes() { aeqit(take(TINY_LIMIT, P.negativeBytes()), "[-42, -74, -4, -115, -54, -64, -57, -102, -126, -36, -128, -82, -104, -22, -116, -118, -121, -22," + " -50, -39]"); } @Test public void testNextNegativeShort() { aeq(P.nextNegativeShort(), -22058); aeq(Q.nextNegativeShort(), -244); aeq(R.nextNegativeShort(), -28553); } @Test public void testNegativeShorts() { aeqit(take(TINY_LIMIT, P.negativeShorts()), "[-22058, -20938, -6532, -11763, -950, -17088, -9529, -12774, -6910, -164, -30464, -31954, -3432," + " -25110, -6132, -23926, -12025, -23830, -15026, -31911]"); } @Test public void testNextNegativeInt() { aeq(P.nextNegativeInt(), -1084795351); aeq(Q.nextNegativeInt(), -1333080799); aeq(R.nextNegativeInt(), -362359403); } @Test public void testNegativeIntegers() { aeqit(take(TINY_LIMIT, P.negativeIntegers()), "[-1084795351, -1986160253, -1177145870, -968883275, -1465892161, -470080200, -2011352603," + " -248472835, -106693423, -1593537177, -206249451, -195502731, -1612587755, -483804495, -831718234," + " -884703402, -759016897, -1408421570, -372683595, -138708033]"); } @Test public void testNextNegativeLong() { aeq(P.nextNegativeLong(), -4659160554254839351L); aeq(Q.nextNegativeLong(), -5476473126251815900L); aeq(R.nextNegativeLong(), -4625575076888178893L); } @Test public void testNegativeLongs() { aeqit(take(TINY_LIMIT, P.negativeLongs()), "[-4659160554254839351, -8530493328132264462, -4161321976937299265, -2018979083213524507," + " -1067182698272227165, -6844190056086445547, -2077924480219546458, -3799772176394282532," + " -3259952746839854786, -1600663848124449857, -6974357164754656982, -601743751419410562," + " -2127248600113938899, -8615999285391660475, -3152269795703421596, -279738421105985993," + " -9128636656372363642, -4787870135943121859, -4018571045884316278, -3622924013254235408]"); } @Test public void testNextNaturalByte() { aeq(P.nextNaturalByte(), 41); aeq(Q.nextNaturalByte(), 115); aeq(R.nextNaturalByte(), 8); } @Test public void testNaturalBytes() { aeqit(take(TINY_LIMIT, P.naturalBytes()), "[41, 73, 3, 114, 53, 63, 56, 101, 125, 35, 127, 81, 103, 21, 115, 117, 120, 21, 49, 38]"); } @Test public void testNextNaturalShort() { aeq(P.nextNaturalShort(), 22057); aeq(Q.nextNaturalShort(), 243); aeq(R.nextNaturalShort(), 28552); } @Test public void testNaturalShorts() { aeqit(take(TINY_LIMIT, P.naturalShorts()), "[22057, 20937, 6531, 11762, 949, 17087, 9528, 12773, 6909, 163, 30463, 31953, 3431, 25109, 6131," + " 23925, 12024, 23829, 15025, 31910]"); } @Test public void testNextNaturalInt() { aeq(P.nextNaturalInt(), 1062688297); aeq(Q.nextNaturalInt(), 405143795); aeq(R.nextNaturalInt(), 87945096); } @Test public void testNaturalIntegers() { aeqit(take(TINY_LIMIT, P.naturalIntegers()), "[1062688297, 1143001545, 161323395, 970337778, 1178600373, 681591487, 1677403448, 136131045," + " 1899010813, 1997176995, 293205759, 2040790225, 553946471, 1941234197, 565581811, 1951980917," + " 102870776, 534895893, 1663679153, 1315765414]"); } @Test public void testNextNaturalLong() { aeq(P.nextNaturalLong(), 4564211482599936457L); aeq(Q.nextNaturalLong(), 1740079350508374669L); aeq(R.nextNaturalLong(), 377721315096188309L); } @Test public void testNaturalLongs() { aeqit(take(TINY_LIMIT, P.naturalLongs()), "[4564211482599936457, 692878708722511346, 5062050059917476543, 7204392953641251301," + " 8156189338582548643, 1259309150092131537, 2379181980768330261, 2429155385556917621," + " 441826621316521237, 7145447556635229350, 404281420475794401, 5423599860460493276," + " 5963419290014921022, 7622708188730325951, 7874913887470575742, 2249014872100118826," + " 8454731288392606713, 347198304573602423, 8621628285435365246, 7096123436740836909]"); } @Test public void testNextNonzeroByte() { aeq(P.nextNonzeroByte(), 41); aeq(Q.nextNonzeroByte(), -13); aeq(R.nextNonzeroByte(), -120); } @Test public void testNonzeroBytes() { aeqit(take(TINY_LIMIT, P.nonzeroBytes()), "[41, -55, -125, -14, -75, -65, 56, -27, -3, -93, -1, -47, 103, 21, -13, 117, -8, 21, -79, -90]"); } @Test public void testNextNonzeroShort() { aeq(P.nextNonzeroShort(), 22057); aeq(Q.nextNonzeroShort(), 243); aeq(R.nextNonzeroShort(), -4216); } @Test public void testNonzeroShorts() { aeqit(take(TINY_LIMIT, P.nonzeroShorts()), "[22057, -11831, -26237, 11762, 949, 17087, 9528, 12773, -25859, -32605, -2305, -815, -29337, -7659," + " 6131, -8843, -20744, -8939, -17743, -858]"); } @Test public void testNextNonzeroInt() { aeq(P.nextNonzeroInt(), -1084795351); aeq(Q.nextNonzeroInt(), 405143795); aeq(R.nextNonzeroInt(), 87945096); } @Test public void testNonzeroIntegers() { aeqit(take(TINY_LIMIT, P.nonzeroIntegers()), "[-1084795351, 1143001545, -1986160253, -1177145870, -968883275, -1465892161, -470080200," + " -2011352603, -248472835, 1997176995, 293205759, -106693423, -1593537177, -206249451, 565581811," + " -195502731, 102870776, -1612587755, -483804495, -831718234]"); } @Test public void testNextNonzeroLong() { aeq(P.nextNonzeroLong(), -4659160554254839351L); aeq(Q.nextNonzeroLong(), 1740079350508374669L); aeq(R.nextNonzeroLong(), 377721315096188309L); } @Test public void testNonzeroLongs() { aeqit(take(TINY_LIMIT, P.nonzeroLongs()), "[-4659160554254839351, -8530493328132264462, -4161321976937299265, -2018979083213524507," + " -1067182698272227165, 1259309150092131537, -6844190056086445547, 2429155385556917621," + " 441826621316521237, -2077924480219546458, 404281420475794401, -3799772176394282532," + " -3259952746839854786, -1600663848124449857, 7874913887470575742, -6974357164754656982," + " 8454731288392606713, 347198304573602423, -601743751419410562, -2127248600113938899]"); } @Test public void testNextByte() { aeq(P.nextByte(), 41); aeq(Q.nextByte(), -13); aeq(R.nextByte(), -120); } @Test public void testBytes() { aeqit(take(TINY_LIMIT, P.bytes()), "[41, -55, -125, -14, -75, -65, 56, -27, -3, -93, -1, -47, 103, 21, -13, 117, -8, 21, -79, -90]"); } @Test public void testNextShort() { aeq(P.nextShort(), 22057); aeq(Q.nextShort(), 243); aeq(R.nextShort(), -4216); } @Test public void testShorts() { aeqit(take(TINY_LIMIT, P.shorts()), "[22057, -11831, -26237, 11762, 949, 17087, 9528, 12773, -25859, -32605, -2305, -815, -29337, -7659," + " 6131, -8843, -20744, -8939, -17743, -858]"); } @Test public void testNextAsciiChar() { aeq(P.nextAsciiChar(), ')'); aeq(Q.nextAsciiChar(), 's'); aeq(R.nextAsciiChar(), '\b'); } @Test public void testAsciiCharacters() { aeqcs(P.asciiCharacters(), ")I\3r5?8e}#\177Qg\25sux\u00151&OaV\\?>5?u~\34*Oy\4w?~+-Br\7)\34d\26CLERd%@c7\2\5o.\u001c2S\6z=Vz\30" + "}l\nNph\32Xx^$x.\23\22\3oK10)\177u;\u001c2nEZF\17If`5f\23OSS\5\3v\5s\u000b2Y\\oKo;\1|CQ7&"); } @Test public void testNextChar() { aeq(P.nextChar(), ''); aeq(Q.nextChar(), 'ó'); aeq(R.nextChar(), '\uef88'); } @Test public void testCharacters() { aeqcs(P.characters(), "\u2df2ε\u2538\u31e5\uf6ff\ue215\u17f3\udd75\udd15ϡ\u19dcᬜK" + "\ufe2d\uf207\u2a43\uea45\ue352\u2b63\uf637\uee1c\u33b2ᅺᤘ" + "\ue9fd\u2aec\uaaf0\u28de\u2e24\uf878ሮܓ\uff03\ue5cb\ua7b1\uecf5\ue8b2\ue2da" + "\ue78f\u3353\ue2d3\ud805ឃᳶ\u2832\uf36f\ue87cࢦ"); } private static void nextFromRangeUp_byte_helper(int a, int output) { P.reset(); aeq(P.nextFromRangeUp((byte) a), output); } @Test public void testNextFromRangeUp_byte() { nextFromRangeUp_byte_helper(0, 41); nextFromRangeUp_byte_helper(1 << 6, 105); nextFromRangeUp_byte_helper(-1 << 6, -23); nextFromRangeUp_byte_helper(Byte.MAX_VALUE, 127); nextFromRangeUp_byte_helper(Byte.MIN_VALUE, -87); } private static void rangeUp_byte_helper(byte a, @NotNull String output) { aeqit(take(TINY_LIMIT, P.rangeUp(a)), output); P.reset(); } @Test public void testRangeUp_byte() { rangeUp_byte_helper( (byte) 0, "[41, 73, 3, 114, 53, 63, 56, 101, 125, 35, 127, 81, 103, 21, 115, 117, 120, 21, 49, 38]" ); rangeUp_byte_helper( (byte) (1 << 6), "[105, 73, 67, 114, 117, 127, 120, 101, 125, 99, 127, 81, 103, 85, 115, 117, 120, 85, 113, 102]" ); rangeUp_byte_helper( (byte) (-1 << 6), "[-23, 67, 117, 127, -8, 99, 39, -43, 53, -43, 113, 102, 22, -1, -2, 117, 127, 53, 62, -36]" ); rangeUp_byte_helper( Byte.MAX_VALUE, "[127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127]" ); rangeUp_byte_helper( Byte.MIN_VALUE, "[-87, 73, 3, 114, 53, 63, -72, 101, 125, 35, 127, 81, -25, -107, 115, -11, 120, -107, 49, 38]" ); } private static void nextFromRangeUp_short_helper(int a, int output) { P.reset(); aeq(P.nextFromRangeUp((short) a), output); } @Test public void testNextFromRangeUp_short() { nextFromRangeUp_short_helper(0, 22057); nextFromRangeUp_short_helper(1 << 14, 22057); nextFromRangeUp_short_helper(-1 << 14, 5673); nextFromRangeUp_short_helper(Short.MAX_VALUE, 32767); nextFromRangeUp_short_helper(Short.MIN_VALUE, -10711); } private static void rangeUp_short_helper(short a, @NotNull String output) { aeqit(take(TINY_LIMIT, P.rangeUp(a)), output); P.reset(); } @Test public void testRangeUp_short() { rangeUp_short_helper( (short) 0, "[22057, 20937, 6531, 11762, 949, 17087, 9528, 12773, 6909, 163, 30463, 31953, 3431, 25109, 6131," + " 23925, 12024, 23829, 15025, 31910]" ); rangeUp_short_helper( (short) (1 << 14), "[22057, 20937, 22915, 28146, 17333, 17087, 25912, 29157, 23293, 16547, 30463, 31953, 19815, 25109," + " 22515, 23925, 28408, 23829, 31409, 31910]" ); rangeUp_short_helper( (short) (-1 << 14), "[5673, 22915, -4622, -15435, 703, -6856, -3611, 23293, 16547, 19815, -10253, 28408, 31409, 3023," + " -15391, 16214, -9764, 4671, -3778, 3253]" ); rangeUp_short_helper( Short.MAX_VALUE, "[32767, 32767, 32767, 32767, 32767, 32767, 32767, 32767, 32767, 32767, 32767, 32767, 32767, 32767," + " 32767, 32767, 32767, 32767, 32767, 32767]" ); rangeUp_short_helper( Short.MIN_VALUE, "[-10711, 20937, 6531, -21006, -31819, -15681, -23240, -19995, 6909, 163, 30463, 31953, 3431, 25109," + " -26637, 23925, 12024, 23829, 15025, 31910]" ); } private static void nextFromRangeUp_int_helper(int a, int output) { aeq(P.nextFromRangeUp(a), output); P.reset(); } @Test public void testNextFromRangeUp_int() { nextFromRangeUp_int_helper(0, 1143001545); nextFromRangeUp_int_helper(1 << 30, 1143001545); nextFromRangeUp_int_helper(-1 << 30, 69259721); nextFromRangeUp_int_helper(Integer.MAX_VALUE, 2147483647); nextFromRangeUp_int_helper(Integer.MIN_VALUE, -1004482103); } private static void rangeUp_int_helper(int a, @NotNull String output) { aeqit(take(TINY_LIMIT, P.rangeUp(a)), output); P.reset(); } @Test public void testRangeUp_int() { rangeUp_int_helper( 0, "[1143001545, 970337778, 681591487, 136131045, 1997176995, 2040790225, 1941234197, 1951980917," + " 534895893, 1315765414, 1488978913, 1855658460, 739062078, 2008775615, 595157118, 1108943146," + " 1275438073, 985283191, 181782398, 960691757]" ); rangeUp_int_helper( 1 << 30, "[1143001545, 2044079602, 1755333311, 1209872869, 1997176995, 2040790225, 1941234197, 1951980917," + " 1608637717, 1315765414, 1488978913, 1855658460, 1812803902, 2008775615, 1668898942, 1108943146," + " 1275438073, 2059025015, 1255524222, 2034433581]" ); rangeUp_int_helper( -1 << 30, "[69259721, 2044079602, 1755333311, 1209872869, 923435171, 1608637717, 415237089, 781916636," + " 1812803902, -478584706, 35201322, 2059025015, -891959426, -113050067, 1109175337, -654497213," + " 1765141061, 1055360356, 936002112, 468907575]" ); rangeUp_int_helper( Integer.MAX_VALUE, "[2147483647, 2147483647, 2147483647, 2147483647, 2147483647, 2147483647, 2147483647, 2147483647," + " 2147483647, 2147483647, 2147483647, 2147483647, 2147483647, 2147483647, 2147483647, 2147483647," + " 2147483647, 2147483647, 2147483647, 2147483647]" ); rangeUp_int_helper( Integer.MIN_VALUE, "[-1004482103, 970337778, 681591487, 136131045, -150306653, 2040790225, 1941234197, 1951980917," + " 534895893, 1315765414, -658504735, -291825188, 739062078, 2008775615, -1552326530, " + "-1038540502, 1275438073, 985283191, -1965701250, -1186791891]" ); } private static void nextFromRangeUp_long_helper(long a, long output) { aeq(P.nextFromRangeUp(a), output); P.reset(); } @Test public void testNextFromRangeUp_long() { nextFromRangeUp_long_helper(0L, 2978664684788457540L); nextFromRangeUp_long_helper(1L << 62, 7590350703215845444L); nextFromRangeUp_long_helper(-1L << 62, 1609966265326126211L); nextFromRangeUp_long_helper(Long.MAX_VALUE, 9223372036854775807L); nextFromRangeUp_long_helper(Long.MIN_VALUE, -3001719753101261693L); } private static void rangeUp_long_helper(long a, @NotNull String output) { aeqit(take(TINY_LIMIT, P.rangeUp(a)), output); P.reset(); } @Test public void testRangeUp_long() { rangeUp_long_helper( 0L, "[2978664684788457540, 259411669349684921, 3819968131296829608, 4045916796483607944," + " 9050600215542762103, 9220690404532069369, 7461625247526204659, 8293297493653674228," + " 8695924240519389599, 3583222511262526670, 5713832101313495128, 6232776051665771374," + " 4562923580722056620, 3840666588017310711, 8453337235194935587, 2025272514667682114," + " 5709813867763402188, 324207515304377018, 4552478380255597834, 3134077502549279289]" ); rangeUp_long_helper( 1L << 62, "[7590350703215845444, 4871097687777072825, 8431654149724217512, 8657602814910995848," + " 9050600215542762103, 9220690404532069369, 7461625247526204659, 8293297493653674228," + " 8695924240519389599, 8194908529689914574, 5713832101313495128, 6232776051665771374," + " 9174609599149444524, 8452352606444698615, 8453337235194935587, 6636958533095070018," + " 5709813867763402188, 4935893533731764922, 9164164398682985738, 7745763520976667193]" ); rangeUp_long_helper( -1L << 62, "[1609966265326126211, -1308654454609754433, -1874654246358644483, 4614632709429989841," + " 5549737756197188595, 8802817253011410639, -4341372912259511332, 1351874002717355189," + " 4304305952112864638, -2650756327368211889, 7135333504334759031, -1322097316696094037," + " 1669389700406211395, 5037133408195934528, -1504487908198687998, 6789092804704878382," + " 3566685953462311704, 5270340593672846712, -1719689906509449096, -3246513607960354030]" ); rangeUp_long_helper( Long.MAX_VALUE, "[9223372036854775807, 9223372036854775807, 9223372036854775807, 9223372036854775807," + " 9223372036854775807, 9223372036854775807, 9223372036854775807, 9223372036854775807," + " 9223372036854775807, 9223372036854775807, 9223372036854775807, 9223372036854775807," + " 9223372036854775807, 9223372036854775807, 9223372036854775807, 9223372036854775807," + " 9223372036854775807, 9223372036854775807, 9223372036854775807, 9223372036854775807]" ); rangeUp_long_helper( Long.MIN_VALUE, "[-3001719753101261693, -5920340473037142337, -6486340264786032387, 2946691002601937," + " 938051737769800691, 6726395392388302357, 4191131234584022735, -8953058930686899236," + " -3259812015710032715, -307380066314523266, -7262442345795599793, 2523647485907371127," + " -5933783335123481941, 9097897703523752562, 8234018459023606428, -2942296318021176509," + " 5939553317435058514, 425447389768546624, -6116173926626075902, 2177406786277490478]" ); } private static void nextFromRangeUp_char_helper(char a, char output) { aeq(P.nextFromRangeUp(a), output); P.reset(); } @Test public void testNextFromRangeUp_char() { nextFromRangeUp_char_helper('\0', ''); nextFromRangeUp_char_helper('a', ''); nextFromRangeUp_char_helper('Ш', ''); nextFromRangeUp_char_helper('\uffff', '\uffff'); } private static void rangeUp_char_helper(char a, @NotNull String output) { aeqcs(P.rangeUp(a), output); P.reset(); } @Test public void testRangeUp_char() { rangeUp_char_helper( '\0', "\u2df2ε\u2538\u31e5\uf6ff\ue215\u17f3\udd75\udd15ϡ\u19dc" + "ᬜK\ufe2d\uf207\u2a43\uea45\ue352\u2b63\uf637\uee1c\u33b2ᅺ" + "ᤘ\ue9fd\u2aec\uaaf0\u28de\u2e24\uf878ሮܓ\uff03\ue5cb\ua7b1\uecf5\ue8b2" + "\ue2da\ue78f\u3353\ue2d3\ud805ឃᳶ\u2832\uf36f\ue87cࢦ" ); rangeUp_char_helper( 'a', "\u2e53Ж\u2599\u3246\uf760\ue276ᡔ\uddd6\udd76тᨽ\u319f\u1b7d\u218b" + "\uf268\ud7fd\u2aa4\ueaa6\ue3b3\u2bc4\uf698\uee7dᇛ\u1979" + "\uea5e\u2b4d\uab51\u293f\u2e85\uf8d9\u128fݴ\uff64\ue62c\ued56\uab1c\ue913" + "\ue33b\ue7f0\u33b4\ue334\ud866\u17e4ᵗ\u2893\ufbba\uf3d0\ue8ddइ" ); rangeUp_char_helper( 'Ш', "\u321aߝ\u2960\ue63dᰛ\ue19d\ue13dࠉḄὄ\u2552\uf62f" + "\udbc4\u2e6b\uee6d\ue77a\u2f8b\uf244ᖢᵀ\uee25\u2f14\u324cᙖ" + "\u0b3b\ue9f3\ufdd8\uf11d\uecda\ue702\uebb7\ue6fb\udc2d\u1bab\u211e" + "\uf797\ueca4\uda6b\u0cce\uf0adᓦၽ" ); rangeUp_char_helper( '\uffff', "\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff" + "\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff" + "\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff" + "\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff" + "\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff" + "\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff" + "\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff" + "\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff\uffff" ); } private static void nextFromRangeDown_byte_helper(int a, int output) { aeq(P.nextFromRangeDown((byte) a), output); P.reset(); } @Test public void testNextFromRangeDown_byte() { nextFromRangeDown_byte_helper(0, -87); nextFromRangeDown_byte_helper(1 << 6, -87); nextFromRangeDown_byte_helper(-1 << 6, -87); nextFromRangeDown_byte_helper(Byte.MAX_VALUE, -87); nextFromRangeDown_byte_helper(Byte.MIN_VALUE, -128); } private static void rangeDown_byte_helper(byte a, @NotNull String output) { aeqit(take(TINY_LIMIT, P.rangeDown(a)), output); P.reset(); } @Test public void testRangeDown_byte() { rangeDown_byte_helper( (byte) 0, "[-87, -72, -25, -107, -11, -107, -42, -65, -66, -11, -2, -100, -86, -49, -124, -9, -65, -2, -83, -14]" ); rangeDown_byte_helper( (byte) (1 << 6), "[-87, 3, 53, 63, -72, 35, -25, -107, -11, -107, 49, 38, -42, -65, -66, 53, 63, -11, -2, -100]" ); rangeDown_byte_helper( (byte) (-1 << 6), "[-87, -125, -75, -65, -72, -93, -107, -107, -79, -90, -65, -66, -75, -65, -100, -86, -124, -65," + " -85, -83]" ); rangeDown_byte_helper( Byte.MAX_VALUE, "[-87, 73, 3, 114, 53, 63, -72, 101, 125, 35, 127, 81, -25, -107, 115, -11, 120, -107, 49, 38]" ); rangeDown_byte_helper( Byte.MIN_VALUE, "[-128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128," + " -128, -128, -128, -128]" ); } private static void nextFromRangeDown_short_helper(int a, int output) { aeq(P.nextFromRangeDown((short) a), output); P.reset(); } @Test public void testNextFromRangeDown_short() { nextFromRangeDown_short_helper(0, -10711); nextFromRangeDown_short_helper(1 << 14, -10711); nextFromRangeDown_short_helper(-1 << 14, -26237); nextFromRangeDown_short_helper(Short.MAX_VALUE, -10711); nextFromRangeDown_short_helper(Short.MIN_VALUE, -32768); } private static void rangeDown_short_helper(short a, @NotNull String output) { aeqit(take(TINY_LIMIT, P.rangeDown(a)), output); P.reset(); } @Test public void testRangeDown_short() { rangeDown_short_helper( (short) 0, "[-10711, -21006, -31819, -15681, -23240, -19995, -26637, -13361, -31775, -170, -26148, -11713," + " -20162, -13131, -1089, -12171, -8066, -25828, -24278, -17073]" ); rangeDown_short_helper( (short) (1 << 14), "[-10711, 6531, -21006, -31819, -15681, -23240, -19995, 6909, 163, 3431, -26637, 12024, 15025," + " -13361, -31775, -170, -26148, -11713, -20162, -13131]" ); rangeDown_short_helper( (short) (-1 << 14), "[-26237, -21006, -31819, -23240, -19995, -25859, -32605, -29337, -26637, -20744, -17743, -31775," + " -26148, -20162, -25828, -24278, -17073, -23559, -17801, -21185]" ); rangeDown_short_helper( Short.MAX_VALUE, "[-10711, 20937, 6531, -21006, -31819, -15681, -23240, -19995, 6909, 163, 30463, 31953, 3431, 25109," + " -26637, 23925, 12024, 23829, 15025, 31910]" ); rangeDown_short_helper( Short.MIN_VALUE, "[-32768, -32768, -32768, -32768, -32768, -32768, -32768, -32768, -32768, -32768, -32768, -32768," + " -32768, -32768, -32768, -32768, -32768, -32768, -32768, -32768]" ); } private static void nextFromRangeDown_int_helper(int a, int output) { aeq(P.nextFromRangeDown(a), output); P.reset(); } @Test public void testNextFromRangeDown_int() { nextFromRangeDown_int_helper(0, -1004482103); nextFromRangeDown_int_helper(1 << 30, -1004482103); nextFromRangeDown_int_helper(-1 << 30, -1177145870); nextFromRangeDown_int_helper(Integer.MAX_VALUE, -1004482103); nextFromRangeDown_int_helper(Integer.MIN_VALUE, -2147483648); } private static void rangeDown_int_helper(int a, @NotNull String output) { aeqit(take(TINY_LIMIT, P.rangeDown(a)), output); P.reset(); } @Test public void testRangeDown_int() { rangeDown_int_helper( 0, "[-1004482103, -150306653, -658504735, -291825188, -1552326530, -1038540502, -1965701250," + " -1186791891, -1728239037, -18381468, -137739712, -604834249, -1131859022, -1686158854," + " -1782600976, -2111534694, -1846406610, -553610990, -96510935, -2032484754]" ); rangeDown_int_helper( 1 << 30, "[-1004482103, 970337778, 681591487, 136131045, -150306653, 534895893, -658504735, -291825188," + " 739062078, -1552326530, -1038540502, 985283191, -1965701250, -1186791891, 35433513, -1728239037," + " 691399237, -18381468, -137739712, -604834249]" ); rangeDown_int_helper( -1 << 30, "[-1177145870, -1465892161, -2011352603, -1612587755, -1408421570, -1552326530, -1162200457," + " -1965701250, -1186791891, -2112050135, -1728239037, -1456084411, -1288200699, -1131859022," + " -1655648634, -2073512899, -1686158854, -1782600976, -2111534694, -1846406610]" ); rangeDown_int_helper( Integer.MAX_VALUE, "[-1004482103, 970337778, 681591487, 136131045, -150306653, 2040790225, 1941234197, 1951980917," + " 534895893, 1315765414, -658504735, -291825188, 739062078, 2008775615, -1552326530, -1038540502," + " 1275438073, 985283191, -1965701250, -1186791891]" ); rangeDown_int_helper( Integer.MIN_VALUE, "[-2147483648, -2147483648, -2147483648, -2147483648, -2147483648, -2147483648, -2147483648," + " -2147483648, -2147483648, -2147483648, -2147483648, -2147483648, -2147483648, -2147483648," + " -2147483648, -2147483648, -2147483648, -2147483648, -2147483648, -2147483648]" ); } private static void nextFromRangeDown_long_helper(long a, long output) { aeq(P.nextFromRangeDown(a), output); P.reset(); } @Test public void testNextFromRangeDown_long() { nextFromRangeDown_long_helper(0L, -3001719753101261693L); nextFromRangeDown_long_helper(1L << 62, -3001719753101261693L); nextFromRangeDown_long_helper(-1L << 62, -6244707352066318268L); nextFromRangeDown_long_helper(Long.MAX_VALUE, -3001719753101261693L); nextFromRangeDown_long_helper(Long.MIN_VALUE, -9223372036854775808L); } private static void rangeDown_long_helper(long a, @NotNull String output) { aeqit(take(TINY_LIMIT, P.rangeDown(a)), output); P.reset(); } @Test public void testRangeDown_long() { rangeDown_long_helper( 0L, "[-3001719753101261693, -5920340473037142337, -6486340264786032387, -8953058930686899236," + " -3259812015710032715, -307380066314523266, -7262442345795599793, -5933783335123481941," + " -2942296318021176509, -6116173926626075902, -1045000064965076200, -6331375924936837000," + " -7858199626387741934, -750497281407653010, -4964572946333319706, -3265594823497196973," + " -7169158286100765709, -3899242950132782503, -354726065181537090, -8326391862079061231]" ); rangeDown_long_helper( 1L << 62, "[-3001719753101261693, -5920340473037142337, -6486340264786032387, 2946691002601937," + " 938051737769800691, 4191131234584022735, -8953058930686899236, -3259812015710032715," + " -307380066314523266, -7262442345795599793, 2523647485907371127, -5933783335123481941," + " -2942296318021176509, 425447389768546624, -6116173926626075902, 2177406786277490478," + " -1045000064965076200, 658654575245458808, -6331375924936837000, -7858199626387741934]" ); rangeDown_long_helper( -1L << 62, "[-6244707352066318268, -8963960367505090887, -5403403905557946200, -5177455240371167864," + " -5640149525592249138, -4660448456132719188, -5382705448837465097, -7198099522187093694," + " -8899164521550398790, -4670893656599177974, -6089294534305496519, -8650775946964755326," + " -7145123307227501859, -7605339026464506600, -6513958261454878089, -9034634951682803789," + " -7138643007725401796, -7486951269179234622, -7852292981010661281, -8935306705831985167]" ); rangeDown_long_helper( Long.MAX_VALUE, "[-3001719753101261693, -5920340473037142337, -6486340264786032387, 2946691002601937," + " 938051737769800691, 6726395392388302357, 4191131234584022735, -8953058930686899236," + " -3259812015710032715, -307380066314523266, -7262442345795599793, 2523647485907371127," + " -5933783335123481941, 9097897703523752562, 8234018459023606428, -2942296318021176509," + " 5939553317435058514, 425447389768546624, -6116173926626075902, 2177406786277490478]" ); rangeDown_long_helper( Long.MIN_VALUE, "[-9223372036854775808, -9223372036854775808, -9223372036854775808, -9223372036854775808," + " -9223372036854775808, -9223372036854775808, -9223372036854775808, -9223372036854775808," + " -9223372036854775808, -9223372036854775808, -9223372036854775808, -9223372036854775808," + " -9223372036854775808, -9223372036854775808, -9223372036854775808, -9223372036854775808," + " -9223372036854775808, -9223372036854775808, -9223372036854775808, -9223372036854775808]" ); } private static void nextFromRangeDown_char_helper(char a, char output) { aeq(P.nextFromRangeDown(a), output); P.reset(); } @Test public void testNextFromRangeDown_char() { nextFromRangeDown_char_helper('\0', '\0'); nextFromRangeDown_char_helper('a', ')'); nextFromRangeDown_char_helper('Ш', 'lj'); nextFromRangeDown_char_helper('\uffff', ''); } private static void rangeDown_char_helper(char a, @NotNull String output) { aeqcs(P.rangeDown(a), output); P.reset(); } @Test public void testRangeDown_char() { rangeDown_char_helper( '\0', "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" + "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" + "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" ); rangeDown_char_helper( 'a', ")I\u00035?8#Q\25\u00151&OaV\\?>5?\34*O\4?+-B\7)\34\26CLER%@7\2\5.\u001c2S\6=V\30\nN\32X^$.\23\22\3K" + "10);\u001c2EZF\17I`5\23OSS\5\3\5\u000b2Y\\K;\1CQ7&W\5>U7\21(Y\2+'\32\24V<T@)B\2?3+\6\u00129CZ\35BW" + "\\FF\13[J" ); rangeDown_char_helper( 'Ш', "ljƃεʿǥ\u02fd£ȕʱϏϡǜȿľοu~\u031cĪϹɷȇЖɃɌɅ\u0352ĥɀ\u0363ϯβœźŖǺĘǽˬǎ\u02f0ŨƚϘÞxȮĒưʻ²\u02da\u03605ȓ\u0353" + "\u02d3\5ʅϳƋ2\u0359\u02ef\u036f\u033b|ɑʷ¦ϲ¾·\21ΨÙɨŔ\u0329ο\u0306\u0092\u0339σŚ\u036bBɗŪŤͽЋɵÊ\u037eʡɪ" + "\35\u0366țdžɐʓΤǔȪĢͽ¬ü\u0300\u009bϖɕdžĖƣ,\u02d6nj\u02f7\3ɌÄʓϨͺɎ" ); rangeDown_char_helper( '\uffff', "\u2df2ε\u2538\u31e5\uf6ff\ue215\u17f3\udd75\udd15ϡ\u19dcᬜK" + "\ufe2d\uf207\u2a43\uea45\ue352\u2b63\uf637\uee1c\u33b2ᅺᤘ\ue9fd" + "\u2aec\uaaf0\u28de\u2e24\uf878ሮܓ\uff03\ue5cb\ua7b1\uecf5\ue8b2\ue2da\ue78f" + "\u3353\ue2d3\ud805ឃᳶ\u2832\uf36f\ue87cࢦ" ); } private static void nextFromRange_byte_byte_helper(int a, int b, int output) { aeq(P.nextFromRange((byte) a, (byte) b), output); P.reset(); } @Test public void testNextFromRange_byte_byte() { nextFromRange_byte_byte_helper(10, 20, 19); nextFromRange_byte_byte_helper(10, 10, 10); nextFromRange_byte_byte_helper(-20, -10, -11); nextFromRange_byte_byte_helper(-20, -20, -20); nextFromRange_byte_byte_helper(0, 0, 0); nextFromRange_byte_byte_helper(0, 10, 9); nextFromRange_byte_byte_helper(-5, 0, -4); nextFromRange_byte_byte_helper(-5, 10, 4); nextFromRange_byte_byte_helper(-10, 5, -1); try { P.nextFromRange((byte) 5, (byte) -10); fail(); } catch (IllegalArgumentException ignored) {} finally { P.reset(); } } private static void range_byte_byte_helper(int a, int b, @NotNull String output) { aeqitLimit(TINY_LIMIT, P.range((byte) a, (byte) b), output); P.reset(); } @Test public void testRange_byte_byte() { range_byte_byte_helper( 10, 20, "[19, 19, 13, 12, 15, 18, 15, 13, 11, 17, 15, 13, 15, 18, 15, 11, 16, 11, 16, 15, ...]" ); range_byte_byte_helper( 10, 10, "[10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, ...]" ); range_byte_byte_helper((byte) 10, (byte) 9, "[]"); range_byte_byte_helper( -20, -10, "[-11, -11, -17, -18, -15, -12, -15, -17, -19, -13, -15, -17, -15, -12, -15, -19, -14, -19, -14," + " -15, ...]" ); range_byte_byte_helper( -20, -20, "[-20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20," + " -20, ...]" ); range_byte_byte_helper((byte) -20, (byte) -21, "[]"); range_byte_byte_helper(0, 0, "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...]"); range_byte_byte_helper(0, 10, "[9, 9, 3, 2, 5, 8, 5, 3, 1, 7, 5, 3, 5, 8, 5, 1, 6, 1, 6, 5, ...]"); range_byte_byte_helper(-5, 0, "[-4, -4, -2, -3, 0, -5, 0, 0, -2, -4, 0, -2, 0, -5, 0, -4, -4, -1, 0, 0, ...]"); range_byte_byte_helper(-5, 10, "[4, 4, -2, -3, 0, 10, 3, 0, 8, -2, 10, -4, 2, 0, -2, 0, 3, 0, -4, 1, ...]"); range_byte_byte_helper( -10, 5, "[-1, -1, -7, -8, -5, 5, -2, -5, 3, -7, 5, -9, -3, -5, -7, -5, -2, -5, -9, -4, ...]" ); range_byte_byte_helper(5, -10, "[]"); } private static void nextFromRange_short_short_helper(int a, int b, int output) { aeq(P.nextFromRange((short) a, (short) b), output); P.reset(); } @Test public void testNextFromRange_short_short() { nextFromRange_short_short_helper(10, 20, 19); nextFromRange_short_short_helper(10, 10, 10); nextFromRange_short_short_helper(-20, -10, -11); nextFromRange_short_short_helper(-20, -20, -20); nextFromRange_short_short_helper(0, 0, 0); nextFromRange_short_short_helper(0, 10, 9); nextFromRange_short_short_helper(-5, 0, -4); nextFromRange_short_short_helper(-5, 10, 4); nextFromRange_short_short_helper(-10, 5, -1); try { P.nextFromRange((short) 5, (short) -10); fail(); } catch (IllegalArgumentException ignored) {} finally { P.reset(); } } private static void range_short_short_helper(int a, int b, @NotNull String output) { aeqitLimit(TINY_LIMIT, P.range((short) a, (short) b), output); P.reset(); } @Test public void testRange_short_short() { range_short_short_helper( 10, 20, "[19, 19, 13, 12, 15, 18, 15, 13, 11, 17, 15, 13, 15, 18, 15, 11, 16, 11, 16, 15, ...]" ); range_short_short_helper( 10, 10, "[10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, ...]" ); range_short_short_helper((short) 10, (short) 9, "[]"); range_short_short_helper( -20, -10, "[-11, -11, -17, -18, -15, -12, -15, -17, -19, -13, -15, -17, -15, -12, -15, -19, -14, -19, -14," + " -15, ...]" ); range_short_short_helper( -20, -20, "[-20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20," + " -20, ...]" ); range_short_short_helper((short) -20, (short) -21, "[]"); range_short_short_helper(0, 0, "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...]"); range_short_short_helper(0, 10, "[9, 9, 3, 2, 5, 8, 5, 3, 1, 7, 5, 3, 5, 8, 5, 1, 6, 1, 6, 5, ...]"); range_short_short_helper( -5, 0, "[-4, -4, -2, -3, 0, -5, 0, 0, -2, -4, 0, -2, 0, -5, 0, -4, -4, -1, 0, 0, ...]" ); range_short_short_helper(-5, 10, "[4, 4, -2, -3, 0, 10, 3, 0, 8, -2, 10, -4, 2, 0, -2, 0, 3, 0, -4, 1, ...]"); range_short_short_helper( -10, 5, "[-1, -1, -7, -8, -5, 5, -2, -5, 3, -7, 5, -9, -3, -5, -7, -5, -2, -5, -9, -4, ...]" ); range_short_short_helper(5, -10, "[]"); } private static void nextFromRange_int_int_helper(int a, int b, int output) { aeq(P.nextFromRange(a, b), output); P.reset(); } @Test public void testNextFromRange_int_int() { nextFromRange_int_int_helper(10, 20, 19); nextFromRange_int_int_helper(10, 10, 10); nextFromRange_int_int_helper(-20, -10, -11); nextFromRange_int_int_helper(-20, -20, -20); nextFromRange_int_int_helper(0, 0, 0); nextFromRange_int_int_helper(0, 10, 9); nextFromRange_int_int_helper(-5, 0, -4); nextFromRange_int_int_helper(-5, 10, 4); nextFromRange_int_int_helper(-10, 5, -1); try { P.nextFromRange(5, -10); fail(); } catch (IllegalArgumentException ignored) {} finally { P.reset(); } } private static void range_int_int_helper(int a, int b, @NotNull String output) { aeqitLimit(TINY_LIMIT, P.range(a, b), output); P.reset(); } @Test public void testRange_int_int() { range_int_int_helper( 10, 20, "[19, 12, 15, 13, 11, 15, 15, 15, 16, 11, 20, 19, 17, 12, 19, 14, 13, 15, 14, 10, ...]" ); range_int_int_helper( 10, 10, "[10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, ...]" ); range_int_int_helper(10, 9, "[]"); range_int_int_helper( -20, -10, "[-11, -18, -15, -17, -19, -15, -15, -15, -14, -19, -10, -11, -13, -18, -11, -16, -17, -15, -16," + " -20, ...]" ); range_int_int_helper( -20, -20, "[-20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20," + " -20, ...]" ); range_int_int_helper(-20, -21, "[]"); range_int_int_helper(0, 0, "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...]"); range_int_int_helper(0, 10, "[9, 2, 5, 3, 1, 5, 5, 5, 6, 1, 10, 9, 7, 2, 9, 4, 3, 5, 4, 0, ...]"); range_int_int_helper(-5, 0, "[-4, -3, 0, -2, -4, 0, 0, 0, -4, -1, -3, -4, 0, -3, -4, -1, -2, 0, -1, -5, ...]"); range_int_int_helper(-5, 10, "[4, -3, 10, 0, -2, -4, 0, 0, 0, 1, -4, 7, 9, 10, 9, 5, 4, 2, 9, 8, ...]"); range_int_int_helper(-10, 5, "[-1, -8, 5, -5, -7, -9, -5, -5, -5, -4, -9, 2, 4, 5, 4, 0, -1, -3, 4, 3, ...]"); range_int_int_helper(5, -10, "[]"); } private static void nextFromRange_long_long_helper(long a, long b, long output) { aeq(P.nextFromRange(a, b), output); P.reset(); } @Test public void testNextFromRange_long_long() { nextFromRange_long_long_helper(10L, 20L, 19L); nextFromRange_long_long_helper(10L, 10L, 10L); nextFromRange_long_long_helper(-20L, -10L, -11L); nextFromRange_long_long_helper(-20L, -20L, -20L); nextFromRange_long_long_helper(0L, 0L, 0L); nextFromRange_long_long_helper(0L, 10L, 9L); nextFromRange_long_long_helper(-5L, 0L, -4L); nextFromRange_long_long_helper(-5L, 10L, 4L); nextFromRange_long_long_helper(-10L, 5L, -1L); try { P.nextFromRange(5L, -10L); fail(); } catch (IllegalArgumentException ignored) {} finally { P.reset(); } } private static void range_long_long_helper(long a, long b, @NotNull String output) { P.reset(); aeqitLimit(TINY_LIMIT, P.range(a, b), output); } @Test public void testRange_long_long() { range_long_long_helper( 10L, 20L, "[19, 19, 13, 12, 15, 18, 15, 13, 11, 17, 15, 13, 15, 18, 15, 11, 16, 11, 16, 15, ...]" ); range_long_long_helper( 10L, 10L, "[10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, ...]" ); range_long_long_helper(10L, 9L, "[]"); range_long_long_helper( -20L, -10L, "[-11, -11, -17, -18, -15, -12, -15, -17, -19, -13, -15, -17, -15, -12, -15, -19, -14, -19, -14," + " -15, ...]" ); range_long_long_helper( -20L, -20L, "[-20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20," + " -20, ...]" ); range_long_long_helper(-20L, -21L, "[]"); range_long_long_helper(0L, 0L, "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...]"); range_long_long_helper(0L, 10L, "[9, 9, 3, 2, 5, 8, 5, 3, 1, 7, 5, 3, 5, 8, 5, 1, 6, 1, 6, 5, ...]"); range_long_long_helper( -5L, 0L, "[-4, -4, -2, -3, 0, -5, 0, 0, -2, -4, 0, -2, 0, -5, 0, -4, -4, -1, 0, 0, ...]" ); range_long_long_helper(-5L, 10L, "[4, 4, -2, -3, 0, 10, 3, 0, 8, -2, 10, -4, 2, 0, -2, 0, 3, 0, -4, 1, ...]"); range_long_long_helper( -10L, 5L, "[-1, -1, -7, -8, -5, 5, -2, -5, 3, -7, 5, -9, -3, -5, -7, -5, -2, -5, -9, -4, ...]" ); range_long_long_helper(5L, -10L, "[]"); } private static void nextFromRange_BigInteger_BigInteger_helper(int a, int b, int output) { aeq(P.nextFromRange(BigInteger.valueOf(a), BigInteger.valueOf(b)), output); P.reset(); } @Test public void testNextFromRange_BigInteger_BigInteger() { nextFromRange_BigInteger_BigInteger_helper(10, 20, 19); nextFromRange_BigInteger_BigInteger_helper(10, 10, 10); nextFromRange_BigInteger_BigInteger_helper(-20, -10, -11); nextFromRange_BigInteger_BigInteger_helper(-20, -20, -20); nextFromRange_BigInteger_BigInteger_helper(0, 0, 0); nextFromRange_BigInteger_BigInteger_helper(0, 10, 9); nextFromRange_BigInteger_BigInteger_helper(-5, 0, -4); nextFromRange_BigInteger_BigInteger_helper(-5, 10, 4); nextFromRange_BigInteger_BigInteger_helper(-10, 5, -1); P.reset(); try { P.nextFromRange(BigInteger.valueOf(5), BigInteger.valueOf(-10)); fail(); } catch (IllegalArgumentException ignored) {} finally { P.reset(); } } private static void range_BigInteger_BigInteger_helper(int a, int b, @NotNull String output) { aeqitLimit(TINY_LIMIT, P.range(BigInteger.valueOf(a), BigInteger.valueOf(b)), output); P.reset(); } @Test public void testRange_BigInteger_BigInteger() { range_BigInteger_BigInteger_helper( 10, 20, "[19, 19, 13, 12, 15, 18, 15, 13, 11, 17, 15, 13, 15, 18, 15, 11, 16, 11, 16, 15, ...]" ); range_BigInteger_BigInteger_helper( 10, 10, "[10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, ...]" ); range_BigInteger_BigInteger_helper(10, 9, "[]"); range_BigInteger_BigInteger_helper( -20, -10, "[-11, -11, -17, -18, -15, -12, -15, -17, -19, -13, -15, -17, -15, -12, -15, -19, -14, -19, -14," + " -15, ...]" ); range_BigInteger_BigInteger_helper( -20, -20, "[-20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20, -20," + " -20, ...]" ); range_BigInteger_BigInteger_helper(-20, -21, "[]"); range_BigInteger_BigInteger_helper(0, 0, "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ...]"); range_BigInteger_BigInteger_helper(0, 10, "[9, 9, 3, 2, 5, 8, 5, 3, 1, 7, 5, 3, 5, 8, 5, 1, 6, 1, 6, 5, ...]"); range_BigInteger_BigInteger_helper( -5, 0, "[-4, -4, -2, -3, 0, -5, 0, 0, -2, -4, 0, -2, 0, -5, 0, -4, -4, -1, 0, 0, ...]" ); range_BigInteger_BigInteger_helper( -5, 10, "[4, 4, -2, -3, 0, 10, 3, 0, 8, -2, 10, -4, 2, 0, -2, 0, 3, 0, -4, 1, ...]" ); range_BigInteger_BigInteger_helper( -10, 5, "[-1, -1, -7, -8, -5, 5, -2, -5, 3, -7, 5, -9, -3, -5, -7, -5, -2, -5, -9, -4, ...]" ); range_BigInteger_BigInteger_helper(5, -10, "[]"); } private static void nextFromRange_char_char_helper(char a, char b, char output) { aeq(P.nextFromRange(a, b), output); P.reset(); } @Test public void testNextFromRange_char_char() { nextFromRange_char_char_helper('a', 'z', 'j'); nextFromRange_char_char_helper('a', 'a', 'a'); nextFromRange_char_char_helper('!', '9', '*'); P.reset(); try { P.nextFromRange('a', 'A'); fail(); } catch (IllegalArgumentException ignored) {} finally { P.reset(); } } private static void range_char_char_helper(char a, char b, @NotNull String output) { aeqcs(P.range(a, b), output); P.reset(); } @Test public void testRange_char_char() { range_char_char_helper( 'a', 'z', "jjdsvyfdrhvtvyvrgpbwvvkpzexlncshjewdmfsefadxcfpostgwymkoqiyyeyotsdplrqjvsofgpjgavgtpttfdwftlszplpbd" + "rxgxsfvxrizclhuiwuagojhcctlgs" ); range_char_char_helper( 'a', 'a', "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaa" ); range_char_char_helper( '!', '9', "**$369&$2(646962'0\"766+0%8,.#3(*%7$-&3%&!$8#&0/34'79-+/1)99%9/43$0,21*63/&'0*'!6'4044&$7&4,30,0\"$" +
package lucee.runtime.engine; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.instrument.Instrumentation; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.security.CodeSource; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.Set; import java.util.TimeZone; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import javax.script.ScriptEngineFactory; import javax.servlet.ServletConfig; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.jsp.JspException; import lucee.Info; import lucee.print; import lucee.cli.servlet.HTTPServletImpl; import lucee.commons.collection.MapFactory; import lucee.commons.io.CharsetUtil; import lucee.commons.io.DevNullOutputStream; import lucee.commons.io.FileUtil; import lucee.commons.io.IOUtil; import lucee.commons.io.SystemUtil; import lucee.commons.io.compress.CompressUtil; import lucee.commons.io.log.Log; import lucee.commons.io.res.Resource; import lucee.commons.io.res.ResourceProvider; import lucee.commons.io.res.ResourcesImpl; import lucee.commons.io.res.util.ResourceUtil; import lucee.commons.io.res.util.ResourceUtilImpl; import lucee.commons.io.retirement.RetireOutputStreamFactory; import lucee.commons.lang.ExceptionUtil; import lucee.commons.lang.Md5; import lucee.commons.lang.Pair; import lucee.commons.lang.StringUtil; import lucee.commons.lang.SystemOut; import lucee.commons.lang.types.RefBoolean; import lucee.commons.lang.types.RefBooleanImpl; import lucee.commons.net.HTTPUtil; import lucee.intergral.fusiondebug.server.FDControllerImpl; import lucee.loader.engine.CFMLEngine; import lucee.loader.engine.CFMLEngineFactory; import lucee.loader.engine.CFMLEngineFactorySupport; import lucee.loader.engine.CFMLEngineWrapper; import lucee.loader.osgi.BundleCollection; import lucee.loader.util.Util; import lucee.runtime.CFMLFactory; import lucee.runtime.CFMLFactoryImpl; import lucee.runtime.ComponentPageImpl; import lucee.runtime.PageContext; import lucee.runtime.PageContextImpl; import lucee.runtime.PageSource; import lucee.runtime.cache.CacheUtil; import lucee.runtime.config.Config; import lucee.runtime.config.ConfigImpl; import lucee.runtime.config.ConfigServer; import lucee.runtime.config.ConfigServerImpl; import lucee.runtime.config.ConfigWeb; import lucee.runtime.config.ConfigWebImpl; import lucee.runtime.config.DeployHandler; import lucee.runtime.config.Identification; import lucee.runtime.config.Password; import lucee.runtime.config.XMLConfigAdmin; import lucee.runtime.config.XMLConfigFactory; import lucee.runtime.config.XMLConfigFactory.UpdateInfo; import lucee.runtime.config.XMLConfigServerFactory; import lucee.runtime.config.XMLConfigWebFactory; import lucee.runtime.engine.listener.CFMLServletContextListener; import lucee.runtime.exp.ApplicationException; import lucee.runtime.exp.PageException; import lucee.runtime.exp.PageRuntimeException; import lucee.runtime.exp.PageServletException; import lucee.runtime.extension.ExtensionDefintion; import lucee.runtime.extension.RHExtension; import lucee.runtime.functions.other.CreateUniqueId; import lucee.runtime.instrumentation.InstrumentationFactory; import lucee.runtime.jsr223.ScriptEngineFactoryImpl; import lucee.runtime.net.http.HTTPServletRequestWrap; import lucee.runtime.net.http.HttpServletRequestDummy; import lucee.runtime.net.http.HttpServletResponseDummy; import lucee.runtime.net.http.ReqRspUtil; import lucee.runtime.op.CastImpl; import lucee.runtime.op.Caster; import lucee.runtime.op.CreationImpl; import lucee.runtime.op.DecisionImpl; import lucee.runtime.op.ExceptonImpl; import lucee.runtime.op.IOImpl; import lucee.runtime.op.JavaProxyUtilImpl; import lucee.runtime.op.OperationImpl; import lucee.runtime.op.StringsImpl; import lucee.runtime.osgi.OSGiUtil; import lucee.runtime.thread.ThreadUtil; import lucee.runtime.type.Struct; import lucee.runtime.type.StructImpl; import lucee.runtime.util.Cast; import lucee.runtime.util.ClassUtil; import lucee.runtime.util.ClassUtilImpl; import lucee.runtime.util.Creation; import lucee.runtime.util.DBUtil; import lucee.runtime.util.DBUtilImpl; import lucee.runtime.util.Decision; import lucee.runtime.util.Excepton; import lucee.runtime.util.HTMLUtil; import lucee.runtime.util.HTMLUtilImpl; import lucee.runtime.util.HTTPUtilImpl; import lucee.runtime.util.IO; import lucee.runtime.util.ListUtil; import lucee.runtime.util.ListUtilImpl; import lucee.runtime.util.ORMUtil; import lucee.runtime.util.ORMUtilImpl; import lucee.runtime.util.Operation; import lucee.runtime.util.PageContextUtil; import lucee.runtime.util.Strings; import lucee.runtime.util.SystemUtilImpl; import lucee.runtime.util.TemplateUtil; import lucee.runtime.util.TemplateUtilImpl; import lucee.runtime.util.ZipUtil; import lucee.runtime.util.ZipUtilImpl; import lucee.runtime.video.VideoUtil; import lucee.runtime.video.VideoUtilImpl; import org.apache.commons.net.telnet.TerminalTypeOptionHandler; import org.apache.felix.framework.Felix; import org.osgi.framework.BundleContext; import org.osgi.framework.Constants; import org.osgi.framework.Version; //import com.intergral.fusiondebug.server.FDControllerFactory; /** * The CFMl Engine */ public final class CFMLEngineImpl implements CFMLEngine { private static Map<String,CFMLFactory> initContextes=MapFactory.<String,CFMLFactory>getConcurrentMap(); private static Map<String,CFMLFactory> contextes=MapFactory.<String,CFMLFactory>getConcurrentMap(); private ConfigServerImpl configServer=null; private static CFMLEngineImpl engine=null; private CFMLEngineFactory factory; private final ControllerStateImpl controlerState=new ControllerStateImpl(true); private boolean allowRequestTimeout=true; private Monitor monitor; private List<ServletConfig> servletConfigs=new ArrayList<ServletConfig>(); private long uptime; private InfoImpl info; private BundleCollection bundleCollection; private ScriptEngineFactory cfmlScriptEngine; private ScriptEngineFactory cfmlTagEngine; private ScriptEngineFactory luceeScriptEngine; private ScriptEngineFactory luceeTagEngine; private Controler controler; private CFMLServletContextListener scl; //private static CFMLEngineImpl engine=new CFMLEngineImpl(); private CFMLEngineImpl(CFMLEngineFactory factory, BundleCollection bc) { this.factory=factory; this.bundleCollection=bc; // log the startup process String logDir=SystemUtil.getSystemPropOrEnvVar("startlogdirectory", null);//"/Users/mic/Tmp/"); if(logDir!=null) { File f = new File(logDir); if(f.isDirectory()) { String logName=SystemUtil.getSystemPropOrEnvVar("logName", "stacktrace"); int timeRange=Caster.toIntValue(SystemUtil.getSystemPropOrEnvVar("timeRange", "stacktrace"),1); LogST._do(f, logName, timeRange); } } // happen when Lucee is loaded directly if(bundleCollection==null) { try{ Properties prop = InfoImpl.getDefaultProperties(null); // read the config from default.properties Map<String,Object> config=new HashMap<String, Object>(); Iterator<Entry<Object, Object>> it = prop.entrySet().iterator(); Entry<Object, Object> e; String k; while(it.hasNext()){ e = it.next(); k=(String) e.getKey(); if(!k.startsWith("org.") && !k.startsWith("felix.")) continue; config.put(k, CFMLEngineFactorySupport.removeQuotes((String)e.getValue(),true)); } config.put( Constants.FRAMEWORK_BOOTDELEGATION, "lucee.*"); Felix felix = factory.getFelix(factory.getResourceRoot(),config); bundleCollection=new BundleCollection(felix, felix, null); //bundleContext=bundleCollection.getBundleContext(); } catch(Throwable t) { ExceptionUtil.rethrowIfNecessary(t); if(t instanceof Error) throw (Error)t; throw new RuntimeException(t); } } this.info=new InfoImpl(bundleCollection==null?null:bundleCollection.core); Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader()); // MUST better location for this UpdateInfo updateInfo; Resource configDir=null; try { configDir = getSeverContextConfigDirectory(factory); updateInfo=XMLConfigFactory.doNew(this,configDir, true); } catch (IOException e) { throw new PageRuntimeException(e); } CFMLEngineFactory.registerInstance((this));// patch, not really good but it works ConfigServerImpl cs = getConfigServerImpl(); controler = new Controler(cs,initContextes,5*1000,controlerState); controler.setDaemon(true); controler.setPriority(Thread.MIN_PRIORITY); boolean disabled=Caster.toBooleanValue(SystemUtil.getSystemPropOrEnvVar(SystemUtil.SETTING_CONTROLLER_DISABLED,null),false); if (!disabled) { // start the controller SystemOut.printDate(SystemUtil.getPrintWriter(SystemUtil.OUT), "Start CFML Controller"); controler.start(); } boolean isRe=configDir==null?false:XMLConfigFactory.isRequiredExtension(this, configDir); boolean installExtensions=Caster.toBooleanValue(SystemUtil.getSystemPropOrEnvVar("lucee.extensions.install",null),true); // copy bundled extension to local extension directory (if never done before) if(installExtensions && updateInfo.updateType!=XMLConfigFactory.NEW_NONE) { deployBundledExtension(cs); SystemOut.printDate(SystemUtil.getPrintWriter(SystemUtil.OUT), "copy bundled extension to local extension directory (if never done before)"); } // required extensions // if we have a "fresh" install Set<ExtensionDefintion> extensions; if(installExtensions && (updateInfo.updateType==XMLConfigFactory.NEW_FRESH || updateInfo.updateType==XMLConfigFactory.NEW_FROM4)) { List<ExtensionDefintion> ext = info.getRequiredExtension(); extensions = toSet(null,ext); SystemOut.printDate(SystemUtil.getPrintWriter(SystemUtil.OUT), "detected Extensions to install (new;"+updateInfo.updateType+"):"+toList(extensions)); } // if we have an update we update the extension that re installed and we have an older version as defined in the manifest else if(installExtensions && (updateInfo.updateType==XMLConfigFactory.NEW_MINOR || !isRe)) { extensions = new HashSet<ExtensionDefintion>(); Iterator<ExtensionDefintion> it = info.getRequiredExtension().iterator(); ExtensionDefintion ed; RHExtension rhe; Version edVersion,rheVersion; while(it.hasNext()){ ed = it.next(); edVersion = OSGiUtil.toVersion(ed.getVersion(), null); if(ed.getVersion()==null) { continue; // no version definition no update } try{ rhe = XMLConfigAdmin.hasRHExtensions(cs, new ExtensionDefintion(ed.getId())); if(rhe==null) { rheVersion=null; Version since=ed.getSince(); if(since==null || updateInfo.oldVersion==null || !Util.isNewerThan(since, updateInfo.oldVersion)) continue; // not installed we do not update extensions.add(ed); } else rheVersion=OSGiUtil.toVersion(rhe.getVersion(), null); // if the installed is older than the one defined in the manifest we update (if possible) //print.e(ed.getVersion()+"->"+edVersion); //if(rhe!=null)print.e(rhe.getVersion()+"->"+rheVersion); //print.e(rheVersion!=null && OSGiUtil.isNewerThan(edVersion,rheVersion)); if(rheVersion!=null && OSGiUtil.isNewerThan(edVersion,rheVersion)) { // TODO do none OSGi version number comparsion extensions.add(ed); } } catch(Throwable t){ ExceptionUtil.rethrowIfNecessary(t); // fails we update extensions.add(ed); } } SystemOut.printDate(SystemUtil.getPrintWriter(SystemUtil.OUT), "detected Extensions to install (minor;"+updateInfo.updateType+"):"+toList(extensions)); } else { extensions = new HashSet<ExtensionDefintion>(); SystemOut.printDate(SystemUtil.getPrintWriter(SystemUtil.OUT), "no update"); } // XMLConfigAdmin.hasRHExtensions(ci, ed) // install extension defined String extensionIds=SystemUtil.getSystemPropOrEnvVar("lucee-extensions",null); // old no longer used if(StringUtil.isEmpty(extensionIds,true)) extensionIds=SystemUtil.getSystemPropOrEnvVar("lucee.extensions",null); if(!StringUtil.isEmpty(extensionIds,true)) { SystemOut.printDate(SystemUtil.getPrintWriter(SystemUtil.OUT), "extensions to install defined in env variable or system property:"+extensionIds); List<ExtensionDefintion> _extensions = RHExtension.toExtensionDefinitions(extensionIds); extensions=toSet(extensions,_extensions); } if(extensions.size()>0) { boolean sucess=DeployHandler.deployExtensions( cs, extensions.toArray(new ExtensionDefintion[extensions.size()]), cs.getLog("deploy", true) ); if(sucess && configDir!=null)XMLConfigFactory.updateRequiredExtension(this, configDir); SystemOut.printDate(SystemUtil.getPrintWriter(SystemUtil.OUT), "installed extensions:"+toList(extensions)); } else if(configDir!=null)XMLConfigFactory.updateRequiredExtension(this, configDir); touchMonitor(cs); SystemOut.printDate(SystemUtil.getPrintWriter(SystemUtil.OUT), "touched monitors"); this.uptime=System.currentTimeMillis(); //this.config=config; } public static Set<ExtensionDefintion> toSet(Set<ExtensionDefintion> set, List<ExtensionDefintion> list) { LinkedHashMap<String, ExtensionDefintion> map=new LinkedHashMap<String, ExtensionDefintion>(); ExtensionDefintion ed; // set > map if(set!=null) { Iterator<ExtensionDefintion> it = set.iterator(); while(it.hasNext()){ ed = it.next(); map.put(ed.toString(),ed); } } // list > map if(list!=null) { Iterator<ExtensionDefintion> it = list.iterator(); while(it.hasNext()){ ed = it.next(); map.put(ed.toString(),ed); } } // to Set LinkedHashSet<ExtensionDefintion> rtn = new LinkedHashSet<ExtensionDefintion>(); Iterator<ExtensionDefintion> it = map.values().iterator(); while(it.hasNext()){ ed = it.next(); rtn.add(ed); } return rtn; } public static String toList(Set<ExtensionDefintion> set) { StringBuilder sb=new StringBuilder(); Iterator<ExtensionDefintion> it = set.iterator(); ExtensionDefintion ed; while(it.hasNext()){ ed = it.next(); if(sb.length()>0) sb.append(", "); sb.append(ed.toString()); } return sb.toString(); } private void deployBundledExtension(ConfigServerImpl cs) { Resource dir = cs.getLocalExtensionProviderDirectory(); List<ExtensionDefintion> existing = DeployHandler.getLocalExtensions(cs); Map<String,ExtensionDefintion> existingMap=new HashMap<String, ExtensionDefintion>(); { Iterator<ExtensionDefintion> it = existing.iterator(); ExtensionDefintion ed; while(it.hasNext()) { ed=it.next(); try { existingMap.put(ed.getSource().getName(), ed); } catch (ApplicationException e) {} } } Log log = cs.getLog("deploy"); // get the index ClassLoader cl=CFMLEngineFactory.getInstance().getCFMLEngineFactory().getClass().getClassLoader(); InputStream is = cl.getResourceAsStream("extensions/.index"); if(is==null)is = cl.getResourceAsStream("/extensions/.index"); if(is==null)is = SystemUtil.getResourceAsStream(null, "/extensions/.index"); if(is==null) { log.error("extract-extension", "could not found [/extensions/.index] defined in the index in the lucee.jar"); return; } try { String index=IOUtil.toString(is, CharsetUtil.UTF8); log.info("extract-extension", "the following extensions are bundled with the lucee.jar ["+index+"]"); String[] names = lucee.runtime.type.util.ListUtil.listToStringArray(index, ';'); String name; Resource temp=null; RHExtension rhe; ExtensionDefintion exist; Iterator<ExtensionDefintion> it; for(int i=0;i<names.length;i++){ name=names[i]; if(StringUtil.isEmpty(name,true)) continue; name=name.trim(); // does it already exist? if(existingMap.containsKey(name)) { continue; } is = cl.getResourceAsStream("extensions/"+name); if(is==null)is = cl.getResourceAsStream("/extensions/"+name); if(is==null) { log.error("extract-extension", "could not found extension ["+name+"] defined in the index in the lucee.jar"); continue; } try { temp=SystemUtil.getTempDirectory().getRealResource(name); ResourceUtil.touch(temp); Util.copy(is, temp.getOutputStream(),false,true); rhe = new RHExtension(cs, temp, false); ExtensionDefintion alreadyExists=null; it = existing.iterator(); while(it.hasNext()){ exist = it.next(); if(exist.equals(rhe)) { alreadyExists=exist; break; } } String trgName=rhe.getId()+"-"+rhe.getVersion()+".lex"; if(alreadyExists==null) { temp.moveTo(dir.getRealResource(trgName)); log.info("extract-extension", "added ["+name+"] to ["+dir+"]"); } else if(!alreadyExists.getSource().getName().equals(trgName)) { log.info("extract-extension", "rename ["+alreadyExists.getSource()+"] to ["+trgName+"]"); alreadyExists.getSource().moveTo( alreadyExists.getSource().getParentResource().getRealResource(trgName) ); } // now we check all extension name (for extension no longer delivered by lucee) it = existing.iterator(); while(it.hasNext()){ exist = it.next(); trgName=exist.getId()+"-"+exist.getVersion()+".lex"; if(!trgName.equals(exist.getSource().getName())) { exist.getSource().moveTo( exist.getSource().getParentResource().getRealResource(trgName) ); log.info("extract-extension", "rename ["+exist.getSource()+"] to ["+trgName+"]"); } } } finally { if(temp!=null && temp.exists())temp.delete(); } } } catch(Throwable t){ ExceptionUtil.rethrowIfNecessary(t); log.error("extract-extension", t); } return; } private void deployBundledExtensionZip(ConfigServerImpl cs) { Resource dir = cs.getLocalExtensionProviderDirectory(); List<ExtensionDefintion> existing = DeployHandler.getLocalExtensions(cs); String sub="extensions/"; // MUST this does not work on windows! we need to add an index ZipEntry entry; ZipInputStream zis = null; try { CodeSource src = CFMLEngineFactory.class.getProtectionDomain().getCodeSource(); if (src == null) return; URL loc = src.getLocation(); zis=new ZipInputStream(loc.openStream()); String path,name; int index; Resource temp; RHExtension rhe; Iterator<ExtensionDefintion> it; ExtensionDefintion exist; while ((entry = zis.getNextEntry())!= null) { path = entry.getName(); if(path.startsWith(sub) && path.endsWith(".lex")) { // ignore non lex files or file from else where index=path.lastIndexOf('/')+1; if(index==sub.length()) { // ignore sub directories name=path.substring(index); temp=null; try { temp = SystemUtil.getTempDirectory().getRealResource(name); ResourceUtil.touch(temp); Util.copy(zis, temp.getOutputStream(),false,true); rhe = new RHExtension(cs, temp, false); boolean alreadyExists=false; it = existing.iterator(); while(it.hasNext()){ exist = it.next(); if(exist.equals(rhe)) { alreadyExists=true; break; } } if(!alreadyExists) { temp.moveTo(dir.getRealResource(name)); } } finally { if(temp!=null && temp.exists())temp.delete(); } } } zis.closeEntry(); } } catch(Throwable t){ ExceptionUtil.rethrowIfNecessary(t);// TODO log this } finally { Util.closeEL(zis); } return; } public void touchMonitor(ConfigServerImpl cs) { if(monitor!=null && monitor.isAlive()) return; monitor = new Monitor(cs,controlerState); monitor.setDaemon(true); monitor.setPriority(Thread.MIN_PRIORITY); monitor.start(); } /** * get singelton instance of the CFML Engine * @param factory * @return CFMLEngine */ public static synchronized CFMLEngine getInstance(CFMLEngineFactory factory,BundleCollection bc) { if(engine==null) { if(SystemUtil.getLoaderVersion()<6.0D) { // windows needs 6.0 because restart is not working with older versions if(SystemUtil.isWindows()) throw new RuntimeException("You need to update a newer lucee.jar to run this version, you can download the latest jar from http://download.lucee.org."); else if(SystemUtil.getLoaderVersion()<5.8D) throw new RuntimeException("You need to update your lucee.jar to run this version, you can download the latest jar from http://download.lucee.org."); else if(SystemUtil.getLoaderVersion()<5.9D) SystemOut.printDate("To use all features Lucee provides, you need to update your lucee.jar, you can download the latest jar from http://download.lucee.org."); } engine=new CFMLEngineImpl(factory,bc); } return engine; } /** * get singelton instance of the CFML Engine, throwsexception when not already init * @param factory * @return CFMLEngine */ public static synchronized CFMLEngine getInstance() throws ServletException { if(engine!=null) return engine; throw new ServletException("CFML Engine is not loaded"); } @Override public void addServletConfig(ServletConfig config) throws ServletException { // FUTURE remove if("LuceeServletContextListener".equals(config.getServletName())) { try { //Method m = config.getClass().getMethod("getServletContextEvent", new Class[0]); //ServletContextEvent sce=(ServletContextEvent) m.invoke(config, new Object[0]); String status=config.getInitParameter("status"); if("release".equalsIgnoreCase(status)) reset(); } catch (Exception e) { e.printStackTrace(); } return; } // add EventListener if(scl==null) { addEventListener(config.getServletContext()); } servletConfigs.add(config); String real=ReqRspUtil.getRootPath(config.getServletContext()); if(!initContextes.containsKey(real)) { CFMLFactory jspFactory = loadJSPFactory(getConfigServerImpl(),config,initContextes.size()); initContextes.put(real,jspFactory); } } private void addEventListener(ServletContext sc) { // TOMCAT if("org.apache.catalina.core.ApplicationContextFacade".equals(sc.getClass().getName())) { Object obj=extractServletContext(sc); obj=extractServletContext(obj); if("org.apache.catalina.core.StandardContext".equals(obj.getClass().getName())) { Method m=null; try { // TODO check if we already have a listener (lucee.loader.servlet.LuceeServletContextListener), if so we do nothing //sc.getApplicationLifecycleListeners(); m=obj.getClass().getMethod("addApplicationLifecycleListener", new Class[]{Object.class}); CFMLServletContextListener tmp; m.invoke(obj, new Object[]{tmp=new CFMLServletContextListener(this)}); scl=tmp; return; } catch (Exception e) {} } } // GENERAL try add Event method directly (does not work with tomcat) try{ CFMLServletContextListener tmp = new CFMLServletContextListener(this); sc.addListener(tmp); scl=tmp; return; } catch(Exception e) {} SystemOut.printDate("Lucee was not able to register an event listener with "+(sc==null?"null":sc.getClass().getName())); } private Object extractServletContext(Object sc) { Class<?> clazz = sc.getClass(); Field f=null; try { f = clazz.getDeclaredField("context"); }catch (Exception e) {} if(f!=null) { f.setAccessible(true); Object obj=null; try { obj = f.get(sc); } catch (Exception e) {} return obj; } return null; } @Override public ConfigServer getConfigServer(Password password) throws PageException { getConfigServerImpl().checkAccess(password); return configServer; } @Override public ConfigServer getConfigServer(String key, long timeNonce) throws PageException { getConfigServerImpl().checkAccess(key,timeNonce); return configServer; } public void setConfigServerImpl(ConfigServerImpl cs) { this.configServer=cs; } private ConfigServerImpl getConfigServerImpl() { if(configServer==null) { try { Resource context = getSeverContextConfigDirectory(factory); //CFMLEngineFactory.registerInstance(this);// patch, not really good but it works configServer=XMLConfigServerFactory.newInstance( this, initContextes, contextes, context); } catch (Exception e) { e.printStackTrace(); } } return configServer; } private Resource getSeverContextConfigDirectory(CFMLEngineFactory factory) throws IOException { ResourceProvider frp = ResourcesImpl.getFileResourceProvider(); return frp.getResource(factory.getResourceRoot().getAbsolutePath()).getRealResource("context"); } private CFMLFactoryImpl loadJSPFactory(ConfigServerImpl configServer, ServletConfig sg, int countExistingContextes) throws ServletException { try { if(XMLConfigWebFactory.LOG) SystemOut.printDate("load Context"); // Load Config RefBoolean isCustomSetting=new RefBooleanImpl(); Resource configDir=getConfigDirectory(sg,configServer,countExistingContextes,isCustomSetting); if(XMLConfigWebFactory.LOG) SystemOut.printDate("got context directory"); CFMLFactoryImpl factory=new CFMLFactoryImpl(this,sg); if(XMLConfigWebFactory.LOG) SystemOut.printDate("init factory"); ConfigWebImpl config=XMLConfigWebFactory.newInstance(this,factory,configServer,configDir,isCustomSetting.toBooleanValue(),sg); if(XMLConfigWebFactory.LOG) SystemOut.printDate("loaded config"); factory.setConfig(config); return factory; } catch (Exception e) { ServletException se= new ServletException(e.getMessage()); se.setStackTrace(e.getStackTrace()); throw se; } } /** * loads Configuration File from System, from init Parameter from web.xml * @param sg * @param configServer * @param countExistingContextes * @return return path to directory */ private Resource getConfigDirectory(ServletConfig sg, ConfigServerImpl configServer, int countExistingContextes, RefBoolean isCustomSetting) throws PageServletException { isCustomSetting.setValue(true); ServletContext sc=sg.getServletContext(); String strConfig=sg.getInitParameter("configuration"); if(StringUtil.isEmpty(strConfig))strConfig=sg.getInitParameter("lucee-web-directory"); if(StringUtil.isEmpty(strConfig))strConfig=System.getProperty("lucee.web.dir"); if(StringUtil.isEmpty(strConfig)) { isCustomSetting.setValue(false); strConfig="{web-root-directory}/WEB-INF/lucee/"; } // only for backward compatibility else if(strConfig.startsWith("/WEB-INF/lucee/"))strConfig="{web-root-directory}"+strConfig; strConfig=StringUtil.removeQuotes(strConfig,true); // static path is not allowed if(countExistingContextes>1 && strConfig!=null && strConfig.indexOf('{')==-1){ String text="static path ["+strConfig+"] for servlet init param [lucee-web-directory] is not allowed, path must use a web-context specific placeholder."; System.err.println(text); throw new PageServletException(new ApplicationException(text)); } strConfig=SystemUtil.parsePlaceHolder(strConfig,sc,configServer.getLabels()); ResourceProvider frp = ResourcesImpl.getFileResourceProvider(); Resource root = frp.getResource(ReqRspUtil.getRootPath(sc)); Resource res; Resource configDir=ResourceUtil.createResource(res=root.getRealResource(strConfig), FileUtil.LEVEL_PARENT_FILE,FileUtil.TYPE_DIR); if(configDir==null) { configDir=ResourceUtil.createResource(res=frp.getResource(strConfig), FileUtil.LEVEL_GRAND_PARENT_FILE,FileUtil.TYPE_DIR); } if(configDir==null && !isCustomSetting.toBooleanValue()) { try { res.createDirectory(true); configDir=res; } catch (IOException e) { throw new PageServletException(Caster.toPageException(e)); } } if(configDir==null) { throw new PageServletException(new ApplicationException("path ["+strConfig+"] is invalid")); } if(!configDir.exists() || ResourceUtil.isEmptyDirectory(configDir, null)){ Resource railoRoot; // there is a railo directory if(configDir.getName().equals("lucee") && (railoRoot=configDir.getParentResource().getRealResource("railo")).isDirectory()) { try { copyRecursiveAndRename(railoRoot,configDir); } catch (IOException e) { try { configDir.createDirectory(true); } catch (IOException ioe) {} return configDir; } // zip the railo-server di and delete it (optional) try { Resource p=railoRoot.getParentResource(); CompressUtil.compress(CompressUtil.FORMAT_ZIP, railoRoot, p.getRealResource("railo-web-context-old.zip"), false, -1); ResourceUtil.removeEL(railoRoot, true); } catch(Throwable t){ExceptionUtil.rethrowIfNecessary(t);} } else { try { configDir.createDirectory(true); } catch (IOException e) {} } } return configDir; } private File getDirectoryByProp(String name) { String value=System.getProperty(name); if(Util.isEmpty(value,true)) return null; File dir=new File(value); dir.mkdirs(); if (dir.isDirectory()) return dir; return null; } private static void copyRecursiveAndRename(Resource src,Resource trg) throws IOException { if(!src.exists()) return ; if(src.isDirectory()) { if(!trg.exists())trg.mkdirs(); Resource[] files = src.listResources(); for(int i=0;i<files.length;i++) { copyRecursiveAndRename(files[i],trg.getRealResource(files[i].getName())); } } else if(src.isFile()) { if(trg.getName().endsWith(".rc") || trg.getName().startsWith(".")) { return; } if(trg.getName().equals("railo-web.xml.cfm")) { trg=trg.getParentResource().getRealResource("lucee-web.xml.cfm"); // cfLuceeConfiguration InputStream is = src.getInputStream(); OutputStream os = trg.getOutputStream(); try{ String str=Util.toString(is); str=str.replace("<cfRailoConfiguration", "<!-- copy from Railo context --><cfLuceeConfiguration"); str=str.replace("</cfRailoConfiguration", "</cfLuceeConfiguration"); str=str.replace("<railo-configuration", "<lucee-configuration"); str=str.replace("</railo-configuration", "</lucee-configuration"); str=str.replace("{railo-config}", "{lucee-config}"); str=str.replace("{railo-server}", "{lucee-server}"); str=str.replace("{railo-web}", "{lucee-web}"); str=str.replace("\"railo.commons.", "\"lucee.commons."); str=str.replace("\"railo.runtime.", "\"lucee.runtime."); str=str.replace("\"railo.cfx.", "\"lucee.cfx."); str=str.replace("/railo-context.ra", "/lucee-context.lar"); str=str.replace("/railo-context", "/lucee"); str=str.replace("railo-server-context", "lucee-server"); str=str.replace("http: str=str.replace("http: ByteArrayInputStream bais = new ByteArrayInputStream(str.getBytes()); try { Util.copy(bais, os); bais.close(); } finally { Util.closeEL(is, os); } } finally { Util.closeEL(is,os); } return; } InputStream is = src.getInputStream(); OutputStream os = trg.getOutputStream(); try{ Util.copy(is, os); } finally { Util.closeEL(is, os); } } } @Override public CFMLFactory getCFMLFactory(ServletConfig srvConfig,HttpServletRequest req) throws ServletException { ServletContext srvContext = srvConfig.getServletContext(); String real=ReqRspUtil.getRootPath(srvContext); ConfigServerImpl cs = getConfigServerImpl(); // Load JspFactory CFMLFactory factory=contextes.get(real); if(factory==null) { factory=initContextes.get(real); if(factory==null) { factory=loadJSPFactory(cs,srvConfig,initContextes.size()); initContextes.put(real,factory); } contextes.put(real,factory); try { String cp = req.getContextPath(); if(cp==null)cp=""; ((CFMLFactoryImpl)factory).setURL(new URL(req.getScheme(),req.getServerName(),req.getServerPort(),cp)); } catch (MalformedURLException e) { e.printStackTrace(); } } return factory; } @Override public void service(HttpServlet servlet, HttpServletRequest req, HttpServletResponse rsp) throws ServletException, IOException { _service(servlet, req, rsp, Request.TYPE_LUCEE); } @Override public void serviceCFML(HttpServlet servlet, HttpServletRequest req, HttpServletResponse rsp) throws ServletException, IOException { _service(servlet, req, rsp, Request.TYPE_CFML); } @Override public void serviceRest(HttpServlet servlet, HttpServletRequest req, HttpServletResponse rsp) throws ServletException, IOException { _service(servlet, new HTTPServletRequestWrap(req), rsp, Request.TYPE_REST); } private void _service(HttpServlet servlet, HttpServletRequest req, HttpServletResponse rsp, short type) throws ServletException, IOException { CFMLFactoryImpl factory=(CFMLFactoryImpl) getCFMLFactory(servlet.getServletConfig(), req); // is Lucee dialect enabled? if(type==Request.TYPE_LUCEE) { if(!((ConfigImpl)factory.getConfig()).allowLuceeDialect()){ try { PageContextImpl.notSupported(); } catch (ApplicationException e) { throw new PageServletException(e); } } } PageContextImpl pc = factory.getPageContextImpl(servlet,req,rsp,null,false,-1,false,false,false,-1,true,false); try { Request r=new Request(pc,type); r.start(); long ended=-1; do { SystemUtil.wait(Thread.currentThread(),1000); // done? if(r.isDone()) { //print.e("mas-done:"+System.currentTimeMillis()); break; } // reach request timeout else if(ended==-1 && (pc.getStartTime()+pc.getRequestTimeout())<System.currentTimeMillis()) { //print.e("req-time:"+System.currentTimeMillis()); CFMLFactoryImpl.terminate(pc,false); ended=System.currentTimeMillis(); // break; we do not break here, we give the thread itself the chance to end we need the exception output } // the thread itself seem blocked, so we release this thread else if(ended>-1 && ended+10000<=System.currentTimeMillis()) { //print.e("give-up:"+System.currentTimeMillis()); break; } } while(true); //print.e("done: "+System.currentTimeMillis()); } finally { factory.releaseLuceePageContext(pc,false); } } @Override public void serviceFile(HttpServlet servlet, HttpServletRequest req, HttpServletResponse rsp) throws ServletException, IOException { req=new HTTPServletRequestWrap(req); CFMLFactory factory=getCFMLFactory( servlet.getServletConfig(), req); ConfigWeb config = factory.getConfig(); PageSource ps = config.getPageSourceExisting(null, null, req.getServletPath(), false, true, true, false); //Resource res = ((ConfigWebImpl)config).getPhysicalResourceExistingX(null, null, req.getServletPath(), false, true, true); if(ps==null) { rsp.sendError(404); } else { Resource res = ps.getResource(); if(res==null) { rsp.sendError(404); } else { ReqRspUtil.setContentLength(rsp,res.length()); String mt = servlet.getServletContext().getMimeType(req.getServletPath()); if(!StringUtil.isEmpty(mt))ReqRspUtil.setContentType(rsp,mt); IOUtil.copy(res, rsp.getOutputStream(), true); } } } /*private String getContextList() { return List.arrayToList((String[])contextes.keySet().toArray(new String[contextes.size()]),", "); }*/ @Override public String getVersion() { return info.getVersion().toString(); } @Override public Info getInfo() { return info; } @Override public String getUpdateType() { return getConfigServerImpl().getUpdateType(); } @Override public URL getUpdateLocation() { return getConfigServerImpl().getUpdateLocation(); } @Override public Identification getIdentification() { return getConfigServerImpl().getIdentification(); } @Override public boolean can(int type, Password password) { return getConfigServerImpl().passwordEqual(password); } @Override public CFMLEngineFactory getCFMLEngineFactory() { return factory; } @Override public void serviceAMF(HttpServlet servlet, HttpServletRequest req, HttpServletResponse rsp) throws ServletException, IOException { throw new ServletException("AMFServlet is no longer supported, use BrokerServlet instead."); //req=new HTTPServletRequestWrap(req); //getCFMLFactory(servlet.getServletConfig(), req).getConfig().getAMFEngine().service(servlet,new HTTPServletRequestWrap(req),rsp); } @Override public void reset() { reset(null); } @Override public void reset(String configId) { SystemOut.printDate("reset CFML Engine"); getControler().close(); RetireOutputStreamFactory.close(); releaseCache(getConfigServerImpl()); CFMLFactoryImpl cfmlFactory; //ScopeContext scopeContext; try { Iterator<String> it = contextes.keySet().iterator(); while(it.hasNext()) { try { cfmlFactory=(CFMLFactoryImpl) contextes.get(it.next()); if(configId!=null && !configId.equals(cfmlFactory.getConfigWebImpl().getIdentification().getId())) continue; // scopes try{cfmlFactory.getScopeContext().clear();}catch(Throwable t){ExceptionUtil.rethrowIfNecessary(t);} // PageContext try{cfmlFactory.resetPageContext();}catch(Throwable t){ExceptionUtil.rethrowIfNecessary(t);} // Query Cache try{ PageContext pc = ThreadLocalPageContext.get(); if(pc!=null) { pc.getConfig().getCacheHandlerCollection(Config.CACHE_TYPE_QUERY,null).clear(pc); pc.getConfig().getCacheHandlerCollection(Config.CACHE_TYPE_FUNCTION,null).clear(pc); pc.getConfig().getCacheHandlerCollection(Config.CACHE_TYPE_INCLUDE,null).clear(pc); } //cfmlFactory.getDefaultQueryCache().clear(null); }catch(Throwable t){ExceptionUtil.rethrowIfNecessary(t);} // Gateway try{ cfmlFactory.getConfigWebImpl().getGatewayEngine().reset();}catch(Throwable t){ExceptionUtil.rethrowIfNecessary(t);} // Cache releaseCache(cfmlFactory.getConfigWebImpl()); } catch(Throwable t){ExceptionUtil.rethrowIfNecessary(t);} } } finally { // Controller controlerState.setActive(false); } } public static void releaseCache(Config config) { CacheUtil.releaseAll(config); if(config instanceof ConfigServer)CacheUtil.releaseAllApplication(); } @Override public Cast getCastUtil() { return CastImpl.getInstance(); } @Override public Operation getOperatonUtil() { return OperationImpl.getInstance(); } @Override public Decision getDecisionUtil() { return DecisionImpl.getInstance(); } @Override public Excepton getExceptionUtil() { return ExceptonImpl.getInstance(); } @Override public Object getJavaProxyUtil() { // FUTURE return JavaProxyUtil return new JavaProxyUtilImpl(); } @Override public Creation getCreationUtil() { return CreationImpl.getInstance(this); } @Override public IO getIOUtil() { return IOImpl.getInstance(); } @Override public Strings getStringUtil() { return StringsImpl.getInstance(); } @Override public Object getFDController() { engine.allowRequestTimeout(false); return new FDControllerImpl(engine,engine.getConfigServerImpl().getSerialNumber()); } public Map<String,CFMLFactory> getCFMLFactories() { return initContextes; } @Override public lucee.runtime.util.ResourceUtil getResourceUtil() { return ResourceUtilImpl.getInstance(); } @Override public lucee.runtime.util.HTTPUtil getHTTPUtil() { return HTTPUtilImpl.getInstance(); } @Override public PageContext getThreadPageContext() { return ThreadLocalPageContext.get(); } @Override public Config getThreadConfig() { return ThreadLocalPageContext.getConfig(); } @Override public void registerThreadPageContext(PageContext pc) { ThreadLocalPageContext.register(pc); } @Override public VideoUtil getVideoUtil() { return VideoUtilImpl.getInstance(); } @Override public ZipUtil getZipUtil() { return ZipUtilImpl.getInstance(); } /*public String getState() { return info.getStateAsString(); }*/ public void allowRequestTimeout(boolean allowRequestTimeout) { this.allowRequestTimeout=allowRequestTimeout; } public boolean allowRequestTimeout() { return allowRequestTimeout; } public boolean isRunning() { try{ CFMLEngine other = CFMLEngineFactory.getInstance(); // FUTURE patch, do better impl when changing loader if(other!=this && controlerState.active() && !(other instanceof CFMLEngineWrapper)) { SystemOut.printDate("CFMLEngine is still set to true but no longer valid, "+lucee.runtime.config.Constants.NAME+" disable this CFMLEngine."); controlerState.setActive(false); reset(); return false; } } catch(Throwable t) {ExceptionUtil.rethrowIfNecessary(t);} return controlerState.active(); } public boolean active() { return controlerState.active(); } public ControllerState getControllerState() { return controlerState; } @Override public void cli(Map<String, String> config, ServletConfig servletConfig) throws IOException,JspException,ServletException { ServletContext servletContext = servletConfig.getServletContext(); HTTPServletImpl servlet=new HTTPServletImpl(servletConfig, servletContext, servletConfig.getServletName()); // webroot String strWebroot=config.get("webroot"); if(StringUtil.isEmpty(strWebroot,true)) throw new IOException("missing webroot configuration"); Resource root=ResourcesImpl.getFileResourceProvider().getResource(strWebroot); root.mkdirs(); // serverName String serverName=config.get("server-name"); if(StringUtil.isEmpty(serverName,true))serverName="localhost"; // uri String strUri=config.get("uri"); if(StringUtil.isEmpty(strUri,true)) throw new IOException("missing uri configuration"); URI uri; try { uri = lucee.commons.net.HTTPUtil.toURI(strUri); } catch (URISyntaxException e) { throw Caster.toPageException(e); } // cookie Cookie[] cookies; String strCookie=config.get("cookie"); if(StringUtil.isEmpty(strCookie,true)) cookies=new Cookie[0]; else { Map<String,String> mapCookies=HTTPUtil.parseParameterList(strCookie,false,null); int index=0; cookies=new Cookie[mapCookies.size()]; Entry<String, String> entry; Iterator<Entry<String, String>> it = mapCookies.entrySet().iterator(); Cookie c; while(it.hasNext()){ entry = it.next(); c=ReqRspUtil.toCookie(entry.getKey(),entry.getValue(),null); if(c!=null)cookies[index++]=c; else throw new IOException("cookie name ["+entry.getKey()+"] is invalid"); } } // header Pair[] headers=new Pair[0]; // parameters Pair[] parameters=new Pair[0]; // attributes StructImpl attributes = new StructImpl(); ByteArrayOutputStream os=new ByteArrayOutputStream(); HttpServletRequestDummy req=new HttpServletRequestDummy( root,serverName,uri.getPath(),uri.getQuery(),cookies,headers,parameters,attributes,null,null); req.setProtocol("CLI/1.0"); HttpServletResponse rsp=new HttpServletResponseDummy(os); serviceCFML(servlet, req, rsp); String res = os.toString(ReqRspUtil.getCharacterEncoding(null,rsp).name()); System.out.println(res); } @Override public ServletConfig[] getServletConfigs(){ return servletConfigs.toArray(new ServletConfig[servletConfigs.size()]); } @Override public long uptime() { return uptime; } /*public Bundle getCoreBundle() { return bundle; }*/ @Override public BundleCollection getBundleCollection() { return bundleCollection; } @Override public BundleContext getBundleContext() { return bundleCollection.getBundleContext(); } @Override public ClassUtil getClassUtil() { return new ClassUtilImpl(); } @Override public ListUtil getListUtil() { return new ListUtilImpl(); } @Override public DBUtil getDBUtil() { return new DBUtilImpl(); } @Override public ORMUtil getORMUtil() { return new ORMUtilImpl(); } @Override public TemplateUtil getTemplateUtil() { return new TemplateUtilImpl(); } @Override public HTMLUtil getHTMLUtil() { return new HTMLUtilImpl(); } @Override public ScriptEngineFactory getScriptEngineFactory(int dialect) { if(dialect==CFMLEngine.DIALECT_CFML) { if(cfmlScriptEngine==null) cfmlScriptEngine=new ScriptEngineFactoryImpl(this,false,dialect); return cfmlScriptEngine; } if(luceeScriptEngine==null) luceeScriptEngine=new ScriptEngineFactoryImpl(this,false,dialect); return luceeScriptEngine; } @Override public ScriptEngineFactory getTagEngineFactory(int dialect) { if(dialect==CFMLEngine.DIALECT_CFML) { if(cfmlTagEngine==null) cfmlTagEngine=new ScriptEngineFactoryImpl(this,true,dialect); return cfmlTagEngine; } if(luceeTagEngine==null) luceeTagEngine=new ScriptEngineFactoryImpl(this,true,dialect); return luceeTagEngine; } @Override public PageContext createPageContext(File contextRoot, String host, String scriptName, String queryString , Cookie[] cookies,Map<String, Object> headers,Map<String, String> parameters, Map<String, Object> attributes, OutputStream os, long timeout, boolean register) throws ServletException { // FUTURE add first 2 arguments to interface return PageContextUtil.getPageContext(null,null,contextRoot,host, scriptName, queryString, cookies, headers, parameters, attributes, os,register,timeout,false); } @Override public ConfigWeb createConfig(File contextRoot,String host, String scriptName) throws ServletException { // TODO do a mored rect approach PageContext pc = null; try{ // FUTURE add first 2 arguments to interface pc = PageContextUtil.getPageContext(null,null,contextRoot,host,scriptName, null, null, null, null, null, null,false,-1,false); return pc.getConfig(); } finally{ pc.getConfig().getFactory().releaseLuceePageContext(pc, false); } } @Override public void releasePageContext(PageContext pc, boolean unregister) { PageContextUtil.releasePageContext(pc,unregister); } @Override public lucee.runtime.util.SystemUtil getSystemUtil() { return new SystemUtilImpl(); } @Override public TimeZone getThreadTimeZone() { return ThreadLocalPageContext.getTimeZone(); } @Override public Instrumentation getInstrumentation() { return InstrumentationFactory.getInstrumentation(ThreadLocalPageContext.getConfig()); } public Controler getControler() { return controler; } public void onStart(ConfigImpl config, boolean reload) { String context=config instanceof ConfigWeb?"Web":"Server"; if(!ThreadLocalPageContext.callOnStart.get()) return; Resource listenerTemplateLucee = config.getConfigDir().getRealResource("context/"+context+"."+lucee.runtime.config.Constants.getLuceeComponentExtension()); Resource listenerTemplateCFML = config.getConfigDir().getRealResource("context/"+context+"."+lucee.runtime.config.Constants.getCFMLComponentExtension()); // dialect int dialect; if(listenerTemplateLucee.isFile()) dialect=CFMLEngine.DIALECT_LUCEE; else if(listenerTemplateCFML.isFile()) dialect=CFMLEngine.DIALECT_CFML; else return; // we do not wait for this new OnStart(config, dialect,context, reload).start(); } private class OnStart extends Thread { private ConfigImpl config; private int dialect; private boolean reload; private String context; public OnStart(ConfigImpl config, int dialect, String context, boolean reload) { this.config=config; this.dialect=dialect; this.context=context; this.reload=reload; } public void run() { boolean isWeb=config instanceof ConfigWeb; String id=CreateUniqueId.invoke(); final String requestURI="/"+(isWeb?"lucee":"lucee-server")+"/"+context+"."+(dialect==CFMLEngine.DIALECT_LUCEE?lucee.runtime.config.Constants.getLuceeComponentExtension():lucee.runtime.config.Constants.getCFMLComponentExtension()); //PageContext oldPC = ThreadLocalPageContext.get(); PageContext pc=null; try { String remotePersisId; try { remotePersisId=Md5.getDigestAsString(requestURI+id); } catch (IOException e) { throw Caster.toPageException(e); } String queryString="method=on"+context+"Start&reload="+reload+"&"+ComponentPageImpl.REMOTE_PERSISTENT_ID+"="+remotePersisId; if(config instanceof ConfigWeb) { Pair[] headers = new Pair[]{new Pair<String,Object>("AMF-Forward","true")}; Struct attrs=new StructImpl(); attrs.setEL("client", "lucee-listener-1-0"); pc = ThreadUtil.createPageContext( (ConfigWeb)config, DevNullOutputStream.DEV_NULL_OUTPUT_STREAM, "localhost", requestURI,queryString, new Cookie[0], headers,null, new Pair[0], attrs,true,Long.MAX_VALUE); } else { Map<String, Object> headers=new HashMap<String, Object>(); headers.put("AMF-Forward","true"); Map<String, Object> attrs=new HashMap<String, Object>(); attrs.put("client", "lucee-listener-1-0"); File root = new File(config.getRootDirectory().getAbsolutePath()); CreationImpl cr =(CreationImpl)CreationImpl.getInstance(engine); ServletConfig sc = cr.createServletConfig(root, null, null); pc = PageContextUtil.getPageContext( config,sc,root, "localhost", requestURI, queryString, new Cookie[0], headers, null, attrs, DevNullOutputStream.DEV_NULL_OUTPUT_STREAM, true,Long.MAX_VALUE, Caster.toBooleanValue(SystemUtil.getSystemPropOrEnvVar("lucee.ignore.scopes", null),false)); } if(dialect==CFMLEngine.DIALECT_LUCEE) pc.execute(requestURI, true,false); else pc.executeCFML(requestURI, true,false); } catch(Throwable t) { // we simply ignore exceptions, if the template itself throws an error it will be handled by the error listener ExceptionUtil.rethrowIfNecessary(t); } finally { CFMLFactory f = pc.getConfig().getFactory(); f.releaseLuceePageContext(pc,true); //ThreadLocalPageContext.register(oldPC); } } } }
package lucee.runtime.engine; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.instrument.Instrumentation; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.security.CodeSource; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.Set; import java.util.TimeZone; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import javax.script.ScriptEngineFactory; import javax.servlet.ServletConfig; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.jsp.JspException; import lucee.Info; import lucee.cli.servlet.HTTPServletImpl; import lucee.commons.collection.MapFactory; import lucee.commons.io.CharsetUtil; import lucee.commons.io.FileUtil; import lucee.commons.io.IOUtil; import lucee.commons.io.SystemUtil; import lucee.commons.io.compress.CompressUtil; import lucee.commons.io.log.Log; import lucee.commons.io.res.Resource; import lucee.commons.io.res.ResourceProvider; import lucee.commons.io.res.ResourcesImpl; import lucee.commons.io.res.util.ResourceUtil; import lucee.commons.io.res.util.ResourceUtilImpl; import lucee.commons.io.retirement.RetireOutputStreamFactory; import lucee.commons.lang.Pair; import lucee.commons.lang.StringUtil; import lucee.commons.lang.SystemOut; import lucee.commons.lang.types.RefBoolean; import lucee.commons.lang.types.RefBooleanImpl; import lucee.commons.net.HTTPUtil; import lucee.intergral.fusiondebug.server.FDControllerImpl; import lucee.loader.engine.CFMLEngine; import lucee.loader.engine.CFMLEngineFactory; import lucee.loader.engine.CFMLEngineFactorySupport; import lucee.loader.engine.CFMLEngineWrapper; import lucee.loader.osgi.BundleCollection; import lucee.loader.util.Util; import lucee.runtime.CFMLFactory; import lucee.runtime.CFMLFactoryImpl; import lucee.runtime.PageContext; import lucee.runtime.PageContextImpl; import lucee.runtime.PageSource; import lucee.runtime.config.Config; import lucee.runtime.config.ConfigImpl; import lucee.runtime.config.ConfigServer; import lucee.runtime.config.ConfigServerImpl; import lucee.runtime.config.ConfigWeb; import lucee.runtime.config.ConfigWebImpl; import lucee.runtime.config.DeployHandler; import lucee.runtime.config.Identification; import lucee.runtime.config.Password; import lucee.runtime.config.XMLConfigAdmin; import lucee.runtime.config.XMLConfigFactory; import lucee.runtime.config.XMLConfigServerFactory; import lucee.runtime.config.XMLConfigWebFactory; import lucee.runtime.exp.ApplicationException; import lucee.runtime.exp.PageException; import lucee.runtime.exp.PageRuntimeException; import lucee.runtime.exp.PageServletException; import lucee.runtime.extension.ExtensionDefintion; import lucee.runtime.extension.RHExtension; import lucee.runtime.instrumentation.InstrumentationFactory; import lucee.runtime.jsr223.ScriptEngineFactoryImpl; import lucee.runtime.net.http.HTTPServletRequestWrap; import lucee.runtime.net.http.HttpServletRequestDummy; import lucee.runtime.net.http.HttpServletResponseDummy; import lucee.runtime.net.http.ReqRspUtil; import lucee.runtime.op.CastImpl; import lucee.runtime.op.Caster; import lucee.runtime.op.CreationImpl; import lucee.runtime.op.DecisionImpl; import lucee.runtime.op.ExceptonImpl; import lucee.runtime.op.IOImpl; import lucee.runtime.op.OperationImpl; import lucee.runtime.op.StringsImpl; import lucee.runtime.type.StructImpl; import lucee.runtime.util.Cast; import lucee.runtime.util.ClassUtil; import lucee.runtime.util.ClassUtilImpl; import lucee.runtime.util.Creation; import lucee.runtime.util.DBUtil; import lucee.runtime.util.DBUtilImpl; import lucee.runtime.util.Decision; import lucee.runtime.util.Excepton; import lucee.runtime.util.HTMLUtil; import lucee.runtime.util.HTMLUtilImpl; import lucee.runtime.util.HTTPUtilImpl; import lucee.runtime.util.IO; import lucee.runtime.util.ListUtil; import lucee.runtime.util.ListUtilImpl; import lucee.runtime.util.ORMUtil; import lucee.runtime.util.ORMUtilImpl; import lucee.runtime.util.Operation; import lucee.runtime.util.PageContextUtil; import lucee.runtime.util.Strings; import lucee.runtime.util.SystemUtilImpl; import lucee.runtime.util.TemplateUtil; import lucee.runtime.util.TemplateUtilImpl; import lucee.runtime.util.ZipUtil; import lucee.runtime.util.ZipUtilImpl; import lucee.runtime.video.VideoUtil; import lucee.runtime.video.VideoUtilImpl; import org.apache.felix.framework.Felix; import org.osgi.framework.BundleContext; import org.osgi.framework.Constants; //import com.intergral.fusiondebug.server.FDControllerFactory; /** * The CFMl Engine */ public final class CFMLEngineImpl implements CFMLEngine { private static Map<String,CFMLFactory> initContextes=MapFactory.<String,CFMLFactory>getConcurrentMap(); private static Map<String,CFMLFactory> contextes=MapFactory.<String,CFMLFactory>getConcurrentMap(); private ConfigServerImpl configServer=null; private static CFMLEngineImpl engine=null; private CFMLEngineFactory factory; private final RefBoolean controlerState=new RefBooleanImpl(true); private boolean allowRequestTimeout=true; private Monitor monitor; private List<ServletConfig> servletConfigs=new ArrayList<ServletConfig>(); private long uptime; private InfoImpl info; private BundleCollection bundleCollection; private ScriptEngineFactory cfmlScriptEngine; private ScriptEngineFactory cfmlTagEngine; private ScriptEngineFactory luceeScriptEngine; private ScriptEngineFactory luceeTagEngine; private Controler controler; //private static CFMLEngineImpl engine=new CFMLEngineImpl(); private CFMLEngineImpl(CFMLEngineFactory factory, BundleCollection bc) { this.factory=factory; this.bundleCollection=bc; // happen when Lucee is loaded directly if(bundleCollection==null) { try{ Properties prop = InfoImpl.getDefaultProperties(null); // read the config from default.properties Map<String,Object> config=new HashMap<String, Object>(); Iterator<Entry<Object, Object>> it = prop.entrySet().iterator(); Entry<Object, Object> e; String k; while(it.hasNext()){ e = it.next(); k=(String) e.getKey(); if(!k.startsWith("org.") && !k.startsWith("felix.")) continue; config.put(k, CFMLEngineFactorySupport.removeQuotes((String)e.getValue(),true)); } config.put( Constants.FRAMEWORK_BOOTDELEGATION, "lucee.*"); Felix felix = factory.getFelix(factory.getResourceRoot(),config); bundleCollection=new BundleCollection(felix, felix, null); //bundleContext=bundleCollection.getBundleContext(); } catch (Throwable t) { throw new RuntimeException(t); } } this.info=new InfoImpl(bundleCollection==null?null:bundleCollection.core); Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader()); // MUST better location for this int doNew; Resource configDir=null; try { configDir = getSeverContextConfigDirectory(factory); doNew=XMLConfigFactory.doNew(this,configDir, true); } catch (IOException e) { throw new PageRuntimeException(e); } CFMLEngineFactory.registerInstance((this));// patch, not really good but it works ConfigServerImpl cs = getConfigServerImpl(); controler = new Controler(cs,initContextes,5*1000,controlerState); controler.setDaemon(true); controler.setPriority(Thread.MIN_PRIORITY); boolean disabled=Caster.toBooleanValue(SystemUtil.getSetting(SystemUtil.SETTING_CONTROLLER_DISABLED,null),false); if (!disabled) { // start the controller SystemOut.printDate(SystemUtil.getPrintWriter(SystemUtil.OUT), "Start CFML Controller"); controler.start(); } // copy bundled extension to local extension directory (if never done before) deployBundledExtension(cs); // required extensions boolean isRe=configDir==null?false:XMLConfigFactory.isRequiredExtension(this, configDir); // if we have a "fresh" install Set<ExtensionDefintion> extensions; if(doNew==XMLConfigFactory.NEW_FRESH || doNew==XMLConfigFactory.NEW_FROM4) { List<ExtensionDefintion> ext = info.getRequiredExtension(); extensions = toSet(null,ext); SystemOut.print(SystemUtil.getPrintWriter(SystemUtil.OUT), "Install Extensions ("+doNew+"):"+toList(extensions)); } // if we have an update we update the extension that re installed and we have an older version as defined in the manifest else if(doNew==XMLConfigFactory.NEW_MINOR || !isRe) { extensions = new HashSet<ExtensionDefintion>(); Iterator<ExtensionDefintion> it = info.getRequiredExtension().iterator(); ExtensionDefintion ed; RHExtension rhe; while(it.hasNext()){ ed = it.next(); if(ed.getVersion()==null) continue; // no version definition no update try{ rhe = XMLConfigAdmin.hasRHExtensions(cs, new ExtensionDefintion(ed.getId())); if(rhe==null) continue; // not installed we do not update // if the installed is older than the one defined in the manifest we update (if possible) if(!rhe.getVersion().equals(ed.getVersion())) extensions.add(ed); } catch(Throwable t){ t.printStackTrace(); // fails we update extensions.add(ed); } } } else { extensions = new HashSet<ExtensionDefintion>(); } // XMLConfigAdmin.hasRHExtensions(ci, ed) // install extension defined String extensionIds=System.getProperty("lucee-extensions"); if(!StringUtil.isEmpty(extensionIds,true)) { List<ExtensionDefintion> _extensions = RHExtension.toExtensionDefinitions(extensionIds); extensions=toSet(extensions,_extensions); } if(extensions.size()>0) { boolean sucess=DeployHandler.deployExtensions( cs, extensions.toArray(new ExtensionDefintion[extensions.size()]), cs.getLog("deploy", true) ); if(sucess && configDir!=null)XMLConfigFactory.updateRequiredExtension(this, configDir); } touchMonitor(cs); this.uptime=System.currentTimeMillis(); //this.config=config; } public static Set<ExtensionDefintion> toSet(Set<ExtensionDefintion> set, List<ExtensionDefintion> list) { HashMap<String, ExtensionDefintion> map=new HashMap<String, ExtensionDefintion>(); ExtensionDefintion ed; // set > map if(set!=null) { Iterator<ExtensionDefintion> it = set.iterator(); while(it.hasNext()){ ed = it.next(); map.put(ed.toString(),ed); } } // list > map if(list!=null) { Iterator<ExtensionDefintion> it = list.iterator(); while(it.hasNext()){ ed = it.next(); map.put(ed.toString(),ed); } } // to Set HashSet<ExtensionDefintion> rtn = new HashSet<ExtensionDefintion>(); Iterator<ExtensionDefintion> it = map.values().iterator(); while(it.hasNext()){ ed = it.next(); rtn.add(ed); } return rtn; } public static String toList(Set<ExtensionDefintion> set) { StringBuilder sb=new StringBuilder(); Iterator<ExtensionDefintion> it = set.iterator(); ExtensionDefintion ed; while(it.hasNext()){ ed = it.next(); if(sb.length()>0) sb.append(", "); sb.append(ed.toString()); } return sb.toString(); } private void deployBundledExtension(ConfigServerImpl cs) { Resource dir = cs.getLocalExtensionProviderDirectory(); List<RHExtension> existing = DeployHandler.getLocalExtensions(cs); String sub="extensions/"; // get the index ClassLoader cl=CFMLEngineFactory.getInstance().getCFMLEngineFactory().getClass().getClassLoader(); InputStream is = cl.getResourceAsStream("extensions/.index"); if(is==null)is = cl.getResourceAsStream("/extensions/.index"); if(is==null) return; Log log = cs.getLog("deploy"); try { String index=IOUtil.toString(is, CharsetUtil.UTF8); String[] names = lucee.runtime.type.util.ListUtil.listToStringArray(index, ';'); String name; Resource temp=null; RHExtension rhe,exist; Iterator<RHExtension> it; for(int i=0;i<names.length;i++){ name=names[i]; if(StringUtil.isEmpty(name,true)) continue; name=name.trim(); is = cl.getResourceAsStream("extensions/"+name); if(is==null)is = cl.getResourceAsStream("/extensions/"+name); if(is==null) { log.error("extract-extension", "could not found extension ["+name+"] defined in the index in the lucee.jar"); continue; } try { temp=SystemUtil.getTempFile("lex", true); Util.copy(is, temp.getOutputStream(),false,true); rhe = new RHExtension(cs, temp, false); boolean alreadyExists=false; it = existing.iterator(); while(it.hasNext()){ exist = it.next(); if(exist.equals(rhe)) { alreadyExists=true; break; } } if(!alreadyExists) { temp.moveTo(dir.getRealResource(name)); log.info("extract-extension", "added ["+name+"] to ["+dir+"]"); } } finally { if(temp!=null && temp.exists())temp.delete(); } } } catch(Throwable t){ log.error("extract-extension", t); } return; } private void deployBundledExtensionZip(ConfigServerImpl cs) { Resource dir = cs.getLocalExtensionProviderDirectory(); List<RHExtension> existing = DeployHandler.getLocalExtensions(cs); String sub="extensions/"; // MUST this does not work on windows! we need to add an index ZipEntry entry; ZipInputStream zis = null; try { CodeSource src = CFMLEngineFactory.class.getProtectionDomain().getCodeSource(); if (src == null) return; URL loc = src.getLocation(); zis=new ZipInputStream(loc.openStream()); String path,name; int index; Resource temp; RHExtension rhe; Iterator<RHExtension> it; RHExtension exist; while ((entry = zis.getNextEntry())!= null) { path = entry.getName(); if(path.startsWith(sub) && path.endsWith(".lex")) { // ignore non lex files or file from else where index=path.lastIndexOf('/')+1; if(index==sub.length()) { // ignore sub directories name=path.substring(index); temp=null; try { temp=SystemUtil.getTempFile("lex", true); Util.copy(zis, temp.getOutputStream(),false,true); rhe = new RHExtension(cs, temp, false); boolean alreadyExists=false; it = existing.iterator(); while(it.hasNext()){ exist = it.next(); if(exist.equals(rhe)) { alreadyExists=true; break; } } if(!alreadyExists) { temp.moveTo(dir.getRealResource(name)); } } finally { if(temp!=null && temp.exists())temp.delete(); } } } zis.closeEntry(); } } catch(Throwable t){ t.printStackTrace();// TODO log this } finally { Util.closeEL(zis); } return; } public void touchMonitor(ConfigServerImpl cs) { if(monitor!=null && monitor.isAlive()) return; monitor = new Monitor(cs,controlerState); monitor.setDaemon(true); monitor.setPriority(Thread.MIN_PRIORITY); monitor.start(); } /** * get singelton instance of the CFML Engine * @param factory * @return CFMLEngine */ public static synchronized CFMLEngine getInstance(CFMLEngineFactory factory,BundleCollection bc) { if(engine==null) { if(SystemUtil.getLoaderVersion()<5.8D) throw new RuntimeException("You need to update your lucee.jar to run this version, you can download the latest jar from http://download.lucee.org."); engine=new CFMLEngineImpl(factory,bc); } return engine; } /** * get singelton instance of the CFML Engine, throwsexception when not already init * @param factory * @return CFMLEngine */ public static synchronized CFMLEngine getInstance() throws ServletException { if(engine!=null) return engine; throw new ServletException("CFML Engine is not loaded"); } @Override public void addServletConfig(ServletConfig config) throws ServletException { servletConfigs.add(config); String real=ReqRspUtil.getRootPath(config.getServletContext()); if(!initContextes.containsKey(real)) { CFMLFactory jspFactory = loadJSPFactory(getConfigServerImpl(),config,initContextes.size()); initContextes.put(real,jspFactory); } } @Override public ConfigServer getConfigServer(Password password) throws PageException { getConfigServerImpl().checkAccess(password); return configServer; } @Override public ConfigServer getConfigServer(String key, long timeNonce) throws PageException { getConfigServerImpl().checkAccess(key,timeNonce); return configServer; } public void setConfigServerImpl(ConfigServerImpl cs) { this.configServer=cs; } private ConfigServerImpl getConfigServerImpl() { if(configServer==null) { try { Resource context = getSeverContextConfigDirectory(factory); //CFMLEngineFactory.registerInstance(this);// patch, not really good but it works configServer=XMLConfigServerFactory.newInstance( this, initContextes, contextes, context); } catch (Exception e) { e.printStackTrace(); } } return configServer; } private Resource getSeverContextConfigDirectory(CFMLEngineFactory factory) throws IOException { ResourceProvider frp = ResourcesImpl.getFileResourceProvider(); return frp.getResource(factory.getResourceRoot().getAbsolutePath()).getRealResource("context"); } private CFMLFactoryImpl loadJSPFactory(ConfigServerImpl configServer, ServletConfig sg, int countExistingContextes) throws ServletException { try { // Load Config RefBoolean isCustomSetting=new RefBooleanImpl(); Resource configDir=getConfigDirectory(sg,configServer,countExistingContextes,isCustomSetting); CFMLFactoryImpl factory=new CFMLFactoryImpl(this,sg); ConfigWebImpl config=XMLConfigWebFactory.newInstance(this,factory,configServer,configDir,isCustomSetting.toBooleanValue(),sg); factory.setConfig(config); return factory; } catch (Exception e) { ServletException se= new ServletException(e.getMessage()); se.setStackTrace(e.getStackTrace()); throw se; } } /** * loads Configuration File from System, from init Parameter from web.xml * @param sg * @param configServer * @param countExistingContextes * @return return path to directory */ private Resource getConfigDirectory(ServletConfig sg, ConfigServerImpl configServer, int countExistingContextes, RefBoolean isCustomSetting) throws PageServletException { isCustomSetting.setValue(true); ServletContext sc=sg.getServletContext(); String strConfig=sg.getInitParameter("configuration"); if(StringUtil.isEmpty(strConfig))strConfig=sg.getInitParameter("lucee-web-directory"); if(StringUtil.isEmpty(strConfig))strConfig=System.getProperty("lucee.web.dir"); if(StringUtil.isEmpty(strConfig)) { isCustomSetting.setValue(false); strConfig="{web-root-directory}/WEB-INF/lucee/"; } // only for backward compatibility else if(strConfig.startsWith("/WEB-INF/lucee/"))strConfig="{web-root-directory}"+strConfig; strConfig=StringUtil.removeQuotes(strConfig,true); // static path is not allowed if(countExistingContextes>1 && strConfig!=null && strConfig.indexOf('{')==-1){ String text="static path ["+strConfig+"] for servlet init param [lucee-web-directory] is not allowed, path must use a web-context specific placeholder."; System.err.println(text); throw new PageServletException(new ApplicationException(text)); } strConfig=SystemUtil.parsePlaceHolder(strConfig,sc,configServer.getLabels()); ResourceProvider frp = ResourcesImpl.getFileResourceProvider(); Resource root = frp.getResource(ReqRspUtil.getRootPath(sc)); Resource res; Resource configDir=ResourceUtil.createResource(res=root.getRealResource(strConfig), FileUtil.LEVEL_PARENT_FILE,FileUtil.TYPE_DIR); if(configDir==null) { configDir=ResourceUtil.createResource(res=frp.getResource(strConfig), FileUtil.LEVEL_GRAND_PARENT_FILE,FileUtil.TYPE_DIR); } if(configDir==null && !isCustomSetting.toBooleanValue()) { try { res.createDirectory(true); configDir=res; } catch (IOException e) { throw new PageServletException(Caster.toPageException(e)); } } if(configDir==null) { throw new PageServletException(new ApplicationException("path ["+strConfig+"] is invalid")); } if(!configDir.exists() || ResourceUtil.isEmptyDirectory(configDir, null)){ Resource railoRoot; // there is a railo directory if(configDir.getName().equals("lucee") && (railoRoot=configDir.getParentResource().getRealResource("railo")).isDirectory()) { try { copyRecursiveAndRename(railoRoot,configDir); } catch (IOException e) { try { configDir.createDirectory(true); } catch (IOException ioe) {} return configDir; } // zip the railo-server di and delete it (optional) try { Resource p=railoRoot.getParentResource(); CompressUtil.compress(CompressUtil.FORMAT_ZIP, railoRoot, p.getRealResource("railo-web-context-old.zip"), false, -1); ResourceUtil.removeEL(railoRoot, true); } catch(Throwable t){t.printStackTrace();} } else { try { configDir.createDirectory(true); } catch (IOException e) {} } } return configDir; } private File getDirectoryByProp(String name) { String value=System.getProperty(name); if(Util.isEmpty(value,true)) return null; File dir=new File(value); dir.mkdirs(); if (dir.isDirectory()) return dir; return null; } private static void copyRecursiveAndRename(Resource src,Resource trg) throws IOException { if(!src.exists()) return ; if(src.isDirectory()) { if(!trg.exists())trg.mkdirs(); Resource[] files = src.listResources(); for(int i=0;i<files.length;i++) { copyRecursiveAndRename(files[i],trg.getRealResource(files[i].getName())); } } else if(src.isFile()) { if(trg.getName().endsWith(".rc") || trg.getName().startsWith(".")) { return; } if(trg.getName().equals("railo-web.xml.cfm")) { trg=trg.getParentResource().getRealResource("lucee-web.xml.cfm"); // cfLuceeConfiguration InputStream is = src.getInputStream(); OutputStream os = trg.getOutputStream(); try{ String str=Util.toString(is); str=str.replace("<cfRailoConfiguration", "<!-- copy from Railo context --><cfLuceeConfiguration"); str=str.replace("</cfRailoConfiguration", "</cfLuceeConfiguration"); str=str.replace("<railo-configuration", "<lucee-configuration"); str=str.replace("</railo-configuration", "</lucee-configuration"); str=str.replace("{railo-config}", "{lucee-config}"); str=str.replace("{railo-server}", "{lucee-server}"); str=str.replace("{railo-web}", "{lucee-web}"); str=str.replace("\"railo.commons.", "\"lucee.commons."); str=str.replace("\"railo.runtime.", "\"lucee.runtime."); str=str.replace("\"railo.cfx.", "\"lucee.cfx."); str=str.replace("/railo-context.ra", "/lucee-context.lar"); str=str.replace("/railo-context", "/lucee"); str=str.replace("railo-server-context", "lucee-server"); str=str.replace("http: str=str.replace("http: ByteArrayInputStream bais = new ByteArrayInputStream(str.getBytes()); try { Util.copy(bais, os); bais.close(); } finally { Util.closeEL(is, os); } } finally { Util.closeEL(is,os); } return; } InputStream is = src.getInputStream(); OutputStream os = trg.getOutputStream(); try{ Util.copy(is, os); } finally { Util.closeEL(is, os); } } } @Override public CFMLFactory getCFMLFactory(ServletConfig srvConfig,HttpServletRequest req) throws ServletException { ServletContext srvContext = srvConfig.getServletContext(); String real=ReqRspUtil.getRootPath(srvContext); ConfigServerImpl cs = getConfigServerImpl(); // Load JspFactory CFMLFactory factory=contextes.get(real); if(factory==null) { factory=initContextes.get(real); if(factory==null) { factory=loadJSPFactory(cs,srvConfig,initContextes.size()); initContextes.put(real,factory); } contextes.put(real,factory); try { String cp = req.getContextPath(); if(cp==null)cp=""; ((CFMLFactoryImpl)factory).setURL(new URL(req.getScheme(),req.getServerName(),req.getServerPort(),cp)); } catch (MalformedURLException e) { e.printStackTrace(); } } return factory; } @Override public void service(HttpServlet servlet, HttpServletRequest req, HttpServletResponse rsp) throws ServletException, IOException { CFMLFactory factory=getCFMLFactory(servlet.getServletConfig(), req); // is Lucee dialect enabled? if(!((ConfigImpl)factory.getConfig()).allowLuceeDialect()){ try { PageContextImpl.notSupported(); } catch (ApplicationException e) { throw new PageServletException(e); } } PageContext pc = factory.getLuceePageContext(servlet,req,rsp,null,false,-1,false,true,-1,true,false); ThreadQueue queue = factory.getConfig().getThreadQueue(); queue.enter(pc); try { pc.execute(pc.getHttpServletRequest().getServletPath(),false,true); } catch (PageException pe) { throw new PageServletException(pe); } finally { queue.exit(pc); factory.releaseLuceePageContext(pc,true); //FDControllerFactory.notifyPageComplete(); } } @Override public void serviceCFML(HttpServlet servlet, HttpServletRequest req, HttpServletResponse rsp) throws ServletException, IOException { CFMLFactory factory=getCFMLFactory(servlet.getServletConfig(), req); PageContext pc = factory.getLuceePageContext(servlet,req,rsp,null,false,-1,false,true,-1,true,false); ThreadQueue queue = factory.getConfig().getThreadQueue(); queue.enter(pc); try { pc.executeCFML(pc.getHttpServletRequest().getServletPath(),false,true); } catch (PageException pe) { throw new PageServletException(pe); } finally { queue.exit(pc); factory.releaseLuceePageContext(pc,true); //FDControllerFactory.notifyPageComplete(); } } @Override public void serviceFile(HttpServlet servlet, HttpServletRequest req, HttpServletResponse rsp) throws ServletException, IOException { req=new HTTPServletRequestWrap(req); CFMLFactory factory=getCFMLFactory( servlet.getServletConfig(), req); ConfigWeb config = factory.getConfig(); PageSource ps = config.getPageSourceExisting(null, null, req.getServletPath(), false, true, true, false); //Resource res = ((ConfigWebImpl)config).getPhysicalResourceExistingX(null, null, req.getServletPath(), false, true, true); if(ps==null) { rsp.sendError(404); } else { Resource res = ps.getResource(); if(res==null) { rsp.sendError(404); } else { ReqRspUtil.setContentLength(rsp,res.length()); String mt = servlet.getServletContext().getMimeType(req.getServletPath()); if(!StringUtil.isEmpty(mt))ReqRspUtil.setContentType(rsp,mt); IOUtil.copy(res, rsp.getOutputStream(), true); } } } @Override public void serviceRest(HttpServlet servlet, HttpServletRequest req, HttpServletResponse rsp) throws ServletException, IOException { req=new HTTPServletRequestWrap(req); CFMLFactory factory=getCFMLFactory(servlet.getServletConfig(), req); PageContext pc = factory.getLuceePageContext(servlet,req,rsp,null,false,-1,false,true,-1,true,false); ThreadQueue queue = factory.getConfig().getThreadQueue(); queue.enter(pc); try { pc.executeRest(pc.getHttpServletRequest().getServletPath(),false); } catch (PageException pe) { throw new PageServletException(pe); } finally { queue.exit(pc); factory.releaseLuceePageContext(pc,true); //FDControllerFactory.notifyPageComplete(); } } /*private String getContextList() { return List.arrayToList((String[])contextes.keySet().toArray(new String[contextes.size()]),", "); }*/ @Override public String getVersion() { return info.getVersion().toString(); } @Override public Info getInfo() { return info; } @Override public String getUpdateType() { return getConfigServerImpl().getUpdateType(); } @Override public URL getUpdateLocation() { return getConfigServerImpl().getUpdateLocation(); } @Override public Identification getIdentification() { return getConfigServerImpl().getIdentification(); } @Override public boolean can(int type, Password password) { return getConfigServerImpl().passwordEqual(password); } @Override public CFMLEngineFactory getCFMLEngineFactory() { return factory; } @Override public void serviceAMF(HttpServlet servlet, HttpServletRequest req, HttpServletResponse rsp) throws ServletException, IOException { req=new HTTPServletRequestWrap(req); getCFMLFactory(servlet.getServletConfig(), req) .getConfig().getAMFEngine().service(servlet,new HTTPServletRequestWrap(req),rsp); } @Override public void reset() { reset(null); } @Override public void reset(String configId) { getControler().close(); RetireOutputStreamFactory.close(); CFMLFactoryImpl cfmlFactory; //ScopeContext scopeContext; try { Iterator<String> it = contextes.keySet().iterator(); while(it.hasNext()) { try { cfmlFactory=(CFMLFactoryImpl) contextes.get(it.next()); if(configId!=null && !configId.equals(cfmlFactory.getConfigWebImpl().getIdentification().getId())) continue; // scopes try{cfmlFactory.getScopeContext().clear();}catch(Throwable t){t.printStackTrace();} // PageContext try{cfmlFactory.resetPageContext();}catch(Throwable t){t.printStackTrace();} // Query Cache try{ PageContext pc = ThreadLocalPageContext.get(); if(pc!=null) { pc.getConfig().getCacheHandlerCollection(Config.CACHE_TYPE_QUERY,null).clear(pc); pc.getConfig().getCacheHandlerCollection(Config.CACHE_TYPE_FUNCTION,null).clear(pc); pc.getConfig().getCacheHandlerCollection(Config.CACHE_TYPE_INCLUDE,null).clear(pc); } //cfmlFactory.getDefaultQueryCache().clear(null); }catch(Throwable t){t.printStackTrace();} // Gateway try{ cfmlFactory.getConfigWebImpl().getGatewayEngine().reset();}catch(Throwable t){t.printStackTrace();} } catch(Throwable t){ t.printStackTrace(); } } } finally { // Controller controlerState.setValue(false); } } @Override public Cast getCastUtil() { return CastImpl.getInstance(); } @Override public Operation getOperatonUtil() { return OperationImpl.getInstance(); } @Override public Decision getDecisionUtil() { return DecisionImpl.getInstance(); } @Override public Excepton getExceptionUtil() { return ExceptonImpl.getInstance(); } @Override public Creation getCreationUtil() { return CreationImpl.getInstance(this); } @Override public IO getIOUtil() { return IOImpl.getInstance(); } @Override public Strings getStringUtil() { return StringsImpl.getInstance(); } @Override public Object getFDController() { engine.allowRequestTimeout(false); return new FDControllerImpl(engine,engine.getConfigServerImpl().getSerialNumber()); } public Map<String,CFMLFactory> getCFMLFactories() { return initContextes; } @Override public lucee.runtime.util.ResourceUtil getResourceUtil() { return ResourceUtilImpl.getInstance(); } @Override public lucee.runtime.util.HTTPUtil getHTTPUtil() { return HTTPUtilImpl.getInstance(); } @Override public PageContext getThreadPageContext() { return ThreadLocalPageContext.get(); } @Override public Config getThreadConfig() { return ThreadLocalPageContext.getConfig(); } @Override public void registerThreadPageContext(PageContext pc) { ThreadLocalPageContext.register(pc); } @Override public VideoUtil getVideoUtil() { return VideoUtilImpl.getInstance(); } @Override public ZipUtil getZipUtil() { return ZipUtilImpl.getInstance(); } /*public String getState() { return info.getStateAsString(); }*/ public void allowRequestTimeout(boolean allowRequestTimeout) { this.allowRequestTimeout=allowRequestTimeout; } public boolean allowRequestTimeout() { return allowRequestTimeout; } public boolean isRunning() { try{ CFMLEngine other = CFMLEngineFactory.getInstance(); // FUTURE patch, do better impl when changing loader if(other!=this && controlerState.toBooleanValue() && !(other instanceof CFMLEngineWrapper)) { SystemOut.printDate("CFMLEngine is still set to true but no longer valid, "+lucee.runtime.config.Constants.NAME+" disable this CFMLEngine."); controlerState.setValue(false); reset(); return false; } } catch(Throwable t){} return controlerState.toBooleanValue(); } @Override public void cli(Map<String, String> config, ServletConfig servletConfig) throws IOException,JspException,ServletException { ServletContext servletContext = servletConfig.getServletContext(); HTTPServletImpl servlet=new HTTPServletImpl(servletConfig, servletContext, servletConfig.getServletName()); // webroot String strWebroot=config.get("webroot"); if(StringUtil.isEmpty(strWebroot,true)) throw new IOException("missing webroot configuration"); Resource root=ResourcesImpl.getFileResourceProvider().getResource(strWebroot); root.mkdirs(); // serverName String serverName=config.get("server-name"); if(StringUtil.isEmpty(serverName,true))serverName="localhost"; // uri String strUri=config.get("uri"); if(StringUtil.isEmpty(strUri,true)) throw new IOException("missing uri configuration"); URI uri; try { uri = lucee.commons.net.HTTPUtil.toURI(strUri); } catch (URISyntaxException e) { throw Caster.toPageException(e); } // cookie Cookie[] cookies; String strCookie=config.get("cookie"); if(StringUtil.isEmpty(strCookie,true)) cookies=new Cookie[0]; else { Map<String,String> mapCookies=HTTPUtil.parseParameterList(strCookie,false,null); int index=0; cookies=new Cookie[mapCookies.size()]; Entry<String, String> entry; Iterator<Entry<String, String>> it = mapCookies.entrySet().iterator(); Cookie c; while(it.hasNext()){ entry = it.next(); c=ReqRspUtil.toCookie(entry.getKey(),entry.getValue(),null); if(c!=null)cookies[index++]=c; else throw new IOException("cookie name ["+entry.getKey()+"] is invalid"); } } // header Pair[] headers=new Pair[0]; // parameters Pair[] parameters=new Pair[0]; // attributes StructImpl attributes = new StructImpl(); ByteArrayOutputStream os=new ByteArrayOutputStream(); HttpServletRequestDummy req=new HttpServletRequestDummy( root,serverName,uri.getPath(),uri.getQuery(),cookies,headers,parameters,attributes,null); req.setProtocol("CLI/1.0"); HttpServletResponse rsp=new HttpServletResponseDummy(os); serviceCFML(servlet, req, rsp); String res = os.toString(ReqRspUtil.getCharacterEncoding(null,rsp).name()); System.out.println(res); } @Override public ServletConfig[] getServletConfigs(){ return servletConfigs.toArray(new ServletConfig[servletConfigs.size()]); } @Override public long uptime() { return uptime; } /*public Bundle getCoreBundle() { return bundle; }*/ @Override public BundleCollection getBundleCollection() { return bundleCollection; } @Override public BundleContext getBundleContext() { return bundleCollection.getBundleContext(); } @Override public ClassUtil getClassUtil() { return new ClassUtilImpl(); } @Override public ListUtil getListUtil() { return new ListUtilImpl(); } @Override public DBUtil getDBUtil() { return new DBUtilImpl(); } @Override public ORMUtil getORMUtil() { return new ORMUtilImpl(); } @Override public TemplateUtil getTemplateUtil() { return new TemplateUtilImpl(); } @Override public HTMLUtil getHTMLUtil() { return new HTMLUtilImpl(); } @Override public ScriptEngineFactory getScriptEngineFactory(int dialect) { if(dialect==CFMLEngine.DIALECT_CFML) { if(cfmlScriptEngine==null) cfmlScriptEngine=new ScriptEngineFactoryImpl(this,false,dialect); return cfmlScriptEngine; } if(luceeScriptEngine==null) luceeScriptEngine=new ScriptEngineFactoryImpl(this,false,dialect); return luceeScriptEngine; } @Override public ScriptEngineFactory getTagEngineFactory(int dialect) { if(dialect==CFMLEngine.DIALECT_CFML) { if(cfmlTagEngine==null) cfmlTagEngine=new ScriptEngineFactoryImpl(this,true,dialect); return cfmlTagEngine; } if(luceeTagEngine==null) luceeTagEngine=new ScriptEngineFactoryImpl(this,true,dialect); return luceeTagEngine; } @Override public PageContext createPageContext(File contextRoot, String host, String scriptName, String queryString , Cookie[] cookies,Map<String, Object> headers,Map<String, String> parameters, Map<String, Object> attributes, OutputStream os, long timeout, boolean register) throws ServletException { return PageContextUtil.getPageContext(contextRoot,host, scriptName, queryString, cookies, headers, parameters, attributes, os,register,timeout,false); } @Override public ConfigWeb createConfig(File contextRoot,String host, String scriptName) throws ServletException { // TODO do a mored rect approach PageContext pc = null; try{ pc = PageContextUtil.getPageContext(contextRoot,host,scriptName, null, null, null, null, null, null,false,-1,false); return pc.getConfig(); } finally{ pc.getConfig().getFactory().releaseLuceePageContext(pc, false); } } @Override public void releasePageContext(PageContext pc, boolean unregister) { PageContextUtil.releasePageContext(pc,unregister); } @Override public lucee.runtime.util.SystemUtil getSystemUtil() { return new SystemUtilImpl(); } @Override public TimeZone getThreadTimeZone() { return ThreadLocalPageContext.getTimeZone(); } @Override public Instrumentation getInstrumentation() { return InstrumentationFactory.getInstrumentation(ThreadLocalPageContext.getConfig()); } public Controler getControler() { return controler; } }
package org.teachingkidsprogramming.section05recursion; import org.teachingextensions.logo.Tortoise; import org.teachingextensions.logo.Turtle.Animals; import org.teachingextensions.logo.utils.ColorUtils.PenColors.Grays; import org.teachingextensions.logo.utils.ColorUtils.PenColors.Reds; public class SpiderWeb { public static void main(String[] args) { Tortoise.show(); Tortoise.setSpeed(10); Tortoise.setPenWidth(1); // Change the Tortoise to a Spider -- Tortoise.setAnimal(Animals.Spider); Tortoise.setPenColor(Reds.Red); Tortoise.getBackgroundWindow().setBackground(Grays.Black); double length = 10.5; Double zoom = 1.1; for (int i = 0; i < 10; i++) { length = weaveOneLayer(length, zoom); zoom = zoom * 1.3; } } private static double weaveOneLayer(double length, Double zoom) { for (int i = 0; i < 6; i++) { drawTriangle(length); Tortoise.turn(360 / 6); length = length + zoom; } return length; } private static void drawTriangle(double length) { for (int i = 0; i < 3; i++) { Tortoise.move(length); Tortoise.turn(360 / 3); } } }
package org.usfirst.frc.team294.robot.commands; import org.usfirst.frc.team294.robot.Robot; //import org.usfirst.frc.team294.robot.subsystems.Pivot; //import org.usfirst.frc.team294.robot.subsystems.Telescope; //import org.usfirst.frc.team294.robot.subsystems.Telescope.Setpoint; import edu.wpi.first.wpilibj.Preferences; import edu.wpi.first.wpilibj.CANTalon.ControlMode; import edu.wpi.first.wpilibj.command.Command; public class TelescopeToHeight extends Command { public enum Setpoint { kStart, k1Tote, k2Tote, k3Tote, k4Tote, k5Tote, kHumanLoad, kIntake } private int height; private double tolerance = 10; private Setpoint m_setpoint; public TelescopeToHeight(Setpoint setpoint) { requires(Robot.telescope); m_setpoint = setpoint; } // Called just before this Command runs the first time protected void initialize() { setPrefSetpoint(getSetpointPrefName(m_setpoint)); System.out.println(m_setpoint); System.out.println(Robot.telescope.getPotCanVal()); } // Called repeatedly when this Command is scheduled to run protected void execute() { } // Make this return true when this Command no longer needs to run execute() protected boolean isFinished() { return onTarget(); } // Called once after isFinished returns true protected void end() { } // Called when another command which requires one or more of the same // subsystems is scheduled to run protected void interrupted() { } public boolean onTarget() { //logging.debug("cur: %.2f setpoint: %.2f error: %.2f ontarget: %s", // self.pidSource.PIDGet(), // self.pid.GetSetpoint(), // self.pid.GetError(), // self.pid.OnTarget()) double error = Robot.telescope.getMainTelescope().get() - Preferences.getInstance().getDouble(getSetpointPrefName(getPrefSetpoint()), Double.POSITIVE_INFINITY); return (Math.abs(error) <= tolerance); } private void setPrefSetpoint(String pref) { if (pref == null) return; double setp = Preferences.getInstance().getDouble(pref, Double.POSITIVE_INFINITY); if (setp == Double.POSITIVE_INFINITY) return; Robot.telescope.getMainTelescope().set(setp);//setSetpoint(setp); if (!(ControlMode.Position == Robot.telescope.getMainTelescope().getControlMode())) { if(!Robot.telescope.getMainTelescope().isControlEnabled()) Robot.telescope.getMainTelescope().enableControl(); Robot.telescope.getMainTelescope().changeControlMode(ControlMode.Position); } } private String getSetpointPrefName(Setpoint setpoint) { switch (setpoint) { case kStart: return "telStartSetpoint"; case k1Tote: return "tel1ToteSetpoint"; case k2Tote: return "tel2ToteSetpoint"; case k3Tote: return "tel3ToteSetpoint"; case k4Tote: return "tel4ToteSetpoint"; case k5Tote: return "tel5ToteSetpoint"; case kHumanLoad: return "telHumanLoadSetpoint"; case kIntake: return "telIntakeSetpoint"; default: return null; } } public synchronized Setpoint getPrefSetpoint() { return m_setpoint; } public synchronized boolean is1Tote() { return m_setpoint == Setpoint.k1Tote; } public synchronized boolean is2Tote() { return m_setpoint == Setpoint.k2Tote; } public synchronized boolean is3Tote() { return m_setpoint == Setpoint.k3Tote; } public synchronized boolean is4Tote() { return m_setpoint == Setpoint.k4Tote; } public synchronized boolean is5Tote() { return m_setpoint == Setpoint.k1Tote; } public synchronized boolean kHumanLoad() { return m_setpoint == Setpoint.kHumanLoad; } //Add other tote levels public synchronized boolean isIntake() { return m_setpoint == Setpoint.kIntake; } public void tweakSetpoint(double amt) { if (Robot.telescope.getMainTelescope().getControlMode() == ControlMode.Position) { double oldSetpoint = Preferences.getInstance().getDouble(getSetpointPrefName(getPrefSetpoint()), Double.POSITIVE_INFINITY); double newSetpoint = oldSetpoint + amt; // Update preferences so the robot remembers it for next time String pref; synchronized (this) { // don't update start setpoint if (m_setpoint == Setpoint.kStart) return; pref = getSetpointPrefName(m_setpoint); } if (pref == null) return; Preferences.getInstance().putDouble(pref, newSetpoint); Preferences.getInstance().save(); Robot.telescope.getMainTelescope().set(newSetpoint); } else { Robot.telescope.getMainTelescope().set(Robot.telescope.getMainTelescope().get() + amt); //getPIDController().reset(); //enable(); } } public void tweakDown() { tweakSetpoint(4); } public void tweakUp() { tweakSetpoint(-4); } }
package org.usfirst.frc.team619.subsystems.drive; import static java.lang.Math.PI; import static java.lang.Math.atan2; import static java.lang.Math.cos; import static java.lang.Math.sin; import static java.lang.Math.sqrt; import static java.lang.Math.toRadians; import com.kauailabs.nav6.frc.IMUAdvanced; import org.usfirst.frc.team619.subsystems.drive.SwerveCalcValue; import edu.wpi.first.wpilibj.AnalogInput; import edu.wpi.first.wpilibj.CANTalon; import edu.wpi.first.wpilibj.DigitalInput; import edu.wpi.first.wpilibj.PIDController; import edu.wpi.first.wpilibj.PIDSource; import edu.wpi.first.wpilibj.SerialPort; import edu.wpi.first.wpilibj.Talon; import edu.wpi.first.wpilibj.TalonSRX; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; public class SwerveDriveBase { //must also use a wheel class Talon driveLFMotor; CANTalon rotateLFMotor; //Front Right Wheel Talon driveRFMotor; CANTalon rotateRFMotor; //Back Left Wheel Talon driveLBMotor; CANTalon rotateLBMotor; //Back Right Wheel Talon driveRBMotor; CANTalon rotateRBMotor; //NavX IMUAdvanced imu; SerialPort serial_port; int encoderUnitsPerRotation = 1660;//was 1665 double orientationOffset; //Used to switch between control modes boolean isRobotCentric = true; boolean isFieldCentric = false; boolean isObjectCentric = false; boolean isHookCentric = false; double radius = 55; SwerveWheel[] wheelArray; SwerveWheel frontLeft; SwerveWheel frontRight; SwerveWheel backLeft; SwerveWheel backRight; double angleToDiagonal = 0; double L; // wheelbase (distance from center of front wheel to center of rear wheel) double W; // trackwidth (distance from left wheel to right wheel) double R; // diagonal size private double[] speeds; private double[] angles; //Used for Autonomous control /*give dimensions between the wheels both width and length, * width is the distance between left wheels and right wheels, * length is the distance between front wheels and back wheels*/ /*give dimensions between the wheels both width and length, * width is the distance between left wheels and right wheels, * @param width Width of the robot, distance(inches) between left and right wheels * int backRightRotateID, double width, double length){ */ /** * Initializes one SwerveControl Object * @param frontLeftDriveChannel PWM Port for Talon that drives the Front Left Swerve Wheel * @param frontLeftRotateID CANBus ID for Front Left Swerve Wheel * @param frontRightDriveChannel PWM Port for Talon that drives the Front Left Right Wheel * @param frontRightRotateID CANBus ID for Front Right Swerve Wheel * @param backLeftDriveChannel PWM Port for Talon that drives the Back Left Swerve Wheel * @param backLeftRotateID CANBus ID for Back Left Swerve Wheel * @param backRightDriveChannel PWM Port for Talon that drives the Back Right Swerve Wheel * @param backRightRotateID CANBus ID for Back Right Swerve Wheel * @param width Width of the robot, distance(inches) between left and right wheels * @param length Length of the robot, distance(inches) between front and back wheels */ public SwerveDriveBase(SwerveWheel frontLeft_, SwerveWheel frontRight_, SwerveWheel backLeft_, SwerveWheel backRight_, double width, double length){ W = width; L = length; R = Math.sqrt( L*L + W*W ); //used to establish rotation angles for all four wheels angleToDiagonal = Math.toDegrees(Math.atan2(length, width)); frontLeft = frontLeft_; frontRight = frontRight_; backLeft = backLeft_; backRight = backRight_; // frontLeft.initalizePosition((270 - angleToDiagonal), 205); // frontRight.initalizePosition((angleToDiagonal + 90), 205); // backLeft.initalizePosition((angleToDiagonal + 270), 0); // backRight.initalizePosition((90 - angleToDiagonal), 0); /* FLWheel = new SwerveWheel(frontLeftDriveChannel, frontLeftRotateID, p, i, d, (180 - angleToDiagonal), 0); FRWheel = new SwerveWheel(frontRightDriveChannel, frontRightRotateID, p, i, d, (angleToDiagonal), 0); BLWheel = new SwerveWheel(backLeftDriveChannel, backLeftRotateID, p, i, d, (angleToDiagonal + 180), 0); BRWheel = new SwerveWheel(backRightDriveChannel, backRightRotateID, p, i, d, (0 - angleToDiagonal), 0); */ wheelArray = new SwerveWheel[]{frontLeft, frontRight, backLeft, backRight}; try { serial_port = new SerialPort(57600,SerialPort.Port.kMXP); // You can add a second parameter to modify the // update rate (in hz) from 4 to 100. The default is 100. // If you need to minimize CPU load, you can set it to a // lower value, as shown here, depending upon your needs. // You can also use the IMUAdvanced class for advanced // features. byte update_rate_hz = 100; //imu = new IMU(serial_port,update_rate_hz); imu = new IMUAdvanced(serial_port,update_rate_hz); } catch( Exception ex ) { } initPIDControllers(); } /** * Used in Autonomous Mode only, Rotates robot a certain angle from the current position * @param targetAngle Angle(degrees) from current position of robot, which robot will rotate to */ public void initPIDControllers(){ //rotationPID.enable(); //rotationPID.setInputRange(-180, 180); //sets input range from 0 to 360(degrees) //rotationPID.setOutputRange(-0.5, 0.5); //sets output range from -1 to 1(max rotation values) //rotationPID.setContinuous(); updatePIDControllers(); } public void updatePIDControllers(){ //rotatePIDInput.setValue(imu.getYaw()); //SmartDashboard.putNumber("PIDInput Value: ", rotatePIDInput.pidGet()); } public void relativeRotateRobot(double angle){ SmartDashboard.putNumber("Delta Angle", angle); double currentAngle = imu.getYaw(); SmartDashboard.putNumber("Current Angle:", currentAngle); double targetAngle = currentAngle + angle; if(targetAngle >= 180){ targetAngle -= 360; } else if(targetAngle < -180){ targetAngle += 360; } SmartDashboard.putNumber("Target Angle: ", targetAngle); updatePIDControllers(); while(Math.abs(currentAngle - targetAngle) >= 2){ //waits until we are within range of the angle //rotationPID.setSetpoint(targetAngle); //tells PID loop to go to the targetAngle currentAngle = imu.getYaw(); updatePIDControllers(); //calculateSwerveControl(0,0,0.2); calculateSwerveControl(0, 0, 0 /*rotatePIDOutput.getPIDValue()*/); //sets the wheels to rotate based off PID loop try{ Thread.sleep(1); } catch(Exception e){ //Do nothing } } calculateSwerveControl(0,0,0); //stops robot spinning SmartDashboard.putNumber("Current Angle:", currentAngle); } public void relativeMoveRobot(double angle, double speed, double time){ calculateSwerveControl(Math.sin(Math.toRadians(angle)) * speed, Math.cos(Math.toRadians(angle)) * speed, 0); try{ Thread.sleep((long) (time * 1000)); }catch(Exception e){ //Do nothing } calculateSwerveControl(0, 0, 0); } /** * Used in Autonomous Mode Only, Rotates robot to a certain angle regardless of robots current position * @param targetAngle Angle(degrees) to which the robot will rotate */ public void absoluteRotateRobot(double targetAngle){ double currentAngle = imu.getYaw(); if(targetAngle >= 180){ targetAngle-=360; } else if(targetAngle < -180){ targetAngle +=360; } updatePIDControllers(); while(Math.abs(currentAngle - targetAngle) >= 1){//waits until we are within range of our target angle //rotationPID.setSetpoint(targetAngle);//tells PID loop to go to the target angle currentAngle = imu.getYaw(); SmartDashboard.putNumber("Absolute Current Angle", currentAngle); updatePIDControllers(); calculateSwerveControl(0, 0, 0 /*rotatePIDOutput.getPIDValue()*/);//sets the wheels to rotate based off PID loop try{ Thread.sleep(10); } catch(Exception e){ //Do nothing } } calculateSwerveControl(0,0,0); //stops robot spinning } public int angleToEncoderUnit(double angle){//Only pass in deltaTheta double deltaEncoder; deltaEncoder = angle*(encoderUnitsPerRotation/360.0); return (int)deltaEncoder; } /** * Calls the correct movement method based on control mode * @param LY Left stick Y Axis * @param LX Left stick X Axis * @param RX Right stick X Axis */ public void move(double LY, double LX, double RX){ if(isRobotCentric){ calculateSwerveControl(LY, LX, RX); } else if(isFieldCentric){ getFieldCentric(LY, LX, RX); } else{ calculateHookControl(RX); } } /** * Called by move command, controls both field centric and robot centric modes * @param LY Left stick Y Axis * @param LX Left stick X Axis * @param RX Left stick Z (twist) Axis */ public void calculateSwerveControl(double LY, double LX, double RX){ //math for rotation vector, different for every wheel so we calculate for each one seperately double A = LX - RX*(L/R); double B = LX + RX*(L/R); double C = LY - RX*(W/R); double D = LY + RX*(W/R); // order of wheels is: // { front_right, front_left, rear_left, rear_right } double[] angles = new double[]{ atan2(B,C)*180/PI, atan2(B,D)*180/PI, atan2(A,D)*180/PI, atan2(A,C)*180/PI }; double[] speeds = new double[]{ sqrt(B*B+C*C), sqrt(B*B+D*D), sqrt(A*A+D*D), sqrt(A*A+C*C) }; double max = speeds[0]; if ( speeds[1] > max ) max = speeds[1]; if ( speeds[2] > max ) max = speeds[2]; if ( speeds[3] > max ) max = speeds[3]; if ( max > 1 ) { speeds[0] /= max; speeds[1] /= max; speeds[2] /= max; speeds[3] /= max; } //Set target speed frontRight.setSpeed(speeds[0]); frontLeft.setSpeed(speeds[1]); backLeft.setSpeed(speeds[2]); backRight.setSpeed(speeds[3]); if(LY == 0 && LX == 0 && RX == 0){//if our inputs are nothing, don't change the angle(use currentAngle as targetAngle) for(SwerveWheel wheel : wheelArray){ wheel.setTargetAngle(wheel.getCurrentAngle()); } }else { //Set target angle frontRight.setTargetAngle(angles[0]); frontLeft.setTargetAngle(angles[1]); backLeft.setTargetAngle(angles[2]); backRight.setTargetAngle(angles[3]); } //Makes the wheels go to calculated target angle frontRight.goToAngle(); frontLeft.goToAngle(); backRight.goToAngle(); backLeft.goToAngle(); //Make the wheels drive at their calculated speed frontRight.drive(); frontLeft.drive(); backRight.drive(); backLeft.drive(); } public void getFieldCentric( double LY, double LX, double RX ) { // imu.getYaw( ) returns angle between -180 and 180 double theta = imu.getYaw( ); theta = toRadians(theta < 0 ? theta+360 : theta); double temp = LY*cos(theta) + LX*sin(theta); LX = -LY*sin(theta) + LX*cos(theta); LY = temp; calculateSwerveControl(LY, LX, RX); } /* * FOR TESTING PURPOSES * //double FRWheelTarget = FRWheel.rotateMotor.getEncPosition() + angleToEncoderUnit(FRWheel.getDeltaTheta()); .rotateMotor.set(FRWheel.rotateMotor.getEncPosition() + angleToEncoderUnit(FRWheel.getDeltaTheta())); SmartDashboard.putNumber("FR Target Encoder Position", (FRWheel.getTargetAngle())); SmartDashboard.putNumber("FR DeltaTheta: ", angleToEncoderUnit(FRWheel.getDeltaTheta())); SmartDashboard.putNumber("FR Current Encoder", FRWheel.getCurrentAngle()); try{ //Thread.sleep(5000); }catch (Exception ex){ } FLWheel.rotateMotor.set(FLWheel.rotateMotor.getEncPosition() + angleToEncoderUnit(FLWheel.getDeltaTheta())); SmartDashboard.putNumber("FL Target Encoder Position", (FLWheel.rotateMotor.getEncPosition() + angleToEncoderUnit(FLWheel.getDeltaTheta()))); SmartDashboard.putNumber("FL DeltaTheta: ", FLWheel.getDeltaTheta()); BRWheel.rotateMotor.set(BRWheel.rotateMotor.getEncPosition() + angleToEncoderUnit(BRWheel.getDeltaTheta())); SmartDashboard.putNumber("BR Target Encoder Position", (BRWheel.rotateMotor.getEncPosition() + angleToEncoderUnit(BRWheel.getDeltaTheta()))); SmartDashboard.putNumber("BR DeltaTheta: ", BRWheel.getDeltaTheta()); BLWheel.rotateMotor.set(BLWheel.rotateMotor.getEncPosition() + angleToEncoderUnit(BLWheel.getDeltaTheta())); SmartDashboard.putNumber("BL Target Encoder Position", (BLWheel.rotateMotor.getEncPosition() + angleToEncoderUnit(BLWheel.getDeltaTheta()))); SmartDashboard.putNumber("BL DeltaTheta: ", BLWheel.getDeltaTheta()); SmartDashboard.putNumber("Current Angle", BLWheel.getCurrentAngle()); SmartDashboard.putNumber("Delta Theta", BLWheel.getDeltaTheta()); SmartDashboard.putNumber("Target Angle", BLWheel.getTargetAngle()); */ /** * Called by move command, controls object centric mode * @param RX Right stick X Axis */ public void calculateObjectControl(double RX){ double distanceToFront = radius - L/2; double distanceToBack = radius + L/2; frontLeft.setTargetAngle(180 - Math.toDegrees(Math.atan2(W/2, distanceToFront))); frontRight.setTargetAngle(180 + Math.toDegrees(Math.atan2(W/2, distanceToFront))); backLeft.setTargetAngle(180 - Math.toDegrees(Math.atan2(W/2, distanceToBack))); backRight.setTargetAngle(180 + Math.toDegrees(Math.atan2(W/2, distanceToBack))); backLeft.setSpeed(RX); backRight.setSpeed(RX); double speedRatio = Math.sqrt(Math.pow((W/2), 2) + Math.pow(distanceToFront, 2)) / Math.sqrt(Math.pow((W/2), 2) + Math.pow(distanceToBack, 2)); frontLeft.setSpeed(speedRatio * RX); frontRight.setSpeed(speedRatio * RX); frontRight.goToAngle(); frontLeft.goToAngle(); backRight.goToAngle(); backLeft.goToAngle(); frontRight.drive(); frontLeft.drive(); backRight.drive(); backLeft.drive(); } public void calculateHookControl(double RX){ double distanceToFront = 38 - L/2; double distanceToBack = 38 + L/2; frontLeft.setTargetAngle(180 - Math.toDegrees(Math.atan2(W/2, distanceToFront))); frontRight.setTargetAngle(180 + Math.toDegrees(Math.atan2(W/2, distanceToFront))); backLeft.setTargetAngle(180 - Math.toDegrees(Math.atan2(W/2, distanceToBack))); backRight.setTargetAngle(180 + Math.toDegrees(Math.atan2(W/2, distanceToBack))); backLeft.setSpeed(RX); backRight.setSpeed(RX); double speedRatio = Math.sqrt(Math.pow((W/2), 2) + Math.pow(distanceToFront, 2)) / Math.sqrt(Math.pow((W/2), 2) + Math.pow(distanceToBack, 2)); frontLeft.setSpeed(speedRatio * RX); frontRight.setSpeed(speedRatio * RX); frontRight.goToAngle(); frontLeft.goToAngle(); backRight.goToAngle(); backLeft.goToAngle(); frontRight.drive(); frontLeft.drive(); backRight.drive(); backLeft.drive(); } public void swerveControl(double LY, double LX, double RX, double radius){ double translationalXComponent = LX; double translationalYComponent = LY; double rAxis = RX; double translationalOffset = 0.0; if(isFieldCentric){ translationalOffset = imu.getYaw(); } else { translationalOffset = 0; } //Same for all wheels so therefore we only do the transitional vector math once double translationalMagnitude = Math.sqrt(Math.pow(translationalYComponent, 2) + Math.pow(translationalXComponent, 2)); double translationalAngle = Math.toDegrees(Math.atan2(translationalYComponent, translationalXComponent)); translationalAngle += translationalOffset; //sets the robot front to be at the angle determined by orientationOffset if(translationalAngle >= 360){ translationalAngle -= 360; } else if(translationalAngle < 0){ translationalAngle += 360; } translationalYComponent = Math.sin(Math.toRadians(translationalAngle)) * translationalMagnitude; //calculates y component of translation vector translationalXComponent = Math.cos(Math.toRadians(translationalAngle)) * translationalMagnitude; //calculates x component of translation vector //Deadband if(Math.abs(LX) < 0.1) translationalXComponent = 0; if(Math.abs(LY) < 0.1) translationalYComponent = 0; if(Math.abs(RX) < 0.1) rAxis = 0; //End Deadband double distanceToFront = radius - L/2; double distanceToBack = radius + L/2; //Calculates wheel's rotational angle based on radius frontLeft.setRAngle(180 - Math.toDegrees(Math.atan2(W/2, distanceToFront))); frontRight.setRAngle(180 + Math.toDegrees(Math.atan2(W/2, distanceToFront))); backLeft.setRAngle(180 - Math.toDegrees(Math.atan2(W/2, distanceToBack))); backRight.setRAngle(180 + Math.toDegrees(Math.atan2(W/2, distanceToBack))); //Calculate each wheel's rotational speed based on the radius //THIS ONLY ALLOWS FOR A POSITVE RADIUS double speedRatio = Math.sqrt(Math.pow((W/2), 2) + Math.pow(distanceToFront, 2)) / Math.sqrt(Math.pow((W/2), 2) + Math.pow(distanceToBack, 2)); backLeft.setSpeed(rAxis); backRight.setSpeed(rAxis); frontLeft.setSpeed(speedRatio * rAxis); frontRight.setSpeed(speedRatio * rAxis); /*This seems wrong, copied code from above BLWheel.setRSpeed(1); BRWheel.setRSpeed(1); FLWheel.setRSpeed(speedRatio); FRWheel.setRSpeed(speedRatio); */ double fastestSpeed = 0.0; for (SwerveWheel wheel : wheelArray){ double rotateXComponent = Math.cos(Math.toRadians(wheel.getRAngle())); //calculates x component of rotation vector double rotateYComponent = Math.sin(Math.toRadians(wheel.getRAngle())); //calculates y component of rotation vector if(rAxis > 0){//Why do we do this? rotateXComponent = -rotateXComponent; rotateYComponent = -rotateYComponent; } wheel.setSpeed(Math.sqrt(Math.pow(rotateXComponent + translationalXComponent, 2) + Math.pow((rotateYComponent + translationalYComponent), 2)));//sets the speed based off translational and rotational vectors wheel.setTargetAngle(Math.toDegrees(Math.atan2((rotateYComponent + translationalYComponent), (rotateXComponent + translationalXComponent))));//sets the target angle based off translation and rotational vectors if(LY == 0 && LX == 0 && RX == 0){//if our inputs are nothing, don't change the angle(use currentAngle as targetAngle) wheel.setTargetAngle(wheel.getCurrentAngle()); } if(wheel.getSpeed() > fastestSpeed){//if speed of wheel is greater than the others store the value fastestSpeed = wheel.getSpeed(); } } if(fastestSpeed > 1){ //if the fastest speed is greater than 1(our max input) divide the target speed for each wheel by the fastest speed for(SwerveWheel wheel : wheelArray){ wheel.setSpeed(wheel.getSpeed()/fastestSpeed); } } //Makes the wheels go to calculated target angle frontRight.goToAngle(); frontLeft.goToAngle(); backRight.goToAngle(); backLeft.goToAngle(); //Make the wheels drive at their calculated speed frontRight.drive(); frontLeft.drive(); backRight.drive(); backLeft.drive(); } public void setSpeedMode(double newSpeedModifier){ for (SwerveWheel wheel : wheelArray){ wheel.setSpeedModifier(newSpeedModifier); } } /** * Change the orientation of the robot in robot centric mode(i.e. changes the left side to become the front) * @param north button to make robot front the original front * @param east button to make robot front the original right * @param south button to make robot front the original back * @param west button to make robot front the original left */ public void changeOrientation(boolean north, boolean east, boolean south, boolean west){ //switch out of field centric //set the robot front (N,E,S,W) if(north){ isFieldCentric = false; isObjectCentric = false; orientationOffset = 0; } else if(east){ isFieldCentric = false; isObjectCentric = false; orientationOffset = -90; } else if(south){ isFieldCentric = false; isObjectCentric = false; orientationOffset = 180; } else if(west){ isFieldCentric = false; isObjectCentric = false; orientationOffset = 90; } } /** * Called to switch to field centric mode */ public void switchToFieldCentric(){ isObjectCentric = false; isRobotCentric = false; isFieldCentric = true; isHookCentric = false; } /** * Called to switch to object centric mode */ public void switchToObjectCentric(){ isObjectCentric = true; isFieldCentric = false; isRobotCentric = false; isHookCentric = false; } /** * Called to switch to HookCentric */ public void switchToHookCentric(){ isObjectCentric = false; isFieldCentric = false; isRobotCentric = false; isHookCentric = true; } /** * Called to switch to robot centric mode */ public void switchToRobotCentric(){ isObjectCentric = false; isFieldCentric = false; isRobotCentric = true; isHookCentric = false; } /* public void switchDrivingMode(boolean LStick, boolean RStick){ if(LStick && !isFieldCentric){ isObjectCentric = false; isRobotCentric = false; isFieldCentric = true; } else if(LStick && isFieldCentric){ isObjectCentric = false; isFieldCentric = false; isRobotCentric = true; } else if(RStick && !isObjectCentric){ isObjectCentric = true; isFieldCentric = false; isRobotCentric = false; } else if(RStick && isObjectCentric){ isObjectCentric = false; isFieldCentric = false; isRobotCentric = true; } else{ isObjectCentric = false; isFieldCentric = false; isRobotCentric = true; } }*/ /** * Moves all four swerve wheels to home position(the limit switch) */ private void wheelsToHomePos(){ /*for (SwerveWheel wheel : wheelArray){ wheel.goToHome(); }*/ //frontRight.goToHome(); //FRWheel.goToHome(); //BRWheel.goToHome(); //BLWheel.goToHome(); } /** * Moves all four swerve wheels to zero position(Straight sideways)(0 degrees on the unit circle) */ public void wheelsToZero(){ // <DRS> uncomment these later... //frontRight.goToZero(); //frontLeft.goToZero(); } private void test(){ //frontRight.test(); //FLWheel.test(); //BLWheel.test(); //BRWheel.test(); } }
package org.crazycake.shiro; import org.apache.shiro.session.Session; import org.apache.shiro.session.UnknownSessionException; import org.apache.shiro.session.mgt.SimpleSession; import org.junit.Before; import org.junit.Test; import java.util.*; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class RedisSessionDAOTest { private RedisSingletonManager redisManager; private RedisSessionDAO redisSessionDAO; private StringSerializer keySerializer; private String testKey; private String testPrefix;private Set<byte[]> testSet; private SimpleSession testValue; private Collection<FakeSession> testValues; private FakeSession tomSession; private FakeSession paulSession; private FakeSession billySession; private ObjectSerializer valueSeralizer; @Before public void setUp() throws SerializationException { keySerializer = new StringSerializer(); valueSeralizer = new ObjectSerializer(); testKey = "testKey"; testPrefix = "testPrefix:"; testValue = new SimpleSession(); testValue.setId(3); testSet = new HashSet<byte[]>(); testSet.add(keySerializer.serialize(testPrefix + "tom")); testSet.add(keySerializer.serialize(testPrefix + "paul")); testSet.add(keySerializer.serialize(testPrefix + "billy")); testValues = new ArrayList<FakeSession>(); tomSession = new FakeSession(1, "tom"); testValues.add(tomSession); paulSession = new FakeSession(2, "paul"); testValues.add(paulSession); billySession = new FakeSession(3, "billy"); testValues.add(billySession); redisManager = mock(RedisSingletonManager.class); when(redisManager.dbSize()).thenReturn(2L); when(redisManager.get(keySerializer.serialize(testPrefix + testKey))).thenReturn(valueSeralizer.serialize(testValue)); when(redisManager.keys(keySerializer.serialize(testPrefix + "*"))).thenReturn(testSet); when(redisManager.get(keySerializer.serialize(testPrefix + "tom"))).thenReturn(valueSeralizer.serialize(tomSession)); when(redisManager.get(keySerializer.serialize(testPrefix + "paul"))).thenReturn(valueSeralizer.serialize(paulSession)); when(redisManager.get(keySerializer.serialize(testPrefix + "billy"))).thenReturn(valueSeralizer.serialize(billySession)); redisSessionDAO = new RedisSessionDAO(); redisSessionDAO.setRedisManager(redisManager); redisSessionDAO.setKeyPrefix(testPrefix); } @Test public void testUpdate() { redisSessionDAO.update(testValue); try { redisSessionDAO.update(null); fail(); } catch (UnknownSessionException e) { assertThat(e.getMessage(), is("session or session id is null")); } } @Test public void testDelete() { redisSessionDAO.delete(null); redisSessionDAO.delete(testValue); } @Test public void testDoCreate() { redisSessionDAO.doCreate(testValue); try { redisSessionDAO.doCreate(null); fail(); } catch (UnknownSessionException e) { assertThat(e.getMessage(), is("session is null")); } } @Test public void testDoReadSession() { Session actualSession = redisSessionDAO.doReadSession(testKey); assertThat(actualSession.getId().toString(), is("3")); redisSessionDAO.doReadSession(null); } @Test public void testGetActiveSessions() { Collection<Session> activeSessions = redisSessionDAO.getActiveSessions(); assertThat(activeSessions.size(), is(3)); for (Iterator<Session> iterator = activeSessions.iterator(); iterator.hasNext(); ) { FakeSession next = (FakeSession)iterator.next(); if (next.getId() == 2) { assertThat(next.getName(), is("paul")); } } } }
package org.sbolstandard.core2; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URI; import java.util.List; import java.util.Set; import javax.xml.namespace.QName; /** * This is a helper class that avoids the need for the user to explicitly create an SBOLDocument instance, * and allows the user to directly create and manipulate top-level instances. * * @author Zhen Zhang * @author Nicholas Roehner * @author Chris Myers * @version 2.1 */ public final class SBOLFactory { private static SBOLDocument document = new SBOLDocument(); /** * This sets the internal SBOLDocument used by the factory. * @param sbolDocument The document used internally by the factory */ public static void setSBOLDocument(SBOLDocument sbolDocument) { document = sbolDocument; } /** * This clears the internal SBOLDocument used by the factory. */ public static void clear() { document = new SBOLDocument(); } /** * Creates a ModuleDefinition instance with this SBOLDocument object's {@code defaultURIprefix}, * the given arguments, and an empty version string, and then * adds it to this SBOLDocument object's list of ModuleDefinition instances. * <p> * This method calls {@link #createModuleDefinition(String, String, String)} to do the following * validity checks and create a ModuleDefinition instance. * <p> * If the {@code defaultURIprefix} does not end with one of the following delimiters: "/", ":", or "#", * then "/" is appended to the end of it. * <p> * This method requires the {@code defaultURIprefix} field to be set, and * the given {@code displayId} is not {@code null} and is valid. * <p> * A ModuleDefinition instance is created with a compliant URI. This URI is composed from * the this SBOLDocument object's {@code defaultURIprefix}, the optional type {@link TopLevel#MODULE_DEFINITION}, * the given {@code displayId}, and an empty version string. * The display ID, persistent identity, and version fields of this instance * are then set accordingly. * * @param displayId an intermediate between name and identity that is machine-readable * @return the created ModuleDefinition instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static ModuleDefinition createModuleDefinition(String displayId) throws SBOLValidationException { return document.createModuleDefinition(displayId); } /** * Creates a ModuleDefinition instance with this SBOLDocument object's {@code defaultURIprefix} * and the given arguments, and then adds it to this SBOLDocument object's list of ModuleDefinition instances. * <p> * This method calls {@link #createModuleDefinition(String, String, String)} to do the following * validity checks and create a ModuleDefinition instance. * <p> * If the {@code defaultURIprefix} does not end with one of the following delimiters: "/", ":", or "#", then * "/" is appended to the end of it. * <p> * This method requires the {@code defaultURIprefix} field to be set, and * the given {@code displayId} and {@code version} arguments are not {@code null} * and are both valid. * <p> * A ModuleDefinition instance is created with a compliant URI. This URI is composed from * the this SBOLDocument object's {@code defaultURIprefix}, the optional type {@link TopLevel#MODULE_DEFINITION}, * the given {@code displayId}, and {@code version}. * The display ID, persistent identity, and version fields of this instance * are then set accordingly. * * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @return the created ModuleDefinition instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static ModuleDefinition createModuleDefinition(String displayId, String version) throws SBOLValidationException { return document.createModuleDefinition(displayId,version); } /** * Creates a ModuleDefinition instance with the given arguments, and then adds it to this SBOLDocument * object's list of ModuleDefinition instances. * <p> * If the given {@code URIprefix} does not end with one of the following delimiters: "/", ":", or "#", then * "/" is appended to the end of it. * <p> * This method requires that the given {@code URIprefix}, {@code displayId}, * and {@code version} are not {@code null} and are valid. * <p> * A ModuleDefinition instance is created with a compliant URI. This URI is composed from * the given {@code URIprefix}, the optional type {@link TopLevel#MODULE_DEFINITION}, * the given {@code displayId}, and {@code version}. * The display ID, persistent identity, and version fields of this instance * are then set accordingly. * * @param URIprefix maps to a domain over which the user has control * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @return the created ModuleDefinition instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static ModuleDefinition createModuleDefinition(String URIprefix,String displayId, String version) throws SBOLValidationException { return document.createModuleDefinition(URIprefix, displayId, version); } /** * Removes the given {@code moduleDefinition} from this SBOLDocument object's list of ModuleDefinition instances. * * @param moduleDefinition the ModuleDefinition to be removed * @return {@code true} if the given {@code moduleDefinition} is successfully removed, {@code false} otherwise. * @throws SBOLValidationException see {@link SBOLValidationException} */ public static boolean removeModuleDefinition(ModuleDefinition moduleDefinition) throws SBOLValidationException { return document.removeModuleDefinition(moduleDefinition); } /** * Returns the ModuleDefinition instance matching the given {@code displayId} * and {@code version} from this SBOLDocument object's list of * ModuleDefinition instances. * <p> * A compliant ModuleDefinition URI is created first using the {@code defaultURIprefix}, * the optional type {@link TopLevel#MODULE_DEFINITION}, the given {@code displayId} * and {@code version}. This URI is used to look up the ModuleDefinition instance * in this SBOLDocument object. * * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @return the matching ModuleDefinition instance if present, or {@code null} otherwise. */ public static ModuleDefinition getModuleDefinition(String displayId,String version) { return document.getModuleDefinition(displayId, version); } /** * Returns the ModuleDefinition instance matching the given {@code modelURI} from this * SBOLDocument object's list of ModuleDefinition instances. * * @param moduleURI the given module URI from this document * @return the matching ModuleDefinition instance if present, or {@code null} otherwise. */ public static ModuleDefinition getModuleDefinition(URI moduleURI) { return document.getModuleDefinition(moduleURI); } /** * Returns the set of {@code ModuleDefinition} instances owned by this SBOLDocument object. * * @return the set of {@code ModuleDefinition} instances owned by this SBOLDocument object. */ public static Set<ModuleDefinition> getModuleDefinitions() { return document.getModuleDefinitions(); } /** * Removes all entries in the list of ModuleDefinition instances * owned by this SBOLDocument object. The list will be empty after this call returns. * @throws SBOLValidationException see {@link SBOLValidationException} */ public static void clearModuleDefinitions() throws SBOLValidationException { document.clearModuleDefinitions(); } /** * Creates a Collection instance with this SBOLDocument object's {@code defaultURIprefix}, * the given {@code displayId} argument, and an empty version string, and then * adds it to this SBOLDocument object's list of Collection instances. * <p> * This method calls {@link #createCollection(String, String, String)} to do the following * validity checks and create a Collection instance. * <p> * If the {@code defaultURIprefix} does not end with one of the following delimiters: "/", ":", or "#", then * "/" is appended to the end of it. * <p> * This method requires the {@code defaultURIprefix} field to be set, and * the given {@code displayId} argument is not {@code null} and is valid. * <p> * A Collection instance is created with a compliant URI. This URI is composed from * the this SBOLDocument object's {@code defaultURIprefix}, the optional type {@link TopLevel#COLLECTION}, * the given {@code displayId}, and an empty version string. * The display ID, persistent identity, and version fields of this instance * are then set accordingly. * * @param displayId an intermediate between name and identity that is machine-readable * @return the created Collection instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static Collection createCollection(String displayId) throws SBOLValidationException { return document.createCollection(displayId); } /** * Creates a Collection instance with this SBOLDocument object's {@code defaultURIprefix} and * the given arguments, and then adds it to this SBOLDocument object's list of Collection instances. * <p> * This method calls {@link #createCollection(String, String, String)} to do the following * validity checks and create a Collection instance. * <p> * If the given {@code URIprefix} does not end with one of the following delimiters: "/", ":", or "#", then * "/" is appended to the end of it. * <p> * This method requires the {@code defaultURIprefix} field to be set, and * the given {@code URIprefix}, {@code displayId}, and {@code version} are not * {@code null} and valid. * <p> * A Collection instance is created with a compliant URI. This URI is composed from * the this SBOLDocument object's {@code defaultURIprefix}, the optional type {@link TopLevel#COLLECTION}, * the given {@code displayId}, and {@code version}. * The display ID, persistent identity, and version fields of this instance * are then set accordingly. * * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @return the created Collection instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static Collection createCollection(String displayId, String version) throws SBOLValidationException { return document.createCollection(displayId, version); } /** * Creates a Collection instance with the given arguments, and then adds it to this SBOLDocument * object's list of Collection instances. * <p> * If the given {@code URIprefix} does not end with one of the following delimiters: "/", ":", or "#", then * "/" is appended to the end of it. * <p> * This method requires the {@code defaultURIprefix} field to be set, and * the given {@code URIprefix}, {@code displayId}, and {@code version} are not * {@code null} and valid. * <p> * A Collection instance is created with a compliant URI. This URI is composed from * the given {@code URIprefix}, the optional type {@link TopLevel#COLLECTION}, * the given {@code displayId}, and {@code version}. * The display ID, persistent identity, and version fields of this instance * are then set accordingly. * * @param URIprefix maps to a domain over which the user has control * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @return the created Collection instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static Collection createCollection(String URIprefix, String displayId, String version) throws SBOLValidationException { return document.createCollection(URIprefix, displayId, version); } /** * Removes the given {@code collection} from this SBOLDocument object's list of Collection instances. * * @param collection the collection object to be removed * @return {@code true} if the given {@code collection} is successfully removed, {@code false} otherwise. * @throws SBOLValidationException see {@link SBOLValidationException} */ public static boolean removeCollection(Collection collection) throws SBOLValidationException { return document.removeCollection(collection); } /** * Returns the Collection instance matching the given {@code displayId} * and {@code version} from this SBOLDocument object's list of * Collection instances. * <p> * A compliant Collection URI is created first using the {@code defaultURIprefix}, * the optional type {@link TopLevel#COLLECTION}, the given {@code displayId} * and {@code version}. This URI is used to look up the Collection instance * in this SBOLDocument object. * * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @return the matching Collection instance if present, or {@code null} otherwise. */ public static Collection getCollection(String displayId,String version) { return document.getCollection(displayId, version); } /** * Returns the Collection instance matching the given {@code collectionURI} from this * SBOLDocument object's list of Collection instances. * * @param collectionURI the given collectionURI from this document * @return the matching Collection instance if present, or {@code null} otherwise. * */ public static Collection getCollection(URI collectionURI) { return document.getCollection(collectionURI); } /** * Returns the set of {@code Collection} instances owned by this SBOLDocument object. * * @return the set of {@code Collection} instances owned by this SBOLDocument object. */ public static Set<Collection> getCollections() { return document.getCollections(); } /** * Removes all entries in the list of Collection instances * owned by this SBOLDocument object. The list will be empty after this call returns. * @throws SBOLValidationException see {@link SBOLValidationException} */ public static void clearCollections() throws SBOLValidationException { document.clearCollections(); } /** * Creates a Model instance with this SBOLDocument object's {@code defaultURIprefix}, * the given arguments, and an empty version string, and then * adds it to this SBOLDocument object's list of Model instances. * <p> * This method calls {@link #createModel(String, String, String, URI, URI, URI)} to do the following * validity checks and create a Model instance. * <p> * If the {@code defaultURIprefix} does not end with one of the following delimiters: "/", ":", or "#", * then "/" is appended to the end of it. * <p> * This method requires the {@code defaultURIprefix} field to be set, and * the given {@code displayId} is not {@code null} and is valid. * <p> * A Model instance is created with a compliant URI. This URI is composed from * the this SBOLDocument object's {@code defaultURIprefix}, the optional type {@link TopLevel#MODEL}, * the given {@code displayId}, and an empty version string. * The display ID, persistent identity, and version fields of this instance * are then set accordingly. * * @param displayId an intermediate between name and identity that is machine-readable * @param source location of the actual content of the model * @param language the language in which the model is implemented * @param framework the framework in which the model is implemented * @return the created Model instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static Model createModel(String displayId, URI source, URI language, URI framework) throws SBOLValidationException { return document.createModel(displayId,"",source,language,framework); } /** * Creates a Model instance with this SBOLDocument object's {@code defaultURIprefix} * and the given arguments, and then adds it to this SBOLDocument object's list of Model instances. * <p> * This method calls {@link #createModel(String, String, String, URI, URI, URI)} to do the following * validity checks and create a Model instance. * <p> * If the {@code defaultURIprefix} does not end with one of the following delimiters: "/", ":", or "#", then * "/" is appended to the end of it. * <p> * This method requires the {@code defaultURIprefix} field to be set, and * the given {@code displayId} and {@code version} arguments are not {@code null} * and are both valid. * <p> * A Model instance is created with a compliant URI. This URI is composed from * the this SBOLDocument object's {@code defaultURIprefix}, the optional type {@link TopLevel#MODEL}, * the given {@code displayId}, and {@code version}. * The display ID, persistent identity, and version fields of this instance * are then set accordingly. * * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @param source location of the actual content of the model * @param language the language in which the model is implemented * @param framework the framework in which the model is implemented * @return the created Model instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static Model createModel(String displayId, String version, URI source, URI language, URI framework) throws SBOLValidationException { return document.createModel(displayId,version,source,language,framework); } /** * Creates a Model instance with the given arguments, and then adds it to this SBOLDocument * object's list of Model instances. * <p> * If the given {@code URIprefix} does not end with one of the following delimiters: "/", ":", or "#", then * "/" is appended to the end of it. * <p> * This method requires that the given {@code URIprefix}, {@code displayId}, * and {@code version} are not {@code null} and are valid. * <p> * A Model instance is created with a compliant URI. This URI is composed from * the given {@code URIprefix}, the optional type {@link TopLevel#MODEL}, * the given {@code displayId}, and {@code version}. * The display ID, persistent identity, and version fields of this instance * are then set accordingly. * * @param URIprefix maps to a domain over which the user has control * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @param source location of the actual content of the model * @param language the language in which the model is implemented * @param framework the framework in which the model is implemented * @return the created Model instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static Model createModel(String URIprefix, String displayId, String version, URI source, URI language, URI framework) throws SBOLValidationException { return document.createModel(URIprefix,displayId,version,source,language,framework); } /** * Removes the given {@code model} from this SBOLDocument object's list of Model instances. * * @param model The model to be removed from the SBOLDocument object * @return {@code true} if the given {@code model} is successfully removed, {@code false} otherwise. * @throws SBOLValidationException see {@link SBOLValidationException} */ public static boolean removeModel(Model model) throws SBOLValidationException { return document.removeModel(model); } /** * Returns the Model instance matching the given {@code displayId} * and {@code version} from this SBOLDocument object's list of * Model instances. * <p> * A compliant Model URI is created first using the {@code defaultURIprefix}, * the optional type {@link TopLevel#MODEL}, the given {@code displayId} * and {@code version}. This URI is used to look up the Model instance * in this SBOLDocument object. * * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @return the matching Model instance if present, or {@code null} otherwise. */ public static Model getModel(String displayId,String version) { return document.getModel(displayId,version); } /** * Returns the Model instance matching the given {@code modelURI} from this * SBOLDocument object's list of Model instances. * * @param modelURI the modelURI * @return the matching Model instance if present, or {@code null} otherwise. */ public static Model getModel(URI modelURI) { return document.getModel(modelURI); } /** * Returns the set of {@code Model} instances owned by this SBOLDocument object. * * @return the set of {@code Model} instances owned by this SBOLDocument object. */ public static Set<Model> getModels() { return document.getModels(); } /** * Removes all entries in the list of Model instances * owned by this SBOLDocument object. The list will be empty after this call returns. * @throws SBOLValidationException see {@link SBOLValidationException} */ public static void clearModels() throws SBOLValidationException { document.clearModels(); } /** * Creates a ComponentDefinition instance with this SBOLDocument object's {@code defaultURIprefix}, * the given arguments, and empty version string, and then * adds it to this SBOLDocument object's list of ComponentDefinition instances. * <p> * This method calls {@link #createComponentDefinition(String, String, String, Set)} to do the following * validity checks and create a ComponentDefinition instance. * <p> * If the {@code defaultURIprefix} does not end with one of the following delimiters: "/", ":", or "#", * then "/" is appended to the end of it. * <p> * This method requires the {@code defaultURIprefix} field to be set, and * the given {@code displayId} is not {@code null} and is valid. * <p> * A ComponentDefinition instance is created with a compliant URI. This URI is composed from * the this SBOLDocument object's {@code defaultURIprefix}, the optional type {@link TopLevel#COMPONENT_DEFINITION}, * the given {@code displayId}, and an empty version string. * The display ID, persistent identity, and version fields of this instance * are then set accordingly. * * @param displayId an intermediate between name and identity that is machine-readable * @param types specifies the category of biochemical or physical entity using appropriate ontologies * @return the created ComponentDefinition instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static ComponentDefinition createComponentDefinition(String displayId, Set<URI> types) throws SBOLValidationException { return document.createComponentDefinition(displayId, types); } /** * Creates a ComponentDefinition instance with this SBOLDocument object's {@code defaultURIprefix}, * the given arguments, and empty version string, and then * adds it to this SBOLDocument object's list of ComponentDefinition instances. * <p> * This method calls {@link #createComponentDefinition(String, String, String, URI)} to do the following * validity checks and create a ComponentDefinition instance. * <p> * If the {@code defaultURIprefix} does not end with one of the following delimiters: "/", ":", or "#", * then "/" is appended to the end of it. * <p> * This method requires the {@code defaultURIprefix} field to be set, and * the given {@code displayId} is not {@code null} and is valid. * <p> * A ComponentDefinition instance is created with a compliant URI. This URI is composed from * the this SBOLDocument object's {@code defaultURIprefix}, the optional type {@link TopLevel#COMPONENT_DEFINITION}, * the given {@code displayId}, and an empty version string. * The display ID, persistent identity, and version fields of this instance * are then set accordingly. * * @param displayId an intermediate between name and identity that is machine-readable * @param type specifies the category of biochemical or physical entity using appropriate ontologies * @return the created ComponentDefinition instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static ComponentDefinition createComponentDefinition(String displayId, URI type) throws SBOLValidationException { return document.createComponentDefinition(displayId, type); } /** * Creates a ComponentDefinition instance with this SBOLDocument object's {@code defaultURIprefix} * and the given arguments, and then adds it to this SBOLDocument object's list of ComponentDefinition instances. * <p> * This method calls {@link #createComponentDefinition(String, String, String, Set)} to do the following * validity checks and create a ComponentDefinition instance. * <p> * If the {@code defaultURIprefix} does not end with one of the following delimiters: "/", ":", or "#", then * "/" is appended to the end of it. * <p> * This method requires the {@code defaultURIprefix} field to be set, and * the given {@code displayId} and {@code version} arguments are not {@code null} * and are both valid. * <p> * A ComponentDefinition instance is created with a compliant URI. This URI is composed from * the this SBOLDocument object's {@code defaultURIprefix}, the optional type {@link TopLevel#COMPONENT_DEFINITION}, * the given {@code displayId}, and {@code version}. * The display ID, persistent identity, and version fields of this instance * are then set accordingly. * * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @param types specifies the category of biochemical or physical entity using appropriate ontologies * @return the created ComponentDefinition instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static ComponentDefinition createComponentDefinition(String displayId, String version, Set<URI> types) throws SBOLValidationException { return document.createComponentDefinition(displayId, version, types); } /** * Creates a ComponentDefinition instance with this SBOLDocument object's {@code defaultURIprefix} * and the given arguments, and then adds it to this SBOLDocument object's list of ComponentDefinition instances. * <p> * This method calls {@link #createComponentDefinition(String, String, String, URI)} to do the following * validity checks and create a ComponentDefinition instance. * <p> * If the {@code defaultURIprefix} does not end with one of the following delimiters: "/", ":", or "#", then * "/" is appended to the end of it. * <p> * This method requires the {@code defaultURIprefix} field to be set, and * the given {@code displayId} and {@code version} arguments are not {@code null} * and are both valid. * <p> * A ComponentDefinition instance is created with a compliant URI. This URI is composed from * the this SBOLDocument object's {@code defaultURIprefix}, the optional type {@link TopLevel#COMPONENT_DEFINITION}, * the given {@code displayId}, and {@code version}. * The display ID, persistent identity, and version fields of this instance * are then set accordingly. * * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @param type specifies the category of biochemical or physical entity using appropriate ontologies * @return the created ComponentDefinition instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static ComponentDefinition createComponentDefinition(String displayId, String version, URI type) throws SBOLValidationException { return document.createComponentDefinition(displayId, version, type); } /** * Creates a ComponentDefinition instance with the given arguments, and then adds it to this SBOLDocument * object's list of ComponentDefinition instances. * <p> * If the given {@code URIprefix} does not end with one of the following delimiters: "/", ":", or "#", then * "/" is appended to the end of it. * <p> * This method requires that the given {@code URIprefix}, {@code displayId}, * and {@code version} are not {@code null} and are valid. * <p> * A ComponentDefinition instance is created with a compliant URI. This URI is composed from * the given {@code URIprefix}, the optional type {@link TopLevel#COMPONENT_DEFINITION}, * the given {@code displayId}, and {@code version}. * The display ID, persistent identity, and version fields of this instance * are then set accordingly. * * @param URIprefix maps to a domain over which the user has control * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @param types specifies the category of biochemical or physical entity using appropriate ontologies * @return the created ComponentDefinition instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static ComponentDefinition createComponentDefinition(String URIprefix,String displayId, String version, Set<URI> types) throws SBOLValidationException { return document.createComponentDefinition(URIprefix, displayId, version, types); } /** * Creates a ComponentDefinition instance with the given arguments, and then adds it to this SBOLDocument * object's list of ComponentDefinition instances. * <p> * If the given {@code URIprefix} does not end with one of the following delimiters: "/", ":", or "#", then * "/" is appended to the end of it. * <p> * This method requires that the given {@code URIprefix}, {@code displayId}, * and {@code version} are not {@code null} and are valid. * <p> * A ComponentDefinition instance is created with a compliant URI. This URI is composed from * the given {@code URIprefix}, the optional type {@link TopLevel#COMPONENT_DEFINITION}, * the given {@code displayId}, and {@code version}. * The display ID, persistent identity, and version fields of this instance * are then set accordingly. * * @param URIprefix maps to a domain over which the user has control * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @param type specifies the category of biochemical or physical entity using appropriate ontologies * @return the created ComponentDefinition instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static ComponentDefinition createComponentDefinition(String URIprefix,String displayId, String version, URI type) throws SBOLValidationException { return document.createComponentDefinition(URIprefix, displayId, version, type); } /** * Removes the given {@code componentDefinition} from this SBOLDocument object's list of ComponentDefinition instances. * * @param componentDefinition the ComponentDefinition to be removed from this SBOLDocument object * @return {@code true} if the given {@code componentDefinition} is successfully removed, {@code false} otherwise. * @throws SBOLValidationException see {@link SBOLValidationException} */ public static boolean removeComponentDefinition(ComponentDefinition componentDefinition) throws SBOLValidationException { return document.removeComponentDefinition(componentDefinition); } /** * Returns the ComponentDefinition instance matching the given {@code displayId} * and {@code version} from this SBOLDocument object's list of * ComponentDefinition instances. * <p> * A compliant ComponentDefinition URI is created first using the {@code defaultURIprefix}, * the optional type {@link TopLevel#COMPONENT_DEFINITION}, the given {@code displayId} * and {@code version}. This URI is used to look up the ComponentDefinition instance * in this SBOLDocument object. * * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @return the matching ComponentDefinition instance if present, or {@code null} otherwise. */ public static ComponentDefinition getComponentDefinition(String displayId,String version) { return document.getComponentDefinition(displayId, version); } /** * Returns the ComponentDefinition instance matching the given {@code componentDefinitionURI} from this * SBOLDocument object's list of ComponentDefinition instances. * * @param componentDefinitionURI The ComponentDefinition URI * @return the matching ComponentDefinition instance if present, or {@code null} otherwise. */ public static ComponentDefinition getComponentDefinition(URI componentDefinitionURI) { return document.getComponentDefinition(componentDefinitionURI); } /** * Returns the set of {@code ComponentDefinition} instances owned by this SBOLDocument object. * * @return the set of {@code ComponentDefinition} instances owned by this SBOLDocument object. */ public static Set<ComponentDefinition> getComponentDefinitions() { return document.getComponentDefinitions(); } /** * Returns the set of root ComponentDefinitions. * @return the set of root ComponentDefinitions. */ public static Set<ComponentDefinition> getRootComponentDefinitions() { return document.getRootComponentDefinitions(); } /** * Removes all entries in the list of ComponentDefinition instances * owned by this SBOLDocument object. The list will be empty after this call returns. * @throws SBOLValidationException see {@link SBOLValidationException} */ public static void clearComponentDefinitions() throws SBOLValidationException { document.clearComponentDefinitions(); } /** * Creates a Sequence instance with this SBOLDocument object's {@code defaultURIprefix}, * the given arguments, and an empty version string, and then * adds it to this SBOLDocument object's list of Sequence instances. * <p> * This method calls {@link #createSequence(String, String, String, String, URI)} to do the following * validity checks and create a Sequence instance. * <p> * If the {@code defaultURIprefix} does not end with one of the following delimiters: "/", ":", or "#", * then "/" is appended to the end of it. * <p> * This method requires the {@code defaultURIprefix} field to be set, and * the given {@code displayId} is not {@code null} and is valid. * <p> * A Sequence instance is created with a compliant URI. This URI is composed from * the this SBOLDocument object's {@code defaultURIprefix}, the optional type {@link TopLevel#SEQUENCE}, * the given {@code displayId}, and an empty version string. * The display ID, persistent identity, and version fields of this instance * are then set accordingly. * * @param displayId an intermediate between name and identity that is machine-readable * @param elements characters that represents the constituents of a biological or chemical molecule (i.e. nucleotide bases of a molecule of DNA, the amino acid residues of a protein, or the atoms and chemical bonds of a small molecule) * @param encoding Indicate how the elements property of a Sequence must be formed and interpreted * @return the created Sequence instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static Sequence createSequence(String displayId, String elements, URI encoding) throws SBOLValidationException { return document.createSequence(displayId,elements,encoding); } /** * Creates a Sequence instance with this SBOLDocument object's {@code defaultURIprefix} * and the given arguments, and then adds it to this SBOLDocument object's list of Sequence instances. * <p> * This method calls {@link #createSequence(String, String, String, String, URI)} to do the following * validity checks and create a Sequence instance. * <p> * If the {@code defaultURIprefix} does not end with one of the following delimiters: "/", ":", or "#", then * "/" is appended to the end of it. * <p> * This method requires the {@code defaultURIprefix} field to be set, and * the given {@code displayId} and {@code version} arguments are not {@code null} * and are both valid. * <p> * A Sequence instance is created with a compliant URI. This URI is composed from * the this SBOLDocument object's {@code defaultURIprefix}, the optional type {@link TopLevel#SEQUENCE}, * the given {@code displayId}, and {@code version}. * The display ID, persistent identity, and version fields of this instance * are then set accordingly. * * * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @param elements characters that represents the constituents of a biological or chemical molecule (i.e. nucleotide bases of a molecule of DNA, the amino acid residues of a protein, or the atoms and chemical bonds of a small molecule) * @param encoding Indicate how the elements property of a Sequence must be formed and interpreted * @return the created Sequence instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static Sequence createSequence(String displayId, String version, String elements, URI encoding) throws SBOLValidationException { return document.createSequence(displayId,version,elements,encoding); } /** * Creates a Sequence instance with the given arguments, and then adds it to this SBOLDocument * object's list of Sequence instances. * <p> * If the given {@code URIprefix} does not end with one of the following delimiters: "/", ":", or "#", then * "/" is appended to the end of it. * <p> * This method requires that the given {@code URIprefix}, {@code displayId}, * and {@code version} are not {@code null} and are valid. * <p> * A Sequence instance is created with a compliant URI. This URI is composed from * the given {@code URIprefix}, the optional type {@link TopLevel#SEQUENCE}, * the given {@code displayId}, and {@code version}. * The display ID, persistent identity, and version fields of this instance * are then set accordingly. * * @param URIprefix maps to a domain over which the user has control * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @param elements characters that represents the constituents of a biological or chemical molecule (i.e. nucleotide bases of a molecule of DNA, the amino acid residues of a protein, or the atoms and chemical bonds of a small molecule) * @param encoding Indicate how the elements property of a Sequence must be formed and interpreted * @return the created Sequence instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static Sequence createSequence(String URIprefix, String displayId, String version, String elements, URI encoding) throws SBOLValidationException { return document.createSequence(URIprefix, displayId, version, elements, encoding); } // /** // * Create a copy of the given top-level object, i.e.{@link Collection}, {@link ComponentDefinition}, {@link Model}, {@link ModuleDefinition}, // * {@link Sequence}, or {@link TopLevel} with the given version, and add it to its corresponding top-level objects list. // * @param toplevel // * @param newURIprefix // * @return the created {@link TopLevel} object // */ // public TopLevel createCopyWithURIprefix(TopLevel toplevel, String newURIprefix) { // String olddisplayId = extractDisplayId(((Collection) toplevel).getIdentity(), 0); // String oldVersion = extractVersion(toplevel.getIdentity()); // return createCopy(toplevel, newURIprefix, olddisplayId, oldVersion); // /** // * Create a copy of the given top-level object, i.e.{@link Collection}, {@link ComponentDefinition}, {@link Model}, {@link ModuleDefinition}, // * {@link Sequence}, or {@link TopLevel} with the given version, and add it to its corresponding top-level objects list. // * @param toplevel // * @param newVersion // * @return {@link TopLevel} object // */ // public TopLevel createCopyWithVersion(TopLevel toplevel, String newVersion) { // String oldURIprefix = extractURIprefix(((Collection) toplevel).getIdentity()); // String olddisplayId = extractDisplayId(((Collection) toplevel).getIdentity(), 0); // return createCopy(toplevel, oldURIprefix, olddisplayId, newVersion); // /** // * Create a copy of the given top-level object, which is i.e.{@link Collection}, {@link ComponentDefinition}, {@link Model}, {@link ModuleDefinition}, // * {@link Sequence}, or {@link GenericTopLevel} with the given display ID, and add it to its corresponding top-level objects list. // * @param toplevel // * @param newDisplayId // * @return {@link TopLevel} object // */ // public TopLevel createCopyWithDisplayId(TopLevel toplevel, String newDisplayId) { // String oldURIprefix = extractURIprefix(toplevel.getIdentity()); // String oldVersion = extractVersion(toplevel.getIdentity()); // return createCopy(toplevel, oldURIprefix, // newDisplayId, oldVersion); // /** // * Create a copy of the given top-level object, which is i.e.{@link Collection}, {@link ComponentDefinition}, {@link Model}, {@link ModuleDefinition}, // * {@link Sequence}, or {@link GenericTopLevel} with the given URIprefix and display ID, and add it to its corresponding top-level objects list. // * @param toplevel // * @param newDisplayId // * @return {@link TopLevel} object // */ // public TopLevel createCopyWithPersistentId(TopLevel toplevel, String newURIprefix, String newDisplayId) { // String oldVersion = extractVersion(toplevel.getIdentity()); // return createCopy(toplevel, newURIprefix, // newDisplayId, oldVersion); // /** // * Create an object of the top-level classes, i.e.{@link Collection}, {@link ComponentDefinition}, {@link Model}, {@link ModuleDefinition}, // * {@link Sequence}, or {@link TopLevel} with a new display ID, and add it to its corresponding top-level objects list. // * @param toplevel // * @param newPrefix // * @return {@link TopLevel} object // */ // public TopLevel createCopyWithNewPrefix(TopLevel toplevel, String newPrefix) { // if (toplevel objectof Collection) { // Collection newCollection = ((Collection) toplevel).copy(newPrefix); // if (addCollection(newCollection)) { // return newCollection; // else { // return null; // else if (toplevel objectof ComponentDefinition) { // ComponentDefinition newComponentDefinition = ((ComponentDefinition) toplevel).copy(newPrefix); // if (addComponentDefinition(newComponentDefinition)) { // return newComponentDefinition; // else { // return null; // else if (toplevel objectof Model) { // Model newModel = ((Model) toplevel).copy(newPrefix); // if (addModel(newModel)) { // return newModel; // else { // return null; // else if (toplevel objectof ModuleDefinition) { // ModuleDefinition newModuleDefinition = ((ModuleDefinition) toplevel).copy(newPrefix); // if (addModuleDefinition(newModuleDefinition)) { // return newModuleDefinition; // else { // return null; // else if (toplevel objectof Sequence) { // Sequence newSequence = ((Sequence) toplevel).copy(newPrefix); // if (addSequence(newSequence)) { // return newSequence; // else { // return null; // else if (toplevel objectof GenericTopLevel) { // GenericTopLevel newGenericTopLevel = ((GenericTopLevel) toplevel).copy(newPrefix); // if (addGenericTopLevel(newGenericTopLevel)) { // return newGenericTopLevel; // else { // return null; // else { // return null; /** * Creates an identical copy of each top-level element of a document, and then adds the created top-level to the corresponding * list of top-levels in this SBOL document. * <p> * This method calls {@link #createCopy(TopLevel)} for each top-level instance. * * @param document the document to be copied from * @throws SBOLValidationException if an SBOL validation rule violation occurred in {@link #createCopy(TopLevel)}. */ public static void createCopy(SBOLDocument document) throws SBOLValidationException { for (TopLevel topLevel : document.getTopLevels()) { createCopy(topLevel); } } /** * Creates an identical copy of the given TopLevel instance. * <p> * This method calls {@link #createCopy(TopLevel, String, String, String)} to do the following * validity checks and create a copy top-level instance. * <p> * If the {@code defaultURIprefix} is {@code null}, then it is extracted from the given * {@code topLevel} instance. If it does not end with one of the following delimiters: "/", ":", or "#", then * "/" is appended to the end of it. * <p> * If either the given {@code displayId} or {@code version}, then the corresponding field * is extracted from the given {@code topLevel} instance. Both extracted fields are required * to be valid and not {@code null}. * <p> * A top-level instance with a compliant URI using the given arguments, * and then its display ID, persistent identity, and version fields are set. This * instance is then added to the corresponding top-level list owned by this SBOLDocument object. * * @param topLevel The topLevel object to be copied from this SBOLDocument * @return the created top-level instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static TopLevel createCopy(TopLevel topLevel) throws SBOLValidationException { return document.createCopy(topLevel); } /** * Creates a copy of the given TopLevel instance with this SBOLDocument object's {@code defaultURIprefix} * the given arguments, and an empty version string, and then adds it to the * corresponding top-level list owned by this SBOLDocument object. * <p> * This method calls {@link #createCopy(TopLevel, String, String, String)} to do the following * validity checks and create a copy top-level instance. * <p> * If the {@code defaultURIprefix} is {@code null}, then it is extracted from the given * {@code topLevel} instance. If it does not end with one of the following delimiters: "/", ":", or "#", then * "/" is appended to the end of it. * <p> * If either the given {@code displayId} or {@code version}, then the corresponding field * is extracted from the given {@code topLevel} instance. Both extracted fields are required * to be valid and not {@code null}. * <p> * A top-level instance with a compliant URI using the given arguments, * and then its display ID, persistent identity, and version fields are set. This * instance is then added to the corresponding top-level list owned by this SBOLDocument object. * * @param topLevel The topLevel object to be copied from this SBOLDocument * @param displayId an intermediate between name and identity that is machine-readable * @return the created top-level instance * @throws SBOLValidationException see {@link SBOLValidationException} * * * if this SBOLDocument object is not compliant * @throws SBOLValidationException see {@link SBOLValidationException} */ public static TopLevel createCopy(TopLevel topLevel, String displayId) throws SBOLValidationException { return document.createCopy(topLevel,displayId); } /** * Creates a copy of the given TopLevel instance with this SBOLDocument object's {@code defaultURIprefix} * and the given arguments, and then adds it to the * corresponding top-level list owned by this SBOLDocument object. * <p> * This method calls {@link #createCopy(TopLevel, String, String, String)} to do the following * validity checks and create a copy top-level instance. * <p> * If the {@code defaultURIprefix} is {@code null}, then it is extracted from the given * {@code topLevel} instance. If it does not end with one of the following delimiters: "/", ":", or "#", then * "/" is appended to the end of it. * <p> * If either the given {@code displayId} or {@code version}, then the corresponding field * is extracted from the given {@code topLevel} instance. Both extracted fields are required * to be valid and not {@code null}. * <p> * A top-level instance with a compliant URI using the given arguments, * and then its display ID, persistent identity, and version fields are set. This * instance is then added to the corresponding top-level list owned by this SBOLDocument object. * * @param topLevel The topLevel object to be copied from this SBOLDocument * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @return the created top-level instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static TopLevel createCopy(TopLevel topLevel, String displayId, String version) throws SBOLValidationException { return document.createCopy(topLevel,displayId,version); } /** * Creates a copy of the given TopLevel instance with the given arguments, and then adds it to * the corresponding top-level list owned by this SBOLDocument object. * <p> * If the given {@code URIprefix} is {@code null}, then it is extracted from the given * {@code topLevel} instance. If it does not end with one of the following delimiters: "/", ":", or "#", then * "/" is appended to the end of it. * <p> * If either the given {@code displayId} or {@code version}, then the corresponding field * is extracted from the given {@code topLevel} instance. Both extracted fields are required * to be valid and not {@code null}. * <p> * A top-level instance with a compliant URI is created using the given arguments, * and then its display ID, persistent identity, and version fields are set. This * instance is then added to the corresponding top-level list owned by this SBOLDocument object. * * @param topLevel The topLevel object to be copied from this SBOLDocument * @param URIprefix maps to a domain over which the user has control * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @return the created top-level instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static TopLevel createCopy(TopLevel topLevel, String URIprefix, String displayId, String version) throws SBOLValidationException { return document.createCopy(topLevel, URIprefix, displayId, version); } /** * Creates an identical copy of the given TopLevel instance and all its dependencies and returns them in * a new SBOLDocument. * * @param topLevel The topLevel object to be recursively copied from this SBOLDocument * @return the created SBOLDocument with this top-level instance and all its dependencies * @throws SBOLValidationException see {@link SBOLValidationException} */ public static SBOLDocument createRecursiveCopy(TopLevel topLevel) throws SBOLValidationException { return document.createRecursiveCopy(topLevel); } /** * Removes the given {@code sequence} from this SBOLDocument object's list of Sequence instances. * * @param sequence The given sequence to be removed * @return {@code true} if the given {@code sequence} is successfully removed, {@code false} otherwise. * @throws SBOLValidationException see {@link SBOLValidationException} */ public static boolean removeSequence(Sequence sequence) throws SBOLValidationException { return document.removeSequence(sequence); } /** * Returns the Sequence instance matching the given {@code displayId} * and {@code version} from this SBOLDocument object's list of * Sequence instances. * <p> * A compliant Sequence URI is created first using the {@code defaultURIprefix}, * the optional type {@link TopLevel#SEQUENCE}, the given {@code displayId} * and {@code version}. This URI is used to look up the Sequence instance * in this SBOLDocument object. * * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @return the matching Sequence instance if present, or {@code null} otherwise. */ public static Sequence getSequence(String displayId,String version) { return document.getSequence(displayId, version); } /** * Returns the Sequence instance matching the given {@code modelURI} from this * SBOLDocument object's list of Sequence instances. * * @param sequenceURI takes the given SequenceURI to retrieve the sequence from this SBOLDocument object * @return the matching Sequence instance if present, or {@code null} otherwise. */ public static Sequence getSequence(URI sequenceURI) { return document.getSequence(sequenceURI); } /** * Returns the set of {@code Sequence} instances owned by this SBOLDocument object. * * @return the set of {@code Sequence} instances owned by this SBOLDocument object. */ public static Set<Sequence> getSequences() { return document.getSequences(); } /** * Removes all entries in the list of Sequence instances * owned by this SBOLDocument object. The list will be empty after this call returns. * @throws SBOLValidationException see {@link SBOLValidationException} */ public static void clearSequences() throws SBOLValidationException { document.clearSequences(); } /** * Creates a GenericTopLevel instance with this SBOLDocument object's {@code defaultURIprefix}, * the given arguments, and an empty version string, and then * adds it to this SBOLDocument object's list of GenericTopLevel instances. * <p> * This method calls {@link #createGenericTopLevel(String, String, String, QName)} to do the following * validity checks and create a GenericTopLevel instance. * <p> * If the {@code defaultURIprefix} does not end with one of the following delimiters: "/", ":", or "#", * then "/" is appended to the end of it. * <p> * This method requires the {@code defaultURIprefix} field to be set, and * the given {@code displayId} is not {@code null} and is valid. * <p> * A GenericTopLevel instance is created with a compliant URI. This URI is composed from * the this SBOLDocument object's {@code defaultURIprefix}, the optional type {@link TopLevel#GENERIC_TOP_LEVEL}, * the given {@code displayId}, and an empty version string. * The display ID, persistent identity, and version fields of this instance * are then set accordingly. * * @param displayId an intermediate between name and identity that is machine-readable * @param rdfType a given QName for this annotated GenericTopLevel object * @return the created GenericTopLevel instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static GenericTopLevel createGenericTopLevel(String displayId, QName rdfType) throws SBOLValidationException { return document.createGenericTopLevel(displayId, rdfType); } /** * Creates a GenericTopLevel instance with this SBOLDocument object's {@code defaultURIprefix} * and the given arguments, and then adds it to this SBOLDocument object's list of GenericTopLevel instances. * <p> * This method calls {@link #createGenericTopLevel(String, String, String, QName)} to do the following * validity checks and create a GenericTopLevel instance. * <p> * If the {@code defaultURIprefix} does not end with one of the following delimiters: "/", ":", or "#", then * "/" is appended to the end of it. * <p> * This method requires the {@code defaultURIprefix} field to be set, and * the given {@code displayId} and {@code version} arguments are not {@code null} * and are both valid. * <p> * A GenericTopLevel instance is created with a compliant URI. This URI is composed from * the this SBOLDocument object's {@code defaultURIprefix}, the optional type {@link TopLevel#GENERIC_TOP_LEVEL}, * the given {@code displayId}, and {@code version}. * The display ID, persistent identity, and version fields of this instance * are then set accordingly. * * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @param rdfType a given QName for this annotated GenericTopLevel object * @return the created GenericTopLevel instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static GenericTopLevel createGenericTopLevel(String displayId, String version, QName rdfType) throws SBOLValidationException { return document.createGenericTopLevel(displayId,version,rdfType); } /** * Creates a GenericTopLevel instance with the given arguments, and then adds it to this SBOLDocument * object's list of GenericTopLevel instances. * <p> * If the given {@code URIprefix} does not end with one of the following delimiters: "/", ":", or "#", then * "/" is appended to the end of it. * <p> * This method requires that the given {@code URIprefix}, {@code displayId}, * and {@code version} are not {@code null} and are valid. * <p> * A GenericTopLevel instance is created with a compliant URI. This URI is composed from * the given {@code URIprefix}, the optional type {@link TopLevel#GENERIC_TOP_LEVEL}, * the given {@code displayId}, and {@code version}. * The display ID, persistent identity, and version fields of this instance * are then set accordingly. * * @param URIprefix maps to a domain over which the user has control * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @param rdfType a given QName for this annotated GenericTopLevel object * @return the created GenericTopLevel instance * @throws SBOLValidationException see {@link SBOLValidationException} */ public static GenericTopLevel createGenericTopLevel(String URIprefix, String displayId, String version, QName rdfType) throws SBOLValidationException { return document.createGenericTopLevel(URIprefix, displayId, version, rdfType); } /** * Removes the given {@code genericTopLevel} from this SBOLDocument object's list of GenericTopLevel instances. * * @param genericTopLevel Remove the given TopLevel object from this document * @return {@code true} if the given {@code genericTopLevel} is successfully removed, {@code false} otherwise. * @throws SBOLValidationException see {@link SBOLValidationException} */ public static boolean removeGenericTopLevel(GenericTopLevel genericTopLevel) throws SBOLValidationException { return document.removeGenericTopLevel(genericTopLevel); } /** * Returns the GenericTopLevel instance matching the given {@code displayId} * and {@code version} from this SBOLDocument object's list of * GenericTopLevel instances. * <p> * A compliant GenericTopLevel URI is created first using the {@code defaultURIprefix}, * the optional type {@link TopLevel#GENERIC_TOP_LEVEL}, the given {@code displayId} * and {@code version}. This URI is used to look up the GenericTopLevel instance * in this SBOLDocument object. * * @param displayId an intermediate between name and identity that is machine-readable * @param version The given version for this object * @return the matching GenericTopLevel instance if present, or {@code null} otherwise. */ public static GenericTopLevel getGenericTopLevel(String displayId, String version) { return document.getGenericTopLevel(displayId, version); } /** * Returns the GenericTopLevel instance matching the given {@code topLevelURI} from this * SBOLDocument object's list of GenericTopLevel instances. * * @param topLevelURI The topLevel object to be retrieved from this SBOLDocument * @return the matching GenericTopLevel instance if present, or {@code null} otherwise. */ public static GenericTopLevel getGenericTopLevel(URI topLevelURI) { return document.getGenericTopLevel(topLevelURI); } /** * Returns the set of {@code GenericTopLevel} instances owned by this SBOLDocument object. * * @return the set of {@code GenericTopLevel} instances owned by this SBOLDocument object. */ public static Set<GenericTopLevel> getGenericTopLevels() { return document.getGenericTopLevels(); } /** * Removes all entries in the list of GenericTopLevel instances * owned by this SBOLDocument object. The list will be empty after this call returns. * @throws SBOLValidationException {@link SBOLValidationException} */ public static void clearGenericTopLevels() throws SBOLValidationException { document.clearGenericTopLevels(); } /** * Returns the top-level instance matching the given {@code topLevelURI} from this * SBOLDocument object's lists of top-level instances. * * @param topLevelURI The topLevel object to be retrieved from this SBOLDocument * @return the matching top-level instance if present, or {@code null} otherwise. */ public static TopLevel getTopLevel(URI topLevelURI) { return document.getTopLevel(topLevelURI); } /** * Returns a set of all TopLevel objects. * * @return set of all TopLevel objects. */ public static Set<TopLevel> getTopLevels() { return document.getTopLevels(); } /** * Creates a set of TopLevels with derived from the same object * as specified by the wasDerivedFrom parameter. * @param wasDerivedFrom refers to another SBOL object or non-SBOL resource from which this object was derived. * @return Set of TopLevels with a matching wasDerivedFrom URI. */ public static Set<TopLevel> getByWasDerivedFrom(URI wasDerivedFrom) { return document.getByWasDerivedFrom(wasDerivedFrom); } /** * Adds a namespace URI and its prefix to a SBOL document * * @param nameSpaceURI The Namespace {@link URI} * @param prefix The prefix {@link String} */ public static void addNamespace(URI nameSpaceURI, String prefix) { document.addNamespace(nameSpaceURI, prefix); } /** * Adds a namespace {@link QName} to a SBOL document * * @param qName Qualified name ({@link QName}) for a namespace */ public static void addNamespace(QName qName) { document.addNamespace(qName); } /** * Removes all non-required namespaces from the SBOL document. */ public static void clearNamespaces() { document.clearNamespaces(); } /** * Returns the {@link QName} instance matching the given {@code modelURI} from this * SBOLDocument object's list of namespace QName instances. * * @param namespaceURI The Namespace {@link URI} * @return the matching instance if present, or {@code null} otherwise. */ public static QName getNamespace(URI namespaceURI) { return document.getNamespace(namespaceURI); } /** * Returns the list of namespace bindings owned by this SBOLDocument object. * * @return the list of namespace bindings owned by this SBOLDocument object. */ public static List<QName> getNamespaces() { return document.getNamespaces(); } /** * Removes the given {@code namespaceURI} from this SBOLDocument object's list of ModuleDefinition instances. * * @param namespaceURI The Namespace {@link URI} */ public static void removeNamespace(URI namespaceURI) { document.removeNamespace(namespaceURI); } @Override public int hashCode() { return document.hashCode(); } @Override public boolean equals(Object obj) { return document.equals(obj); } /** * Sets the default URI prefix to the given {@code defaultURIprefix}. * * @param defaultURIprefix the given default URI prefix */ public static void setDefaultURIprefix(String defaultURIprefix) { document.setDefaultURIprefix(defaultURIprefix); } /** * Returns the default URI prefix of this SBOLDocument object * * @return the default URI prefix of this SBOLDocument object */ public static String getDefaultURIprefix() { return document.getDefaultURIprefix(); } /** * Returns {@code true} if the {@code complete} flag for this SBOLDocument object is set. * This flag is set to {@code true} if all objects are must be present within this SBOLDocument object, * i.e. all URI references point to actual objects. * * @return {@code true} if the complete flag is set, {@code false} otherwise */ public static boolean isComplete() { return document.isComplete(); } /** * Sets the complete flag which when true indicates this SBOLDocument object is complete * and any URIs that cannot be dereferenced to a valid object cause an exception to be thrown. * * @param complete A flag indicator which when true indicates this SBOLDocument object is complete */ public static void setComplete(boolean complete) { document.setComplete(complete); } /** * Returns {@code true} if all URIs in this SBOLDocument object are compliant. * * @return {@code true} if all URIs in this SBOLDocument object are compliant, * {@code false} otherwise */ public static boolean isCompliant() { return document.isCompliant(); } /** * Returns {@code true} if types are to be inserted into top-level URIs. * * @return {@code true} if types are to be inserted into top-level URIs, {@code false} otherwise */ public static boolean isTypesInURIs() { return document.isTypesInURIs(); } /** * Sets the flag to the given {@code typesInURIs} to determine if types are to be inserted into top-level URIs. * * @param typesInURIs A flag to determine if types are to be inserted into top-level URIs */ public static void setTypesInURIs(boolean typesInURIs) { document.setTypesInURIs(typesInURIs); } /** * Returns {@code true} if default component instances should be created when not present. * * @return {@code true} if default component instances should be created when not present, * {@code false} otherwise */ public static boolean isCreateDefaults() { return document.isCreateDefaults(); } /** * Sets the flag to the given {@code createDefaults} to determine if default component instances * should be created when not present. * * @param createDefaults A flag to determine if default component instances should be created when not present. */ public static void setCreateDefaults(boolean createDefaults) { document.setCreateDefaults(createDefaults); } /** * Takes in a given RDF fileName and add the data read to this SBOLDocument. * * @param fileName a given RDF fileName * @throws SBOLValidationException see {@link SBOLValidationException} * @throws SBOLConversionException see {@link SBOLConversionException} * @throws IOException see {@link IOException} */ public static void read(String fileName) throws SBOLValidationException, IOException, SBOLConversionException { document.read(fileName); } /** * Takes in a given RDF File and add the data read to this SBOLDocument. * * @param file a given RDF fileName * @throws SBOLValidationException see {@link SBOLValidationException} * @throws SBOLConversionException see {@link SBOLConversionException} * @throws IOException see {@link IOException} */ public static void read(File file) throws SBOLValidationException, IOException, SBOLConversionException { document.read(file); } /** * Takes in a given RDF InputStream and add the data read to this SBOLDocument. * * @param in a given RDF InputStream * @throws SBOLValidationException see {@link SBOLValidationException} * @throws SBOLConversionException see {@link SBOLConversionException} * @throws IOException see {@link IOException} */ public static void read(InputStream in) throws SBOLValidationException, IOException, SBOLConversionException { document.read(in); } /** * Serializes SBOLDocument and outputs the data from the serialization to the given output * file name in RDF format * @param filename the given output file in RDF format * @throws IOException see {@link IOException} * @throws SBOLConversionException see {@link SBOLConversionException} */ public static void write(String filename) throws IOException, SBOLConversionException { document.write(filename); } /** * Serializes SBOLDocument and outputs the data from the serialization to the given output * file name in fileType format * @param filename the given output file name * @param fileType the file type to be written out to * @throws IOException see {@link IOException} * @throws SBOLConversionException see {@link SBOLConversionException} */ public static void write(String filename,String fileType) throws IOException, SBOLConversionException { document.write(new File(filename), fileType); } /** * Serializes SBOLDocument and outputs the data from the serialization to the given output * file in RDF format * @param file the given output file * @throws IOException see {@link IOException} * @throws SBOLConversionException see {@link SBOLConversionException} */ public static void write(File file) throws IOException, SBOLConversionException { document.write(file); } /** * Serializes SBOLDocument and outputs the data from the serialization to the given output * file in fileType format * @param file the given output file * @param fileType the file type of the given output file * @throws IOException see {@link IOException} * @throws SBOLConversionException see {@link SBOLConversionException} */ public static void write(File file,String fileType) throws IOException, SBOLConversionException { document.write(file,fileType); } /** * Serializes SBOLDocument and outputs the data from the serialization to the given output * stream in RDF format * @param out the given output stream * @throws SBOLConversionException see {@link SBOLConversionException} */ public static void write(OutputStream out) throws SBOLConversionException { document.write(out); } /** * Serializes SBOLDocument and outputs the data from the serialization to the given output * stream in fileType format * @param out the given output stream * @param fileType specify what file type for the the given output stream * @throws SBOLConversionException see {@link SBOLConversionException} * @throws IOException see {@link IOException} */ public static void write(OutputStream out,String fileType) throws SBOLConversionException, IOException { document.write(out, fileType); } @Override public String toString() { return document.toString(); } }
package org.jscep.message; import static org.junit.Assert.assertEquals; import java.io.IOException; import java.math.BigInteger; import java.security.GeneralSecurityException; import java.security.KeyPair; import java.security.KeyPairGenerator; import java.security.PrivateKey; import java.security.PublicKey; import java.security.cert.X509Certificate; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import javax.security.auth.x500.X500Principal; import org.bouncycastle.asn1.ASN1OctetString; import org.bouncycastle.asn1.ASN1Set; import org.bouncycastle.asn1.DEROctetString; import org.bouncycastle.asn1.DERPrintableString; import org.bouncycastle.asn1.DERSet; import org.bouncycastle.asn1.cms.ContentInfo; import org.bouncycastle.asn1.cms.IssuerAndSerialNumber; import org.bouncycastle.asn1.cms.SignedData; import org.bouncycastle.asn1.cms.SignerInfo; import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers; import org.bouncycastle.asn1.x500.X500Name; import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo; import org.bouncycastle.cert.jcajce.JcaCertStore; import org.bouncycastle.cms.CMSAbsentContent; import org.bouncycastle.cms.CMSException; import org.bouncycastle.cms.CMSSignedData; import org.bouncycastle.cms.CMSSignedDataGenerator; import org.bouncycastle.cms.CMSTypedData; import org.bouncycastle.cms.jcajce.JcaSignerInfoGeneratorBuilder; import org.bouncycastle.operator.ContentSigner; import org.bouncycastle.operator.OperatorCreationException; import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder; import org.bouncycastle.operator.jcajce.JcaDigestCalculatorProviderBuilder; import org.bouncycastle.pkcs.PKCS10CertificationRequest; import org.bouncycastle.pkcs.PKCS10CertificationRequestBuilder; import org.bouncycastle.util.Store; import org.jscep.asn1.IssuerAndSubject; import org.jscep.transaction.FailInfo; import org.jscep.transaction.Nonce; import org.jscep.transaction.TransactionId; import org.jscep.util.X509Certificates; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import junit.framework.Assert; @RunWith(Parameterized.class) public class PkiMessageEncoderTest { @Parameters public static Collection<Object[]> getParameters() throws Exception { List<Object[]> params = new ArrayList<Object[]>(); KeyPair pair = KeyPairGenerator.getInstance("RSA").generateKeyPair(); TransactionId transId = TransactionId.createTransactionId(); Nonce recipientNonce = Nonce.nextNonce(); Nonce senderNonce = recipientNonce; X500Name issuer = new X500Name("CN=CA"); X500Name subject = new X500Name("CN=Client"); IssuerAndSubject ias = new IssuerAndSubject(issuer, subject); BigInteger serial = BigInteger.ONE; IssuerAndSerialNumber iasn = new IssuerAndSerialNumber(issuer, serial); PKCS10CertificationRequest csr = getCsr(new X500Principal("CN=Client"), pair.getPublic(), pair.getPrivate(), "password".toCharArray()); CMSSignedDataGenerator gen = new CMSSignedDataGenerator(); ContentSigner sha1Signer = new JcaContentSignerBuilder("SHA1withRSA") .build(pair.getPrivate()); X509Certificate cert = X509Certificates.createEphemeral( new X500Principal("CN=client"), pair); Store certs = new JcaCertStore(Collections.singleton(cert)); gen.addSignerInfoGenerator(new JcaSignerInfoGeneratorBuilder( new JcaDigestCalculatorProviderBuilder().build()).build( sha1Signer, cert)); gen.addCertificates(certs); CMSTypedData msg = new CMSAbsentContent(); CMSSignedData sigData = gen.generate(msg, false); params.add(new Object[] { new GetCert(transId, senderNonce, iasn) }); params.add(new Object[] { new GetCertInitial(transId, senderNonce, ias) }); params.add(new Object[] { new GetCrl(transId, senderNonce, iasn) }); params.add(new Object[] { new PkcsReq(transId, senderNonce, csr) }); params.add(new Object[] { new CertRep(transId, senderNonce, recipientNonce) }); params.add(new Object[] { new CertRep(transId, senderNonce, recipientNonce, sigData) }); params.add(new Object[] { new CertRep(transId, senderNonce, recipientNonce, FailInfo.badAlg) }); return params; } private final PkiMessage<?> message; public PkiMessageEncoderTest(PkiMessage<?> message) { this.message = message; } @Test public void simpleTest() throws Exception { KeyPair caPair = KeyPairGenerator.getInstance("RSA").generateKeyPair(); X509Certificate ca = X509Certificates.createEphemeral( new X500Principal("CN=CA"), caPair); KeyPair clientPair = KeyPairGenerator.getInstance("RSA") .generateKeyPair(); X509Certificate client = X509Certificates.createEphemeral( new X500Principal("CN=Client"), clientPair); // Everything below this line only available to client PkcsPkiEnvelopeEncoder envEncoder = new PkcsPkiEnvelopeEncoder(ca, "DES"); PkiMessageEncoder encoder = new PkiMessageEncoder( clientPair.getPrivate(), client, envEncoder); PkcsPkiEnvelopeDecoder envDecoder = new PkcsPkiEnvelopeDecoder(ca, caPair.getPrivate()); PkiMessageDecoder decoder = new PkiMessageDecoder(client, envDecoder); PkiMessage<?> actual = decoder.decode(encoder.encode(message)); assertEquals(message, actual); } @Test public void simpleTestTripleDES() throws Exception { KeyPair caPair = KeyPairGenerator.getInstance("RSA").generateKeyPair(); X509Certificate ca = X509Certificates.createEphemeral( new X500Principal("CN=CA"), caPair); KeyPair clientPair = KeyPairGenerator.getInstance("RSA") .generateKeyPair(); X509Certificate client = X509Certificates.createEphemeral( new X500Principal("CN=Client"), clientPair); // Everything below this line only available to client PkcsPkiEnvelopeEncoder envEncoder = new PkcsPkiEnvelopeEncoder(ca, "DESede"); PkiMessageEncoder encoder = new PkiMessageEncoder( clientPair.getPrivate(), client, envEncoder); PkcsPkiEnvelopeDecoder envDecoder = new PkcsPkiEnvelopeDecoder(ca, caPair.getPrivate()); PkiMessageDecoder decoder = new PkiMessageDecoder(client, envDecoder); PkiMessage<?> actual = decoder.decode(encoder.encode(message)); assertEquals(message, actual); } @Test public void invalidSignatureTest() throws Exception { KeyPair caPair = KeyPairGenerator.getInstance("RSA").generateKeyPair(); X509Certificate ca = X509Certificates.createEphemeral( new X500Principal("CN=CA"), caPair); KeyPair clientPair = KeyPairGenerator.getInstance("RSA") .generateKeyPair(); X509Certificate client = X509Certificates.createEphemeral( new X500Principal("CN=Client"), clientPair); // Everything below this line only available to client PkcsPkiEnvelopeEncoder envEncoder = new PkcsPkiEnvelopeEncoder(ca, "DES"); PkiMessageEncoder encoder = new PkiMessageEncoder( clientPair.getPrivate(), client, envEncoder); PkcsPkiEnvelopeDecoder envDecoder = new PkcsPkiEnvelopeDecoder(ca, caPair.getPrivate()); PkiMessageDecoder decoder = new PkiMessageDecoder(client, envDecoder); CMSSignedData encodedMessage = encoder.encode(message); // modifify the signature CMSSignedData encodedMessage2 = modifySignature(encodedMessage); try{ decoder.decode(encodedMessage2); Assert.fail("decoding exception expected"); }catch(MessageDecodingException e) { assertEquals("decoding exception", "pkiMessage verification failed.", e.getMessage()); } } private static PKCS10CertificationRequest getCsr(X500Principal subject, PublicKey pubKey, PrivateKey priKey, char[] password) throws GeneralSecurityException, IOException { DERPrintableString cpSet = new DERPrintableString(new String(password)); SubjectPublicKeyInfo pkInfo = SubjectPublicKeyInfo.getInstance(pubKey .getEncoded()); JcaContentSignerBuilder signerBuilder = new JcaContentSignerBuilder( "SHA1withRSA"); ContentSigner signer; try { signer = signerBuilder.build(priKey); } catch (OperatorCreationException e) { IOException ioe = new IOException(); ioe.initCause(e); throw ioe; } PKCS10CertificationRequestBuilder builder = new PKCS10CertificationRequestBuilder( X500Name.getInstance(subject.getEncoded()), pkInfo); builder.addAttribute(PKCSObjectIdentifiers.pkcs_9_at_challengePassword, cpSet); return builder.build(signer); } private static CMSSignedData modifySignature(CMSSignedData sd) throws CMSException { ContentInfo ci = sd.toASN1Structure(); SignedData content = (SignedData) ci.getContent(); SignerInfo si = (SignerInfo) content.getSignerInfos().getObjectAt(0); byte[] signature = si.getEncryptedDigest().getOctets(); int index = signature.length - 10; signature[index] = (byte) (signature[index] + 1); ASN1OctetString signature2 = new DEROctetString(signature); SignerInfo si2 = new SignerInfo( si.getSID(), si.getDigestAlgorithm(), si.getAuthenticatedAttributes(), si.getDigestEncryptionAlgorithm(), signature2, si.getUnauthenticatedAttributes()); ASN1Set signerInfos2 = new DERSet(si2); SignedData content2 = new SignedData( content.getDigestAlgorithms(), content.getEncapContentInfo(), content.getCertificates(), content.getCRLs(), signerInfos2); ContentInfo ci2 = new ContentInfo(ci.getContentType(), content2); return new CMSSignedData(ci2); } }
package org.junit.tests.assertion; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.expectThrows; import static org.junit.Assert.fail; import java.io.IOException; import java.math.BigDecimal; import java.util.Comparator; import org.junit.Assert; import org.junit.Assert.ThrowingRunnable; import org.junit.ComparisonFailure; import org.junit.Test; import org.junit.internal.ArrayComparisonFailure; /** * Tests for {@link org.junit.Assert} */ public class AssertionTest { // If you want to use 1.4 assertions, they will be reported correctly. // However, you need to add the -ea VM argument when running. // @Test (expected=AssertionError.class) public void error() { // assert false; @Test(expected = AssertionError.class) public void fails() { Assert.fail(); } @Test public void failWithNoMessageToString() { try { Assert.fail(); } catch (AssertionError exception) { assertEquals("java.lang.AssertionError", exception.toString()); } } @Test public void failWithMessageToString() { try { Assert.fail("woops!"); } catch (AssertionError exception) { assertEquals("java.lang.AssertionError: woops!", exception.toString()); } } @Test(expected = AssertionError.class) public void arraysNotEqual() { assertArrayEquals((new Object[]{new Object()}), (new Object[]{new Object()})); } @Test(expected = AssertionError.class) public void arraysNotEqualWithMessage() { assertArrayEquals("not equal", (new Object[]{new Object()}), (new Object[]{new Object()})); } @Test public void arraysExpectedNullMessage() { try { assertArrayEquals("not equal", null, (new Object[]{new Object()})); } catch (AssertionError exception) { assertEquals("not equal: expected array was null", exception.getMessage()); } } @Test public void arraysActualNullMessage() { try { assertArrayEquals("not equal", (new Object[]{new Object()}), null); } catch (AssertionError exception) { assertEquals("not equal: actual array was null", exception.getMessage()); } } @Test public void arraysDifferentLengthMessage() { try { assertArrayEquals("not equal", (new Object[0]), (new Object[1])); } catch (AssertionError exception) { assertEquals("not equal: array lengths differed, expected.length=0 actual.length=1", exception.getMessage()); } } @Test(expected = ArrayComparisonFailure.class) public void arraysElementsDiffer() { assertArrayEquals("not equal", (new Object[]{"this is a very long string in the middle of an array"}), (new Object[]{"this is another very long string in the middle of an array"})); } @Test public void arraysDifferAtElement0nullMessage() { try { assertArrayEquals((new Object[]{true}), (new Object[]{false})); } catch (AssertionError exception) { assertEquals("arrays first differed at element [0]; expected:<true> but was:<false>", exception .getMessage()); } } @Test public void arraysDifferAtElement1nullMessage() { try { assertArrayEquals((new Object[]{true, true}), (new Object[]{true, false})); } catch (AssertionError exception) { assertEquals("arrays first differed at element [1]; expected:<true> but was:<false>", exception .getMessage()); } } @Test public void arraysDifferAtElement0withMessage() { try { assertArrayEquals("message", (new Object[]{true}), (new Object[]{false})); } catch (AssertionError exception) { assertEquals("message: arrays first differed at element [0]; expected:<true> but was:<false>", exception .getMessage()); } } @Test public void arraysDifferAtElement1withMessage() { try { assertArrayEquals("message", (new Object[]{true, true}), (new Object[]{true, false})); fail(); } catch (AssertionError exception) { assertEquals("message: arrays first differed at element [1]; expected:<true> but was:<false>", exception.getMessage()); } } @Test public void multiDimensionalArraysAreEqual() { assertArrayEquals((new Object[][]{{true, true}, {false, false}}), (new Object[][]{{true, true}, {false, false}})); } @Test public void multiDimensionalIntArraysAreEqual() { int[][] int1 = {{1, 2, 3}, {4, 5, 6}}; int[][] int2 = {{1, 2, 3}, {4, 5, 6}}; assertArrayEquals(int1, int2); } @Test public void oneDimensionalPrimitiveArraysAreEqual() { assertArrayEquals(new boolean[]{true}, new boolean[]{true}); assertArrayEquals(new byte[]{1}, new byte[]{1}); assertArrayEquals(new char[]{1}, new char[]{1}); assertArrayEquals(new short[]{1}, new short[]{1}); assertArrayEquals(new int[]{1}, new int[]{1}); assertArrayEquals(new long[]{1}, new long[]{1}); assertArrayEquals(new double[]{1.0}, new double[]{1.0}, 1.0); assertArrayEquals(new float[]{1.0f}, new float[]{1.0f}, 1.0f); } @Test(expected = AssertionError.class) public void oneDimensionalDoubleArraysAreNotEqual() { assertArrayEquals(new double[]{1.0}, new double[]{2.5}, 1.0); } @Test(expected = AssertionError.class) public void oneDimensionalFloatArraysAreNotEqual() { assertArrayEquals(new float[]{1.0f}, new float[]{2.5f}, 1.0f); } @Test(expected = AssertionError.class) public void oneDimensionalBooleanArraysAreNotEqual() { assertArrayEquals(new boolean[]{true}, new boolean[]{false}); } @Test(expected = AssertionError.class) public void IntegerDoesNotEqualLong() { assertEquals(new Integer(1), new Long(1)); } @Test public void intsEqualLongs() { assertEquals(1, 1L); } @Test public void multiDimensionalArraysDeclaredAsOneDimensionalAreEqual() { assertArrayEquals((new Object[]{new Object[]{true, true}, new Object[]{false, false}}), (new Object[]{new Object[]{true, true}, new Object[]{false, false}})); } @Test public void multiDimensionalArraysAreNotEqual() { try { assertArrayEquals("message", (new Object[][]{{true, true}, {false, false}}), (new Object[][]{{true, true}, {true, false}})); fail(); } catch (AssertionError exception) { assertEquals("message: arrays first differed at element [1][0]; expected:<false> but was:<true>", exception.getMessage()); } } @Test public void multiDimensionalArraysAreNotEqualNoMessage() { try { assertArrayEquals((new Object[][]{{true, true}, {false, false}}), (new Object[][]{{true, true}, {true, false}})); fail(); } catch (AssertionError exception) { assertEquals("arrays first differed at element [1][0]; expected:<false> but was:<true>", exception.getMessage()); } } @Test public void multiDimensionalArraysDifferentLengthMessage() { try { assertArrayEquals("message", new Object[][]{{true, true}, {false, false}}, new Object[][]{{true, true}, {false}}); } catch (AssertionError exception) { assertEquals("message: arrays first differed at element [1]; array lengths differed, expected.length=2 actual.length=1", exception.getMessage()); return; } fail("Expected AssertionError to be thrown"); } @Test public void multiDimensionalArraysDifferentLengthNoMessage() { try { assertArrayEquals(new Object[][]{{true, true}, {false, false}}, new Object[][]{{true, true}, {false}}); } catch (AssertionError exception) { assertEquals("arrays first differed at element [1]; array lengths differed, expected.length=2 actual.length=1", exception.getMessage()); return; } fail("Expected AssertionError to be thrown"); } @Test public void arraysWithNullElementEqual() { Object[] objects1 = new Object[]{null}; Object[] objects2 = new Object[]{null}; assertArrayEquals(objects1, objects2); } @Test public void stringsDifferWithUserMessage() { try { assertEquals("not equal", "one", "two"); } catch (Throwable exception) { assertEquals("not equal expected:<[one]> but was:<[two]>", exception.getMessage()); } } @Test public void arraysEqual() { Object element = new Object(); Object[] objects1 = new Object[]{element}; Object[] objects2 = new Object[]{element}; assertArrayEquals(objects1, objects2); } @Test public void arraysEqualWithMessage() { Object element = new Object(); Object[] objects1 = new Object[]{element}; Object[] objects2 = new Object[]{element}; assertArrayEquals("equal", objects1, objects2); } @Test public void equals() { Object o = new Object(); assertEquals(o, o); assertEquals("abc", "abc"); assertEquals(true, true); assertEquals((byte) 1, (byte) 1); assertEquals('a', 'a'); assertEquals((short) 1, (short) 1); assertEquals(1, 1); // int by default, cast is unnecessary assertEquals(1l, 1l); assertEquals(1.0, 1.0, 0.0); assertEquals(1.0d, 1.0d, 0.0d); } @Test(expected = AssertionError.class) public void notEqualsObjectWithNull() { assertEquals(new Object(), null); } @Test(expected = AssertionError.class) public void notEqualsNullWithObject() { assertEquals(null, new Object()); } @Test public void notEqualsObjectWithNullWithMessage() { Object o = new Object(); try { assertEquals("message", null, o); fail(); } catch (AssertionError e) { assertEquals("message expected:<null> but was:<" + o.toString() + ">", e.getMessage()); } } @Test public void notEqualsNullWithObjectWithMessage() { Object o = new Object(); try { assertEquals("message", o, null); fail(); } catch (AssertionError e) { assertEquals("message expected:<" + o.toString() + "> but was:<null>", e.getMessage()); } } @Test(expected = AssertionError.class) public void objectsNotEquals() { assertEquals(new Object(), new Object()); } @Test(expected = ComparisonFailure.class) public void stringsNotEqual() { assertEquals("abc", "def"); } @Test(expected = AssertionError.class) public void booleansNotEqual() { assertEquals(true, false); } @Test(expected = AssertionError.class) public void bytesNotEqual() { assertEquals((byte) 1, (byte) 2); } @Test(expected = AssertionError.class) public void charsNotEqual() { assertEquals('a', 'b'); } @Test(expected = AssertionError.class) public void shortsNotEqual() { assertEquals((short) 1, (short) 2); } @Test(expected = AssertionError.class) public void intsNotEqual() { assertEquals(1, 2); } @Test(expected = AssertionError.class) public void longsNotEqual() { assertEquals(1l, 2l); } @Test(expected = AssertionError.class) public void floatsNotEqual() { assertEquals(1.0, 2.0, 0.9); } @SuppressWarnings("deprecation") @Test(expected = AssertionError.class) public void floatsNotEqualWithoutDelta() { assertEquals(1.0, 1.1); } @Test public void floatsNotDoublesInArrays() { float delta = 4.444f; float[] f1 = new float[]{1.111f}; float[] f2 = new float[]{5.555f}; Assert.assertArrayEquals(f1, f2, delta); } @Test(expected = AssertionError.class) public void bigDecimalsNotEqual() { assertEquals(new BigDecimal("123.4"), new BigDecimal("123.0")); } @Test(expected = AssertionError.class) public void doublesNotEqual() { assertEquals(1.0d, 2.0d, 0.9d); } @Test public void naNsAreEqual() { assertEquals(Float.NaN, Float.NaN, Float.POSITIVE_INFINITY); assertEquals(Double.NaN, Double.NaN, Double.POSITIVE_INFINITY); } @SuppressWarnings("unused") @Test public void nullNullmessage() { try { assertNull("junit"); fail(); } catch (AssertionError e) { assertEquals("expected null, but was:<junit>", e.getMessage()); } } @SuppressWarnings("unused") @Test public void nullWithMessage() { try { assertNull("message", "hello"); fail(); } catch (AssertionError exception) { assertEquals("message expected null, but was:<hello>", exception.getMessage()); } } @Test public void same() { Object o1 = new Object(); assertSame(o1, o1); } @Test public void notSame() { Object o1 = new Object(); Object o2 = new Object(); assertNotSame(o1, o2); } @Test(expected = AssertionError.class) public void objectsNotSame() { assertSame(new Object(), new Object()); } @Test(expected = AssertionError.class) public void objectsAreSame() { Object o = new Object(); assertNotSame(o, o); } @Test public void sameWithMessage() { try { assertSame("not same", "hello", "good-bye"); fail(); } catch (AssertionError exception) { assertEquals("not same expected same:<hello> was not:<good-bye>", exception.getMessage()); } } @Test public void sameNullMessage() { try { assertSame("hello", "good-bye"); fail(); } catch (AssertionError exception) { assertEquals("expected same:<hello> was not:<good-bye>", exception.getMessage()); } } @Test public void notSameWithMessage() { Object o = new Object(); try { assertNotSame("message", o, o); fail(); } catch (AssertionError exception) { assertEquals("message expected not same", exception.getMessage()); } } @Test public void notSameNullMessage() { Object o = new Object(); try { assertNotSame(o, o); fail(); } catch (AssertionError exception) { assertEquals("expected not same", exception.getMessage()); } } @Test public void nullMessage() { try { fail(null); } catch (AssertionError exception) { // we used to expect getMessage() to return ""; see failWithNoMessageToString() assertNull(exception.getMessage()); } } @Test public void nullMessageDisappearsWithStringAssertEquals() { try { assertEquals(null, "a", "b"); fail(); } catch (ComparisonFailure e) { assertEquals("expected:<[a]> but was:<[b]>", e.getMessage()); } } @Test public void nullMessageDisappearsWithAssertEquals() { try { assertEquals(null, 1, 2); fail(); } catch (AssertionError e) { assertEquals("expected:<1> but was:<2>", e.getMessage()); } } @Test(expected = AssertionError.class) public void arraysDeclaredAsObjectAreComparedAsObjects() { Object a1 = new Object[]{"abc"}; Object a2 = new Object[]{"abc"}; assertEquals(a1, a2); } @Test public void implicitTypecastEquality() { byte b = 1; short s = 1; int i = 1; long l = 1L; float f = 1.0f; double d = 1.0; assertEquals(b, s); assertEquals(b, i); assertEquals(b, l); assertEquals(s, i); assertEquals(s, l); assertEquals(i, l); assertEquals(f, d, 0); } @Test public void errorMessageDistinguishesDifferentValuesWithSameToString() { try { assertEquals("4", new Integer(4)); } catch (AssertionError e) { assertEquals("expected: java.lang.String<4> but was: java.lang.Integer<4>", e.getMessage()); } } @Test public void assertThatIncludesDescriptionOfTestedValueInErrorMessage() { String expected = "expected"; String actual = "actual"; String expectedMessage = "identifier\nExpected: \"expected\"\n but: was \"actual\""; try { assertThat("identifier", actual, equalTo(expected)); } catch (AssertionError e) { assertEquals(expectedMessage, e.getMessage()); } } @Test public void assertThatIncludesAdvancedMismatch() { String expectedMessage = "identifier\nExpected: is an instance of java.lang.Integer\n but: \"actual\" is a java.lang.String"; try { assertThat("identifier", "actual", is(instanceOf(Integer.class))); } catch (AssertionError e) { assertEquals(expectedMessage, e.getMessage()); } } @Test public void assertThatDescriptionCanBeElided() { String expected = "expected"; String actual = "actual"; String expectedMessage = "\nExpected: \"expected\"\n but: was \"actual\""; try { assertThat(actual, equalTo(expected)); } catch (AssertionError e) { assertEquals(expectedMessage, e.getMessage()); } } @Test public void nullAndStringNullPrintCorrectError() { try { assertEquals(null, "null"); } catch (AssertionError e) { assertEquals("expected: null<null> but was: java.lang.String<null>", e.getMessage()); } } @Test(expected = AssertionError.class) public void stringNullAndNullWorksToo() { assertEquals("null", null); } @Test(expected = AssertionError.class) public void compareBigDecimalAndInteger() { final BigDecimal bigDecimal = new BigDecimal("1.2"); final Integer integer = Integer.valueOf("1"); assertEquals(bigDecimal, integer); } @Test(expected = AssertionError.class) public void sameObjectIsNotEqual() { Object o = new Object(); assertNotEquals(o, o); } @Test public void objectsWithDiferentReferencesAreNotEqual() { assertNotEquals(new Object(), new Object()); } @Test public void assertNotEqualsIncludesCorrectMessage() { Integer value1 = new Integer(1); Integer value2 = new Integer(1); String message = "The values should be different"; try { assertNotEquals(message, value1, value2); } catch (AssertionError e) { assertEquals(message + ". Actual: " + value1, e.getMessage()); return; } fail("Failed on assertion."); } @Test public void assertNotEqualsIncludesTheValueBeingTested() { Integer value1 = new Integer(1); Integer value2 = new Integer(1); try { assertNotEquals(value1, value2); } catch (AssertionError e) { assertTrue(e.getMessage().contains(value1.toString())); return; } fail("Failed on assertion."); } @Test public void assertNotEqualsWorksWithPrimitiveTypes() { assertNotEquals(1L, 2L); assertNotEquals("The values should be different", 1L, 2L); assertNotEquals(1.0, 2.0, 0); assertNotEquals("The values should be different", 1.0, 2.0, 0); assertNotEquals(1.0f, 2.0f, 0f); assertNotEquals("The values should be different", 1.0f, 2.0f, 0f); } @Test(expected = AssertionError.class) public void assertNotEqualsConsidersDeltaCorrectly() { assertNotEquals(1.0, 0.9, 0.1); } @Test(expected = AssertionError.class) public void assertNotEqualsConsidersFloatDeltaCorrectly() { assertNotEquals(1.0f, 0.75f, 0.25f); } @Test(expected = AssertionError.class) public void assertNotEqualsIgnoresDeltaOnNaN() { assertNotEquals(Double.NaN, Double.NaN, 1); } @Test(expected = AssertionError.class) public void assertNotEqualsIgnoresFloatDeltaOnNaN() { assertNotEquals(Float.NaN, Float.NaN, 1f); } @Test(expected = AssertionError.class) public void expectThrowsRequiresAnExceptionToBeThrown() { expectThrows(Throwable.class, nonThrowingRunnable()); } @Test public void expectThrowsIncludesAnInformativeDefaultMessage() { try { expectThrows(Throwable.class, nonThrowingRunnable()); } catch (AssertionError ex) { assertEquals("expected Throwable to be thrown, but nothing was thrown", ex.getMessage()); return; } fail(); } @Test public void expectThrowsReturnsTheSameObjectThrown() { NullPointerException npe = new NullPointerException(); Throwable throwable = expectThrows(Throwable.class, throwingRunnable(npe)); assertSame(npe, throwable); } @Test(expected = AssertionError.class) public void expectThrowsDetectsTypeMismatchesViaExplicitTypeHint() { NullPointerException npe = new NullPointerException(); expectThrows(IOException.class, throwingRunnable(npe)); } @Test public void expectThrowsWrapsAndPropagatesUnexpectedExceptions() { NullPointerException npe = new NullPointerException("inner-message"); try { expectThrows(IOException.class, throwingRunnable(npe)); } catch (AssertionError ex) { assertSame(npe, ex.getCause()); assertEquals("inner-message", ex.getCause().getMessage()); return; } fail(); } @Test public void expectThrowsSuppliesACoherentErrorMessageUponTypeMismatch() { NullPointerException npe = new NullPointerException(); try { expectThrows(IOException.class, throwingRunnable(npe)); } catch (AssertionError error) { assertEquals("unexpected exception type thrown; expected:<IOException> but was:<NullPointerException>", error.getMessage()); assertSame(npe, error.getCause()); return; } fail(); } private static ThrowingRunnable nonThrowingRunnable() { return new ThrowingRunnable() { public void run() throws Throwable { } }; } private static ThrowingRunnable throwingRunnable(final Throwable t) { return new ThrowingRunnable() { public void run() throws Throwable { throw t; } }; } @Test public void greaterThan() { Comparator<Double> comparator = new Comparator<Double>() { public int compare(Double o1, Double o2) { return o1.compareTo(o2); } }; Assert.assertGreaterThan(1.5, 0.5, comparator); } }
package org.narwhal.core; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.narwhal.bean.Person; import java.lang.reflect.InvocationTargetException; import java.sql.SQLException; import java.util.List; /** * @author Miron Aseev */ @RunWith(JUnit4.class) public class DatabaseConnectionTest { private DatabaseConnection connection; public DatabaseConnectionTest() throws SQLException, ClassNotFoundException { String driver = "com.mysql.jdbc.Driver"; String url = "jdbc:mysql://localhost/bank"; String username = "lrngsql"; String password = "lrngsql"; DatabaseInformation information = new DatabaseInformation(driver, url, username, password); connection = new DatabaseConnection(information); } @Test public void transactionMethodsTest() { } @Test public void createTest() { } @Test public void readTest() { } @Test public void updateTest() { } @Test public void deleteTest() { } @Test public void executeUpdateTest() { } @Test public void executeQueryTest() throws SQLException, IllegalAccessException, InstantiationException, NoSuchMethodException, InvocationTargetException { String expectedName = "John"; Person person = connection.executeQuery("SELECT * FROM Person WHERE name = ?", Person.class, expectedName); Assert.assertNotNull(person); Assert.assertEquals(expectedName, person.getName()); } @Test public void executeQueryForCollectionTest() throws SQLException, ClassNotFoundException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException { List<Person> persons = connection.executeQueryForCollection("SELECT * FROM Person", Person.class); final int expectedSize = 2; Assert.assertEquals(expectedSize, persons.size()); Assert.assertEquals("John", persons.get(0).getName()); Assert.assertEquals("Doe", persons.get(1).getName()); } }
package com.bddinaction.chapter2.utilities; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.databind.introspect.JacksonAnnotationIntrospector; import com.fasterxml.jackson.datatype.joda.JodaModule; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; public class JsonBuilder { final Logger logger = LoggerFactory.getLogger(JsonBuilder.class); private ObjectMapper mapper; public JsonBuilder() { mapper = new ObjectMapper(); mapper.registerModule(new JodaModule()); mapper.configure(SerializationFeature.WRITE_NULL_MAP_VALUES, true); mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); mapper.setAnnotationIntrospector(new JacksonAnnotationIntrospector()); } public String build(final Object object) { String value = null; try { value = mapper.writeValueAsString(object); logger.info(value); } catch (JsonProcessingException e) { logger.error("Exception thrown... {}", e); } return value; } public <T> T build(final String value, Class<T> clazz) { try { T object = mapper.readValue(value, clazz); logger.info(mapper.writeValueAsString(object)); return object; } catch (IOException e) { logger.error("Exception thrown... {}", e); } return null; } public <T> T build(final String value, final TypeReference reference) { T object = null; try { object = mapper.readValue(value, reference); } catch (IOException e) { logger.error("Exception thrown... {}", e); } return object; } }
package org.owasp.esapi.reference; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Set; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import org.owasp.esapi.ESAPI; import org.owasp.esapi.Encoder; import org.owasp.esapi.ValidationErrorList; import org.owasp.esapi.ValidationRule; import org.owasp.esapi.Validator; import org.owasp.esapi.errors.ValidationException; import org.owasp.esapi.http.MockHttpServletRequest; import org.owasp.esapi.http.MockHttpServletResponse; import org.owasp.esapi.reference.validation.HTMLValidationRule; import org.owasp.esapi.reference.validation.StringValidationRule; /** * The Class ValidatorTest. * * @author Mike Fauzy (mike.fauzy@aspectsecurity.com) * @author Jeff Williams (jeff.williams@aspectsecurity.com) */ public class ValidatorTest extends TestCase { private static final String PREFERRED_ENCODING = "UTF-8"; public static Test suite() { return new TestSuite(ValidatorTest.class); } /** * Instantiates a new HTTP utilities test. * * @param testName the test name */ public ValidatorTest(String testName) { super(testName); } /** * {@inheritDoc} * * @throws Exception */ protected void setUp() throws Exception { // none } /** * {@inheritDoc} * * @throws Exception */ protected void tearDown() throws Exception { // none } public void testAddRule() { Validator validator = ESAPI.validator(); ValidationRule rule = new StringValidationRule("ridiculous"); validator.addRule(rule); assertEquals(rule, validator.getRule("ridiculous")); } public void testAssertValidFileUpload() { // assertValidFileUpload(String, String, String, byte[], int, boolean, ValidationErrorList) } public void testGetPrintable1() { // getValidPrintable(String, char[], int, boolean, ValidationErrorList) } public void testGetPrintable2() { // getValidPrintable(String, String, int, boolean, ValidationErrorList) } public void testGetRule() { Validator validator = ESAPI.validator(); ValidationRule rule = new StringValidationRule("rule"); validator.addRule(rule); assertEquals(rule, validator.getRule("rule")); assertFalse(rule == validator.getRule("ridiculous")); } public void testGetValidCreditCard() { System.out.println("getValidCreditCard"); Validator instance = ESAPI.validator(); ValidationErrorList errors = new ValidationErrorList(); assertTrue(instance.isValidCreditCard("cctest1", "1234 9876 0000 0008", false)); assertTrue(instance.isValidCreditCard("cctest2", "1234987600000008", false)); assertFalse(instance.isValidCreditCard("cctest3", "12349876000000081", false)); assertFalse(instance.isValidCreditCard("cctest4", "4417 1234 5678 9112", false)); instance.getValidCreditCard("cctest5", "1234 9876 0000 0008", false, errors); assertEquals(0, errors.size()); instance.getValidCreditCard("cctest6", "1234987600000008", false, errors); assertEquals(0, errors.size()); instance.getValidCreditCard("cctest7", "12349876000000081", false, errors); assertEquals(1, errors.size()); instance.getValidCreditCard("cctest8", "4417 1234 5678 9112", false, errors); assertEquals(2, errors.size()); } public void testGetValidDate() throws Exception { System.out.println("getValidDate"); Validator instance = ESAPI.validator(); ValidationErrorList errors = new ValidationErrorList(); assertTrue(instance.getValidDate("datetest1", "June 23, 1967", DateFormat.getDateInstance(DateFormat.MEDIUM, Locale.US), false) != null); instance.getValidDate("datetest2", "freakshow", DateFormat.getDateInstance(), false, errors); assertEquals(1, errors.size()); // TODO: This test case fails due to an apparent bug in SimpleDateFormat instance.getValidDate("test", "June 32, 2008", DateFormat.getDateInstance(), false, errors); // assertEquals( 2, errors.size() ); } public void testGetValidDirectoryPath() throws Exception { System.out.println("getValidDirectoryPath"); Validator instance = ESAPI.validator(); ValidationErrorList errors = new ValidationErrorList(); // find a directory that exists File parent = new File("/"); String path = ESAPI.securityConfiguration().getResourceFile("ESAPI.properties").getParentFile().getCanonicalPath(); instance.getValidDirectoryPath("dirtest1", path, parent, true, errors); assertEquals(0, errors.size()); instance.getValidDirectoryPath("dirtest2", null, parent, false, errors); assertEquals(1, errors.size()); instance.getValidDirectoryPath("dirtest3", "ridicul%00ous", parent, false, errors); assertEquals(2, errors.size()); } public void testGetValidDouble() { System.out.println("getValidDouble"); Validator instance = ESAPI.validator(); ValidationErrorList errors = new ValidationErrorList(); instance.getValidDouble("dtest1", "1.0", 0, 20, true, errors); assertEquals(0, errors.size()); instance.getValidDouble("dtest2", null, 0, 20, true, errors); assertEquals(0, errors.size()); instance.getValidDouble("dtest3", null, 0, 20, false, errors); assertEquals(1, errors.size()); instance.getValidDouble("dtest4", "ridiculous", 0, 20, true, errors); assertEquals(2, errors.size()); instance.getValidDouble("dtest5", "" + (Double.MAX_VALUE), 0, 20, true, errors); assertEquals(3, errors.size()); instance.getValidDouble("dtest6", "" + (Double.MAX_VALUE + .00001), 0, 20, true, errors); assertEquals(4, errors.size()); } public void testGetValidFileContent() { System.out.println("getValidFileContent"); Validator instance = ESAPI.validator(); ValidationErrorList errors = new ValidationErrorList(); byte[] bytes = null; try { bytes = "12345".getBytes(PREFERRED_ENCODING); } catch (UnsupportedEncodingException e) { fail(PREFERRED_ENCODING + " not a supported encoding?!?!!"); } instance.getValidFileContent("test", bytes, 5, true, errors); assertEquals(0, errors.size()); instance.getValidFileContent("test", bytes, 4, true, errors); assertEquals(1, errors.size()); } public void testGetValidFileName() throws Exception { System.out.println("getValidFileName"); Validator instance = ESAPI.validator(); ValidationErrorList errors = new ValidationErrorList(); String testName = "aspe%20ct.jar"; assertEquals("Percent encoding is not changed", testName, instance.getValidFileName("test", testName, ESAPI.securityConfiguration().getAllowedFileExtensions(), false, errors)); } public void testGetValidInput() { System.out.println("getValidInput"); Validator instance = ESAPI.validator(); ValidationErrorList errors = new ValidationErrorList(); // instance.getValidInput(String, String, String, int, boolean, ValidationErrorList) } public void testGetValidInteger() { System.out.println("getValidInteger"); Validator instance = ESAPI.validator(); ValidationErrorList errors = new ValidationErrorList(); // instance.getValidInteger(String, String, int, int, boolean, ValidationErrorList) } public void testGetValidListItem() { System.out.println("getValidListItem"); Validator instance = ESAPI.validator(); ValidationErrorList errors = new ValidationErrorList(); // instance.getValidListItem(String, String, List, ValidationErrorList) } public void testGetValidNumber() { System.out.println("getValidNumber"); Validator instance = ESAPI.validator(); ValidationErrorList errors = new ValidationErrorList(); // instance.getValidNumber(String, String, long, long, boolean, ValidationErrorList) } public void testGetValidRedirectLocation() { System.out.println("getValidRedirectLocation"); Validator instance = ESAPI.validator(); ValidationErrorList errors = new ValidationErrorList(); // instance.getValidRedirectLocation(String, String, boolean, ValidationErrorList) } public void testGetValidSafeHTML() throws Exception { System.out.println("getValidSafeHTML"); Validator instance = ESAPI.validator(); ValidationErrorList errors = new ValidationErrorList(); // new school test case setup HTMLValidationRule rule = new HTMLValidationRule("test"); ESAPI.validator().addRule(rule); assertEquals("Test.", ESAPI.validator().getRule("test").getValid("test", "Test. <script>alert(document.cookie)</script>")); String test1 = "<b>Jeff</b>"; String result1 = instance.getValidSafeHTML("test", test1, 100, false, errors); assertEquals(test1, result1); String test2 = "<a href=\"http: String result2 = instance.getValidSafeHTML("test", test2, 100, false, errors); assertEquals(test2, result2); String test3 = "Test. <script>alert(document.cookie)</script>"; assertEquals("Test.", rule.getSafe("test", test3)); assertEquals("Test. &lt;<div>load=alert()</div>", rule.getSafe("test", "Test. <<div on<script></script>load=alert()")); assertEquals("Test. <div>b</div>", rule.getSafe("test", "Test. <div style={xss:expression(xss)}>b</div>")); assertEquals("Test.", rule.getSafe("test", "Test. <s%00cript>alert(document.cookie)</script>")); assertEquals("Test. alert(document.cookie)", rule.getSafe("test", "Test. <s\tcript>alert(document.cookie)</script>")); assertEquals("Test. alert(document.cookie)", rule.getSafe("test", "Test. <s\tcript>alert(document.cookie)</script>")); // TODO: ENHANCE waiting for a way to validate text headed for an attribute for scripts // This would be nice to catch, but just looks like text to AntiSamy // assertFalse(instance.isValidSafeHTML("test", "\" onload=\"alert(document.cookie)\" ")); // String result4 = instance.getValidSafeHTML("test", test4); // assertEquals("", result4); } public void testIsInvalidFilename() { System.out.println("testIsInvalidFilename"); Validator instance = ESAPI.validator(); char invalidChars[] = "/\\:*?\"<>|".toCharArray(); for (int i = 0; i < invalidChars.length; i++) { assertFalse(invalidChars[i] + " is an invalid character for a filename", instance.isValidFileName("test", "as" + invalidChars[i] + "pect.jar", false)); } assertFalse("Files must have an extension", instance.isValidFileName("test", "", false)); assertFalse("Files must have a valid extension", instance.isValidFileName("test.invalidExtension", "", false)); assertFalse("Filennames cannot be the empty string", instance.isValidFileName("test", "", false)); } public void testIsValidDate() { System.out.println("isValidDate"); Validator instance = ESAPI.validator(); DateFormat format = SimpleDateFormat.getDateInstance(); assertTrue(instance.isValidDate("datetest1", "September 11, 2001", format, true)); assertFalse(instance.isValidDate("datetest2", null, format, false)); assertFalse(instance.isValidDate("datetest3", "", format, false)); } public void testIsValidDirectoryPath() throws IOException { System.out.println("isValidDirectoryPath"); // get an encoder with a special list of codecs and make a validator out of it List list = new ArrayList(); list.add("HTMLEntityCodec"); Encoder encoder = new DefaultEncoder(list); Validator instance = new DefaultValidator(encoder); boolean isWindows = (System.getProperty("os.name").indexOf("Windows") != -1) ? true : false; File parent = new File("/"); if (isWindows) { String sysRoot = new File(System.getenv("SystemRoot")).getCanonicalPath(); // Windows paths that don't exist and thus should fail assertFalse(instance.isValidDirectoryPath("test", "c:\\ridiculous", parent, false)); assertFalse(instance.isValidDirectoryPath("test", "c:\\jeff", parent, false)); assertFalse(instance.isValidDirectoryPath("test", "c:\\temp\\..\\etc", parent, false)); // Windows paths assertTrue(instance.isValidDirectoryPath("test", "C:\\", parent, false)); // Windows root directory assertTrue(instance.isValidDirectoryPath("test", sysRoot, parent, false)); // Windows always exist directory assertFalse(instance.isValidDirectoryPath("test", sysRoot + "\\System32\\cmd.exe", parent, false)); // Windows command shell // Unix specific paths should not pass assertFalse(instance.isValidDirectoryPath("test", "/tmp", parent, false)); // Unix Temporary directory assertFalse(instance.isValidDirectoryPath("test", "/bin/sh", parent, false)); // Unix Standard shell assertFalse(instance.isValidDirectoryPath("test", "/etc/config", parent, false)); // Unix specific paths that should not exist or work assertFalse(instance.isValidDirectoryPath("test", "/etc/ridiculous", parent, false)); assertFalse(instance.isValidDirectoryPath("test", "/tmp/../etc", parent, false)); } else { // Windows paths should fail assertFalse(instance.isValidDirectoryPath("test", "c:\\ridiculous", parent, false)); assertFalse(instance.isValidDirectoryPath("test", "c:\\temp\\..\\etc", parent, false)); // Standard Windows locations should fail assertFalse(instance.isValidDirectoryPath("test", "c:\\", parent, false)); // Windows root directory assertFalse(instance.isValidDirectoryPath("test", "c:\\Windows\\temp", parent, false)); // Windows temporary directory assertFalse(instance.isValidDirectoryPath("test", "c:\\Windows\\System32\\cmd.exe", parent, false)); // Windows command shell // Unix specific paths should pass assertTrue(instance.isValidDirectoryPath("test", "/", parent, false)); // Root directory assertTrue(instance.isValidDirectoryPath("test", "/bin", parent, false)); // Always exist directory // Unix specific paths that should not exist or work assertFalse(instance.isValidDirectoryPath("test", "/bin/sh", parent, false)); // Standard shell, not dir assertFalse(instance.isValidDirectoryPath("test", "/etc/ridiculous", parent, false)); assertFalse(instance.isValidDirectoryPath("test", "/tmp/../etc", parent, false)); } } public void TestIsValidDirectoryPath() { // isValidDirectoryPath(String, String, boolean) } public void testIsValidDouble() { // isValidDouble(String, String, double, double, boolean) } public void testIsValidFileContent() { System.out.println("isValidFileContent"); byte[] content = null; try { content = "This is some file content".getBytes(PREFERRED_ENCODING); } catch (UnsupportedEncodingException e) { fail(PREFERRED_ENCODING + " not a supported encoding?!?!!!"); } Validator instance = ESAPI.validator(); assertTrue(instance.isValidFileContent("test", content, 100, false)); } public void testIsValidFileName() { System.out.println("isValidFileName"); Validator instance = ESAPI.validator(); assertTrue("Simple valid filename with a valid extension", instance.isValidFileName("test", "aspect.jar", false)); assertTrue("All valid filename characters are accepted", instance.isValidFileName("test", "!@#$%^&{}[]()_+-=,.~'` abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890.jar", false)); assertTrue("Legal filenames that decode to legal filenames are accepted", instance.isValidFileName("test", "aspe%20ct.jar", false)); } public void testIsValidFileUpload() throws IOException { System.out.println("isValidFileUpload"); String filepath = new File(System.getProperty("user.dir")).getCanonicalPath(); String filename = "aspect.jar"; File parent = new File("/").getCanonicalFile(); byte[] content = null; try { content = "This is some file content".getBytes(PREFERRED_ENCODING); } catch (UnsupportedEncodingException e) { fail(PREFERRED_ENCODING + " not a supported encoding?!?!!!"); } Validator instance = ESAPI.validator(); assertTrue(instance.isValidFileUpload("test", filepath, filename, parent, content, 100, false)); filepath = "/ridiculous"; filename = "aspect.jar"; try { content = "This is some file content".getBytes(PREFERRED_ENCODING); } catch (UnsupportedEncodingException e) { fail(PREFERRED_ENCODING + " not a supported encoding?!?!!!"); } assertFalse(instance.isValidFileUpload("test", filepath, filename, parent, content, 100, false)); } public void testIsValidHTTPRequestParameterSet() { // isValidHTTPRequestParameterSet(String, Set, Set) } public void testisValidInput() { System.out.println("isValidInput"); Validator instance = ESAPI.validator(); assertTrue(instance.isValidInput("test", "jeff.williams@aspectsecurity.com", "Email", 100, false)); assertFalse(instance.isValidInput("test", "jeff.williams@@aspectsecurity.com", "Email", 100, false)); assertFalse(instance.isValidInput("test", "jeff.williams@aspectsecurity", "Email", 100, false)); assertTrue(instance.isValidInput("test", "jeff.wil'liams@aspectsecurity.com", "Email", 100, false)); assertTrue(instance.isValidInput("test", "jeff.wil''liams@aspectsecurity.com", "Email", 100, false)); assertTrue(instance.isValidInput("test", "123.168.100.234", "IPAddress", 100, false)); assertTrue(instance.isValidInput("test", "192.168.1.234", "IPAddress", 100, false)); assertFalse(instance.isValidInput("test", "..168.1.234", "IPAddress", 100, false)); assertFalse(instance.isValidInput("test", "10.x.1.234", "IPAddress", 100, false)); assertTrue(instance.isValidInput("test", "http: assertFalse(instance.isValidInput("test", "http: assertFalse(instance.isValidInput("test", "http: assertTrue(instance.isValidInput("test", "078-05-1120", "SSN", 100, false)); assertTrue(instance.isValidInput("test", "078 05 1120", "SSN", 100, false)); assertTrue(instance.isValidInput("test", "078051120", "SSN", 100, false)); assertFalse(instance.isValidInput("test", "987-65-4320", "SSN", 100, false)); assertFalse(instance.isValidInput("test", "000-00-0000", "SSN", 100, false)); assertFalse(instance.isValidInput("test", "(555) 555-5555", "SSN", 100, false)); assertFalse(instance.isValidInput("test", "test", "SSN", 100, false)); assertTrue(instance.isValidInput("test", "jeffWILLIAMS123", "HTTPParameterValue", 100, false)); assertTrue(instance.isValidInput("test", "jeff .-/+=@_ WILLIAMS", "HTTPParameterValue", 100, false)); assertFalse(instance.isValidInput("test", "jeff*WILLIAMS", "HTTPParameterValue", 100, false)); assertFalse(instance.isValidInput("test", "jeff^WILLIAMS", "HTTPParameterValue", 100, false)); assertFalse(instance.isValidInput("test", "jeff\\WILLIAMS", "HTTPParameterValue", 100, false)); assertTrue(instance.isValidInput("test", null, "Email", 100, true)); assertFalse(instance.isValidInput("test", null, "Email", 100, false)); } public void testIsValidInteger() { System.out.println("isValidInteger"); Validator instance = ESAPI.validator(); //testing negative range assertFalse(instance.isValidInteger("test", "-4", 1, 10, false)); assertTrue(instance.isValidInteger("test", "-4", -10, 10, false)); //testing null value assertTrue(instance.isValidInteger("test", null, -10, 10, true)); assertFalse(instance.isValidInteger("test", null, -10, 10, false)); //testing empty string assertTrue(instance.isValidInteger("test", "", -10, 10, true)); assertFalse(instance.isValidInteger("test", "", -10, 10, false)); //testing improper range assertFalse(instance.isValidInteger("test", "50", 10, -10, false)); //testing non-integers assertFalse(instance.isValidInteger("test", "4.3214", -10, 10, true)); assertFalse(instance.isValidInteger("test", "-1.65", -10, 10, true)); //other testing assertTrue(instance.isValidInteger("test", "4", 1, 10, false)); assertTrue(instance.isValidInteger("test", "400", 1, 10000, false)); assertTrue(instance.isValidInteger("test", "400000000", 1, 400000000, false)); assertFalse(instance.isValidInteger("test", "4000000000000", 1, 10000, false)); assertFalse(instance.isValidInteger("test", "alsdkf", 10, 10000, false)); assertFalse(instance.isValidInteger("test", "--10", 10, 10000, false)); assertFalse(instance.isValidInteger("test", "14.1414234x", 10, 10000, false)); assertFalse(instance.isValidInteger("test", "Infinity", 10, 10000, false)); assertFalse(instance.isValidInteger("test", "-Infinity", 10, 10000, false)); assertFalse(instance.isValidInteger("test", "NaN", 10, 10000, false)); assertFalse(instance.isValidInteger("test", "-NaN", 10, 10000, false)); assertFalse(instance.isValidInteger("test", "+NaN", 10, 10000, false)); assertFalse(instance.isValidInteger("test", "1e-6", -999999999, 999999999, false)); assertFalse(instance.isValidInteger("test", "-1e-6", -999999999, 999999999, false)); } public void testIsValidListItem() { System.out.println("isValidListItem"); Validator instance = ESAPI.validator(); List list = new ArrayList(); list.add("one"); list.add("two"); assertTrue(instance.isValidListItem("test", "one", list)); assertFalse(instance.isValidListItem("test", "three", list)); } public void testIsValidNumber() { System.out.println("isValidNumber"); Validator instance = ESAPI.validator(); //testing negative range assertFalse(instance.isValidNumber("test", "-4", 1, 10, false)); assertTrue(instance.isValidNumber("test", "-4", -10, 10, false)); //testing null value assertTrue(instance.isValidNumber("test", null, -10, 10, true)); assertFalse(instance.isValidNumber("test", null, -10, 10, false)); //testing empty string assertTrue(instance.isValidNumber("test", "", -10, 10, true)); assertFalse(instance.isValidNumber("test", "", -10, 10, false)); //testing improper range assertFalse(instance.isValidNumber("test", "5", 10, -10, false)); //testing non-integers assertTrue(instance.isValidNumber("test", "4.3214", -10, 10, true)); assertTrue(instance.isValidNumber("test", "-1.65", -10, 10, true)); //other testing assertTrue(instance.isValidNumber("test", "4", 1, 10, false)); assertTrue(instance.isValidNumber("test", "400", 1, 10000, false)); assertTrue(instance.isValidNumber("test", "400000000", 1, 400000000, false)); assertFalse(instance.isValidNumber("test", "4000000000000", 1, 10000, false)); assertFalse(instance.isValidNumber("test", "alsdkf", 10, 10000, false)); assertFalse(instance.isValidNumber("test", "--10", 10, 10000, false)); assertFalse(instance.isValidNumber("test", "14.1414234x", 10, 10000, false)); assertFalse(instance.isValidNumber("test", "Infinity", 10, 10000, false)); assertFalse(instance.isValidNumber("test", "-Infinity", 10, 10000, false)); assertFalse(instance.isValidNumber("test", "NaN", 10, 10000, false)); assertFalse(instance.isValidNumber("test", "-NaN", 10, 10000, false)); assertFalse(instance.isValidNumber("test", "+NaN", 10, 10000, false)); assertTrue(instance.isValidNumber("test", "1e-6", -999999999, 999999999, false)); assertTrue(instance.isValidNumber("test", "-1e-6", -999999999, 999999999, false)); } public void testIsValidParameterSet() { System.out.println("isValidParameterSet"); Set requiredNames = new HashSet(); requiredNames.add("p1"); requiredNames.add("p2"); requiredNames.add("p3"); Set optionalNames = new HashSet(); optionalNames.add("p4"); optionalNames.add("p5"); optionalNames.add("p6"); MockHttpServletRequest request = new MockHttpServletRequest(); MockHttpServletResponse response = new MockHttpServletResponse(); request.addParameter("p1", "value"); request.addParameter("p2", "value"); request.addParameter("p3", "value"); ESAPI.httpUtilities().setCurrentHTTP(request, response); Validator instance = ESAPI.validator(); assertTrue(instance.isValidHTTPRequestParameterSet("HTTPParameters", request, requiredNames, optionalNames)); request.addParameter("p4", "value"); request.addParameter("p5", "value"); request.addParameter("p6", "value"); assertTrue(instance.isValidHTTPRequestParameterSet("HTTPParameters", request, requiredNames, optionalNames)); request.removeParameter("p1"); assertFalse(instance.isValidHTTPRequestParameterSet("HTTPParameters", request, requiredNames, optionalNames)); } public void testIsValidPrintable() { System.out.println("isValidPrintable"); Validator instance = ESAPI.validator(); assertTrue(instance.isValidPrintable("name", "abcDEF", 100, false)); assertTrue(instance.isValidPrintable("name", "!@#R()*$;><()", 100, false)); char[] chars = {0x60, (char) 0xFF, 0x10, 0x25}; assertFalse(instance.isValidPrintable("name", chars, 100, false)); assertFalse(instance.isValidPrintable("name", "%08", 100, false)); } public void testIsValidRedirectLocation() { // isValidRedirectLocation(String, String, boolean) } public void testIsValidSafeHTML() { System.out.println("isValidSafeHTML"); Validator instance = ESAPI.validator(); assertTrue(instance.isValidSafeHTML("test", "<b>Jeff</b>", 100, false)); assertTrue(instance.isValidSafeHTML("test", "<a href=\"http: assertTrue(instance.isValidSafeHTML("test", "Test. <script>alert(document.cookie)</script>", 100, false)); assertTrue(instance.isValidSafeHTML("test", "Test. <div style={xss:expression(xss)}>", 100, false)); assertTrue(instance.isValidSafeHTML("test", "Test. <s%00cript>alert(document.cookie)</script>", 100, false)); assertTrue(instance.isValidSafeHTML("test", "Test. <s\tcript>alert(document.cookie)</script>", 100, false)); assertTrue(instance.isValidSafeHTML("test", "Test. <s\r\n\0cript>alert(document.cookie)</script>", 100, false)); // TODO: waiting for a way to validate text headed for an attribute for scripts // This would be nice to catch, but just looks like text to AntiSamy // assertFalse(instance.isValidSafeHTML("test", "\" onload=\"alert(document.cookie)\" ")); } public void testSafeReadLine() { System.out.println("safeReadLine"); byte[] bytes = null; try { bytes = "testString".getBytes(PREFERRED_ENCODING); } catch (UnsupportedEncodingException e1) { fail(PREFERRED_ENCODING + " not a supported encoding?!?!!!"); } ByteArrayInputStream s = new ByteArrayInputStream(bytes); Validator instance = ESAPI.validator(); try { instance.safeReadLine(s, -1); fail(); } catch (ValidationException e) { // Expected } s.reset(); try { instance.safeReadLine(s, 4); fail(); } catch (ValidationException e) { // Expected } s.reset(); try { String u = instance.safeReadLine(s, 20); assertEquals("testString", u); } catch (ValidationException e) { fail(); } // This sub-test attempts to validate that BufferedReader.readLine() and safeReadLine() are similar in operation // for the nominal case try { s.reset(); InputStreamReader isr = new InputStreamReader(s); BufferedReader br = new BufferedReader(isr); String u = br.readLine(); s.reset(); String v = instance.safeReadLine(s, 20); assertEquals(u, v); } catch (IOException e) { fail(); } catch (ValidationException e) { fail(); } } public void testIssue82_SafeString_Bad_Regex() { Validator instance = ESAPI.validator(); try { instance.getValidInput("address", "55 main st. pasadena ak", "SafeString", 512, false); } catch (ValidationException e) { fail(e.getLogMessage()); } } }
package com.celements.xwikiPatches; import static com.celements.common.test.CelementsTestUtils.*; import static org.easymock.EasyMock.*; import static org.junit.Assert.*; import java.util.ArrayList; import java.util.concurrent.Callable; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import org.hibernate.FlushMode; import org.hibernate.SessionFactory; import org.hibernate.Transaction; import org.hibernate.cfg.Configuration; import org.hibernate.classic.Session; import org.junit.Before; import org.junit.Test; import org.xwiki.cache.CacheFactory; import org.xwiki.context.Execution; import org.xwiki.context.ExecutionContext; import org.xwiki.model.reference.DocumentReference; import org.xwiki.query.QueryExecutor; import com.celements.common.test.AbstractComponentTest; import com.xpn.xwiki.XWikiConfig; import com.xpn.xwiki.XWikiContext; import com.xpn.xwiki.XWikiException; import com.xpn.xwiki.doc.XWikiDocument; import com.xpn.xwiki.store.XWikiCacheStore; import com.xpn.xwiki.store.XWikiStoreInterface; import com.xpn.xwiki.store.hibernate.HibernateSessionFactory; import com.xpn.xwiki.web.Utils; public class ConcurrentCacheTest extends AbstractComponentTest { private XWikiCacheStore theCacheStore; private String wikiName = "testWiki"; private String testFullName = "TestSpace.TestDoc"; private XWikiConfig configMock; private DocumentReference testDocRef; private SessionFactory sessionFactoryMock; private Configuration hibConfigMock; private QueryExecutor hibQueryExecMock; @Before public void setUp_ConcurrentCatchTest() throws Exception { getContext().setDatabase(wikiName); sessionFactoryMock = createMockAndAddToDefault(SessionFactory.class); hibConfigMock = createMockAndAddToDefault(Configuration.class); Utils.getComponent(HibernateSessionFactory.class).setSessionFactory(sessionFactoryMock); hibQueryExecMock = registerComponentMock(QueryExecutor.class, "hql"); testDocRef = new DocumentReference(wikiName, "TestSpace", "TestDoc"); configMock = createMockAndAddToDefault(XWikiConfig.class); expect(getWikiMock().getConfig()).andReturn(configMock).anyTimes(); expect(configMock.getProperty(eq("xwiki.store.hibernate.path"), eq( "/WEB-INF/hibernate.cfg.xml"))).andReturn("testhibernate.cfg.xml"); expect(getWikiMock().Param(eq("xwiki.store.cache.capacity"))).andReturn(null).anyTimes(); expect(getWikiMock().Param(eq("xwiki.store.cache.pageexistcapacity"))).andReturn( null).anyTimes(); CacheFactory cacheFactory = Utils.getComponent(CacheFactory.class, "jbosscache"); expect(getWikiMock().getCacheFactory()).andReturn(cacheFactory).anyTimes(); expect(getWikiMock().getPlugin(eq("monitor"), same(getContext()))).andReturn(null).anyTimes(); expect(getWikiMock().hasDynamicCustomMappings()).andReturn(false).anyTimes(); expect(getWikiMock().isVirtualMode()).andReturn(false).anyTimes(); } @Test public void test_singleThreaded() throws Exception { Session sessionMock = createMockAndAddToDefault(Session.class); expect(sessionFactoryMock.openSession()).andReturn(sessionMock).once(); sessionMock.setFlushMode(eq(FlushMode.COMMIT)); expectLastCall().once(); sessionMock.setFlushMode(eq(FlushMode.MANUAL)); expectLastCall().once(); Transaction transactionMock = createMockAndAddToDefault(Transaction.class); expect(sessionMock.beginTransaction()).andReturn(transactionMock).once(); expect(sessionMock.close()).andReturn(null).once(); // expected(sessionMock.load(capture(docCapture), id)) // TODO replayDefault(); initStore(); LoadXWikiDocCommand testLoadCommand = new LoadXWikiDocCommand(); Boolean result = testLoadCommand.call(); assertTrue(result); verifyDefault(); } @Test public void test_multiThreaded() throws Exception { replayDefault(); initStore(); int cores = Runtime.getRuntime().availableProcessors(); assertTrue("This tests needs real multi core processors, but found " + cores, cores > 1); ScheduledExecutorService theExecutor = Executors.newScheduledThreadPool(cores); ArrayList<ScheduledFuture<Boolean>> futureList = new ArrayList<>(100); for (int i = 1; i < 100; i++) { ScheduledFuture<Boolean> testFuture = theExecutor.schedule(new LoadXWikiDocCommand(), 90, TimeUnit.MILLISECONDS); futureList.add(testFuture); } theExecutor.scheduleAtFixedRate(new ResetCacheEntryCommand(), 100, 100, TimeUnit.MILLISECONDS); try { theExecutor.awaitTermination(10, TimeUnit.SECONDS); } catch (InterruptedException exp) { exp.printStackTrace(); } for (ScheduledFuture<Boolean> testFuture : futureList) { assertTrue(testFuture.isDone()); assertTrue(testFuture.get()); } theExecutor.shutdown(); verifyDefault(); } void initStore() throws XWikiException { XWikiStoreInterface store = Utils.getComponent(XWikiStoreInterface.class); theCacheStore = new XWikiCacheStore(store, getContext()); } private class ResetCacheEntryCommand implements Runnable { @Override public void run() { String key = theCacheStore.getKey(wikiName, testFullName, ""); if (theCacheStore.getCache() != null) { theCacheStore.getCache().remove(key); } } } private class LoadXWikiDocCommand implements Callable<Boolean> { private XWikiContext getContext() { Execution execution = Utils.getComponent(Execution.class); ExecutionContext execContext = execution.getContext(); // TODO create ExecutionContext if not exists return (XWikiContext) execContext.getProperty("xwikicontext"); } @Override public Boolean call() throws Exception { XWikiDocument myDoc = new XWikiDocument(testDocRef); XWikiDocument loadedXWikiDoc = theCacheStore.loadXWikiDoc(myDoc, getContext()); assertNotNull(loadedXWikiDoc); // TODO check objects return false; } } }
package com.commercetools.sdk.contrib; import io.sphere.sdk.client.*; import org.junit.rules.ExternalResource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.time.Duration; import java.util.Properties; import java.util.concurrent.CompletionStage; import java.util.concurrent.TimeUnit; public final class SphereClientRule extends ExternalResource implements BlockingSphereClient { private static final Logger LOGGER = LoggerFactory.getLogger(SphereClientRule.class); private BlockingSphereClient client; @Override public void close() { LOGGER.warn("it is not recommended to close the client directly in " + getClass().getName()); client.close(); } @Override public <T> CompletionStage<T> execute(final SphereRequest<T> sphereRequest) { return client.execute(sphereRequest); } @Override public <T> T executeBlocking(final SphereRequest<T> sphereRequest) { return client.executeBlocking(sphereRequest); } @Override public <T> T executeBlocking(final SphereRequest<T> sphereRequest, final long l, final TimeUnit timeUnit) { return client.executeBlocking(sphereRequest, l, timeUnit); } @Override public <T> T executeBlocking(final SphereRequest<T> sphereRequest, final Duration duration) { return client.executeBlocking(sphereRequest, duration); } @Override protected void after() { client.close(); } @Override protected void before() throws Throwable { if (System.getenv("JVM_SDK_CONTRIB_IT_PROJECT_KEY") != null) { initializeClientFromEnv(); } else { initializeClientFromProperties(); } } private void initializeClientFromEnv() { final SphereClientConfig config = SphereClientConfig.ofEnvironmentVariables("JVM_SDK_CONTRIB_IT"); initializeClient(config); } private void initializeClientFromProperties() throws IOException { final File file = findFile(new File("integrationtest.properties").getAbsoluteFile(), 5); try (final FileInputStream fileInputStream = new FileInputStream(file)) { final Properties properties = new Properties(); properties.load(fileInputStream); final SphereClientConfig config = SphereClientConfig.ofProperties(properties, ""); initializeClient(config); } } private void initializeClient(final SphereClientConfig config) { final SphereClient underlying = SphereClientFactory.of().createClient(config); client = BlockingSphereClient.of(underlying, 20, TimeUnit.SECONDS); } private static File findFile(final File initial, final int ttl) { if (ttl <= 0 || initial.exists()) { return initial; } else { final String name = initial.getName(); final File parentFile = initial.getParentFile().getParentFile(); final File newInitial = new File(parentFile, name).getAbsoluteFile(); return findFile(newInitial, ttl - 1); } } }
package com.pollistics.controllers; import com.pollistics.models.Poll; import com.pollistics.services.PollService; import org.hamcrest.Matchers; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.mock.mockito.MockBean; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.web.servlet.MockMvc; import java.util.Arrays; import java.util.HashMap; import java.util.List; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.hasItem; import static org.junit.Assert.fail; import static org.mockito.Mockito.when; import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*; @RunWith(SpringRunner.class) @SpringBootTest @AutoConfigureMockMvc public class PollControllerTests { @MockBean private PollService pollService; @Autowired private MockMvc mockMvc; @Test public void getPollByIdTest() throws Exception { try { HashMap<String, Integer> options = new HashMap<>(); options.put("Blauw", 1); options.put("Rood", 12); when(pollService.getPoll("someId123")).thenReturn(new Poll("Mooi kleur", options)); when(pollService.getPoll("NotARealId")).thenReturn(null); this.mockMvc.perform(get("/polls/someId123")).andDo(print()) .andExpect(status().isOk()) .andExpect(model().attribute("poll", Matchers.<Poll>hasProperty("name", equalTo("Mooi kleur")))) .andExpect(model().attribute("poll", Matchers.<Poll>hasProperty("options", Matchers.hasEntry("Blauw", 1)))) .andExpect(model().attribute("poll", Matchers.<Poll>hasProperty("options", Matchers.hasEntry("Rood", 12)))); this.mockMvc.perform(get("/polls/someImpossibleId")) .andExpect(status().isNotFound()); } catch (Exception e) { fail(e.getMessage()); } } @Test public void createPollTest() { try { HashMap<String, Integer> options = new HashMap<>(); String title = "Poll title"; String option1 = "option1"; String option2 = "option2"; String option3 = "option3"; options.put(option1, 0); options.put(option2, 0); options.put(option3, 0); when(pollService.createPoll(title, options)).thenReturn("someId123"); this.mockMvc.perform(post("/polls/create").with(csrf()) .param("title", title) .param("option1",option1) .param("option2", option2) .param("option3", option3)) .andDo(print()) .andExpect(status().is3xxRedirection()) .andExpect(redirectedUrl("/someId123")); } catch (Exception e) { fail(e.getMessage()); } } @Test public void deletePollTest() { try { when(pollService.deletePoll("someId123")).thenReturn(true); when(pollService.deletePoll("someFakeId")).thenReturn(false); this.mockMvc.perform(post("/polls/delete/someId123").with(csrf())) .andExpect(flash().attribute("message", "The poll has deleted successfully!")) .andExpect(redirectedUrl("/")); this.mockMvc.perform(post("/polls/delete/someFakeId").with(csrf())) .andExpect(status().is4xxClientError()); } catch (Exception e) { fail(e.getMessage()); } } @Test public void voteTest() { try { HashMap<String, Integer> options = new HashMap<>(); options.put("Blauw", 1); options.put("Rood", 12); Poll p = new Poll("Welk kleur?", options); when(pollService.voteOption(p, "Rood")).thenReturn(true); this.mockMvc.perform(post("/polls/vote/someId123").with(csrf()) .param("option", "Rood")) .andDo(print()) .andExpect(status().is3xxRedirection()) .andExpect(redirectedUrl("/someId123")) .andExpect(cookie().exists("id")); } catch (Exception e) { fail(e.getMessage()); } } @Test public void getAllPollsTest() throws Exception { try { HashMap<String, Integer> options = new HashMap<>(); options.put("Blauw", 1); options.put("Rood", 12); Poll poll1 = new Poll("Mooi kleur", options); Poll poll2 = new Poll("Vies kleur", options); Poll poll3 = new Poll("Raar kleur", options); List<Poll> polls = Arrays.asList(poll1, poll2, poll3); when(pollService.getAllPolls()).thenReturn(polls); this.mockMvc.perform(get("/polls/")).andDo(print()) .andExpect(status().isOk()) .andExpect(model().attribute("polls", hasItem(Matchers.<Poll>hasProperty("name", equalTo("Mooi kleur"))))) .andExpect(model().attribute("polls", hasItem(Matchers.<Poll>hasProperty("name", equalTo("Vies kleur"))))) .andExpect(model().attribute("polls", hasItem(Matchers.<Poll>hasProperty("name", equalTo("Raar kleur"))))) .andExpect(model().attribute("polls", hasItem(Matchers.<Poll>hasProperty("options", Matchers.hasEntry("Blauw", 1))))) .andExpect(model().attribute("polls", hasItem(Matchers.<Poll>hasProperty("options", Matchers.hasEntry("Rood", 12))))); } catch (Exception e) { fail(e.getMessage()); } } }
// $Id: JDBCUtil.java,v 1.10 2004/05/28 01:54:47 eric Exp $ // samskivert library - useful routines for java programs // This library is free software; you can redistribute it and/or modify it // (at your option) any later version. // This library is distributed in the hope that it will be useful, // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // You should have received a copy of the GNU Lesser General Public // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA package com.samskivert.jdbc; import java.io.UnsupportedEncodingException; import java.sql.*; import com.samskivert.Log; import com.samskivert.io.PersistenceException; import com.samskivert.util.StringUtil; /** * A repository for JDBC related utility functions. */ public class JDBCUtil { /** * Closes the supplied JDBC statement and gracefully handles being * passed null (by doing nothing). */ public static void close (Statement stmt) throws SQLException { if (stmt != null) { stmt.close(); } } /** * Closes the supplied JDBC connection and gracefully handles being * passed null (by doing nothing). */ public static void close (Connection conn) throws SQLException { if (conn != null) { conn.close(); } } /** * Calls <code>stmt.executeUpdate()</code> on the supplied statement, * checking to see that it returns the expected update count and * throwing a persistence exception if it does not. */ public static void checkedUpdate ( PreparedStatement stmt, int expectedCount) throws SQLException, PersistenceException { int modified = stmt.executeUpdate(); if (modified != expectedCount) { String err = "Statement did not modify expected number of rows " + "[stmt=" + stmt + ", expected=" + expectedCount + ", modified=" + modified + "]"; throw new PersistenceException(err); } } /** * Calls <code>stmt.executeUpdate()</code> on the supplied statement * with the supplied query, checking to see that it returns the * expected update count and throwing a persistence exception if it * does not. */ public static void checkedUpdate ( Statement stmt, String query, int expectedCount) throws SQLException, PersistenceException { int modified = stmt.executeUpdate(query); if (modified != expectedCount) { String err = "Statement did not modify expected number of rows " + "[stmt=" + stmt + ", expected=" + expectedCount + ", modified=" + modified + "]"; throw new PersistenceException(err); } } /** * Calls <code>stmt.executeUpdate()</code> on the supplied statement, * checking to see that it returns the expected update count and * logging a warning if it does not. */ public static void warnedUpdate ( PreparedStatement stmt, int expectedCount) throws SQLException { int modified = stmt.executeUpdate(); if (modified != expectedCount) { Log.warning("Statement did not modify expected number of rows " + "[stmt=" + stmt + ", expected=" + expectedCount + ", modified=" + modified + "]"); } } /** * Calls <code>stmt.executeUpdate()</code> on the supplied statement * with the supplied query, checking to see that it returns the * expected update count and logging a warning if it does not. */ public static void warnedUpdate ( Statement stmt, String query, int expectedCount) throws SQLException { int modified = stmt.executeUpdate(query); if (modified != expectedCount) { Log.warning("Statement did not modify expected number of rows " + "[stmt=" + stmt + ", expected=" + expectedCount + ", modified=" + modified + "]"); } } /** * Escapes any single quotes in the supplied text and wraps it in * single quotes to make it safe for embedding into a database query. */ public static String escape (String text) { return "'" + StringUtil.replace(text, "'", "\\'") + ","; } /** * Many databases simply fail to handle Unicode text properly and this * routine provides a common workaround which is to represent a UTF-8 * string as an ISO-8895-1 string. If you don't need to use the * database's collation routines, this allows you to do pretty much * exactly what you want at the expense of having to jigger and * dejigger every goddamned string that might contain multibyte * characters every time you access the database. Three cheers for * progress! */ public static String jigger (String text) { if (text == null) { return null; } try { return new String(text.getBytes("UTF8"), "8859_1"); } catch (UnsupportedEncodingException uee) { Log.logStackTrace(uee); return text; } } /** * Reverses {@link #jigger}. */ public static String unjigger (String text) { if (text == null) { return null; } try { return new String(text.getBytes("8859_1"), "UTF8"); } catch (UnsupportedEncodingException uee) { Log.logStackTrace(uee); return text; } } /** * Utility method to jigger the specified string so that it's safe * to use in a regular Statement. */ public static String safeJigger (String text) { return StringUtil.replace(jigger(text), "'", "\\'"); } /** * Returns true if the table with the specified name exists, false if * it does not. <em>Note:</em> the table name is case sensitive. */ public static boolean tableExists (Connection conn, String name) throws SQLException { boolean matched = false; ResultSet rs = conn.getMetaData().getTables("", "", name, null); while (rs.next()) { String tname = rs.getString("TABLE_NAME"); if (name.equals(tname)) { matched = true; } } return matched; } /** * Returns true if the table with the specified name exists and * contains a column with the specified name, false if either * condition does not hold true. <em>Note:</em> the names are case * sensitive. */ public static boolean tableContainsColumn ( Connection conn, String table, String column) throws SQLException { boolean matched = false; ResultSet rs = conn.getMetaData().getColumns("", "", table, column); while (rs.next()) { String tname = rs.getString("TABLE_NAME"); String cname = rs.getString("COLUMN_NAME"); if (tname.equals(table) && cname.equals(column)) { matched = true; } } return matched; } /** * Returns true if the index on the specified column exists for the * specified table, false if it does not. Optionally you can specifiy * a non null index name, and the table will be checked to see if it * contains that specifically named index. <em>Note:</em> the names * are case sensitive. */ public static boolean tableContainsIndex (Connection conn, String table, String column, String index) throws SQLException { boolean matched = false; ResultSet rs = conn.getMetaData().getIndexInfo("", "", table, false, true); while (rs.next()) { String tname = rs.getString("TABLE_NAME"); String cname = rs.getString("COLUMN_NAME"); String iname = rs.getString("INDEX_NAME"); if (index == null) { if (tname.equals(table) && cname.equals(column)) { matched = true; } } else if (index.equals(iname)) { matched = true; } } return matched; } /** * Returns true if the specified table contains a primary key on the * specified column. */ public static boolean tableContainsPrimaryKey (Connection conn, String table, String column) throws SQLException { boolean matched = false; ResultSet rs = conn.getMetaData().getPrimaryKeys("", "", table); while (rs.next()) { String tname = rs.getString("TABLE_NAME"); String cname = rs.getString("COLUMN_NAME"); if (tname.equals(table) && cname.equals(column)) { matched = true; } } return matched; } /** * Returns the name of the index for the specified column in the * specified table. */ public static String getIndexName (Connection conn, String table, String column) throws SQLException { boolean matched = false; ResultSet rs = conn.getMetaData().getIndexInfo("", "", table, false, true); while (rs.next()) { String tname = rs.getString("TABLE_NAME"); String cname = rs.getString("COLUMN_NAME"); String iname = rs.getString("INDEX_NAME"); if (tname.equals(table) && cname.equals(column)) { return iname; } } return null; } /** * Returns the type (as specified in {@link java.sql.Types} for the * specified column in the specified table. */ public static int getColumnType (Connection conn, String table, String column) throws SQLException { boolean matched = false; ResultSet rs = conn.getMetaData().getColumns("", "", table, column); while (rs.next()) { String tname = rs.getString("TABLE_NAME"); String cname = rs.getString("COLUMN_NAME"); int type = rs.getInt("DATA_TYPE"); if (tname.equals(table) && cname.equals(column)) { return type; } } throw new SQLException("Table or Column not defined. [table=" + table + ", col=" + column + "]."); } /** * Adds a column (with name 'cname' and definition 'cdef') to the * specified table. * * @param afterCname (optional) the name of the column after which to * add the new column. */ public static void addColumn (Connection conn, String table, String cname, String cdef, String afterCname) throws SQLException { if (JDBCUtil.tableContainsColumn(conn, table, cname)) { // Log.info("Database table '" + table + "' already has column '" + // cname + "'."); return; } String update = "ALTER TABLE " + table + " ADD COLUMN " + cname + " " + cdef; if (afterCname != null) { update += " AFTER " + afterCname; } PreparedStatement stmt = null; try { stmt = conn.prepareStatement(update); stmt.executeUpdate(); } finally { close(stmt); } Log.info("Database column '" + cname + "' added to table '" + table + "'."); } /** * Changes a column's definition. Takes a full column definition * 'cdef' (including the name of the column) with which to replace the * specified column 'cname'. * * NOTE: A handy thing you can do with this is to rename a column by * providing a column definition that has a different name, but the * same column type. */ public static void changeColumn (Connection conn, String table, String cname, String cdef) throws SQLException { String update = "ALTER TABLE " + table + " CHANGE " + cname + " " + cdef; PreparedStatement stmt = null; try { stmt = conn.prepareStatement(update); stmt.executeUpdate(); } finally { close(stmt); } Log.info("Database column '" + cname + "' of table '" + table + "' modified to have this def '" + cdef + "'."); } /** * Removes a column from the specified table. */ public static void dropColumn (Connection conn, String table, String cname) throws SQLException { String update = "ALTER TABLE " + table + " DROP COLUMN " + cname; PreparedStatement stmt = null; try { stmt = conn.prepareStatement(update); if (stmt.executeUpdate() == 1) { Log.info("Database index '" + cname + "' removed from " + "table '" + table + "'."); } } finally { close(stmt); } } /** * Removes a named index from the specified table. */ public static void dropIndex (Connection conn, String table, String iname) throws SQLException { String update = "ALTER TABLE " + table + " DROP INDEX " + iname; PreparedStatement stmt = null; try { stmt = conn.prepareStatement(update); if (stmt.executeUpdate() == 1) { Log.info("Database index '" + iname + "' removed from " + "table '" + table + "'."); } } finally { close(stmt); } } /** * Removes the primary key from the specified table. */ public static void dropPrimaryKey (Connection conn, String table) throws SQLException { String update = "ALTER TABLE " + table + " DROP PRIMARY KEY"; PreparedStatement stmt = null; try { stmt = conn.prepareStatement(update); if (stmt.executeUpdate() == 1) { Log.info("Database primary key removed from '" + table + "'."); } } finally { close(stmt); } } /** * Adds an index on the specified column (cname) to the specified * table. Optionally supply an index name, otherwise the index is * named after the column. */ public static void addIndexToTable (Connection conn, String table, String cname, String iname) throws SQLException { if (JDBCUtil.tableContainsIndex(conn, table, cname, iname)) { // Log.info("Database table '" + table + "' already has an index " + // "on column '" + cname + "'" + // (iname != null ? " named '" + iname + "'." : ".")); return; } String idx_name = (iname != null ? iname : cname); String update = "CREATE INDEX " + idx_name + " on " + table + "(" + cname + ")"; PreparedStatement stmt = null; try { stmt = conn.prepareStatement(update); stmt.executeUpdate(); } finally { close(stmt); } Log.info("Database index '" + idx_name + "' added to table '" + table + "'"); } }
package com.turo.pushy.apns; import com.turo.pushy.apns.auth.ApnsSigningKey; import com.turo.pushy.apns.proxy.ProxyHandlerFactory; import io.netty.channel.EventLoopGroup; import io.netty.handler.codec.http2.Http2FrameLogger; import io.netty.handler.codec.http2.Http2SecurityUtil; import io.netty.handler.ssl.*; import io.netty.util.ReferenceCounted; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.net.ssl.SSLException; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.net.InetSocketAddress; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.PrivateKey; import java.security.cert.Certificate; import java.security.cert.X509Certificate; import java.util.concurrent.TimeUnit; public class ApnsClientBuilder { private InetSocketAddress apnsServerAddress; private X509Certificate clientCertificate; private PrivateKey privateKey; private String privateKeyPassword; private ApnsSigningKey signingKey; private File trustedServerCertificatePemFile; private InputStream trustedServerCertificateInputStream; private X509Certificate[] trustedServerCertificates; private EventLoopGroup eventLoopGroup; private int concurrentConnections = 1; private ApnsClientMetricsListener metricsListener; private ProxyHandlerFactory proxyHandlerFactory; private int connectionTimeoutMillis; private long idlePingIntervalMillis = DEFAULT_PING_IDLE_TIME_MILLIS; private long gracefulShutdownTimeoutMillis; private Http2FrameLogger frameLogger; /** * The default idle time in milliseconds after which the client will send a PING frame to the APNs server. * * @since 0.11 */ public static final int DEFAULT_PING_IDLE_TIME_MILLIS = 60_000; /** * The hostname for the production APNs gateway. * * @since 0.5 */ public static final String PRODUCTION_APNS_HOST = "api.push.apple.com"; /** * The hostname for the development APNs gateway. * * @since 0.5 */ public static final String DEVELOPMENT_APNS_HOST = "api.sandbox.push.apple.com"; /** * The default (HTTPS) port for communication with the APNs gateway. * * @since 0.5 */ public static final int DEFAULT_APNS_PORT = 443; public static final int ALTERNATE_APNS_PORT = 2197; private static final Logger log = LoggerFactory.getLogger(ApnsClientBuilder.class); public ApnsClientBuilder setApnsServer(final String hostname) { return this.setApnsServer(hostname, DEFAULT_APNS_PORT); } public ApnsClientBuilder setApnsServer(final String hostname, final int port) { this.apnsServerAddress = InetSocketAddress.createUnresolved(hostname, port); return this; } /** * <p>Sets the TLS credentials for the client under construction using the contents of the given PKCS#12 file. * Clients constructed with TLS credentials will use TLS-based authentication when sending push notifications. The * PKCS#12 file <em>must</em> contain a certificate/private key pair.</p> * * <p>Clients may not have both TLS credentials and a signing key.</p> * * @param p12File a PKCS#12-formatted file containing the certificate and private key to be used to identify the * client to the APNs server * @param p12Password the password to be used to decrypt the contents of the given PKCS#12 file; passwords may be * blank (i.e. {@code ""}), but must not be {@code null} * * @throws SSLException if the given PKCS#12 file could not be loaded or if any other SSL-related problem arises * when constructing the context * @throws IOException if any IO problem occurred while attempting to read the given PKCS#12 file, or the PKCS#12 * file could not be found * * @return a reference to this builder * * @since 0.8 */ public ApnsClientBuilder setClientCredentials(final File p12File, final String p12Password) throws SSLException, IOException { try (final InputStream p12InputStream = new FileInputStream(p12File)) { return this.setClientCredentials(p12InputStream, p12Password); } } /** * <p>Sets the TLS credentials for the client under construction using the data from the given PKCS#12 input stream. * Clients constructed with TLS credentials will use TLS-based authentication when sending push notifications. The * PKCS#12 data <em>must</em> contain a certificate/private key pair.</p> * * <p>Clients may not have both TLS credentials and a signing key.</p> * * @param p12InputStream an input stream to a PKCS#12-formatted file containing the certificate and private key to * be used to identify the client to the APNs server * @param p12Password the password to be used to decrypt the contents of the given PKCS#12 file; passwords may be * blank (i.e. {@code ""}), but must not be {@code null} * * @throws SSLException if the given PKCS#12 file could not be loaded or if any other SSL-related problem arises * when constructing the context * @throws IOException if any IO problem occurred while attempting to read the given PKCS#12 input stream * * @return a reference to this builder * * @since 0.8 */ public ApnsClientBuilder setClientCredentials(final InputStream p12InputStream, final String p12Password) throws SSLException, IOException { final X509Certificate x509Certificate; final PrivateKey privateKey; try { final KeyStore.PrivateKeyEntry privateKeyEntry = P12Util.getFirstPrivateKeyEntryFromP12InputStream(p12InputStream, p12Password); final Certificate certificate = privateKeyEntry.getCertificate(); if (!(certificate instanceof X509Certificate)) { throw new KeyStoreException("Found a certificate in the provided PKCS#12 file, but it was not an X.509 certificate."); } x509Certificate = (X509Certificate) certificate; privateKey = privateKeyEntry.getPrivateKey(); } catch (final KeyStoreException e) { throw new SSLException(e); } return this.setClientCredentials(x509Certificate, privateKey, p12Password); } /** * <p>Sets the TLS credentials for the client under construction. Clients constructed with TLS credentials will use * TLS-based authentication when sending push notifications.</p> * * <p>Clients may not have both TLS credentials and a signing key.</p> * * @param clientCertificate the certificate to be used to identify the client to the APNs server * @param privateKey the private key for the client certificate * @param privateKeyPassword the password to be used to decrypt the private key; may be {@code null} if the private * key does not require a password * * @return a reference to this builder * * @since 0.8 */ public ApnsClientBuilder setClientCredentials(final X509Certificate clientCertificate, final PrivateKey privateKey, final String privateKeyPassword) { this.clientCertificate = clientCertificate; this.privateKey = privateKey; this.privateKeyPassword = privateKeyPassword; return this; } /** * <p>Sets the signing key for the client under construction. Clients constructed with a signing key will use * token-based authentication when sending push notifications.</p> * * <p>Clients may not have both a signing key and TLS credentials.</p> * * @param signingKey the signing key to be used by the client under construction * * @return a reference to this builder * * @see ApnsSigningKey#loadFromPkcs8File(File, String, String) * @see ApnsSigningKey#loadFromInputStream(InputStream, String, String) * * @since 0.10 */ public ApnsClientBuilder setSigningKey(final ApnsSigningKey signingKey) { this.signingKey = signingKey; return this; } public ApnsClientBuilder setTrustedServerCertificateChain(final File certificatePemFile) { this.trustedServerCertificatePemFile = certificatePemFile; this.trustedServerCertificateInputStream = null; this.trustedServerCertificates = null; return this; } public ApnsClientBuilder setTrustedServerCertificateChain(final InputStream certificateInputStream) { this.trustedServerCertificatePemFile = null; this.trustedServerCertificateInputStream = certificateInputStream; this.trustedServerCertificates = null; return this; } public ApnsClientBuilder setTrustedServerCertificateChain(final X509Certificate... certificates) { this.trustedServerCertificatePemFile = null; this.trustedServerCertificateInputStream = null; this.trustedServerCertificates = certificates; return this; } /** * <p>Sets the event loop group to be used by the client under construction. If not set (or if {@code null}), the * client will create and manage its own event loop group.</p> * * <p>Generally speaking, callers don't need to set event loop groups for clients, but it may be useful to specify * an event loop group under certain circumstances. In particular, specifying an event loop group that is shared * among multiple {@code ApnsClient} instances can keep thread counts manageable. Regardless of the number of * concurrent {@code ApnsClient} instances, callers may also wish to specify an event loop group to take advantage * of certain platform-specific optimizations (e.g. {@code epoll} or {@code KQueue} event loop groups).</p> * * @param eventLoopGroup the event loop group to use for this client, or {@code null} to let the client manage its * own event loop group * * @return a reference to this builder * * @since 0.8 */ public ApnsClientBuilder setEventLoopGroup(final EventLoopGroup eventLoopGroup) { this.eventLoopGroup = eventLoopGroup; return this; } /** * Sets the maximum number of concurrent connections the client under construction may attempt to maintain to the * APNs server. By default, clients will attempt to maintain a single connection to the APNs server. * * @param concurrentConnections the maximum number of concurrent connections the client under construction may * attempt to maintain * * @return a reference to this builder * * @since 0.11 */ public ApnsClientBuilder setConcurrentConnections(final int concurrentConnections) { this.concurrentConnections = concurrentConnections; return this; } /** * Sets the metrics listener for the client under construction. Metrics listeners gather information that describes * the performance and behavior of a client, and are completely optional. * * @param metricsListener the metrics listener for the client under construction, or {@code null} if this client * should not report metrics to a listener * * @return a reference to this builder * * @since 0.8 */ public ApnsClientBuilder setMetricsListener(final ApnsClientMetricsListener metricsListener) { this.metricsListener = metricsListener; return this; } /** * Sets the proxy handler factory to be used to construct proxy handlers when establishing a new connection to the * APNs gateway. A client's proxy handler factory may be {@code null}, in which case the client will connect to the * gateway directly and will not use a proxy. By default, clients will not use a proxy. * * @param proxyHandlerFactory the proxy handler factory to be used to construct proxy handlers, or {@code null} if * this client should not use a proxy * * @return a reference to this builder * * @since 0.8 */ public ApnsClientBuilder setProxyHandlerFactory(final ProxyHandlerFactory proxyHandlerFactory) { this.proxyHandlerFactory = proxyHandlerFactory; return this; } /** * Sets the maximum amount of time, in milliseconds, that the client under construction will wait to establish a * connection with the APNs server before the connection attempt is considered a failure. * * @param connectionTimeout the maximum amount of time to wait for a connection attempt to complete * @param timeoutUnit the time unit for the given timeout * * @return a reference to this builder * * @since 0.8 */ public ApnsClientBuilder setConnectionTimeout(final long connectionTimeout, final TimeUnit timeoutUnit) { this.connectionTimeoutMillis = (int) timeoutUnit.toMillis(connectionTimeout); return this; } /** * Sets the amount of idle time (in milliseconds) after which the client under construction will send a PING frame * to the APNs server. By default, clients will send a PING frame after * {@value com.turo.pushy.apns.ApnsClientBuilder#DEFAULT_PING_IDLE_TIME_MILLIS} milliseconds of inactivity. * * @param pingInterval the amount of idle time after which the client will send a PING frame * @param pingIntervalUnit the time unit for the given idle time * * @return a reference to this builder * * @since 0.10 */ public ApnsClientBuilder setIdlePingInterval(final long pingInterval, final TimeUnit pingIntervalUnit) { this.idlePingIntervalMillis = pingIntervalUnit.toMillis(pingInterval); return this; } /** * Sets the amount of time clients should wait for in-progress requests to complete before closing a connection * during a graceful shutdown. * * @param gracefulShutdownTimeout the amount of time to wait for in-progress requests to complete before closing a * connection * @param timeoutUnit the time unit for the given timeout * * @return a reference to this builder * * @see ApnsClient#close() * * @since 0.8 */ public ApnsClientBuilder setGracefulShutdownTimeout(final long gracefulShutdownTimeout, final TimeUnit timeoutUnit) { this.gracefulShutdownTimeoutMillis = timeoutUnit.toMillis(gracefulShutdownTimeout); return this; } public ApnsClientBuilder setFrameLogger(final Http2FrameLogger frameLogger) { this.frameLogger = frameLogger; return this; } public ApnsClient build() throws SSLException { if (this.apnsServerAddress == null) { throw new IllegalStateException("No APNs server address specified."); } if (this.clientCertificate == null && this.privateKey == null && this.signingKey == null) { throw new IllegalStateException("No client credentials specified; either TLS credentials (a " + "certificate/private key) or an APNs signing key must be provided before building a client."); } else if ((this.clientCertificate != null || this.privateKey != null) && this.signingKey != null) { throw new IllegalStateException("Clients may not have both a signing key and TLS credentials."); } final SslContext sslContext; { final SslProvider sslProvider; if (OpenSsl.isAvailable()) { log.info("Native SSL provider is available; will use native provider."); sslProvider = SslProvider.OPENSSL_REFCNT; } else { log.info("Native SSL provider not available; will use JDK SSL provider."); sslProvider = SslProvider.JDK; } final SslContextBuilder sslContextBuilder = SslContextBuilder.forClient() .sslProvider(sslProvider) .ciphers(Http2SecurityUtil.CIPHERS, SupportedCipherSuiteFilter.INSTANCE); if (this.clientCertificate != null && this.privateKey != null) { sslContextBuilder.keyManager(this.privateKey, this.privateKeyPassword, this.clientCertificate); } if (this.trustedServerCertificatePemFile != null) { sslContextBuilder.trustManager(this.trustedServerCertificatePemFile); } else if (this.trustedServerCertificateInputStream != null) { sslContextBuilder.trustManager(this.trustedServerCertificateInputStream); } else if (this.trustedServerCertificates != null) { sslContextBuilder.trustManager(this.trustedServerCertificates); } sslContext = sslContextBuilder.build(); } final ApnsClient client = new ApnsClient(this.apnsServerAddress, sslContext, this.signingKey, this.proxyHandlerFactory, this.connectionTimeoutMillis, this.idlePingIntervalMillis, this.gracefulShutdownTimeoutMillis, this.concurrentConnections, this.metricsListener, this.frameLogger, this.eventLoopGroup); if (sslContext instanceof ReferenceCounted) { ((ReferenceCounted) sslContext).release(); } return client; } }
package net.xmeter.gui; import java.awt.BorderLayout; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.File; import javax.swing.BorderFactory; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JFileChooser; import javax.swing.JPanel; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import org.apache.jmeter.gui.util.FileDialoger; import org.apache.jmeter.gui.util.HorizontalPanel; import org.apache.jmeter.gui.util.VerticalPanel; import org.apache.jmeter.util.JMeterUtils; import org.apache.jorphan.gui.JLabeledChoice; import org.apache.jorphan.gui.JLabeledTextField; import net.xmeter.Constants; import net.xmeter.samplers.AbstractMQTTSampler; public class CommonConnUI implements ChangeListener, ActionListener, Constants{ private final JLabeledTextField serverAddr = new JLabeledTextField("Server name or IP:"); private final JLabeledTextField serverPort = new JLabeledTextField("Port number:", 5); private JCheckBox connShared = new JCheckBox("Share conn in thread"); private JLabeledChoice mqttVersion = new JLabeledChoice("MQTT version:", new String[] { MQTT_VERSION_3_1, MQTT_VERSION_3_1_1 }, false, false);; private final JLabeledTextField timeout = new JLabeledTextField("Timeout(s):", 5); private final JLabeledTextField userNameAuth = new JLabeledTextField("User name:"); private final JLabeledTextField passwordAuth = new JLabeledTextField("Password:"); private JLabeledChoice protocols; private JCheckBox dualAuth = new JCheckBox("Dual SSL authentication"); private final JLabeledTextField tksFilePath = new JLabeledTextField("Trust Key Store(*.jks): ", 25); private final JLabeledTextField ccFilePath = new JLabeledTextField("Client Certification(*.p12):", 25); private final JLabeledTextField tksPassword = new JLabeledTextField("Secret:", 10); private final JLabeledTextField ccPassword = new JLabeledTextField("Secret:", 10); private JButton tksBrowseButton; private JButton ccBrowseButton; private static final String TKS_BROWSE = "tks_browse"; private static final String CC_BROWSE = "cc_browse"; public final JLabeledTextField connNamePrefix = new JLabeledTextField("ClientId:", 8); private JCheckBox connNameSuffix = new JCheckBox("Add random suffix for ClientId"); private final JLabeledTextField connKeepAlive = new JLabeledTextField("Keep alive(s):", 4); private final JLabeledTextField connKeeptime = new JLabeledTextField("Connection keep time(s):", 4); private final JLabeledTextField connAttmptMax = new JLabeledTextField("Connect attampt max:", 0); private final JLabeledTextField reconnAttmptMax = new JLabeledTextField("Reconnect attampt max:", 0); public JPanel createConnPanel() { JPanel con = new HorizontalPanel(); JPanel connPanel = new HorizontalPanel(); connPanel.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), "MQTT connection")); connPanel.add(serverAddr); connPanel.add(serverPort); connPanel.add(mqttVersion); connPanel.add(connShared); JPanel timeoutPannel = new HorizontalPanel(); timeoutPannel.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), "Timeout")); timeoutPannel.add(timeout); con.add(connPanel); con.add(timeoutPannel); return con; } public JPanel createConnOptions() { JPanel optsPanelCon = new VerticalPanel(); optsPanelCon.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), "Connection options")); JPanel optsPanel0 = new HorizontalPanel(); optsPanel0.add(connNamePrefix); optsPanel0.add(connNameSuffix); connNameSuffix.setSelected(true); optsPanelCon.add(optsPanel0); JPanel optsPanel1 = new HorizontalPanel(); optsPanel1.add(connKeepAlive); optsPanel1.add(connKeeptime); optsPanelCon.add(optsPanel1); optsPanel1.add(connAttmptMax); optsPanel1.add(reconnAttmptMax); optsPanelCon.add(optsPanel1); return optsPanelCon; } public JPanel createAuthentication() { JPanel optsPanelCon = new VerticalPanel(); optsPanelCon.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), "User authentication")); JPanel optsPanel = new HorizontalPanel(); optsPanel.add(userNameAuth); optsPanel.add(passwordAuth); optsPanelCon.add(optsPanel); return optsPanelCon; } public JPanel createProtocolPanel() { JPanel protocolPanel = new VerticalPanel(); protocolPanel.setBorder(BorderFactory.createTitledBorder(BorderFactory.createEtchedBorder(), "Protocols")); JPanel pPanel = new HorizontalPanel(); //pPanel.setLayout(new GridLayout(1, 2)); protocols = new JLabeledChoice("Protocols:", new String[] { "TCP", "SSL" }, true, false); //JComboBox<String> component = (JComboBox) protocols.getComponentList().get(1); //component.setSize(new Dimension(40, component.getHeight())); protocols.addChangeListener(this); pPanel.add(protocols, BorderLayout.WEST); dualAuth.setSelected(false); dualAuth.setFont(null); dualAuth.setVisible(false); dualAuth.addChangeListener(this); pPanel.add(dualAuth, BorderLayout.CENTER); JPanel panel = new JPanel(new GridBagLayout()); GridBagConstraints c = new GridBagConstraints(); c.anchor = GridBagConstraints.SOUTHWEST; c.gridx = 0; c.gridy = 0; c.gridwidth = 2; tksFilePath.setVisible(false); panel.add(tksFilePath, c); c.gridx = 2; c.gridy = 0; c.gridwidth = 1; tksBrowseButton = new JButton(JMeterUtils.getResString("browse")); tksBrowseButton.setActionCommand(TKS_BROWSE); tksBrowseButton.addActionListener(this); tksBrowseButton.setVisible(false); panel.add(tksBrowseButton, c); c.gridx = 3; c.gridy = 0; c.gridwidth = 2; tksPassword.setVisible(false); panel.add(tksPassword, c); //c.weightx = 0.0; c.gridx = 0; c.gridy = 1; c.gridwidth = 2; ccFilePath.setVisible(false); panel.add(ccFilePath, c); c.gridx = 2; c.gridy = 1; c.gridwidth = 1; ccBrowseButton = new JButton(JMeterUtils.getResString("browse")); ccBrowseButton.setActionCommand(CC_BROWSE); ccBrowseButton.addActionListener(this); ccBrowseButton.setVisible(false); panel.add(ccBrowseButton, c); c.gridx = 3; c.gridy = 1; c.gridwidth = 2; ccPassword.setVisible(false); panel.add(ccPassword, c); protocolPanel.add(pPanel); protocolPanel.add(panel); return protocolPanel; } @Override public void actionPerformed(ActionEvent e) { String action = e.getActionCommand(); if(TKS_BROWSE.equals(action)) { String path = browseAndGetFilePath(); tksFilePath.setText(path); }else if(CC_BROWSE.equals(action)) { String path = browseAndGetFilePath(); ccFilePath.setText(path); } } private String browseAndGetFilePath() { String path = ""; JFileChooser chooser = FileDialoger.promptToOpenFile(); if (chooser != null) { File file = chooser.getSelectedFile(); if (file != null) { path = file.getPath(); } } return path; } @Override public void stateChanged(ChangeEvent e) { if(e.getSource() == dualAuth) { if(dualAuth.isSelected()) { tksFilePath.setVisible(true); tksBrowseButton.setVisible(true); tksPassword.setVisible(true); ccFilePath.setVisible(true); ccBrowseButton.setVisible(true); ccPassword.setVisible(true); } else { tksFilePath.setVisible(false); tksBrowseButton.setVisible(false); tksPassword.setVisible(false); ccFilePath.setVisible(false); ccBrowseButton.setVisible(false); ccPassword.setVisible(false); } } else if(e.getSource() == protocols) { if("TCP".equals(protocols.getText())) { dualAuth.setVisible(false); dualAuth.setSelected(false); } else if("SSL".equals(protocols.getText())) { dualAuth.setVisible(true); dualAuth.setEnabled(true); } } } public void configure(AbstractMQTTSampler sampler) { serverAddr.setText(sampler.getServer()); serverPort.setText(sampler.getPort()); if(sampler.getMqttVersion().equals(MQTT_VERSION_3_1)) { mqttVersion.setSelectedIndex(0); } else if(sampler.getMqttVersion().equals(MQTT_VERSION_3_1_1)) { mqttVersion.setSelectedIndex(1); } connShared.setSelected(sampler.isConnectionShare()); if(!sampler.isConnectionShareShow()) { connShared.setVisible(false); } timeout.setText(sampler.getConnTimeout()); if(sampler.getProtocol().trim().indexOf(JMETER_VARIABLE_PREFIX) == -1){ if(DEFAULT_PROTOCOL.equals(sampler.getProtocol())) { protocols.setSelectedIndex(0); } else { protocols.setSelectedIndex(1); } } else { protocols.setText(sampler.getProtocol()); } if(sampler.isDualSSLAuth()) { dualAuth.setVisible(true); dualAuth.setSelected(sampler.isDualSSLAuth()); } tksFilePath.setText(sampler.getKeyStoreFilePath()); tksPassword.setText(sampler.getKeyStorePassword()); ccFilePath.setText(sampler.getClientCertFilePath()); ccPassword.setText(sampler.getClientCertPassword()); userNameAuth.setText(sampler.getUserNameAuth()); passwordAuth.setText(sampler.getPasswordAuth()); connNamePrefix.setText(sampler.getConnPrefix()); if(sampler.isClientIdSuffix()) { connNameSuffix.setSelected(true); } else { connNameSuffix.setSelected(false); } connKeepAlive.setText(sampler.getConnKeepAlive()); connKeeptime.setText(sampler.getConnKeepTime()); if(!sampler.isKeepTimeShow()) { connKeeptime.setVisible(false); } connAttmptMax.setText(sampler.getConnAttamptMax()); reconnAttmptMax.setText(sampler.getConnReconnAttamptMax()); } public void setupSamplerProperties(AbstractMQTTSampler sampler) { sampler.setServer(serverAddr.getText()); sampler.setPort(serverPort.getText()); sampler.setMqttVersion(mqttVersion.getText()); sampler.setConnectionShare(connShared.isSelected()); sampler.setConnTimeout(timeout.getText()); sampler.setProtocol(protocols.getText()); sampler.setDualSSLAuth(dualAuth.isSelected()); sampler.setKeyStoreFilePath(tksFilePath.getText()); sampler.setKeyStorePassword(tksPassword.getText()); sampler.setClientCertFilePath(ccFilePath.getText()); sampler.setClientCertPassword(ccPassword.getText()); sampler.setUserNameAuth(userNameAuth.getText()); sampler.setPasswordAuth(passwordAuth.getText()); sampler.setConnPrefix(connNamePrefix.getText()); sampler.setClientIdSuffix(connNameSuffix.isSelected()); sampler.setConnKeepAlive(connKeepAlive.getText()); sampler.setConnKeepTime(connKeeptime.getText()); sampler.setConnAttamptMax(connAttmptMax.getText()); sampler.setConnReconnAttamptMax(reconnAttmptMax.getText()); } public static int parseInt(String value) { if(value == null || "".equals(value.trim())) { return 0; } return Integer.parseInt(value); } public void clearUI() { serverAddr.setText(DEFAULT_SERVER); serverPort.setText(DEFAULT_PORT); mqttVersion.setSelectedIndex(0); connShared.setSelected(DEFAULT_CONNECTION_SHARE); timeout.setText(DEFAULT_CONN_TIME_OUT); protocols.setSelectedIndex(0); dualAuth.setSelected(false); tksFilePath.setText(""); tksPassword.setText(""); ccFilePath.setText(""); ccPassword.setText(""); userNameAuth.setText(""); passwordAuth.setText(""); connNamePrefix.setText(DEFAULT_CONN_PREFIX_FOR_CONN); connNameSuffix.setSelected(true); connKeepAlive.setText(DEFAULT_CONN_KEEP_ALIVE); connKeeptime.setText(DEFAULT_CONN_KEEP_TIME); connAttmptMax.setText(DEFAULT_CONN_ATTAMPT_MAX); reconnAttmptMax.setText(DEFAULT_CONN_RECONN_ATTAMPT_MAX); } }
package io.myweb; import android.content.Context; import android.content.res.AssetManager; import android.net.LocalSocket; import android.util.Log; import io.myweb.api.MimeTypes; import java.io.*; import java.util.regex.Pattern; public class AssetEndpoint extends Endpoint { public static final String MYWEB_ASSETS_DIR = "myweb"; public AssetEndpoint(Context context) { super(context); } @Override protected String httpMethod() { return "GET"; } @Override protected String originalPath() { return "/"; // doesn't matter actually } @Override protected Pattern getPattern() { return null; // doesn't matter actually } @Override public boolean match(String method, String uri) { Log.d("AssetEndpoint", "trying to match: " + uri); if ("GET".equals(method)) { AssetManager assetManager = getContext().getAssets(); try { assetManager.open(MYWEB_ASSETS_DIR + uri).close(); Log.d("AssetEndpoint", "matched: " + uri); return true; } catch (IOException e) { Log.d("AssetEndpoint", "not matched: " + uri + " (" + e + ")"); return false; } } return false; } @Override public void invoke(String uri, String request, LocalSocket localSocket, String reqId) { AssetManager assetManager = getContext().getAssets(); try { String contentType = MimeTypes.getMimeType(uri); OutputStream os = outputStream(localSocket); writeResponseHeaders(os, reqId); InputStream is = assetManager.open(MYWEB_ASSETS_DIR + uri); ResponseBuilder responseBuilder = new ResponseBuilder(); long length = AssetInfo.getAssetLengths().get(uri); responseBuilder.writeResponse(contentType, length, is, os); } catch (IOException e) { Log.e("AssetEndpoint", "error during invoke", e); } } }
package com.jetbrains.python.packaging; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableMap; import com.google.common.util.concurrent.UncheckedExecutionException; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.annotations.SerializedName; import com.intellij.openapi.application.ApplicationInfo; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ApplicationNamesInfo; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.CatchingConsumer; import com.intellij.util.SmartList; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.io.HttpRequests; import com.intellij.webcore.packaging.RepoPackage; import com.jetbrains.python.PythonHelpersLocator; import one.util.streamex.EntryStream; import one.util.streamex.StreamEx; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.text.MutableAttributeSet; import javax.swing.text.html.HTML; import javax.swing.text.html.HTMLEditorKit; import javax.swing.text.html.parser.ParserDelegator; import java.io.IOException; import java.io.Reader; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.stream.Collectors; public class PyPIPackageUtil { private static final Logger LOG = Logger.getInstance(PyPIPackageUtil.class); private static final Gson GSON = new GsonBuilder().create(); private static final String PYPI_HOST = "https://pypi.python.org"; public static final String PYPI_URL = PYPI_HOST + "/pypi"; public static final String PYPI_LIST_URL = PYPI_HOST + "/simple"; /** * Contains mapping "importable top-level package" -> "package names on PyPI". */ public static final ImmutableMap<String, List<String>> PACKAGES_TOPLEVEL = loadPackageAliases(); public static final PyPIPackageUtil INSTANCE = new PyPIPackageUtil(); /** * Contains cached versions of packages from additional repositories. * * @see #getPackageVersionsFromAdditionalRepositories(String) */ private final LoadingCache<String, List<String>> myAdditionalPackagesReleases = CacheBuilder.newBuilder().build( new CacheLoader<String, List<String>>() { @Override public List<String> load(@NotNull String key) throws Exception { LOG.debug("Searching for versions of package '" + key + "' in additional repositories"); final List<String> repositories = PyPackageService.getInstance().additionalRepositories; for (String repository : repositories) { try { final String packageUrl = StringUtil.trimEnd(repository, "/") + "/" + key; final List<String> versions = parsePackageVersionsFromArchives(packageUrl, key); if (!versions.isEmpty()) { LOG.debug("Found versions " + versions + "of " + key + " at " + repository); return Collections.unmodifiableList(versions); } } catch (HttpRequests.HttpStatusException e) { if (e.getStatusCode() != 404) { LOG.debug("Cannot access " + e.getUrl() + ": " + e.getMessage()); } } } return Collections.emptyList(); } }); /** * Contains cached packages taken from additional repositories. */ protected final LoadingCache<String, List<RepoPackage>> myAdditionalPackages = CacheBuilder.newBuilder().build( new CacheLoader<String, List<RepoPackage>>() { @Override public List<RepoPackage> load(@NotNull String key) throws Exception { return getPackagesFromAdditionalRepository(key); } }); /** * Contains cached package information retrieved through PyPI's JSON API. * * @see #refreshAndGetPackageDetailsFromPyPI(String, boolean) */ private final LoadingCache<String, PackageDetails> myPackageToDetails = CacheBuilder.newBuilder().build( new CacheLoader<String, PackageDetails>() { @Override public PackageDetails load(@NotNull String key) throws Exception { LOG.debug("Fetching details for the package '" + key + "' on PyPI"); return HttpRequests.request(PYPI_URL + "/" + key + "/json") .userAgent(getUserAgent()) .connect(request -> GSON.fromJson(request.getReader(), PackageDetails.class)); } }); /** * Prevents simultaneous updates of {@link PyPackageService#PY_PACKAGES} * because the corresponding response contains tons of data and multiple * queries at the same time can cause memory issues. */ private final Object myPyPIPackageCacheUpdateLock = new Object(); /** * Value for "User Agent" HTTP header in form: PyCharm/2016.2 EAP */ @NotNull private static String getUserAgent() { return ApplicationNamesInfo.getInstance().getProductName() + "/" + ApplicationInfo.getInstance().getFullVersion(); } @NotNull private static ImmutableMap<String, List<String>> loadPackageAliases() { final ImmutableMap.Builder<String, List<String>> builder = ImmutableMap.builder(); try { Files .lines(Paths.get(PythonHelpersLocator.getHelperPath("/tools/packages"))) .forEach( line -> { final List<String> split = StringUtil.split(line, " "); builder.put(split.get(0), new SmartList<>(ContainerUtil.subList(split, 1))); } ); } catch (IOException e) { LOG.error("Cannot find \"packages\". " + e.getMessage()); } return builder.build(); } public static boolean isPyPIRepository(@Nullable String repository) { return repository != null && repository.startsWith(PYPI_HOST); } @NotNull public List<RepoPackage> getAdditionalPackages(@NotNull List<String> repositories) { return StreamEx.of(myAdditionalPackages.getAllPresent(repositories).values()).flatMap(StreamEx::of).toList(); } public void loadAdditionalPackages(@NotNull List<String> repositories, boolean alwaysRefresh) throws IOException { if (alwaysRefresh) { for (String url : repositories) { myAdditionalPackages.refresh(url); } } else { for (String url : repositories) { getCachedValueOrRethrowIO(myAdditionalPackages, url); } } } @NotNull private static List<RepoPackage> getPackagesFromAdditionalRepository(@NotNull String url) throws IOException { return parsePyPIListFromWeb(url) .stream() .map(s -> new RepoPackage(s, url, null)) .collect(Collectors.toList()); } public void fillPackageDetails(@NotNull String packageName, @NotNull CatchingConsumer<PackageDetails.Info, Exception> callback) { ApplicationManager.getApplication().executeOnPooledThread(() -> { try { final PackageDetails packageDetails = refreshAndGetPackageDetailsFromPyPI(packageName, false); callback.consume(packageDetails.getInfo()); } catch (IOException e) { callback.consume(e); } }); } @NotNull private PackageDetails refreshAndGetPackageDetailsFromPyPI(@NotNull String packageName, boolean alwaysRefresh) throws IOException { if (alwaysRefresh) { myPackageToDetails.invalidate(packageName); } return getCachedValueOrRethrowIO(myPackageToDetails, packageName); } public void usePackageReleases(@NotNull String packageName, @NotNull CatchingConsumer<List<String>, Exception> callback) { ApplicationManager.getApplication().executeOnPooledThread(() -> { try { final List<String> releasesFromSimpleIndex = getPackageVersionsFromAdditionalRepositories(packageName); if (releasesFromSimpleIndex.isEmpty()) { final List<String> releasesFromPyPI = getPackageVersionsFromPyPI(packageName, true); callback.consume(releasesFromPyPI); } else { callback.consume(releasesFromSimpleIndex); } } catch (Exception e) { callback.consume(e); } }); } /** * Fetches available package versions using JSON API of PyPI. */ @NotNull private List<String> getPackageVersionsFromPyPI(@NotNull String packageName, boolean force) throws IOException { final PackageDetails details = refreshAndGetPackageDetailsFromPyPI(packageName, force); final List<String> result = details.getReleases(); result.sort(PyPackageVersionComparator.getSTR_COMPARATOR().reversed()); return Collections.unmodifiableList(result); } @Nullable private String getLatestPackageVersionFromPyPI(@NotNull Project project, @NotNull String packageName) throws IOException { LOG.debug("Requesting the latest PyPI version for the package " + packageName); final List<String> versions = getPackageVersionsFromPyPI(packageName, true); return PyPackagingSettings.getInstance(project).selectLatestVersion(versions); } /** * Fetches available package versions by scrapping the page containing package archives. * It's primarily used for additional repositories since, e.g. devpi doesn't provide another way to get this information. */ @NotNull private List<String> getPackageVersionsFromAdditionalRepositories(@NotNull String packageName) throws IOException { return getCachedValueOrRethrowIO(myAdditionalPackagesReleases, packageName); } @NotNull private static <T> T getCachedValueOrRethrowIO(@NotNull LoadingCache<String, ? extends T> cache, @NotNull String key) throws IOException { try { return cache.get(key); } catch (ExecutionException|UncheckedExecutionException e) { final Throwable cause = e.getCause(); throw (cause instanceof IOException ? (IOException)cause : new IOException("Unexpected non-IO error", cause)); } } @Nullable private String getLatestPackageVersionFromAdditionalRepositories(@NotNull Project project, @NotNull String packageName) throws IOException { final List<String> versions = getPackageVersionsFromAdditionalRepositories(packageName); return PyPackagingSettings.getInstance(project).selectLatestVersion(versions); } @Nullable public String fetchLatestPackageVersion(@NotNull Project project, @NotNull String packageName) throws IOException { String version = null; // Package is on PyPI not a, say, some system package on Ubuntu if (PyPIPackageCache.getInstance().containsPackage(packageName)) { version = getLatestPackageVersionFromPyPI(project, packageName); } if (!PyPackageService.getInstance().additionalRepositories.isEmpty()) { final String extraVersion = getLatestPackageVersionFromAdditionalRepositories(project, packageName); if (extraVersion != null) { version = extraVersion; } } return version; } @NotNull private static List<String> parsePackageVersionsFromArchives(@NotNull String archivesUrl, @NotNull String packageName) throws IOException { return HttpRequests.request(archivesUrl).userAgent(getUserAgent()).connect(request -> { final List<String> versions = new ArrayList<>(); final Reader reader = request.getReader(); new ParserDelegator().parse(reader, new HTMLEditorKit.ParserCallback() { HTML.Tag myTag; @Override public void handleStartTag(HTML.Tag tag, MutableAttributeSet set, int i) { myTag = tag; } @Override public void handleText(@NotNull char[] data, int pos) { if (myTag != null && "a".equals(myTag.toString())) { final String artifactName = String.valueOf(data); final String version = extractVersionFromArtifactName(artifactName, packageName); if (version != null) { versions.add(version); } else { LOG.debug("Could not extract version from " + artifactName + " at " + archivesUrl); } } } }, true); versions.sort(PyPackageVersionComparator.getSTR_COMPARATOR().reversed()); return versions; }); } @Nullable private static String extractVersionFromArtifactName(@NotNull String artifactName, @NotNull String packageName) { final String withoutExtension; // Contains more than one dot and thus should be handled separately if (artifactName.endsWith(".tar.gz")) { withoutExtension = StringUtil.trimEnd(artifactName, ".tar.gz"); } else { withoutExtension = FileUtil.getNameWithoutExtension(artifactName); } final String packageNameWithUnderscores = packageName.replace('-', '_'); final String suffix; if (withoutExtension.startsWith(packageName)) { suffix = StringUtil.trimStart(withoutExtension, packageName); } else if (withoutExtension.startsWith(packageNameWithUnderscores)) { suffix = StringUtil.trimStart(withoutExtension, packageNameWithUnderscores); } else { return null; } // StringUtil.split excludes empty parts by default effectively stripping a leading dash final String version = ContainerUtil.getFirstItem(StringUtil.split(suffix, "-")); if (StringUtil.isNotEmpty(version)) { return version; } return null; } public void updatePyPICache() throws IOException { final PyPackageService service = PyPackageService.getInstance(); if (service.PYPI_REMOVED) return; PyPIPackageCache.reload(parsePyPIListFromWeb(PYPI_LIST_URL)); service.LAST_TIME_CHECKED = System.currentTimeMillis(); } @NotNull private static List<String> parsePyPIListFromWeb(@NotNull String url) throws IOException { LOG.debug("Fetching index of all packages available on " + url); return HttpRequests.request(url).userAgent(getUserAgent()).connect(request -> { final List<String> packages = new ArrayList<>(); final Reader reader = request.getReader(); new ParserDelegator().parse(reader, new HTMLEditorKit.ParserCallback() { HTML.Tag myTag; @Override public void handleStartTag(@NotNull HTML.Tag tag, @NotNull MutableAttributeSet set, int i) { myTag = tag; } @Override public void handleText(@NotNull char[] data, int pos) { if (myTag != null && "a".equals(myTag.toString())) { packages.add(String.valueOf(data)); } } @Override public void handleEndTag(@NotNull HTML.Tag t, int pos) { myTag = null; } }, true); return packages; }); } public void loadPackages() throws IOException { // This lock is solely to prevent multiple threads from updating // the mammoth cache of PyPI packages simultaneously. synchronized (myPyPIPackageCacheUpdateLock) { final PyPIPackageCache cache = PyPIPackageCache.getInstance(); if (cache.getPackageNames().isEmpty()) { updatePyPICache(); } } } /** * @see PyPIPackageCache#containsPackage(String) */ public boolean isInPyPI(@NotNull String packageName) { return PyPIPackageCache.getInstance().containsPackage(packageName); } @SuppressWarnings("FieldMayBeFinal") public static final class PackageDetails { public static final class Info { // We have to explicitly name each of the fields instead of just using // GsonBuilder#setFieldNamingStrategy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES), // since otherwise GSON wouldn't be able to deserialize server responses // in the professional edition of PyCharm where the names of private fields // are obfuscated. @SerializedName("version") private String version = ""; @SerializedName("author") private String author = ""; @SerializedName("author_email") private String authorEmail = ""; @SerializedName("home_page") private String homePage = ""; @SerializedName("summary") private String summary = ""; @NotNull public String getVersion() { return StringUtil.notNullize(version); } @NotNull public String getAuthor() { return StringUtil.notNullize(author); } @NotNull public String getAuthorEmail() { return StringUtil.notNullize(authorEmail); } @NotNull public String getHomePage() { return StringUtil.notNullize(homePage); } @NotNull public String getSummary() { return StringUtil.notNullize(summary); } } @SerializedName("info") private Info info = new Info(); @SerializedName("releases") private Map<String, Object> releases = Collections.emptyMap(); @NotNull public Info getInfo() { return info; } @NotNull public List<String> getReleases() { return EntryStream.of(releases).filterValues(PackageDetails::isNotBrokenRelease).keys().toList(); } private static boolean isNotBrokenRelease(Object o) { return !(o instanceof List) || !((List)o).isEmpty(); } } }
package be.fedict.dcat.helpers; import java.io.IOException; import java.io.StringReader; import java.net.URL; import javax.json.Json; import javax.json.JsonObject; import javax.json.JsonReader; import org.apache.http.HttpHeaders; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; import org.apache.http.client.fluent.Request; import org.apache.http.util.EntityUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author Bart Hanssens <bart.hanssens@fedict.be> */ public class Fetcher { private final Logger logger = LoggerFactory.getLogger(Fetcher.class); private HttpHost proxy = null; private int delay = 1000; /** * Sleep (between HTTP requests) */ public void sleep() { try { Thread.sleep(getDelay()); } catch (InterruptedException ex) { } } /** * Set HTTP proxy. * * @param proxy proxy server * @param port proxy port */ public void setProxy(String proxy, int port) { if (proxy == null || proxy.isEmpty()) { this.proxy = null; } else { this.proxy = new HttpHost(proxy, port); } } /** * Get HTTP proxy * * @return proxy or null */ public HttpHost getProxy() { return proxy; } /** * Get delay between HTTP requests * * @return */ public int getDelay() { return delay; } /** * Make HTTP GET request. * * @param url * @return JsonObject containing CKAN info * @throws IOException */ public JsonObject makeJsonRequest(URL url) throws IOException { Request request = Request.Get(url.toString()); if (getProxy() != null) { request = request.viaProxy(getProxy()); } String json = request.execute().returnContent().asString(); JsonReader reader = Json.createReader(new StringReader(json)); return reader.readObject(); } /** * Make HTTP GET request. * * @param url * @return String containing raw page or empty string * @throws IOException */ public String makeRequest(URL url) throws IOException { logger.info("Get request for page {}", url); Request request = Request.Get(url.toString()); // some servers return 503 if no accept header is present request.addHeader(HttpHeaders.ACCEPT, "*/*"); if (getProxy() != null) { request = request.viaProxy(getProxy()); } HttpResponse res = request.execute().returnResponse(); // Return empty if the HTTP returns something faulty int status = res.getStatusLine().getStatusCode(); if (status != 200) { logger.warn("HTTP code {} getting page {}", status, url); return ""; } return EntityUtils.toString(res.getEntity()); } /** * Make HTTP HEAD request * * @param url * @return * @throws IOException */ public int makeHeadRequest(URL url) throws IOException { logger.info("Head request for {}", url); Request request = Request.Head(url.toString()); if (getProxy() != null) { request = request.viaProxy(getProxy()); } return request.execute().returnResponse() .getStatusLine().getStatusCode(); } }
package de.bmoth.backend.translator; import com.microsoft.z3.*; import de.bmoth.TestUsingZ3; import de.bmoth.backend.z3.FormulaToZ3Translator; import de.bmoth.backend.z3.SolutionFinder; import de.bmoth.preferences.BMothPreferences; import org.junit.Before; import org.junit.Test; import java.util.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class SolutionFinderTest extends TestUsingZ3 { private SolutionFinder finder; @Before @Override public void setup() { super.setup(); finder = new SolutionFinder(z3Solver, z3Context); } @Test public void testSolutionFinder1() { String formula = "a : NATURAL & a < 1"; BoolExpr constraint = FormulaToZ3Translator.translatePredicate(formula, z3Context); Set<Model> solutions = finder.findSolutions(constraint, 20); assertEquals(1, solutions.size()); } @Test public void testSolutionFinderNATUpperFail() { String maxInt = String.valueOf(BMothPreferences.getIntPreference(BMothPreferences.IntPreference.MAX_INT)); String formula = new StringBuilder().append("a : NAT & a > ").append(maxInt).toString(); BoolExpr constraint = FormulaToZ3Translator.translatePredicate(formula, z3Context); Set<Model> solutions = finder.findSolutions(constraint, 20); assertEquals(0, solutions.size()); } @Test public void testSolutionFinderNATUpper() { String oneBelowMaxInt = String.valueOf(BMothPreferences.getIntPreference(BMothPreferences.IntPreference.MAX_INT) - 1); String formula = new StringBuilder().append("a : NAT & a > ").append(oneBelowMaxInt).toString(); BoolExpr constraint = FormulaToZ3Translator.translatePredicate(formula, z3Context); Set<Model> solutions = finder.findSolutions(constraint, 20); assertEquals(1, solutions.size()); } @Test public void testSolutionFinderNAT1UpperFail() { String formula = "a : NATURAL1 & a < 1"; BoolExpr constraint = FormulaToZ3Translator.translatePredicate(formula, z3Context); Set<Model> solutions = finder.findSolutions(constraint, 20); assertEquals(0, solutions.size()); } @Test public void testSolutionFinderNAT1Upper() { String formula = "a : NATURAL1 & a < 2"; BoolExpr constraint = FormulaToZ3Translator.translatePredicate(formula, z3Context); Set<Model> solutions = finder.findSolutions(constraint, 20); assertEquals(1, solutions.size()); } @Test public void testExistsSolutionFinder() { String formula = "#x.(x : {1,2} & a = x)"; BoolExpr constraint = FormulaToZ3Translator.translatePredicate(formula, z3Context); Set<Model> solutions = finder.findSolutions(constraint, 20); assertEquals(2, solutions.size()); } @Test public void testExistsSolutionFinder2() { String formula = "#a,b,c.(c = TRUE & a : {1,2} & b : {1,2} & a /= b & x = a+b)"; BoolExpr constraint = FormulaToZ3Translator.translatePredicate(formula, z3Context); Set<Model> solutions = finder.findSolutions(constraint, 20); //all existentially quantified variables are part of the model //{c!0=true, a!2=2, b!1=1, x=3} //{c!0=true, a!2=1, b!1=2, x=3} assertEquals(2, solutions.size()); } @Test public void testSolutionFinder() { String formula = "0 < a & a < 6 & 0 < b & b < 6 & ( 2 * b < a or 2 * b = a )"; BoolExpr constraint = FormulaToZ3Translator.translatePredicate(formula, z3Context); z3Solver.add(constraint); assertEquals(Status.SATISFIABLE, z3Solver.check()); Set<Model> solutions = finder.findSolutions(constraint, 20); assertEquals(6, solutions.size()); for (Model solution : solutions) { String solutionAsString = z3ModelToString(solution); switch (solutionAsString) { case "{a=2, b=1}": case "{a=3, b=1}": case "{a=4, b=1}": case "{a=4, b=2}": case "{a=5, b=1}": case "{a=5, b=2}": case "{a=5, b=3}": break; default: fail(solutionAsString + " is not part of found solutions"); } } } @Test public void testSolutionFinder2() { String formula = "1 < x & x < 5"; BoolExpr constraint = FormulaToZ3Translator.translatePredicate(formula, z3Context); z3Solver.add(constraint); assertEquals(Status.SATISFIABLE, z3Solver.check()); Set<Model> solutions = finder.findSolutions(constraint, 20); assertEquals(3, solutions.size()); for (Model solution : solutions) { String solutionAsString = z3ModelToString(solution); switch (solutionAsString) { case "{x=2}": case "{x=3}": case "{x=4}": break; default: fail(solutionAsString + " is not part of found solutions"); } } } @Test public void testSolutionFinder3() { String formula = "0 < x & x < 5 & 1 < y & y < 6 & y < x"; BoolExpr constraint = FormulaToZ3Translator.translatePredicate(formula, z3Context); z3Solver.add(constraint); assertEquals(Status.SATISFIABLE, z3Solver.check()); Set<Model> solutions = finder.findSolutions(constraint, 20); assertEquals(3, solutions.size()); for (Model solution : solutions) { String solutionAsString = z3ModelToString(solution); switch (solutionAsString) { case "{x=3, y=2}": case "{x=4, y=2}": case "{x=4, y=3}": break; default: fail(solutionAsString + " is not part of found solutions"); } } } @Test public void testAllSolutions() { String formula = "1 < x & x < 5"; // getting the translated z3 representation of the formula BoolExpr constraint = FormulaToZ3Translator.translatePredicate(formula, z3Context); z3Solver.add(constraint); Expr x = z3Context.mkIntConst("x"); // 1st try: brute force over 'all' satisfying solutions // credit goes to: // http://stackoverflow.com/questions/13395391/z3-finding-all-satisfying-models#answer-13398853 List<Number> solutions = new ArrayList<>(); // as long as formula is satisfiable for (int i = 0; z3Solver.check() == Status.SATISFIABLE && i < 10; i++) { // get current evaluation for x IntNum currentX = (IntNum) z3Solver.getModel().eval(x, true); // and exclude it from formula z3Solver.add(z3Context.mkNot(z3Context.mkEq(x, currentX))); // store result solutions.add(currentX.getInt()); } assertEquals("[2, 3, 4]", solutions.toString()); } @Test public void testSolutionFinder4() { String formula = "a > 0"; BoolExpr constraint = FormulaToZ3Translator.translatePredicate(formula, z3Context); Set<Model> solutions = finder.findSolutions(constraint, 20); assertEquals(20, solutions.size()); } @Test public void testAbort() { String formula = "a > 0"; BoolExpr constraint = FormulaToZ3Translator.translatePredicate(formula, z3Context); int maxIterations = 20000; new Thread(() -> { try { Thread.sleep(500); } catch (InterruptedException e) { Thread.interrupted(); } finder.abort(); }).start(); Set<Model> solutions = finder.findSolutions(constraint, maxIterations); assertTrue(solutions.size() < maxIterations); } static String z3ModelToString(Model m) { Map<String, String> values = new HashMap<>(); for (FuncDecl constant : m.getConstDecls()) { String value = m.eval(constant.apply(), true).toString(); values.put(constant.apply().toString(), value); } return values.toString(); } }
package org.jenkinsci.plugins.gitclient; import com.cloudbees.plugins.credentials.CredentialsScope; import com.cloudbees.plugins.credentials.common.StandardUsernamePasswordCredentials; import com.cloudbees.plugins.credentials.impl.UsernamePasswordCredentialsImpl; import static java.util.Collections.unmodifiableList; import static org.apache.commons.lang.StringUtils.isBlank; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.*; import static org.jenkinsci.plugins.gitclient.StringSharesPrefix.sharesPrefix; import hudson.FilePath; import hudson.Launcher; import hudson.Util; import hudson.ProxyConfiguration; import hudson.model.TaskListener; import hudson.plugins.git.Branch; import hudson.plugins.git.GitException; import hudson.plugins.git.GitLockFailedException; import hudson.plugins.git.GitObject; import hudson.plugins.git.IGitAPI; import hudson.plugins.git.IndexEntry; import hudson.plugins.git.Revision; import hudson.remoting.VirtualChannel; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.StringWriter; import java.lang.reflect.Field; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Random; import java.util.Set; import java.util.TreeSet; import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; import static java.util.stream.Collectors.toList; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import junit.framework.TestCase; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.SystemUtils; import org.eclipse.jgit.api.Status; import org.eclipse.jgit.internal.storage.file.FileRepository; import org.eclipse.jgit.lib.Config; import org.eclipse.jgit.lib.ConfigConstants; import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.Ref; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.lib.StoredConfig; import org.eclipse.jgit.transport.RefSpec; import org.eclipse.jgit.transport.RemoteConfig; import org.eclipse.jgit.transport.URIish; import org.jvnet.hudson.test.Issue; import org.jvnet.hudson.test.TemporaryDirectoryAllocator; import org.objenesis.ObjenesisStd; import com.google.common.collect.Collections2; /** * @author <a href="mailto:nicolas.deloof@gmail.com">Nicolas De Loof</a> */ public abstract class GitAPITestCase extends TestCase { private final TemporaryDirectoryAllocator temporaryDirectoryAllocator = new TemporaryDirectoryAllocator(); protected hudson.EnvVars env = new hudson.EnvVars(); protected TaskListener listener; private LogHandler handler = null; private int logCount = 0; private static final String LOGGING_STARTED = "Logging started"; private static final String SRC_DIR = (new File(".")).getAbsolutePath(); private String revParseBranchName = null; private int checkoutTimeout = -1; private int cloneTimeout = -1; private int fetchTimeout = -1; private int submoduleUpdateTimeout = -1; private final Random random = new Random(); private void createRevParseBranch() throws GitException, InterruptedException { revParseBranchName = "rev-parse-branch-" + UUID.randomUUID().toString(); w.git.checkout("origin/master", revParseBranchName); } private void assertCheckoutTimeout() { if (checkoutTimeout > 0) { assertSubstringTimeout("git checkout", checkoutTimeout); } } private void assertCloneTimeout() { if (cloneTimeout > 0) { // clone_() uses "git fetch" internally, not "git clone" assertSubstringTimeout("git fetch", cloneTimeout); } } private void assertFetchTimeout() { if (fetchTimeout > 0) { assertSubstringTimeout("git fetch", fetchTimeout); } } private void assertSubmoduleUpdateTimeout() { if (submoduleUpdateTimeout > 0) { assertSubstringTimeout("git submodule update", submoduleUpdateTimeout); } } private void assertSubstringTimeout(final String substring, int expectedTimeout) { if (!(w.git instanceof CliGitAPIImpl)) { // Timeout only implemented in CliGitAPIImpl return; } List<String> messages = handler.getMessages(); List<String> substringMessages = new ArrayList<>(); List<String> substringTimeoutMessages = new ArrayList<>(); final String messageRegEx = ".*\\b" + substring + "\\b.*"; // the expected substring final String timeoutRegEx = messageRegEx + " [#] timeout=" + expectedTimeout + "\\b.*"; // # timeout=<value> for (String message : messages) { if (message.matches(messageRegEx)) { substringMessages.add(message); } if (message.matches(timeoutRegEx)) { substringTimeoutMessages.add(message); } } assertThat(messages, is(not(empty()))); assertThat(substringMessages, is(not(empty()))); assertThat(substringTimeoutMessages, is(not(empty()))); assertEquals(substringMessages, substringTimeoutMessages); } /** * One local workspace of a Git repository on a temporary directory * that gets automatically cleaned up in the end. * * Every test case automatically gets one in {@link #w} but additional ones can be created if multi-repository * interactions need to be tested. */ class WorkingArea { final File repo; final GitClient git; boolean bare = false; WorkingArea() throws Exception { this(temporaryDirectoryAllocator.allocate()); } WorkingArea(File repo) throws Exception { this.repo = repo; git = setupGitAPI(repo); setupProxy(git); } private void setupProxy(GitClient gitClient) throws SecurityException, NoSuchFieldException, IllegalArgumentException, IllegalAccessException { final String proxyHost = getSystemProperty("proxyHost", "http.proxyHost", "https.proxyHost"); final String proxyPort = getSystemProperty("proxyPort", "http.proxyPort", "https.proxyPort"); final String proxyUser = getSystemProperty("proxyUser", "http.proxyUser", "https.proxyUser"); //final String proxyPassword = getSystemProperty("proxyPassword", "http.proxyPassword", "https.proxyPassword"); final String noProxyHosts = getSystemProperty("noProxyHosts", "http.noProxyHosts", "https.noProxyHosts"); if(isBlank(proxyHost) || isBlank(proxyPort)) return; ProxyConfiguration proxyConfig = new ObjenesisStd().newInstance(ProxyConfiguration.class); setField(ProxyConfiguration.class, "name", proxyConfig, proxyHost); setField(ProxyConfiguration.class, "port", proxyConfig, Integer.parseInt(proxyPort)); setField(ProxyConfiguration.class, "userName", proxyConfig, proxyUser); setField(ProxyConfiguration.class, "noProxyHost", proxyConfig, noProxyHosts); //Password does not work since a set password results in a "Secret" call which expects a running Jenkins setField(ProxyConfiguration.class, "password", proxyConfig, null); setField(ProxyConfiguration.class, "secretPassword", proxyConfig, null); gitClient.setProxy(proxyConfig); } private void setField(Class<?> clazz, String fieldName, Object object, Object value) throws SecurityException, NoSuchFieldException, IllegalArgumentException, IllegalAccessException { Field declaredField = clazz.getDeclaredField(fieldName); declaredField.setAccessible(true); declaredField.set(object, value); } private String getSystemProperty(String ... keyVariants) { for(String key : keyVariants) { String value = System.getProperty(key); if(value != null) return value; } return null; } String cmd(String args) throws IOException, InterruptedException { return launchCommand(args.split(" ")); } String cmd(boolean ignoreError, String args) throws IOException, InterruptedException { return launchCommand(ignoreError, args.split(" ")); } String launchCommand(String... args) throws IOException, InterruptedException { return launchCommand(false, args); } String launchCommand(boolean ignoreError, String... args) throws IOException, InterruptedException { ByteArrayOutputStream out = new ByteArrayOutputStream(); int st = new Launcher.LocalLauncher(listener).launch().pwd(repo).cmds(args). envs(env).stdout(out).join(); String s = out.toString(); if (!ignoreError) { if (s == null || s.isEmpty()) { s = StringUtils.join(args, ' '); } assertEquals(s, 0, st); /* Reports full output of failing commands */ } return s; } String repoPath() { return repo.getAbsolutePath(); } WorkingArea init() throws IOException, InterruptedException { git.init(); git.setAuthor("root", "root@mydomain.com"); git.setCommitter("root", "root@domain.com"); return this; } WorkingArea init(boolean bare) throws IOException, InterruptedException { git.init_().workspace(repoPath()).bare(bare).execute(); return this; } void tag(String tag) throws IOException, InterruptedException { tag(tag, false); } void tag(String tag, boolean force) throws IOException, InterruptedException { cmd("git tag" + (force ? " --force " : " ") + tag); } void commitEmpty(String msg) throws IOException, InterruptedException { cmd("git commit --allow-empty -m " + msg); } /** * Refers to a file in this workspace */ File file(String path) { return new File(repo, path); } boolean exists(String path) { return file(path).exists(); } /** * Creates a file in the workspace. */ void touch(String path) throws IOException { file(path).createNewFile(); } /** * Creates a file in the workspace. */ File touch(String path, String content) throws IOException { File f = file(path); FileUtils.writeStringToFile(f, content, "UTF-8"); return f; } void rm(String path) { file(path).delete(); } String contentOf(String path) throws IOException { return FileUtils.readFileToString(file(path), "UTF-8"); } /** * Creates a CGit implementation. Sometimes we need this for testing JGit impl. */ CliGitAPIImpl cgit() throws Exception { return (CliGitAPIImpl)Git.with(listener, env).in(repo).using("git").getClient(); } /** * Creates a JGit implementation. Sometimes we need this for testing CliGit impl. */ JGitAPIImpl jgit() throws Exception { return (JGitAPIImpl)Git.with(listener, env).in(repo).using("jgit").getClient(); } /** * Creates a {@link Repository} object out of it. */ FileRepository repo() throws IOException { return bare ? new FileRepository(repo) : new FileRepository(new File(repo, ".git")); } /** * Obtain the current HEAD revision */ ObjectId head() throws IOException, InterruptedException { return git.revParse("HEAD"); } /** * Casts the {@link #git} to {@link IGitAPI} */ IGitAPI igit() { return (IGitAPI)git; } } protected WorkingArea w; protected WorkingArea clone(String src) throws Exception { WorkingArea x = new WorkingArea(); x.launchCommand("git", "clone", src, x.repoPath()); return new WorkingArea(x.repo); } private boolean timeoutVisibleInCurrentTest; /** * Returns true if the current test is expected to have a timeout * value visible written to the listener log. Used to assert * timeout values are passed correctly through the layers without * requiring that the timeout actually expire. * @see #setTimeoutVisibleInCurrentTest(boolean) */ protected boolean getTimeoutVisibleInCurrentTest() { return timeoutVisibleInCurrentTest; } /** * Pass visible = true to cause the current test to assert that a * timeout value should be reported in at least one of the log * entries. * @param visible set to false if current test performs no operation which should report a timeout value * @see #getTimeoutVisibleInCurrentTest() */ protected void setTimeoutVisibleInCurrentTest(boolean visible) { timeoutVisibleInCurrentTest = visible; } @Override protected void setUp() throws Exception { revParseBranchName = null; setTimeoutVisibleInCurrentTest(true); checkoutTimeout = -1; cloneTimeout = -1; fetchTimeout = -1; submoduleUpdateTimeout = -1; Logger logger = Logger.getLogger(this.getClass().getPackage().getName() + "-" + logCount++); handler = new LogHandler(); handler.setLevel(Level.ALL); logger.setUseParentHandlers(false); logger.addHandler(handler); logger.setLevel(Level.ALL); listener = new hudson.util.LogTaskListener(logger, Level.ALL); listener.getLogger().println(LOGGING_STARTED); w = new WorkingArea(); } /* HEAD ref of local mirror - all read access should use getMirrorHead */ private static ObjectId mirrorHead = null; private ObjectId getMirrorHead() throws IOException, InterruptedException { if (mirrorHead == null) { final String mirrorPath = new File(localMirror()).getAbsolutePath(); mirrorHead = ObjectId.fromString(w.launchCommand("git", "--git-dir=" + mirrorPath, "rev-parse", "HEAD").substring(0,40)); } return mirrorHead; } private final String remoteMirrorURL = "https://github.com/jenkinsci/git-client-plugin.git"; private final String remoteSshURL = "git@github.com:ndeloof/git-client-plugin.git"; protected String localMirror() throws IOException, InterruptedException { File base = new File(".").getAbsoluteFile(); for (File f=base; f!=null; f=f.getParentFile()) { if (new File(f,"target").exists()) { File clone = new File(f, "target/clone.git"); if (!clone.exists()) { // TODO: perhaps some kind of quick timestamp-based up-to-date check? w.launchCommand("git", "clone", "--mirror", "https://github.com/jenkinsci/git-client-plugin.git", clone.getAbsolutePath()); } return clone.getPath(); } } throw new IllegalStateException(); } /* JENKINS-33258 detected many calls to git rev-parse. This checks * those calls are not being made. The createRevParseBranch call * creates a branch whose name is unknown to the tests. This * checks that the branch name is not mentioned in a call to * git rev-parse. */ private void assertRevParseCalls(String branchName) { if (revParseBranchName == null) { return; } String messages = StringUtils.join(handler.getMessages(), ";"); // Linux uses rev-parse without quotes assertFalse("git rev-parse called: " + messages, handler.containsMessageSubstring("rev-parse " + branchName)); // Windows quotes the rev-parse argument assertFalse("git rev-parse called: " + messages, handler.containsMessageSubstring("rev-parse \"" + branchName)); } protected abstract GitClient setupGitAPI(File ws) throws Exception; @Override protected void tearDown() throws Exception { try { temporaryDirectoryAllocator.dispose(); } catch (IOException e) { e.printStackTrace(System.err); } try { String messages = StringUtils.join(handler.getMessages(), ";"); assertTrue("Logging not started: " + messages, handler.containsMessageSubstring(LOGGING_STARTED)); assertCheckoutTimeout(); assertCloneTimeout(); assertFetchTimeout(); assertSubmoduleUpdateTimeout(); assertRevParseCalls(revParseBranchName); } finally { handler.close(); } } private void check_remote_url(final String repositoryName) throws InterruptedException, IOException { assertEquals("Wrong remote URL", localMirror(), w.git.getRemoteUrl(repositoryName)); String remotes = w.cmd("git remote -v"); assertTrue("remote URL has not been updated", remotes.contains(localMirror())); } private Collection<String> getBranchNames(Collection<Branch> branches) { return branches.stream().map(Branch::getName).collect(toList()); } private void assertBranchesExist(Set<Branch> branches, String ... names) throws InterruptedException { Collection<String> branchNames = getBranchNames(branches); for (String name : names) { assertTrue(name + " branch not found in " + branchNames, branchNames.contains(name)); } } private void assertBranchesNotExist(Set<Branch> branches, String ... names) throws InterruptedException { Collection<String> branchNames = getBranchNames(branches); for (String name : names) { assertFalse(name + " branch found in " + branchNames, branchNames.contains(name)); } } @NotImplementedInJGit public void test_clone_default_timeout_logging() throws Exception { w.git.clone_().url(localMirror()).repositoryName("origin").execute(); cloneTimeout = CliGitAPIImpl.TIMEOUT; assertCloneTimeout(); } @NotImplementedInJGit public void test_fetch_default_timeout_logging() throws Exception { w.git.clone_().url(localMirror()).repositoryName("origin").execute(); w.git.fetch_().from(new URIish("origin"), null).execute(); fetchTimeout = CliGitAPIImpl.TIMEOUT; assertFetchTimeout(); } @NotImplementedInJGit public void test_checkout_default_timeout_logging() throws Exception { w.git.clone_().url(localMirror()).repositoryName("origin").execute(); w.git.checkout().ref("origin/master").execute(); checkoutTimeout = CliGitAPIImpl.TIMEOUT; assertCheckoutTimeout(); } @NotImplementedInJGit public void test_submodule_update_default_timeout_logging() throws Exception { w.git.clone_().url(localMirror()).repositoryName("origin").execute(); w.git.checkout().ref("origin/tests/getSubmodules").execute(); w.git.submoduleUpdate().execute(); submoduleUpdateTimeout = CliGitAPIImpl.TIMEOUT; assertSubmoduleUpdateTimeout(); } public void test_setAuthor() throws Exception { final String authorName = "Test Author"; final String authorEmail = "jenkins@example.com"; w.init(); w.touch("file1", "Varying content " + java.util.UUID.randomUUID().toString()); w.git.add("file1"); w.git.setAuthor(authorName, authorEmail); w.git.commit("Author was set explicitly on this commit"); List<String> revision = w.git.showRevision(w.head()); assertTrue("Wrong author in " + revision, revision.get(2).startsWith("author " + authorName + " <" + authorEmail +"> ")); } public void test_setCommitter() throws Exception { final String committerName = "Test Commiter"; final String committerEmail = "jenkins.plugin@example.com"; w.init(); w.touch("file1", "Varying content " + java.util.UUID.randomUUID().toString()); w.git.add("file1"); w.git.setCommitter(committerName, committerEmail); w.git.commit("Committer was set explicitly on this commit"); List<String> revision = w.git.showRevision(w.head()); assertTrue("Wrong committer in " + revision, revision.get(3).startsWith("committer " + committerName + " <" + committerEmail + "> ")); } /** Clone arguments include: * repositoryName(String) - if omitted, CliGit does not set a remote repo name * shallow() - no relevant assertion of success or failure of this argument * shared() - not implemented on CliGit, not verified on JGit * reference() - implemented on JGit, not verified on either JGit or CliGit * * CliGit and JGit both require the w.git.checkout() call * otherwise no branch is checked out. That is different than the * command line git program, but consistent within the git API. */ public void test_clone() throws Exception { cloneTimeout = 1 + random.nextInt(60 * 24); w.git.clone_().timeout(cloneTimeout).url(localMirror()).repositoryName("origin").execute(); createRevParseBranch(); // Verify JENKINS-32258 is fixed w.git.checkout("origin/master", "master"); check_remote_url("origin"); assertBranchesExist(w.git.getBranches(), "master"); final String alternates = ".git" + File.separator + "objects" + File.separator + "info" + File.separator + "alternates"; assertFalse("Alternates file found: " + alternates, w.exists(alternates)); assertFalse("Unexpected shallow clone", w.cgit().isShallowRepository()); } public void test_checkout_exception() throws Exception { w.git.clone_().url(localMirror()).repositoryName("origin").execute(); createRevParseBranch(); w.git.checkout("origin/master", "master"); final String SHA1 = "feedbeefbeaded"; try { w.git.checkout(SHA1, "master"); fail("Expected checkout exception not thrown"); } catch (GitException ge) { assertEquals("Could not checkout master with start point " + SHA1, ge.getMessage()); } } public void test_clone_repositoryName() throws IOException, InterruptedException { w.git.clone_().url(localMirror()).repositoryName("upstream").execute(); w.git.checkout("upstream/master", "master"); check_remote_url("upstream"); assertBranchesExist(w.git.getBranches(), "master"); final String alternates = ".git" + File.separator + "objects" + File.separator + "info" + File.separator + "alternates"; assertFalse("Alternates file found: " + alternates, w.exists(alternates)); } public void test_clone_shallow() throws Exception { w.git.clone_().url(localMirror()).repositoryName("origin").shallow(true).execute(); createRevParseBranch(); // Verify JENKINS-32258 is fixed w.git.checkout("origin/master", "master"); check_remote_url("origin"); assertBranchesExist(w.git.getBranches(), "master"); assertAlternatesFileNotFound(); /* JGit does not support shallow clone */ boolean hasShallowCloneSupport = w.git instanceof CliGitAPIImpl && w.cgit().isAtLeastVersion(1, 5, 0, 0); assertEquals("isShallow?", hasShallowCloneSupport, w.cgit().isShallowRepository()); String shallow = ".git" + File.separator + "shallow"; assertEquals("shallow file existence: " + shallow, hasShallowCloneSupport, w.exists(shallow)); } public void test_clone_shallow_with_depth() throws Exception { w.git.clone_().url(localMirror()).repositoryName("origin").shallow(true).depth(2).execute(); w.git.checkout("origin/master", "master"); check_remote_url("origin"); assertBranchesExist(w.git.getBranches(), "master"); assertAlternatesFileNotFound(); /* JGit does not support shallow clone */ boolean hasShallowCloneSupport = w.git instanceof CliGitAPIImpl && w.cgit().isAtLeastVersion(1, 5, 0, 0); assertEquals("isShallow?", hasShallowCloneSupport, w.cgit().isShallowRepository()); String shallow = ".git" + File.separator + "shallow"; assertEquals("shallow file existence: " + shallow, hasShallowCloneSupport, w.exists(shallow)); } public void test_clone_shared() throws IOException, InterruptedException { w.git.clone_().url(localMirror()).repositoryName("origin").shared().execute(); createRevParseBranch(); // Verify JENKINS-32258 is fixed w.git.checkout("origin/master", "master"); check_remote_url("origin"); assertBranchesExist(w.git.getBranches(), "master"); assertAlternateFilePointsToLocalMirror(); assertNoObjectsInRepository(); } public void test_clone_null_branch() throws IOException, InterruptedException { w.git.clone_().url(localMirror()).repositoryName("origin").shared().execute(); createRevParseBranch(); w.git.checkout("origin/master", null); check_remote_url("origin"); assertAlternateFilePointsToLocalMirror(); assertNoObjectsInRepository(); } public void test_clone_unshared() throws IOException, InterruptedException { w.git.clone_().url(localMirror()).repositoryName("origin").shared(false).execute(); createRevParseBranch(); // Verify JENKINS-32258 is fixed w.git.checkout("origin/master", "master"); check_remote_url("origin"); assertBranchesExist(w.git.getBranches(), "master"); assertAlternatesFileNotFound(); } public void test_clone_reference() throws IOException, InterruptedException { w.git.clone_().url(localMirror()).repositoryName("origin").reference(localMirror()).execute(); createRevParseBranch(); // Verify JENKINS-32258 is fixed w.git.checkout("origin/master", "master"); check_remote_url("origin"); assertBranchesExist(w.git.getBranches(), "master"); assertAlternateFilePointsToLocalMirror(); assertNoObjectsInRepository(); // Verify JENKINS-46737 expected log message is written String messages = StringUtils.join(handler.getMessages(), ";"); assertTrue("Reference repo not logged in: " + messages, handler.containsMessageSubstring("Using reference repository: ")); } private void assertNoObjectsInRepository() { List<String> objectsDir = new ArrayList<>(Arrays.asList(w.file(".git/objects").list())); objectsDir.remove("info"); objectsDir.remove("pack"); assertTrue("Objects directory must not contain anything but 'info' and 'pack' folders", objectsDir.isEmpty()); File packDir = w.file(".git/objects/pack"); if (packDir.isDirectory()) { assertEquals("Pack dir must noct contain anything", 0, packDir.list().length); } } private void assertAlternatesFileNotFound() { final String alternates = ".git" + File.separator + "objects" + File.separator + "info" + File.separator + "alternates"; assertFalse("Alternates file found: " + alternates, w.exists(alternates)); } private void assertAlternateFilePointsToLocalMirror() throws IOException, InterruptedException { final String alternates = ".git" + File.separator + "objects" + File.separator + "info" + File.separator + "alternates"; assertTrue("Alternates file not found: " + alternates, w.exists(alternates)); final String expectedContent = localMirror().replace("\\", "/") + "/objects"; final String actualContent = w.contentOf(alternates); assertEquals("Alternates file wrong content", expectedContent, actualContent); final File alternatesDir = new File(actualContent); assertTrue("Alternates destination " + actualContent + " missing", alternatesDir.isDirectory()); } public void test_clone_reference_working_repo() throws IOException, InterruptedException { assertTrue("SRC_DIR " + SRC_DIR + " has no .git subdir", (new File(SRC_DIR + File.separator + ".git").isDirectory())); final File shallowFile = new File(SRC_DIR + File.separator + ".git" + File.separator + "shallow"); if (shallowFile.exists()) { return; /* Reference repository pointing to a shallow checkout is nonsense */ } w.git.clone_().url(localMirror()).repositoryName("origin").reference(SRC_DIR).execute(); w.git.checkout("origin/master", "master"); check_remote_url("origin"); assertBranchesExist(w.git.getBranches(), "master"); final String alternates = ".git" + File.separator + "objects" + File.separator + "info" + File.separator + "alternates"; assertTrue("Alternates file not found: " + alternates, w.exists(alternates)); final String expectedContent = SRC_DIR.replace("\\", "/") + "/.git/objects"; final String actualContent = w.contentOf(alternates); assertEquals("Alternates file wrong content", expectedContent, actualContent); final File alternatesDir = new File(actualContent); assertTrue("Alternates destination " + actualContent + " missing", alternatesDir.isDirectory()); } public void test_clone_refspec() throws Exception { w.git.clone_().url(localMirror()).repositoryName("origin").execute(); final WorkingArea w2 = new WorkingArea(); w2.launchCommand("git", "clone", localMirror(), "./"); w2.git.withRepository((final Repository realRepo, VirtualChannel channel) -> w.git.withRepository((final Repository implRepo, VirtualChannel channel1) -> { final String realRefspec = realRepo.getConfig().getString(ConfigConstants.CONFIG_REMOTE_SECTION, Constants.DEFAULT_REMOTE_NAME, "fetch"); final String implRefspec = implRepo.getConfig().getString(ConfigConstants.CONFIG_REMOTE_SECTION, Constants.DEFAULT_REMOTE_NAME, "fetch"); assertEquals("Refspec not as git-clone", realRefspec, implRefspec); return null; })); } public void test_clone_refspecs() throws Exception { List<RefSpec> refspecs = Arrays.asList( new RefSpec("+refs/heads/master:refs/remotes/origin/master"), new RefSpec("+refs/heads/1.4.x:refs/remotes/origin/1.4.x") ); w.git.clone_().url(localMirror()).refspecs(refspecs).repositoryName("origin").execute(); w.git.withRepository((Repository repo, VirtualChannel channel) -> { String[] fetchRefSpecs = repo.getConfig().getStringList(ConfigConstants.CONFIG_REMOTE_SECTION, Constants.DEFAULT_REMOTE_NAME, "fetch"); assertEquals("Expected 2 refspecs", 2, fetchRefSpecs.length); assertEquals("Incorrect refspec 1", "+refs/heads/master:refs/remotes/origin/master", fetchRefSpecs[0]); assertEquals("Incorrect refspec 2", "+refs/heads/1.4.x:refs/remotes/origin/1.4.x", fetchRefSpecs[1]); return null; }); Set<Branch> remoteBranches = w.git.getRemoteBranches(); assertBranchesExist(remoteBranches, "origin/master"); assertBranchesExist(remoteBranches, "origin/1.4.x"); assertEquals(2, remoteBranches.size()); } public void test_detect_commit_in_repo() throws Exception { w.init(); assertFalse(w.git.isCommitInRepo(null)); // NPE safety check w.touch("file1"); w.git.add("file1"); w.git.commit("commit1"); assertTrue("HEAD commit not found", w.git.isCommitInRepo(w.head())); // this MAY fail if commit has this exact sha1, but please admit this would be unlucky assertFalse(w.git.isCommitInRepo(ObjectId.fromString("1111111111111111111111111111111111111111"))); assertFalse(w.git.isCommitInRepo(null)); // NPE safety check } @Deprecated public void test_lsTree_non_recursive() throws IOException, InterruptedException { w.init(); w.touch("file1", "file1 fixed content"); w.git.add("file1"); w.git.commit("commit1"); String expectedBlobSHA1 = "3f5a898e0c8ea62362dbf359cf1a400f3cfd46ae"; List<IndexEntry> tree = w.igit().lsTree("HEAD", false); assertEquals("Wrong blob sha1", expectedBlobSHA1, tree.get(0).getObject()); assertEquals("Wrong number of tree entries", 1, tree.size()); final String remoteUrl = localMirror(); w.igit().setRemoteUrl("origin", remoteUrl, w.repoPath() + File.separator + ".git"); assertEquals("Wrong origin default remote", "origin", w.igit().getDefaultRemote("origin")); assertEquals("Wrong invalid default remote", "origin", w.igit().getDefaultRemote("invalid")); } @Deprecated public void test_lsTree_recursive() throws IOException, InterruptedException { w.init(); assertTrue("mkdir dir1 failed", w.file("dir1").mkdir()); w.touch("dir1/file1", "dir1/file1 fixed content"); w.git.add("dir1/file1"); w.touch("file2", "file2 fixed content"); w.git.add("file2"); w.git.commit("commit-dir-and-file"); String expectedBlob1SHA1 = "a3ee484019f0576fcdeb48e682fa1058d0c74435"; String expectedBlob2SHA1 = "aa1b259ac5e8d6cfdfcf4155a9ff6836b048d0ad"; List<IndexEntry> tree = w.igit().lsTree("HEAD", true); assertEquals("Wrong blob 1 sha1", expectedBlob1SHA1, tree.get(0).getObject()); assertEquals("Wrong blob 2 sha1", expectedBlob2SHA1, tree.get(1).getObject()); assertEquals("Wrong number of tree entries", 2, tree.size()); final String remoteUrl = "https://github.com/jenkinsci/git-client-plugin.git"; w.git.setRemoteUrl("origin", remoteUrl); assertEquals("Wrong origin default remote", "origin", w.igit().getDefaultRemote("origin")); assertEquals("Wrong invalid default remote", "origin", w.igit().getDefaultRemote("invalid")); } @Deprecated public void test_getRemoteURL_two_args() throws Exception { w.init(); String originUrl = "https://github.com/bogus/bogus.git"; w.git.setRemoteUrl("origin", originUrl); assertEquals("Wrong remote URL", originUrl, w.git.getRemoteUrl("origin")); assertEquals("Wrong null remote URL", originUrl, w.igit().getRemoteUrl("origin", null)); assertEquals("Wrong blank remote URL", originUrl, w.igit().getRemoteUrl("origin", "")); if (w.igit() instanceof CliGitAPIImpl) { String gitDir = w.repoPath() + File.separator + ".git"; assertEquals("Wrong repoPath/.git remote URL for " + gitDir, originUrl, w.igit().getRemoteUrl("origin", gitDir)); assertEquals("Wrong .git remote URL", originUrl, w.igit().getRemoteUrl("origin", ".git")); } else { assertEquals("Wrong repoPath remote URL", originUrl, w.igit().getRemoteUrl("origin", w.repoPath())); } // Fails on both JGit and CliGit, though with different failure modes in each // assertEquals("Wrong . remote URL", originUrl, w.igit().getRemoteUrl("origin", ".")); } @Deprecated public void test_getDefaultRemote() throws Exception { w.init(); w.cmd("git remote add origin https://github.com/jenkinsci/git-client-plugin.git"); w.cmd("git remote add ndeloof git@github.com:ndeloof/git-client-plugin.git"); assertEquals("Wrong origin default remote", "origin", w.igit().getDefaultRemote("origin")); assertEquals("Wrong ndeloof default remote", "ndeloof", w.igit().getDefaultRemote("ndeloof")); /* CliGitAPIImpl and JGitAPIImpl return different ordered lists for default remote if invalid */ assertEquals("Wrong invalid default remote", w.git instanceof CliGitAPIImpl ? "ndeloof" : "origin", w.igit().getDefaultRemote("invalid")); } public void test_getRemoteURL() throws Exception { w.init(); w.cmd("git remote add origin https://github.com/jenkinsci/git-client-plugin.git"); w.cmd("git remote add ndeloof git@github.com:ndeloof/git-client-plugin.git"); String remoteUrl = w.git.getRemoteUrl("origin"); assertEquals("unexepected remote URL " + remoteUrl, "https://github.com/jenkinsci/git-client-plugin.git", remoteUrl); } public void test_getRemoteURL_local_clone() throws Exception { w = clone(localMirror()); assertEquals("Wrong origin URL", localMirror(), w.git.getRemoteUrl("origin")); String remotes = w.cmd("git remote -v"); assertTrue("remote URL has not been updated", remotes.contains(localMirror())); } public void test_setRemoteURL() throws Exception { w.init(); w.cmd("git remote add origin https://github.com/jenkinsci/git-client-plugin.git"); w.git.setRemoteUrl("origin", "git@github.com:ndeloof/git-client-plugin.git"); String remotes = w.cmd("git remote -v"); assertTrue("remote URL has not been updated", remotes.contains("git@github.com:ndeloof/git-client-plugin.git")); } public void test_setRemoteURL_local_clone() throws Exception { w = clone(localMirror()); String originURL = "https://github.com/jenkinsci/git-client-plugin.git"; w.git.setRemoteUrl("origin", originURL); assertEquals("Wrong origin URL", originURL, w.git.getRemoteUrl("origin")); String remotes = w.cmd("git remote -v"); assertTrue("remote URL has not been updated", remotes.contains(originURL)); } public void test_addRemoteUrl_local_clone() throws Exception { w = clone(localMirror()); assertEquals("Wrong origin URL before add", localMirror(), w.git.getRemoteUrl("origin")); String upstreamURL = "https://github.com/jenkinsci/git-client-plugin.git"; w.git.addRemoteUrl("upstream", upstreamURL); assertEquals("Wrong upstream URL", upstreamURL, w.git.getRemoteUrl("upstream")); assertEquals("Wrong origin URL after add", localMirror(), w.git.getRemoteUrl("origin")); } public void test_clean_with_parameter() throws Exception { w.init(); w.commitEmpty("init"); String dirName1 = "dir1"; String fileName1 = dirName1 + File.separator + "fileName1"; String fileName2 = "fileName2"; assertTrue("Did not create dir " + dirName1, w.file(dirName1).mkdir()); w.touch(fileName1); w.touch(fileName2); String dirName3 = "dir-with-submodule"; File submodule = w.file(dirName3); assertTrue("Did not create dir " + dirName3, submodule.mkdir()); WorkingArea workingArea = new WorkingArea(submodule); workingArea.init(); workingArea.commitEmpty("init"); w.git.clean(false); assertFalse(w.exists(dirName1)); assertFalse(w.exists(fileName1)); assertFalse(w.exists(fileName2)); assertTrue(w.exists(dirName3)); w.git.clean(true); assertFalse(w.exists(dirName3)); } @Issue({"JENKINS-20410", "JENKINS-27910", "JENKINS-22434"}) public void test_clean() throws Exception { w.init(); w.commitEmpty("init"); String fileName = "\uD835\uDD65-\u5c4f\u5e55\u622a\u56fe-\u0041\u030a-\u00c5-\u212b-fileName.xml"; w.touch(fileName, "content " + fileName); withSystemLocaleReporting(fileName, () -> { w.git.add(fileName); w.git.commit(fileName); }); /* JENKINS-27910 reported that certain cyrillic file names * failed to delete if the encoding was not UTF-8. */ String fileNameSwim = "\u00d0\u00bf\u00d0\u00bb\u00d0\u00b0\u00d0\u00b2\u00d0\u00b0\u00d0\u00bd\u00d0\u00b8\u00d0\u00b5-swim.png"; w.touch(fileNameSwim, "content " + fileNameSwim); withSystemLocaleReporting(fileNameSwim, () -> { w.git.add(fileNameSwim); w.git.commit(fileNameSwim); }); String fileNameFace = "\u00d0\u00bb\u00d0\u00b8\u00d1\u2020\u00d0\u00be-face.png"; w.touch(fileNameFace, "content " + fileNameFace); withSystemLocaleReporting(fileNameFace, () -> { w.git.add(fileNameFace); w.git.commit(fileNameFace); }); w.touch(".gitignore", ".test"); w.git.add(".gitignore"); w.git.commit("ignore"); String dirName1 = "\u5c4f\u5e55\u622a\u56fe-dir-not-added"; String fileName1 = dirName1 + File.separator + "\u5c4f\u5e55\u622a\u56fe-fileName1-not-added.xml"; String fileName2 = ".test-\u00f8\u00e4\u00fc\u00f6-fileName2-not-added"; assertTrue("Did not create dir " + dirName1, w.file(dirName1).mkdir()); w.touch(fileName1); w.touch(fileName2); w.touch(fileName, "new content"); w.git.clean(); assertFalse(w.exists(dirName1)); assertFalse(w.exists(fileName1)); assertFalse(w.exists(fileName2)); assertEquals("content " + fileName, w.contentOf(fileName)); assertEquals("content " + fileNameFace, w.contentOf(fileNameFace)); assertEquals("content " + fileNameSwim, w.contentOf(fileNameSwim)); String status = w.cmd("git status"); assertTrue("unexpected status " + status, status.contains("working directory clean") || status.contains("working tree clean")); /* A few poorly placed tests of hudson.FilePath - testing JENKINS-22434 */ FilePath fp = new FilePath(w.file(fileName)); assertTrue(fp + " missing", fp.exists()); assertTrue("mkdir " + dirName1 + " failed", w.file(dirName1).mkdir()); assertTrue("dir " + dirName1 + " missing", w.file(dirName1).isDirectory()); FilePath dir1 = new FilePath(w.file(dirName1)); w.touch(fileName1); assertTrue("Did not create file " + fileName1, w.file(fileName1).exists()); assertTrue(dir1 + " missing", dir1.exists()); dir1.deleteRecursive(); /* Fails on Linux JDK 7 with LANG=C, ok with LANG=en_US.UTF-8 */ /* Java reports "Malformed input or input contains unmappable chacraters" */ assertFalse("Did not delete file " + fileName1, w.file(fileName1).exists()); assertFalse(dir1 + " not deleted", dir1.exists()); w.touch(fileName2); FilePath fp2 = new FilePath(w.file(fileName2)); assertTrue(fp2 + " missing", fp2.exists()); fp2.delete(); assertFalse(fp2 + " not deleted", fp2.exists()); String dirContents = Arrays.toString((new File(w.repoPath())).listFiles()); String finalStatus = w.cmd("git status"); assertTrue("unexpected final status " + finalStatus + " dir contents: " + dirContents, finalStatus.contains("working directory clean") || finalStatus.contains("working tree clean")); } private void assertExceptionMessageContains(GitException ge, String expectedSubstring) { String actual = ge.getMessage().toLowerCase(); assertTrue("Expected '" + expectedSubstring + "' exception message, but was: " + actual, actual.contains(expectedSubstring)); } public void test_fetch() throws Exception { /* Create a working repo containing a commit */ w.init(); w.touch("file1", "file1 content " + java.util.UUID.randomUUID().toString()); w.git.add("file1"); w.git.commit("commit1"); ObjectId commit1 = w.head(); /* Clone working repo into a bare repo */ WorkingArea bare = new WorkingArea(); bare.init(true); w.git.setRemoteUrl("origin", bare.repoPath()); Set<Branch> remoteBranchesEmpty = w.git.getRemoteBranches(); assertThat(remoteBranchesEmpty, is(empty())); w.git.push("origin", "master"); ObjectId bareCommit1 = bare.git.getHeadRev(bare.repoPath(), "master"); assertEquals("bare != working", commit1, bareCommit1); assertEquals(commit1, bare.git.getHeadRev(bare.repoPath(), "refs/heads/master")); /* Clone new working repo from bare repo */ WorkingArea newArea = clone(bare.repoPath()); ObjectId newAreaHead = newArea.head(); assertEquals("bare != newArea", bareCommit1, newAreaHead); Set<Branch> remoteBranches1 = newArea.git.getRemoteBranches(); assertThat(getBranchNames(remoteBranches1), hasItems("origin/master")); assertEquals(bareCommit1, newArea.git.getHeadRev(newArea.repoPath(), "refs/heads/master")); /* Commit a new change to the original repo */ w.touch("file2", "file2 content " + java.util.UUID.randomUUID().toString()); w.git.add("file2"); w.git.commit("commit2"); ObjectId commit2 = w.head(); assertEquals(commit2, w.git.getHeadRev(w.repoPath(), "refs/heads/master")); /* Push the new change to the bare repo */ w.git.push("origin", "master"); ObjectId bareCommit2 = bare.git.getHeadRev(bare.repoPath(), "master"); assertEquals("bare2 != working2", commit2, bareCommit2); assertEquals(commit2, bare.git.getHeadRev(bare.repoPath(), "refs/heads/master")); /* Fetch new change into newArea repo */ /* Confirm the fetch did not alter working branch */ /* Merge the fetch results into working branch */ /* Commit a new change to the original repo */ /* Push the new change to the bare repo */ /* Fetch new change into newArea repo using different argument forms */ /* Merge the fetch results into working branch */ /* Commit a new change to the original repo */ /* Push the new change to the bare repo */ /* Fetch new change into newArea repo using a different argument form */ /* Merge the fetch results into working branch */ /* Commit a new change to the original repo */ /* Push the new change to the bare repo */ /* Fetch into newArea repo with null RefSpec - should only * pull tags, not commits in git versions prior to git 1.9.0. * In git 1.9.0, fetch -t pulls tags and versions. */ newArea.git.fetch("origin", null, null); assertEquals("null refSpec fetch modified local repo", bareCommit4, newArea.head()); ObjectId expectedHead = bareCommit4; try { /* Assert that change did not arrive in repo if git * command line less than 1.9. Assert that change arrives in * repo if git command line 1.9 or later. */ newArea.git.merge().setRevisionToMerge(bareCommit5).execute(); // JGit 4.9.0 and later copy the revision, JGit 4.8.0 and earlier did not // assertTrue("JGit should not have copied the revision", newArea.git instanceof CliGitAPIImpl); if (newArea.git instanceof CliGitAPIImpl) { assertTrue("Wrong git version", w.cgit().isAtLeastVersion(1, 9, 0, 0)); } expectedHead = bareCommit5; } catch (GitException ge) { assertTrue("Wrong cli git message :" + ge.getMessage(), ge.getMessage().contains("Could not merge") || ge.getMessage().contains("not something we can merge") || ge.getMessage().contains("does not point to a commit")); assertExceptionMessageContains(ge, bareCommit5.name()); } /* Assert that expected change is in repo after merge. With * git 1.7 and 1.8, it should be bareCommit4. With git 1.9 * and later, it should be bareCommit5. */ assertEquals("null refSpec fetch modified local repo", expectedHead, newArea.head()); try { /* Fetch into newArea repo with invalid repo name and no RefSpec */ newArea.git.fetch("invalid-remote-name"); fail("Should have thrown an exception"); } catch (GitException ge) { assertExceptionMessageContains(ge, "invalid-remote-name"); } } public void test_push_tags() throws Exception { /* Create a working repo containing a commit */ w.init(); w.touch("file1", "file1 content " + java.util.UUID.randomUUID().toString()); w.git.add("file1"); w.git.commit("commit1"); ObjectId commit1 = w.head(); /* Clone working repo into a bare repo */ WorkingArea bare = new WorkingArea(); bare.init(true); w.git.setRemoteUrl("origin", bare.repoPath()); Set<Branch> remoteBranchesEmpty = w.git.getRemoteBranches(); assertThat(remoteBranchesEmpty, is(empty())); w.git.push("origin", "master"); ObjectId bareCommit1 = bare.git.getHeadRev(bare.repoPath(), "master"); assertEquals("bare != working", commit1, bareCommit1); assertEquals(commit1, bare.git.getHeadRev(bare.repoPath(), "refs/heads/master")); /* Add tag1 to working repo without pushing it to bare repo */ w.tag("tag1"); assertTrue("tag1 wasn't created", w.git.tagExists("tag1")); assertEquals("tag1 points to wrong commit", commit1, w.git.revParse("tag1")); w.git.push().ref("master").to(new URIish(bare.repoPath())).tags(false).execute(); assertFalse("tag1 pushed unexpectedly", bare.cmd("git tag").contains("tag1")); /* Push tag1 to bare repo */ w.git.push().ref("master").to(new URIish(bare.repoPath())).tags(true).execute(); assertTrue("tag1 not pushed", bare.cmd("git tag").contains("tag1")); /* Create a new commit, move tag1 to that commit, attempt push */ w.touch("file1", "file1 content " + java.util.UUID.randomUUID().toString()); w.git.add("file1"); w.git.commit("commit2"); ObjectId commit2 = w.head(); w.tag("tag1", true); /* Tag already exists, move from commit1 to commit2 */ assertTrue("tag1 wasn't created", w.git.tagExists("tag1")); assertEquals("tag1 points to wrong commit", commit2, w.git.revParse("tag1")); try { w.git.push().ref("master").to(new URIish(bare.repoPath())).tags(true).execute(); /* JGit does not throw exception updating existing tag - ugh */ /* CliGit before 1.8 does not throw exception updating existing tag - ugh */ if (w.git instanceof CliGitAPIImpl && w.cgit().isAtLeastVersion(1, 8, 0, 0)) { fail("Modern CLI git should throw exception pushing a change to existing tag"); } } catch (GitException ge) { assertThat(ge.getMessage(), containsString("already exists")); } try { w.git.push().ref("master").to(new URIish(bare.repoPath())).tags(true).force(false).execute(); /* JGit does not throw exception updating existing tag - ugh */ /* CliGit before 1.8 does not throw exception updating existing tag - ugh */ if (w.git instanceof CliGitAPIImpl && w.cgit().isAtLeastVersion(1, 8, 0, 0)) { fail("Modern CLI git should throw exception pushing a change to existing tag"); } } catch (GitException ge) { assertThat(ge.getMessage(), containsString("already exists")); } w.git.push().ref("master").to(new URIish(bare.repoPath())).tags(true).force(true).execute(); /* Add tag to working repo without pushing it to the bare * repo, tests the default behavior when tags() is not added * to PushCommand. */ w.tag("tag3"); assertTrue("tag3 wasn't created", w.git.tagExists("tag3")); w.git.push().ref("master").to(new URIish(bare.repoPath())).execute(); assertFalse("tag3 was pushed", bare.cmd("git tag").contains("tag3")); /* Add another tag to working repo and push tags to the bare repo */ w.touch("file2", "file2 content " + java.util.UUID.randomUUID().toString()); w.git.add("file2"); w.git.commit("commit2"); w.tag("tag2"); assertTrue("tag2 wasn't created", w.git.tagExists("tag2")); w.git.push().ref("master").to(new URIish(bare.repoPath())).tags(true).execute(); assertTrue("tag1 wasn't pushed", bare.cmd("git tag").contains("tag1")); assertTrue("tag2 wasn't pushed", bare.cmd("git tag").contains("tag2")); assertTrue("tag3 wasn't pushed", bare.cmd("git tag").contains("tag3")); } @Issue("JENKINS-19591") public void test_fetch_needs_preceding_prune() throws Exception { /* Create a working repo containing a commit */ w.init(); w.touch("file1", "file1 content " + java.util.UUID.randomUUID().toString()); w.git.add("file1"); w.git.commit("commit1"); ObjectId commit1 = w.head(); assertThat(getBranchNames(w.git.getBranches()), contains("master")); assertThat(w.git.getRemoteBranches(), is(empty())); /* Prune when a remote is not yet defined */ try { w.git.prune(new RemoteConfig(new Config(), "remote-is-not-defined")); fail("Should have thrown an exception"); } catch (GitException ge) { String expected = w.git instanceof CliGitAPIImpl ? "returned status code 1" : "The uri was empty or null"; final String msg = ge.getMessage(); assertTrue("Wrong exception: " + msg, msg.contains(expected)); } /* Clone working repo into a bare repo */ WorkingArea bare = new WorkingArea(); bare.init(true); w.git.setRemoteUrl("origin", bare.repoPath()); w.git.push("origin", "master"); ObjectId bareCommit1 = bare.git.getHeadRev(bare.repoPath(), "master"); assertEquals("bare != working", commit1, bareCommit1); assertThat(getBranchNames(w.git.getBranches()), contains("master")); assertThat(w.git.getRemoteBranches(), is(empty())); /* Create a branch in working repo named "parent" */ w.git.branch("parent"); w.git.checkout("parent"); w.touch("file2", "file2 content " + java.util.UUID.randomUUID().toString()); w.git.add("file2"); w.git.commit("commit2"); ObjectId commit2 = w.head(); assertThat(getBranchNames(w.git.getBranches()), containsInAnyOrder("master", "parent")); assertThat(w.git.getRemoteBranches(), is(empty())); /* Push branch named "parent" to bare repo */ w.git.push("origin", "parent"); ObjectId bareCommit2 = bare.git.getHeadRev(bare.repoPath(), "parent"); assertEquals("working parent != bare parent", commit2, bareCommit2); assertThat(getBranchNames(w.git.getBranches()), containsInAnyOrder("master", "parent")); assertThat(w.git.getRemoteBranches(), is(empty())); /* Clone new working repo from bare repo */ WorkingArea newArea = clone(bare.repoPath()); ObjectId newAreaHead = newArea.head(); assertEquals("bare != newArea", bareCommit1, newAreaHead); Set<Branch> remoteBranches = newArea.git.getRemoteBranches(); assertThat(getBranchNames(remoteBranches), containsInAnyOrder("origin/master", "origin/parent", "origin/HEAD")); /* Checkout parent in new working repo */ newArea.git.checkout("origin/parent", "parent"); ObjectId newAreaParent = newArea.head(); assertEquals("parent1 != newAreaParent", commit2, newAreaParent); /* Delete parent branch from w */ w.git.checkout("master"); w.cmd("git branch -D parent"); assertThat(getBranchNames(w.git.getBranches()), contains("master")); /* Delete parent branch on bare repo*/ bare.cmd("git branch -D parent"); // assertEquals("Wrong branch count", 1, bare.git.getBranches().size()); /* Create parent/a branch in working repo */ w.git.branch("parent/a"); w.git.checkout("parent/a"); w.touch("file3", "file3 content " + java.util.UUID.randomUUID().toString()); w.git.add("file3"); w.git.commit("commit3"); ObjectId commit3 = w.head(); /* Push parent/a branch to bare repo */ w.git.push("origin", "parent/a"); ObjectId bareCommit3 = bare.git.getHeadRev(bare.repoPath(), "parent/a"); assertEquals("parent/a != bare", commit3, bareCommit3); remoteBranches = bare.git.getRemoteBranches(); assertThat(remoteBranches, is(empty())); /* Fetch parent/a into newArea repo - fails for * CliGitAPIImpl, succeeds for JGitAPIImpl */ newArea.cmd("git config fetch.prune false"); newArea.git.fetch(new URIish(bare.repo.toString()), refSpecs); assertTrue("CliGit should have thrown an exception", newArea.git instanceof JGitAPIImpl); } catch (GitException ge) { final String msg = ge.getMessage(); assertTrue("Wrong exception: " + msg, msg.contains("some local refs could not be updated") || msg.contains("error: cannot lock ref ")); } /* Use git remote prune origin to remove obsolete branch named "parent" */ newArea.git.prune(new RemoteConfig(new Config(), "origin")); /* Fetch should succeed */ newArea.git.fetch_().from(new URIish(bare.repo.toString()), refSpecs).execute(); } public void test_fetch_timeout() throws Exception { w.init(); w.git.setRemoteUrl("origin", localMirror()); /** * JGit 3.3.0 thru 3.6.0 "prune during fetch" prunes more remote * branches than command line git prunes during fetch. This test * should be used to evaluate future versions of JGit to see if * pruning behavior more closely emulates command line git. * * This has been fixed using a workaround. */ public void test_fetch_with_prune() throws Exception { WorkingArea bare = new WorkingArea(); bare.init(true); /* Create a working repo containing three branches */ /* master -> branch1 */ /* -> branch2 */ w.init(); w.git.setRemoteUrl("origin", bare.repoPath()); w.touch("file-master", "file master content " + java.util.UUID.randomUUID().toString()); w.git.add("file-master"); w.git.commit("master-commit"); assertEquals("Wrong branch count", 1, w.git.getBranches().size()); w.git.push("origin", "master"); /* master branch is now on bare repo */ w.git.checkout("master"); w.git.branch("branch1"); w.touch("file-branch1", "file branch1 content " + java.util.UUID.randomUUID().toString()); w.git.add("file-branch1"); w.git.commit("branch1-commit"); assertThat(getBranchNames(w.git.getBranches()), containsInAnyOrder("master", "branch1")); w.git.push("origin", "branch1"); /* branch1 is now on bare repo */ w.git.checkout("master"); w.git.branch("branch2"); w.touch("file-branch2", "file branch2 content " + java.util.UUID.randomUUID().toString()); w.git.add("file-branch2"); w.git.commit("branch2-commit"); assertThat(getBranchNames(w.git.getBranches()), containsInAnyOrder("master", "branch1", "branch2")); assertThat(w.git.getRemoteBranches(), is(empty())); w.git.push("origin", "branch2"); /* branch2 is now on bare repo */ /* Clone new working repo from bare repo */ WorkingArea newArea = clone(bare.repoPath()); ObjectId newAreaHead = newArea.head(); Set<Branch> remoteBranches = newArea.git.getRemoteBranches(); assertThat(getBranchNames(remoteBranches), containsInAnyOrder("origin/master", "origin/branch1", "origin/branch2", "origin/HEAD")); /* Remove branch1 from bare repo using original repo */ w.cmd("git push " + bare.repoPath() + " :branch1"); /* Fetch without prune should leave branch1 in newArea */ /* Fetch with prune should remove branch1 from newArea */ /* Git older than 1.7.9 (like 1.7.1 on Red Hat 6) does not prune branch1, don't fail the test * on that old git version. */ if (newArea.git instanceof CliGitAPIImpl && !w.cgit().isAtLeastVersion(1, 7, 9, 0)) { assertThat(getBranchNames(remoteBranches), containsInAnyOrder("origin/master", "origin/branch1", "origin/branch2", "origin/HEAD")); } else { assertThat(getBranchNames(remoteBranches), containsInAnyOrder("origin/master", "origin/branch2", "origin/HEAD")); } } public void test_fetch_from_url() throws Exception { WorkingArea r = new WorkingArea(); r.init(); r.commitEmpty("init"); String sha1 = r.cmd("git rev-list --no-walk --max-count=1 HEAD"); w.init(); w.cmd("git remote add origin " + r.repoPath()); w.git.fetch(new URIish(r.repo.toString()), Collections.<RefSpec>emptyList()); assertTrue(sha1.equals(r.cmd("git rev-list --no-walk --max-count=1 HEAD"))); } public void test_fetch_with_updated_tag() throws Exception { WorkingArea r = new WorkingArea(); r.init(); r.commitEmpty("init"); r.tag("t"); String sha1 = r.cmd("git rev-list --no-walk --max-count=1 t"); w.init(); w.cmd("git remote add origin " + r.repoPath()); w.git.fetch("origin", new RefSpec[] {null}); assertTrue(sha1.equals(r.cmd("git rev-list --no-walk --max-count=1 t"))); r.touch("file.txt"); r.git.add("file.txt"); r.git.commit("update"); r.tag("-d t"); r.tag("t"); sha1 = r.cmd("git rev-list --no-walk --max-count=1 t"); w.git.fetch("origin", new RefSpec[] {null}); assertTrue(sha1.equals(r.cmd("git rev-list --max-count=1 t"))); } public void test_fetch_shallow() throws Exception { w.init(); w.git.setRemoteUrl("origin", localMirror()); /* JGit does not support shallow fetch */ /* JGit does not support shallow fetch */ w.git.fetch_().from(new URIish("origin"), Collections.singletonList(new RefSpec("refs/heads/*:refs/remotes/origin/*"))).tags(false).execute(); check_remote_url("origin"); assertBranchesExist(w.git.getRemoteBranches(), "origin/master"); Set<String> tags = w.git.getTagNames(""); assertTrue("Tags have been found : " + tags, tags.isEmpty()); } @Issue("JENKINS-37794") public void test_getTagNames_supports_slashes_in_tag_names() throws Exception { w.init(); w.commitEmpty("init-getTagNames-supports-slashes"); w.git.tag("no-slash", "Tag without a /"); Set<String> tags = w.git.getTagNames(null); assertThat(tags, hasItem("no-slash")); assertThat(tags, not(hasItem("slashed/sample"))); assertThat(tags, not(hasItem("slashed/sample-with-short-comment"))); w.git.tag("slashed/sample", "Tag slashed/sample includes a /"); w.git.tag("slashed/sample-with-short-comment", "short comment"); for (String matchPattern : Arrays.asList("n*", "no-*", "*-slash", "*/sl*sa*", "*/sl*/sa*")) { Set<String> latestTags = w.git.getTagNames(matchPattern); assertThat(tags, hasItem("no-slash")); assertThat(latestTags, not(hasItem("slashed/sample"))); assertThat(latestTags, not(hasItem("slashed/sample-with-short-comment"))); } for (String matchPattern : Arrays.asList("s*", "slashed*", "sl*sa*", "slashedsa*", "slashed/sa*")) { Set<String> latestTags = w.git.getTagNames(matchPattern); assertThat(latestTags, hasItem("slashed/sample")); assertThat(latestTags, hasItem("slashed/sample-with-short-comment")); } } public void test_empty_comment() throws Exception { w.init(); w.commitEmpty("init-empty-comment-to-tag-fails-on-windows"); if (isWindows()) { w.git.tag("non-empty-comment", "empty-tag-comment-fails-on-windows"); } else { w.git.tag("empty-comment", ""); } } public void test_create_branch() throws Exception { w.init(); w.commitEmpty("init"); w.git.branch("test"); String branches = w.cmd("git branch -l"); assertTrue("master branch not listed", branches.contains("master")); assertTrue("test branch not listed", branches.contains("test")); } @Issue("JENKINS-34309") public void test_list_branches() throws Exception { w.init(); Set<Branch> branches = w.git.getBranches(); assertEquals(0, branches.size()); // empty repo should have 0 branches w.commitEmpty("init"); w.git.branch("test"); w.touch("test-branch.txt"); w.git.add("test-branch.txt"); // JGit commit doesn't end commit message with Ctrl-M, even when passed final String testBranchCommitMessage = "test branch commit ends in Ctrl-M"; w.jgit().commit(testBranchCommitMessage + "\r"); w.git.branch("another"); w.touch("another-branch.txt"); w.git.add("another-branch.txt"); // CliGit commit doesn't end commit message with Ctrl-M, even when passed final String anotherBranchCommitMessage = "test branch commit ends in Ctrl-M"; w.cgit().commit(anotherBranchCommitMessage + "\r"); branches = w.git.getBranches(); assertBranchesExist(branches, "master", "test", "another"); assertEquals(3, branches.size()); String output = w.cmd("git branch -v --no-abbrev"); assertTrue("git branch -v --no-abbrev missing test commit msg: '" + output + "'", output.contains(testBranchCommitMessage)); assertTrue("git branch -v --no-abbrev missing another commit msg: '" + output + "'", output.contains(anotherBranchCommitMessage)); if (w.cgit().isAtLeastVersion(2, 13, 0, 0)) { assertTrue("git branch -v --no-abbrev missing Ctrl-M: '" + output + "'", output.contains("\r")); assertTrue("git branch -v --no-abbrev missing test commit msg Ctrl-M: '" + output + "'", output.contains(testBranchCommitMessage + "\r")); assertTrue("git branch -v --no-abbrev missing another commit msg Ctrl-M: '" + output + "'", output.contains(anotherBranchCommitMessage + "\r")); } else { assertFalse("git branch -v --no-abbrev contains Ctrl-M: '" + output + "'", output.contains("\r")); assertFalse("git branch -v --no-abbrev contains test commit msg Ctrl-M: '" + output + "'", output.contains(testBranchCommitMessage + "\r")); assertFalse("git branch -v --no-abbrev contains another commit msg Ctrl-M: '" + output + "'", output.contains(anotherBranchCommitMessage + "\r")); } } public void test_list_remote_branches() throws Exception { WorkingArea r = new WorkingArea(); r.init(); r.commitEmpty("init"); r.git.branch("test"); r.git.branch("another"); w.init(); w.cmd("git remote add origin " + r.repoPath()); w.cmd("git fetch origin"); Set<Branch> branches = w.git.getRemoteBranches(); assertBranchesExist(branches, "origin/master", "origin/test", "origin/another"); assertEquals(3, branches.size()); } public void test_remote_list_tags_with_filter() throws Exception { WorkingArea r = new WorkingArea(); r.init(); r.commitEmpty("init"); r.tag("test"); r.tag("another_test"); r.tag("yet_another"); w.init(); w.cmd("git remote add origin " + r.repoPath()); w.cmd("git fetch origin"); Set<String> local_tags = w.git.getTagNames("*test"); Set<String> tags = w.git.getRemoteTagNames("*test"); assertTrue("expected tag test not listed", tags.contains("test")); assertTrue("expected tag another_test not listed", tags.contains("another_test")); assertFalse("unexpected yet_another tag listed", tags.contains("yet_another")); } public void test_remote_list_tags_without_filter() throws Exception { WorkingArea r = new WorkingArea(); r.init(); r.commitEmpty("init"); r.tag("test"); r.tag("another_test"); r.tag("yet_another"); w.init(); w.cmd("git remote add origin " + r.repoPath()); w.cmd("git fetch origin"); Set<String> allTags = w.git.getRemoteTagNames(null); assertTrue("tag 'test' not listed", allTags.contains("test")); assertTrue("tag 'another_test' not listed", allTags.contains("another_test")); assertTrue("tag 'yet_another' not listed", allTags.contains("yet_another")); } public void test_list_branches_containing_ref() throws Exception { w.init(); w.commitEmpty("init"); w.git.branch("test"); w.git.branch("another"); Set<Branch> branches = w.git.getBranches(); assertBranchesExist(branches, "master", "test", "another"); assertEquals(3, branches.size()); } public void test_delete_branch() throws Exception { w.init(); w.commitEmpty("init"); w.git.branch("test"); w.git.deleteBranch("test"); String branches = w.cmd("git branch -l"); assertFalse("deleted test branch still present", branches.contains("test")); try { w.git.deleteBranch("test"); assertTrue("cgit did not throw an exception", w.git instanceof JGitAPIImpl); } catch (GitException ge) { assertEquals("Could not delete branch test", ge.getMessage()); } } @Issue("JENKINS-23299") public void test_create_tag() throws Exception { w.init(); String gitDir = w.repoPath() + File.separator + ".git"; w.commitEmpty("init"); ObjectId commitId = w.git.revParse("HEAD"); w.git.tag("test", "this is an annotated tag"); /* * Spec: "test" (short tag syntax) * CliGit does not support this syntax for remotes. * JGit fully supports this syntax. * * JGit seems to have the better behavior in this case, always * returning the SHA1 of the commit. Most users are using * command line git, so the difference is retained in command * line git for compatibility with any legacy command line git * use cases which depend on returning null rather than the * SHA-1 of the commit to which the annotated tag points. */ String shortTagRef = "test"; ObjectId tagHeadIdByShortRef = w.git.getHeadRev(gitDir, shortTagRef); if (w.git instanceof JGitAPIImpl) { assertEquals("annotated tag does not match commit SHA1", commitId, tagHeadIdByShortRef); } else { assertNull("annotated tag unexpectedly not null", tagHeadIdByShortRef); } assertEquals("annotated tag does not match commit SHA1", commitId, w.git.revParse(shortTagRef)); /* * Spec: "refs/tags/test" (more specific tag syntax) * CliGit and JGit fully support this syntax. */ String longTagRef = "refs/tags/test"; assertEquals("annotated tag does not match commit SHA1", commitId, w.git.getHeadRev(gitDir, longTagRef)); assertEquals("annotated tag does not match commit SHA1", commitId, w.git.revParse(longTagRef)); String tagNames = w.cmd("git tag -l").trim(); assertEquals("tag not created", "test", tagNames); String tagNamesWithMessages = w.cmd("git tag -l -n1"); assertTrue("unexpected tag message : " + tagNamesWithMessages, tagNamesWithMessages.contains("this is an annotated tag")); ObjectId invalidTagId = w.git.getHeadRev(gitDir, "not-a-valid-tag"); assertNull("did not expect reference for invalid tag but got : " + invalidTagId, invalidTagId); } public void test_delete_tag() throws Exception { w.init(); w.commitEmpty("init"); w.tag("test"); w.tag("another"); w.git.deleteTag("test"); String tags = w.cmd("git tag"); assertFalse("deleted test tag still present", tags.contains("test")); assertTrue("expected tag not listed", tags.contains("another")); try { w.git.deleteTag("test"); assertTrue("cgit did not throw an exception", w.git instanceof JGitAPIImpl); } catch (GitException ge) { assertEquals("Could not delete tag test", ge.getMessage()); } } public void test_list_tags_with_filter() throws Exception { w.init(); w.commitEmpty("init"); w.tag("test"); w.tag("another_test"); w.tag("yet_another"); Set<String> tags = w.git.getTagNames("*test"); assertTrue("expected tag test not listed", tags.contains("test")); assertTrue("expected tag another_test not listed", tags.contains("another_test")); assertFalse("unexpected yet_another tag listed", tags.contains("yet_another")); } public void test_list_tags_without_filter() throws Exception { w.init(); w.commitEmpty("init"); w.tag("test"); w.tag("another_test"); w.tag("yet_another"); Set<String> allTags = w.git.getTagNames(null); assertTrue("tag 'test' not listed", allTags.contains("test")); assertTrue("tag 'another_test' not listed", allTags.contains("another_test")); assertTrue("tag 'yet_another' not listed", allTags.contains("yet_another")); } public void test_list_tags_star_filter() throws Exception { w.init(); w.commitEmpty("init"); w.tag("test"); w.tag("another_test"); w.tag("yet_another"); Set<String> allTags = w.git.getTagNames("*"); assertTrue("tag 'test' not listed", allTags.contains("test")); assertTrue("tag 'another_test' not listed", allTags.contains("another_test")); assertTrue("tag 'yet_another' not listed", allTags.contains("yet_another")); } public void test_tag_exists() throws Exception { w.init(); w.commitEmpty("init"); w.tag("test"); assertTrue(w.git.tagExists("test")); assertFalse(w.git.tagExists("unknown")); } public void test_get_tag_message() throws Exception { w.init(); w.commitEmpty("init"); w.tag("test -m this-is-a-test"); assertEquals("this-is-a-test", w.git.getTagMessage("test")); } public void test_get_tag_message_multi_line() throws Exception { w.init(); w.commitEmpty("init"); w.launchCommand("git", "tag", "test", "-m", "test 123!\n* multi-line tag message\n padded "); // Leading four spaces from each line should be stripped, // but not the explicit single space before "padded", // and the final errant space at the end should be trimmed assertEquals("test 123!\n* multi-line tag message\n padded", w.git.getTagMessage("test")); } public void test_create_ref() throws Exception { w.init(); w.commitEmpty("init"); w.git.ref("refs/testing/testref"); assertTrue("test ref not created", w.cmd("git show-ref").contains("refs/testing/testref")); } public void test_delete_ref() throws Exception { w.init(); w.commitEmpty("init"); w.git.ref("refs/testing/testref"); w.git.ref("refs/testing/anotherref"); w.git.deleteRef("refs/testing/testref"); String refs = w.cmd("git show-ref"); assertFalse("deleted test tag still present", refs.contains("refs/testing/testref")); assertTrue("expected tag not listed", refs.contains("refs/testing/anotherref")); w.git.deleteRef("refs/testing/testref"); // Double-deletes do nothing. } public void test_list_refs_with_prefix() throws Exception { w.init(); w.commitEmpty("init"); w.git.ref("refs/testing/testref"); w.git.ref("refs/testing/nested/anotherref"); w.git.ref("refs/testing/nested/yetanotherref"); Set<String> refs = w.git.getRefNames("refs/testing/nested/"); assertFalse("ref testref listed", refs.contains("refs/testing/testref")); assertTrue("ref anotherref not listed", refs.contains("refs/testing/nested/anotherref")); assertTrue("ref yetanotherref not listed", refs.contains("refs/testing/nested/yetanotherref")); } public void test_list_refs_without_prefix() throws Exception { w.init(); w.commitEmpty("init"); w.git.ref("refs/testing/testref"); w.git.ref("refs/testing/nested/anotherref"); w.git.ref("refs/testing/nested/yetanotherref"); Set<String> allRefs = w.git.getRefNames(""); assertTrue("ref testref not listed", allRefs.contains("refs/testing/testref")); assertTrue("ref anotherref not listed", allRefs.contains("refs/testing/nested/anotherref")); assertTrue("ref yetanotherref not listed", allRefs.contains("refs/testing/nested/yetanotherref")); } public void test_ref_exists() throws Exception { w.init(); w.commitEmpty("init"); w.git.ref("refs/testing/testref"); assertTrue(w.git.refExists("refs/testing/testref")); assertFalse(w.git.refExists("refs/testing/testref_notfound")); assertFalse(w.git.refExists("refs/testing2/yetanother")); } public void test_revparse_sha1_HEAD_or_tag() throws Exception { w.init(); w.commitEmpty("init"); w.touch("file1"); w.git.add("file1"); w.git.commit("commit1"); w.tag("test"); String sha1 = w.cmd("git rev-parse HEAD").substring(0,40); assertEquals(sha1, w.git.revParse(sha1).name()); assertEquals(sha1, w.git.revParse("HEAD").name()); assertEquals(sha1, w.git.revParse("test").name()); } public void test_revparse_throws_expected_exception() throws Exception { w.init(); w.commitEmpty("init"); try { w.git.revParse("unknown-rev-to-parse"); fail("Did not throw exception"); } catch (GitException ge) { final String msg = ge.getMessage(); assertTrue("Wrong exception: " + msg, msg.contains("unknown-rev-to-parse")); } } public void test_hasGitRepo_without_git_directory() throws Exception { setTimeoutVisibleInCurrentTest(false); assertFalse("Empty directory has a Git repo", w.git.hasGitRepo()); } public void test_hasGitRepo_with_invalid_git_repo() throws Exception { // Create an empty directory named .git - "corrupt" git repo assertTrue("mkdir .git failed", w.file(".git").mkdir()); assertFalse("Invalid Git repo reported as valid", w.git.hasGitRepo()); } public void test_hasGitRepo_with_valid_git_repo() throws Exception { w.init(); assertTrue("Valid Git repo reported as invalid", w.git.hasGitRepo()); } public void test_push() throws Exception { w.init(); w.commitEmpty("init"); w.touch("file1"); w.git.add("file1"); w.git.commit("commit1"); ObjectId sha1 = w.head(); WorkingArea r = new WorkingArea(); r.init(true); w.cmd("git remote add origin " + r.repoPath()); w.git.push("origin", "master"); String remoteSha1 = r.cmd("git rev-parse master").substring(0, 40); assertEquals(sha1.name(), remoteSha1); } @Deprecated public void test_push_deprecated_signature() throws Exception { /* Make working repo a remote of the bare repo */ w.init(); w.commitEmpty("init"); ObjectId workHead = w.head(); /* Create a bare repo */ WorkingArea bare = new WorkingArea(); bare.init(true); /* Set working repo origin to point to bare */ w.git.setRemoteUrl("origin", bare.repoPath()); assertEquals("Wrong remote URL", w.git.getRemoteUrl("origin"), bare.repoPath()); /* Push to bare repo */ w.git.push("origin", "master"); /* JGitAPIImpl revParse fails unexpectedly when used here */ ObjectId bareHead = w.git instanceof CliGitAPIImpl ? bare.head() : ObjectId.fromString(bare.cmd("git rev-parse master").substring(0, 40)); assertEquals("Heads don't match", workHead, bareHead); assertEquals("Heads don't match", w.git.getHeadRev(w.repoPath(), "master"), bare.git.getHeadRev(bare.repoPath(), "master")); /* Commit a new file */ w.touch("file1"); w.git.add("file1"); w.git.commit("commit1"); /* Push commit to the bare repo */ Config config = new Config(); config.fromText(w.contentOf(".git/config")); RemoteConfig origin = new RemoteConfig(config, "origin"); w.igit().push(origin, "master"); /* JGitAPIImpl revParse fails unexpectedly when used here */ ObjectId workHead2 = w.git instanceof CliGitAPIImpl ? w.head() : ObjectId.fromString(w.cmd("git rev-parse master").substring(0, 40)); ObjectId bareHead2 = w.git instanceof CliGitAPIImpl ? bare.head() : ObjectId.fromString(bare.cmd("git rev-parse master").substring(0, 40)); assertEquals("Working SHA1 != bare SHA1", workHead2, bareHead2); assertEquals("Working SHA1 != bare SHA1", w.git.getHeadRev(w.repoPath(), "master"), bare.git.getHeadRev(bare.repoPath(), "master")); } @NotImplementedInJGit public void test_push_from_shallow_clone() throws Exception { WorkingArea r = new WorkingArea(); r.init(); r.commitEmpty("init"); r.touch("file1"); r.git.add("file1"); r.git.commit("commit1"); r.cmd("git checkout -b other"); w.init(); w.cmd("git remote add origin " + r.repoPath()); w.cmd("git pull --depth=1 origin master"); w.touch("file2"); w.git.add("file2"); w.git.commit("commit2"); ObjectId sha1 = w.head(); try { w.git.push("origin", "master"); assertTrue("git < 1.9.0 can push from shallow repository", w.cgit().isAtLeastVersion(1, 9, 0, 0)); String remoteSha1 = r.cmd("git rev-parse master").substring(0, 40); assertEquals(sha1.name(), remoteSha1); } catch (GitException e) { // expected for git cli < 1.9.0 assertExceptionMessageContains(e, "push from shallow repository"); assertFalse("git >= 1.9.0 can't push from shallow repository", w.cgit().isAtLeastVersion(1, 9, 0, 0)); } } public void test_notes_add_first_note() throws Exception { w.init(); w.touch("file1"); w.git.add("file1"); w.commitEmpty("init"); w.git.addNote("foo", "commits"); assertEquals("foo\n", w.cmd("git notes show")); w.git.appendNote("alpha\rbravo\r\ncharlie\r\n\r\nbar\n\n\nzot\n\n", "commits"); // cgit normalizes CR+LF aggressively // it appears to be collpasing CR+LF to LF, then truncating duplicate LFs down to 2 // note that CR itself is left as is assertEquals("foo\n\nalpha\rbravo\ncharlie\n\nbar\n\nzot\n", w.cmd("git notes show")); } public void test_notes_append_first_note() throws Exception { w.init(); w.touch("file1"); w.git.add("file1"); w.commitEmpty("init"); w.git.appendNote("foo", "commits"); assertEquals("foo\n", w.cmd("git notes show")); w.git.appendNote("alpha\rbravo\r\ncharlie\r\n\r\nbar\n\n\nzot\n\n", "commits"); // cgit normalizes CR+LF aggressively // it appears to be collpasing CR+LF to LF, then truncating duplicate LFs down to 2 // note that CR itself is left as is assertEquals("foo\n\nalpha\rbravo\ncharlie\n\nbar\n\nzot\n", w.cmd("git notes show")); } /** * A rev-parse warning message should not break revision parsing. */ @Issue("JENKINS-11177") public void test_jenkins_11177() throws Exception { w.init(); w.commitEmpty("init"); ObjectId base = w.head(); ObjectId master = w.git.revParse("master"); assertEquals(base, master); /* Make reference to master ambiguous, verify it is reported ambiguous by rev-parse */ w.tag("master"); // ref "master" is now ambiguous String revParse = w.cmd("git rev-parse master"); assertTrue("'" + revParse + "' does not contain 'ambiguous'", revParse.contains("ambiguous")); ObjectId masterTag = w.git.revParse("refs/tags/master"); assertEquals("masterTag != head", w.head(), masterTag); /* Get reference to ambiguous master */ ObjectId ambiguous = w.git.revParse("master"); assertEquals("ambiguous != master", ambiguous.toString(), master.toString()); /* Exploring JENKINS-20991 ambigous revision breaks checkout */ w.touch("file-master", "content-master"); w.git.add("file-master"); w.git.commit("commit1-master"); final ObjectId masterTip = w.head(); w.cmd("git branch branch1 " + masterTip.name()); w.cmd("git checkout branch1"); w.touch("file1", "content1"); w.git.add("file1"); w.git.commit("commit1-branch1"); final ObjectId branch1 = w.head(); /* JGit checks out the masterTag, while CliGit checks out * master branch. It is risky that there are different * behaviors between the two implementations, but when a * reference is ambiguous, it is safe to assume that * resolution of the ambiguous reference is an implementation * specific detail. */ w.git.checkout("master"); String messageDetails = ", head=" + w.head().name() + ", masterTip=" + masterTip.name() + ", masterTag=" + masterTag.name() + ", branch1=" + branch1.name(); if (w.git instanceof CliGitAPIImpl) { assertEquals("head != master branch" + messageDetails, masterTip, w.head()); } else { assertEquals("head != master tag" + messageDetails, masterTag, w.head()); } } /** * Command line git clean as implemented in CliGitAPIImpl does not remove * untracked submodules or files contained in untracked submodule dirs. * JGit clean as implemented in JGitAPIImpl removes untracked submodules. * This test captures that surprising difference between the implementations. * * Command line git as implemented in CliGitAPIImpl supports renamed submodules. * JGit as implemented in JGitAPIImpl does not support renamed submodules. * This test captures that surprising difference between the implementations. * * This test really should be split into multiple tests. * Current transitions in the test include: * with submodules -> without submodules, with files/dirs of same name * with submodules -> without submodules, no files/dirs of same name * * See bug reports such as: * JENKINS-22510 - Clean After Checkout Results in Failed to Checkout Revision * JENKINS-8053 - Git submodules are cloned too early and not removed once the revToBuild has been checked out * JENKINS-14083 - Build can't recover from broken submodule path * JENKINS-15399 - Changing remote URL doesn't update submodules * * @throws Exception on test failure */ public void test_submodule_checkout_and_clean_transitions() throws Exception { w = clone(localMirror()); assertSubmoduleDirs(w.repo, false, false); String subBranch = "tests/getSubmodules"; String subRefName = "origin/" + subBranch; String ntpDirName = "modules/ntp"; String contributingFileName = "modules/ntp/CONTRIBUTING.md"; String contributingFileContent = "Puppet Labs modules on the Puppet Forge are open projects"; File modulesDir = new File(w.repo, "modules"); assertDirNotFound(modulesDir); File keeperFile = new File(modulesDir, "keeper"); assertFileNotFound(keeperFile); File ntpDir = new File(modulesDir, "ntp"); File ntpContributingFile = new File(ntpDir, "CONTRIBUTING.md"); assertDirNotFound(ntpDir); assertFileNotFound(ntpContributingFile); File firewallDir = new File(modulesDir, "firewall"); assertDirNotFound(firewallDir); File sshkeysDir = new File(modulesDir, "sshkeys"); File sshkeysModuleFile = new File(sshkeysDir, "Modulefile"); assertDirNotFound(sshkeysDir); assertFileNotFound(sshkeysModuleFile); /* Checkout a branch which includes submodules (in modules directory) */ w.git.checkout().ref(subRefName).branch(subBranch).execute(); assertDirExists(modulesDir); assertFileExists(keeperFile); assertFileContents(keeperFile, ""); assertSubmoduleDirs(w.repo, true, false); /* Call submodule update without recursion */ w.git.submoduleUpdate().recursive(false).execute(); /* Command line git supports renamed submodule dirs, JGit does not */ /* JGit silently fails submodule updates on renamed submodule dirs */ if (w.git instanceof CliGitAPIImpl) { assertSubmoduleDirs(w.repo, true, true); assertSubmoduleContents(w.repo); assertSubmoduleRepository(new File(w.repo, "modules/ntp")); assertSubmoduleRepository(new File(w.repo, "modules/firewall")); assertSubmoduleRepository(new File(w.repo, "modules/sshkeys")); } else { /* JGit does not fully support renamed submodules - creates directories but not content */ assertSubmoduleDirs(w.repo, true, false); } /* Call submodule update with recursion */ w.git.submoduleUpdate().recursive(true).execute(); /* Command line git supports renamed submodule dirs, JGit does not */ /* JGit silently fails submodule updates on renamed submodule dirs */ if (w.git instanceof CliGitAPIImpl) { assertSubmoduleDirs(w.repo, true, true); assertSubmoduleContents(w.repo); assertSubmoduleRepository(new File(w.repo, "modules/ntp")); assertSubmoduleRepository(new File(w.repo, "modules/firewall")); assertSubmoduleRepository(new File(w.repo, "modules/sshkeys")); } else { /* JGit does not fully support renamed submodules - creates directories but not content */ assertSubmoduleDirs(w.repo, true, false); } String notSubBranchName = "tests/notSubmodules"; String notSubRefName = "origin/" + notSubBranchName; String contributingFileContentFromNonsubmoduleBranch = "This is not a useful contribution"; /* Checkout a detached head which does not include submodules, * since checkout of a branch does not currently use the "-f" * option (though it probably should). The checkout includes a file * modules/ntp/CONTRIBUTING.md which collides with a file from the * submodule but is provided from the repository rather than from a * submodule. */ // w.git.checkout().ref(notSubRefName).execute(); w.git.checkout().ref(notSubRefName).branch(notSubBranchName).deleteBranchIfExist(true).execute(); assertDirExists(ntpDir); assertFileExists(ntpContributingFile); assertFileContains(ntpContributingFile, contributingFileContentFromNonsubmoduleBranch); if (w.git instanceof CliGitAPIImpl) { /* submodule dirs exist because git.clean() won't remove untracked submodules */ assertDirExists(firewallDir); assertDirExists(sshkeysDir); assertFileExists(sshkeysModuleFile); } else { /* firewallDir and sshKeysDir don't exist because JGit submodule update never created them */ assertDirNotFound(firewallDir); assertDirNotFound(sshkeysDir); } /* CLI git clean does not remove submodule remnants, JGit does */ w.git.clean(); assertDirExists(ntpDir); assertFileExists(ntpContributingFile); /* exists in nonSubmodule branch */ if (w.git instanceof CliGitAPIImpl) { /* untracked - CLI clean doesn't remove submodule dirs or their contents */ assertDirExists(firewallDir); assertDirExists(sshkeysDir); assertFileExists(sshkeysModuleFile); } else { /* JGit clean removes submodule dirs*/ assertDirNotFound(firewallDir); assertDirNotFound(sshkeysDir); } /* Checkout master branch - will leave submodule files untracked */ w.git.checkout().ref("origin/master").execute(); // w.git.checkout().ref("origin/master").branch("master").execute(); if (w.git instanceof CliGitAPIImpl) { /* CLI git clean will not remove untracked submodules */ assertDirExists(ntpDir); assertDirExists(firewallDir); assertDirExists(sshkeysDir); assertFileNotFound(ntpContributingFile); /* cleaned because it is in tests/notSubmodules branch */ assertFileExists(sshkeysModuleFile); } else { /* JGit git clean removes them */ assertDirNotFound(ntpDir); assertDirNotFound(firewallDir); assertDirNotFound(sshkeysDir); } /* git.clean() does not remove submodule remnants in CliGitAPIImpl, does in JGitAPIImpl */ w.git.clean(); if (w.git instanceof CliGitAPIImpl && w.cgit().isAtLeastVersion(1, 7, 9, 0)) { assertDirExists(ntpDir); assertDirExists(firewallDir); assertDirExists(sshkeysDir); } else { assertDirNotFound(ntpDir); assertDirNotFound(firewallDir); assertDirNotFound(sshkeysDir); } /* Really remove submodule remnant, use git command line double force */ if (w.git instanceof CliGitAPIImpl) { if (!isWindows()) { w.cmd("git clean -xffd"); } else { try { w.cmd("git clean -xffd"); } catch (Exception e) { /* Retry once (and only once) in case of Windows busy file behavior */ Thread.sleep(503); /* Wait 0.5 seconds for Windows */ w.cmd("git clean -xffd"); } } } assertSubmoduleDirs(w.repo, false, false); /* Checkout a branch which *includes submodules* after a prior * checkout with a file which has the same name as a file * provided by a submodule checkout. Use a detached head, * since checkout of a branch does not currently use the "-f" * option. */ assertEquals(ObjectId.fromString("a6dd186704985fdb0c60e60f5c6ea7ea35e082e5"), w.git.revParse(subRefName)); // w.git.checkout().ref(subRefName).branch(subBranch).execute(); w.git.checkout().ref(subRefName).execute(); assertDirExists(modulesDir); assertSubmoduleDirs(w.repo, true, false); w.git.submoduleClean(true); assertSubmoduleDirs(w.repo, true, false); if (w.git instanceof JGitAPIImpl) { /* submoduleUpdate().recursive(true).execute() throws an exception */ /* Call setupSubmoduleUrls to assure it throws expected exception */ try { Revision nullRevision = null; w.igit().setupSubmoduleUrls(nullRevision, listener); } catch (UnsupportedOperationException uoe) { assertTrue("Unsupported operation not on JGit", w.igit() instanceof JGitAPIImpl); } return; } w.git.submoduleUpdate().recursive(true).execute(); assertSubmoduleDirs(w.repo, true, true); assertSubmoduleContents(w.repo); assertSubmoduleRepository(new File(w.repo, "modules/ntp")); assertSubmoduleRepository(new File(w.repo, "modules/firewall")); if (w.git instanceof CliGitAPIImpl) { // This is a low value section of the test. Does not assert anything // about the result of setupSubmoduleUrls ObjectId headId = w.git.revParse("HEAD"); List<Branch> branches = new ArrayList<>(); branches.add(new Branch("HEAD", headId)); branches.add(new Branch(subRefName, headId)); Revision head = new Revision(headId, branches); w.cgit().setupSubmoduleUrls(head, listener); assertSubmoduleDirs(w.repo, true, true); assertSubmoduleContents(w.repo); } } /* Submodule checkout in JGit does not support renamed submodules. * The test branch intentionally includes a renamed submodule, so this test * is not run with JGit. */ @NotImplementedInJGit public void test_submodule_checkout_simple() throws Exception { w = clone(localMirror()); assertSubmoduleDirs(w.repo, false, false); /* Checkout a branch which includes submodules (in modules directory) */ String subBranch = "tests/getSubmodules"; String subRefName = "origin/" + subBranch; w.git.checkout().ref(subRefName).branch(subBranch).execute(); assertSubmoduleDirs(w.repo, true, false); w.git.submoduleUpdate().recursive(true).execute(); assertSubmoduleDirs(w.repo, true, true); assertSubmoduleContents(w.repo); assertSubmoduleRepository(new File(w.repo, "modules/ntp")); assertSubmoduleRepository(new File(w.repo, "modules/firewall")); assertSubmoduleRepository(new File(w.repo, "modules/sshkeys")); } /* Opening a git repository in a directory with a symbolic git file instead * of a git directory should function properly. */ public void test_with_repository_works_with_submodule() throws Exception { w = clone(localMirror()); assertSubmoduleDirs(w.repo, false, false); /* Checkout a branch which includes submodules (in modules directory) */ String subBranch = w.git instanceof CliGitAPIImpl ? "tests/getSubmodules" : "tests/getSubmodules-jgit"; String subRefName = "origin/" + subBranch; w.git.checkout().ref(subRefName).branch(subBranch).execute(); w.git.submoduleInit(); w.git.submoduleUpdate().recursive(true).execute(); assertSubmoduleRepository(new File(w.repo, "modules/ntp")); assertSubmoduleRepository(new File(w.repo, "modules/firewall")); } private void assertSubmoduleRepository(File submoduleDir) throws Exception { /* Get a client directly on the submoduleDir */ GitClient submoduleClient = setupGitAPI(submoduleDir); /* Assert that when we invoke the repository callback it gets a * functioning repository object */ submoduleClient.withRepository((final Repository repo, VirtualChannel channel) -> { assertTrue(repo.getDirectory() + " is not a valid repository", repo.getObjectDatabase().exists()); return null; }); } private String listDir(File dir) { if (dir == null || !dir.exists()) { return ""; } File[] files = dir.listFiles(); if (files == null) { return ""; } StringBuilder fileList = new StringBuilder(); for (File file : files) { fileList.append(file.getName()); fileList.append(','); } if (fileList.length() > 0) { fileList.deleteCharAt(fileList.length() - 1); } return fileList.toString(); } private void assertFileExists(File file) { assertTrue(file + " not found, peer files: " + listDir(file.getParentFile()), file.exists()); } private void assertFileNotFound(File file) { assertFalse(file + " found, peer files: " + listDir(file.getParentFile()), file.exists()); } private void assertDirExists(File dir) { assertFileExists(dir); assertTrue(dir + " is not a directory", dir.isDirectory()); } private void assertDirNotFound(File dir) { assertFileNotFound(dir); } private void assertFileContains(File file, String expectedContent) throws IOException { assertFileExists(file); final String fileContent = FileUtils.readFileToString(file, "UTF-8"); final String message = file + " does not contain '" + expectedContent + "', contains '" + fileContent + "'"; assertTrue(message, fileContent.contains(expectedContent)); } private void assertFileContents(File file, String expectedContent) throws IOException { assertFileExists(file); final String fileContent = FileUtils.readFileToString(file, "UTF-8"); assertEquals(file + " wrong content", expectedContent, fileContent); } private void assertSubmoduleDirs(File repo, boolean dirsShouldExist, boolean filesShouldExist) throws IOException { final File modulesDir = new File(w.repo, "modules"); final File ntpDir = new File(modulesDir, "ntp"); final File firewallDir = new File(modulesDir, "firewall"); final File keeperFile = new File(modulesDir, "keeper"); final File ntpContributingFile = new File(ntpDir, "CONTRIBUTING.md"); final File sshkeysDir = new File(modulesDir, "sshkeys"); final File sshkeysModuleFile = new File(sshkeysDir, "Modulefile"); if (dirsShouldExist) { assertDirExists(modulesDir); assertDirExists(ntpDir); assertDirExists(firewallDir); assertDirExists(sshkeysDir); /* keeperFile is in the submodules branch, but is a plain file */ assertFileExists(keeperFile); } else { assertDirNotFound(modulesDir); assertDirNotFound(ntpDir); assertDirNotFound(firewallDir); assertDirNotFound(sshkeysDir); /* keeperFile is in the submodules branch, but is a plain file */ assertFileNotFound(keeperFile); } if (filesShouldExist) { assertFileExists(ntpContributingFile); assertFileExists(sshkeysModuleFile); } else { assertFileNotFound(ntpContributingFile); assertFileNotFound(sshkeysModuleFile); } } private void assertSubmoduleContents(File repo) throws IOException { final File modulesDir = new File(w.repo, "modules"); final File sshkeysDir = new File(modulesDir, "sshkeys"); final File sshkeysModuleFile = new File(sshkeysDir, "Modulefile"); assertFileExists(sshkeysModuleFile); final File keeperFile = new File(modulesDir, "keeper"); final String keeperContent = ""; assertFileExists(keeperFile); assertFileContents(keeperFile, keeperContent); final File ntpDir = new File(modulesDir, "ntp"); final File ntpContributingFile = new File(ntpDir, "CONTRIBUTING.md"); final String ntpContributingContent = "Puppet Labs modules on the Puppet Forge are open projects"; assertFileExists(ntpContributingFile); assertFileContains(ntpContributingFile, ntpContributingContent); /* Check substring in file */ } public void test_no_submodules() throws IOException, InterruptedException { w.init(); w.touch("committed-file", "committed-file content " + java.util.UUID.randomUUID().toString()); w.git.add("committed-file"); w.git.commit("commit1"); w.igit().submoduleClean(false); w.igit().submoduleClean(true); w.igit().submoduleUpdate(false); w.igit().submoduleUpdate(true); w.igit().submoduleSync(); assertTrue("committed-file missing at commit1", w.file("committed-file").exists()); } public void assertFixSubmoduleUrlsThrows() throws InterruptedException { try { w.igit().fixSubmoduleUrls("origin", listener); fail("Expected exception not thrown"); } catch (UnsupportedOperationException uoe) { assertTrue("Unsupported operation not on JGit", w.igit() instanceof JGitAPIImpl); } catch (GitException ge) { assertTrue("GitException not on CliGit", w.igit() instanceof CliGitAPIImpl); assertTrue("Wrong message in " + ge.getMessage(), ge.getMessage().startsWith("Could not determine remote")); assertExceptionMessageContains(ge, "origin"); } } public void test_addSubmodule() throws Exception { String sub1 = "sub1-" + java.util.UUID.randomUUID().toString(); String readme1 = sub1 + File.separator + "README.md"; w.init(); assertFalse("submodule1 dir found too soon", w.file(sub1).exists()); assertFalse("submodule1 file found too soon", w.file(readme1).exists()); w.git.addSubmodule(localMirror(), sub1); assertTrue("submodule1 dir not found after add", w.file(sub1).exists()); assertTrue("submodule1 file not found after add", w.file(readme1).exists()); w.igit().submoduleUpdate(false); assertTrue("submodule1 dir not found after add", w.file(sub1).exists()); assertTrue("submodule1 file not found after add", w.file(readme1).exists()); w.igit().submoduleUpdate(true); assertTrue("submodule1 dir not found after recursive update", w.file(sub1).exists()); assertTrue("submodule1 file found after recursive update", w.file(readme1).exists()); w.igit().submoduleSync(); assertFixSubmoduleUrlsThrows(); } @NotImplementedInJGit public void test_trackingSubmodule() throws Exception { if (! ((CliGitAPIImpl)w.git).isAtLeastVersion(1,8,2,0)) { System.err.println("git must be at least 1.8.2 to do tracking submodules."); return; } w.init(); // empty repository // create a new GIT repo. // master -- <file1>C <file2>C WorkingArea r = new WorkingArea(); r.init(); r.touch("file1", "content1"); r.git.add("file1"); r.git.commit("submod-commit1"); // Add new GIT repo to w String subModDir = "submod1-" + java.util.UUID.randomUUID().toString(); w.git.addSubmodule(r.repoPath(), subModDir); w.git.submoduleInit(); // Add a new file to the separate GIT repo. r.touch("file2", "content2"); r.git.add("file2"); r.git.commit("submod-branch1-commit1"); // Make sure that the new file doesn't exist in the repo with remoteTracking String subFile = subModDir + File.separator + "file2"; w.git.submoduleUpdate(true, false); assertFalse("file2 exists and should not because we didn't update to the tip of the branch (master).", w.exists(subFile)); // Run submodule update with remote tracking w.git.submoduleUpdate(true, true); assertTrue("file2 does not exist and should because we updated to the top of the branch (master).", w.exists(subFile)); assertFixSubmoduleUrlsThrows(); } /* Check JENKINS-23424 - inconsistent handling of modified tracked * files when performing a checkout in an existing directory. * CliGitAPIImpl reverts tracked files, while JGitAPIImpl does * not. */ private void base_checkout_replaces_tracked_changes(boolean defineBranch) throws Exception { w.git.clone_().url(localMirror()).repositoryName("JENKINS-23424").execute(); w.git.checkout("JENKINS-23424/master", "master"); if (defineBranch) { w.git.checkout().branch("master").ref("JENKINS-23424/master").deleteBranchIfExist(true).execute(); } else { w.git.checkout().ref("JENKINS-23424/master").deleteBranchIfExist(true).execute(); } /* Confirm first checkout */ String pomContent = w.contentOf("pom.xml"); assertTrue("Missing inceptionYear ref in master pom : " + pomContent, pomContent.contains("inceptionYear")); assertFalse("Found untracked file", w.file("untracked-file").exists()); /* Modify the pom file by adding a comment */ String comment = " <!-- JENKINS-23424 comment -->"; /* JGit implementation prior to 3.4.1 did not reset modified tracked files */ w.touch("pom.xml", pomContent + comment); assertTrue(w.contentOf("pom.xml").contains(comment)); /* Create an untracked file. Both implementations retain * untracked files across checkout. */ w.touch("untracked-file", comment); assertTrue("Missing untracked file", w.file("untracked-file").exists()); /* Checkout should erase local modification */ CheckoutCommand cmd = w.git.checkout().ref("JENKINS-23424/1.4.x").deleteBranchIfExist(true); if (defineBranch) { cmd.branch("1.4.x"); } cmd.execute(); /* Tracked file should not contain added comment, nor the inceptionYear reference */ pomContent = w.contentOf("pom.xml"); assertFalse("Found inceptionYear ref in 1.4.x pom : " + pomContent, pomContent.contains("inceptionYear")); assertFalse("Found comment in 1.4.x pom", pomContent.contains(comment)); assertTrue("Missing untracked file", w.file("untracked-file").exists()); } @Issue("JENKINS-23424") public void test_checkout_replaces_tracked_changes() throws Exception { base_checkout_replaces_tracked_changes(false); } @Issue("JENKINS-23424") public void test_checkout_replaces_tracked_changes_with_branch() throws Exception { base_checkout_replaces_tracked_changes(true); } /** * Confirm that JENKINS-8122 is fixed in the current * implementation. That bug reported that the tags from a * submodule were being included in the set of tags associated * with the parent repository. This test clones a repository with * submodules, updates those submodules, and compares the tags * available in the repository before the submodule branch * checkout, after the submodule branch checkout, and within one * of the submodules. */ @Issue("JENKINS-8122") public void test_submodule_tags_not_fetched_into_parent() throws Exception { w.git.clone_().url(localMirror()).repositoryName("origin").execute(); checkoutTimeout = 1 + random.nextInt(60 * 24); w.git.checkout().ref("origin/master").branch("master").timeout(checkoutTimeout).execute(); String tagsBefore = w.cmd("git tag"); Set<String> tagNamesBefore = w.git.getTagNames(null); for (String tag : tagNamesBefore) { assertTrue(tag + " not in " + tagsBefore, tagsBefore.contains(tag)); } w.git.checkout().branch("tests/getSubmodules").ref("origin/tests/getSubmodules").timeout(checkoutTimeout).execute(); w.git.submoduleUpdate().recursive(true).execute(); String tagsAfter = w.cmd("git tag"); Set<String> tagNamesAfter = w.git.getTagNames(null); for (String tag : tagNamesAfter) { assertTrue(tag + " not in " + tagsAfter, tagsAfter.contains(tag)); } assertEquals("tags before != after", tagsBefore, tagsAfter); GitClient gitNtp = w.git.subGit("modules/ntp"); Set<String> tagNamesSubmodule = gitNtp.getTagNames(null); for (String tag : tagNamesSubmodule) { assertFalse("Submodule tag " + tag + " in parent " + tagsAfter, tagsAfter.matches("^" + tag + "$")); } try { w.igit().fixSubmoduleUrls("origin", listener); assertTrue("not CliGit", w.igit() instanceof CliGitAPIImpl); } catch (UnsupportedOperationException uoe) { assertTrue("Unsupported operation not on JGit", w.igit() instanceof JGitAPIImpl); } } /* Shows the JGit submodule update is broken now that tests/getSubmodule includes a renamed submodule */ public void test_getSubmodules() throws Exception { w.init(); w.git.clone_().url(localMirror()).repositoryName("sub_origin").execute(); w.git.checkout("sub_origin/tests/getSubmodules", "tests/getSubmodules"); List<IndexEntry> r = w.git.getSubmodules("HEAD"); assertEquals( "[IndexEntry[mode=160000,type=commit,file=modules/firewall,object=978c8b223b33e203a5c766ecf79704a5ea9b35c8], " + "IndexEntry[mode=160000,type=commit,file=modules/ntp,object=b62fabbc2bb37908c44ded233e0f4bf479e45609], " + "IndexEntry[mode=160000,type=commit,file=modules/sshkeys,object=689c45ed57f0829735f9a2b16760c14236fe21d9]]", r.toString() ); w.git.submoduleInit(); w.git.submoduleUpdate().execute(); assertTrue("modules/firewall does not exist", w.exists("modules/firewall")); assertTrue("modules/ntp does not exist", w.exists("modules/ntp")); // JGit submodule implementation doesn't handle renamed submodules if (w.igit() instanceof CliGitAPIImpl) { assertTrue("modules/sshkeys does not exist", w.exists("modules/sshkeys")); } assertFixSubmoduleUrlsThrows(); } /* Shows the submodule update is broken now that tests/getSubmodule includes a renamed submodule */ @NotImplementedInJGit public void test_submodule_update() throws Exception { w.init(); w.git.clone_().url(localMirror()).repositoryName("sub2_origin").execute(); w.git.checkout().branch("tests/getSubmodules").ref("sub2_origin/tests/getSubmodules").deleteBranchIfExist(true).execute(); w.git.submoduleInit(); w.git.submoduleUpdate().execute(); assertTrue("modules/firewall does not exist", w.exists("modules/firewall")); assertTrue("modules/ntp does not exist", w.exists("modules/ntp")); // JGit submodule implementation doesn't handle renamed submodules if (w.igit() instanceof CliGitAPIImpl) { assertTrue("modules/sshkeys does not exist", w.exists("modules/sshkeys")); } assertFixSubmoduleUrlsThrows(); String shallow = Paths.get(".git", "modules", "module", "1", "shallow").toString(); assertFalse("shallow file existence: " + shallow, w.exists(shallow)); } public void test_submodule_update_shallow() throws Exception { WorkingArea remote = setupRepositoryWithSubmodule(); w.git.clone_().url("file://" + remote.file("dir-repository").getAbsolutePath()).repositoryName("origin").execute(); w.git.checkout().branch("master").ref("origin/master").execute(); w.git.submoduleInit(); w.git.submoduleUpdate().shallow(true).execute(); boolean hasShallowSubmoduleSupport = w.git instanceof CliGitAPIImpl && w.cgit().isAtLeastVersion(1, 8, 4, 0); String shallow = Paths.get(".git", "modules", "submodule", "shallow").toString(); assertEquals("shallow file existence: " + shallow, hasShallowSubmoduleSupport, w.exists(shallow)); int localSubmoduleCommits = w.cgit().subGit("submodule").revList("master").size(); int remoteSubmoduleCommits = remote.cgit().subGit("dir-submodule").revList("master").size(); assertEquals("submodule commit count didn't match", hasShallowSubmoduleSupport ? 1 : remoteSubmoduleCommits, localSubmoduleCommits); } public void test_submodule_update_shallow_with_depth() throws Exception { WorkingArea remote = setupRepositoryWithSubmodule(); w.git.clone_().url("file://" + remote.file("dir-repository").getAbsolutePath()).repositoryName("origin").execute(); w.git.checkout().branch("master").ref("origin/master").execute(); w.git.submoduleInit(); w.git.submoduleUpdate().shallow(true).depth(2).execute(); boolean hasShallowSubmoduleSupport = w.git instanceof CliGitAPIImpl && w.cgit().isAtLeastVersion(1, 8, 4, 0); String shallow = Paths.get(".git", "modules", "submodule", "shallow").toString(); assertEquals("shallow file existence: " + shallow, hasShallowSubmoduleSupport, w.exists(shallow)); int localSubmoduleCommits = w.cgit().subGit("submodule").revList("master").size(); int remoteSubmoduleCommits = remote.cgit().subGit("dir-submodule").revList("master").size(); assertEquals("submodule commit count didn't match", hasShallowSubmoduleSupport ? 2 : remoteSubmoduleCommits, localSubmoduleCommits); } @NotImplementedInJGit public void test_submodule_update_with_threads() throws Exception { w.init(); w.git.clone_().url(localMirror()).repositoryName("sub2_origin").execute(); w.git.checkout().branch("tests/getSubmodules").ref("sub2_origin/tests/getSubmodules").deleteBranchIfExist(true).execute(); w.git.submoduleInit(); w.git.submoduleUpdate().threads(3).execute(); assertTrue("modules/firewall does not exist", w.exists("modules/firewall")); assertTrue("modules/ntp does not exist", w.exists("modules/ntp")); // JGit submodule implementation doesn't handle renamed submodules if (w.igit() instanceof CliGitAPIImpl) { assertTrue("modules/sshkeys does not exist", w.exists("modules/sshkeys")); } assertFixSubmoduleUrlsThrows(); } @NotImplementedInJGit public void test_trackingSubmoduleBranches() throws Exception { if (! ((CliGitAPIImpl)w.git).isAtLeastVersion(1,8,2,0)) { setTimeoutVisibleInCurrentTest(false); System.err.println("git must be at least 1.8.2 to do tracking submodules."); return; } w.init(); // empty repository // create a new GIT repo. // master -- <file1>C // branch1 -- <file1>C <file2>C // branch2 -- <file1>C <file3>C WorkingArea r = new WorkingArea(); r.init(); r.touch("file1", "content1"); r.git.add("file1"); r.git.commit("submod-commit1"); r.git.branch("branch1"); r.git.checkout("branch1"); r.touch("file2", "content2"); r.git.add("file2"); r.git.commit("submod-commit2"); r.git.checkout("master"); r.git.branch("branch2"); r.git.checkout("branch2"); r.touch("file3", "content3"); r.git.add("file3"); r.git.commit("submod-commit3"); r.git.checkout("master"); // Setup variables for use in tests String submodDir = "submod1" + java.util.UUID.randomUUID().toString(); String subFile1 = submodDir + File.separator + "file1"; String subFile2 = submodDir + File.separator + "file2"; String subFile3 = submodDir + File.separator + "file3"; // Add new GIT repo to w, at the master branch w.git.addSubmodule(r.repoPath(), submodDir); w.git.submoduleInit(); assertTrue("file1 does not exist and should be we imported the submodule.", w.exists(subFile1)); assertFalse("file2 exists and should not because not on 'branch1'", w.exists(subFile2)); assertFalse("file3 exists and should not because not on 'branch2'", w.exists(subFile3)); // Switch to branch1 submoduleUpdateTimeout = 1 + random.nextInt(60 * 24); w.git.submoduleUpdate().remoteTracking(true).useBranch(submodDir, "branch1").timeout(submoduleUpdateTimeout).execute(); assertTrue("file2 does not exist and should because on branch1", w.exists(subFile2)); assertFalse("file3 exists and should not because not on 'branch2'", w.exists(subFile3)); // Switch to branch2 w.git.submoduleUpdate().remoteTracking(true).useBranch(submodDir, "branch2").timeout(submoduleUpdateTimeout).execute(); assertFalse("file2 exists and should not because not on 'branch1'", w.exists(subFile2)); assertTrue("file3 does not exist and should because on branch2", w.exists(subFile3)); // Switch to master w.git.submoduleUpdate().remoteTracking(true).useBranch(submodDir, "master").timeout(submoduleUpdateTimeout).execute(); assertFalse("file2 exists and should not because not on 'branch1'", w.exists(subFile2)); assertFalse("file3 exists and should not because not on 'branch2'", w.exists(subFile3)); } @NotImplementedInJGit public void test_sparse_checkout() throws Exception { /* Sparse checkout was added in git 1.7.0, but the checkout -f syntax * required by the plugin implementation does not work in git 1.7.1. */ if (!w.cgit().isAtLeastVersion(1, 7, 9, 0)) { return; } // Create a repo for cloning purpose w.init(); w.commitEmpty("init"); assertTrue("mkdir dir1 failed", w.file("dir1").mkdir()); w.touch("dir1/file1"); assertTrue("mkdir dir2 failed", w.file("dir2").mkdir()); w.touch("dir2/file2"); assertTrue("mkdir dir3 failed", w.file("dir3").mkdir()); w.touch("dir3/file3"); w.git.add("dir1/file1"); w.git.add("dir2/file2"); w.git.add("dir3/file3"); w.git.commit("commit"); // Clone it WorkingArea workingArea = new WorkingArea(); workingArea.git.clone_().url(w.repoPath()).execute(); checkoutTimeout = 1 + random.nextInt(60 * 24); workingArea.git.checkout().ref("origin/master").branch("master").deleteBranchIfExist(true).sparseCheckoutPaths(Arrays.asList("dir1")).timeout(checkoutTimeout).execute(); assertTrue(workingArea.exists("dir1")); assertFalse(workingArea.exists("dir2")); assertFalse(workingArea.exists("dir3")); workingArea.git.checkout().ref("origin/master").branch("master").deleteBranchIfExist(true).sparseCheckoutPaths(Arrays.asList("dir2")).timeout(checkoutTimeout).execute(); assertFalse(workingArea.exists("dir1")); assertTrue(workingArea.exists("dir2")); assertFalse(workingArea.exists("dir3")); workingArea.git.checkout().ref("origin/master").branch("master").deleteBranchIfExist(true).sparseCheckoutPaths(Arrays.asList("dir1", "dir2")).timeout(checkoutTimeout).execute(); assertTrue(workingArea.exists("dir1")); assertTrue(workingArea.exists("dir2")); assertFalse(workingArea.exists("dir3")); workingArea.git.checkout().ref("origin/master").branch("master").deleteBranchIfExist(true).sparseCheckoutPaths(Collections.<String>emptyList()).timeout(checkoutTimeout).execute(); assertTrue(workingArea.exists("dir1")); assertTrue(workingArea.exists("dir2")); assertTrue(workingArea.exists("dir3")); workingArea.git.checkout().ref("origin/master").branch("master").deleteBranchIfExist(true).sparseCheckoutPaths(null) .timeout(checkoutTimeout) .execute(); assertTrue(workingArea.exists("dir1")); assertTrue(workingArea.exists("dir2")); assertTrue(workingArea.exists("dir3")); } public void test_clone_no_checkout() throws Exception { // Create a repo for cloning purpose WorkingArea repoToClone = new WorkingArea(); repoToClone.init(); repoToClone.commitEmpty("init"); repoToClone.touch("file1"); repoToClone.git.add("file1"); repoToClone.git.commit("commit"); // Clone it with no checkout w.git.clone_().url(repoToClone.repoPath()).repositoryName("origin").noCheckout().execute(); assertFalse(w.exists("file1")); } public void test_hasSubmodules() throws Exception { w.init(); w.launchCommand("git", "fetch", localMirror(), "tests/getSubmodules:t"); w.git.checkout("t"); assertTrue(w.git.hasGitModules()); w.launchCommand("git", "fetch", localMirror(), "master:t2"); w.git.checkout("t2"); assertFalse(w.git.hasGitModules()); assertFixSubmoduleUrlsThrows(); } /* * core.symlinks is set to false by git for WIndows. * It is not set on Linux. * See also JENKINS-22376 and JENKINS-22391 */ @Issue("JENKINS-21168") private void checkSymlinkSetting(WorkingArea area) throws IOException { String expected = SystemUtils.IS_OS_WINDOWS ? "false" : ""; String symlinkValue = null; try { symlinkValue = w.cmd(true, "git config core.symlinks").trim(); } catch (Exception e) { symlinkValue = e.getMessage(); } assertEquals(expected, symlinkValue); } public void test_init() throws Exception { assertFalse(w.file(".git").exists()); w.git.init(); assertTrue(w.file(".git").exists()); checkSymlinkSetting(w); } public void test_init_() throws Exception { assertFalse(w.file(".git").exists()); w.git.init_().workspace(w.repoPath()).execute(); assertTrue(w.file(".git").exists()); checkSymlinkSetting(w); } public void test_init_bare() throws Exception { assertFalse(w.file(".git").exists()); assertFalse(w.file("refs").exists()); w.git.init_().workspace(w.repoPath()).bare(false).execute(); assertTrue(w.file(".git").exists()); assertFalse(w.file("refs").exists()); checkSymlinkSetting(w); WorkingArea anotherRepo = new WorkingArea(); assertFalse(anotherRepo.file(".git").exists()); assertFalse(anotherRepo.file("refs").exists()); anotherRepo.git.init_().workspace(anotherRepo.repoPath()).bare(true).execute(); assertFalse(anotherRepo.file(".git").exists()); assertTrue(anotherRepo.file("refs").exists()); checkSymlinkSetting(anotherRepo); } @NotImplementedInCliGit // Until submodule rename is fixed public void test_getSubmoduleUrl() throws Exception { w = clone(localMirror()); w.cmd("git checkout tests/getSubmodules"); w.git.submoduleInit(); assertEquals("https://github.com/puppetlabs/puppetlabs-firewall.git", w.igit().getSubmoduleUrl("modules/firewall")); try { w.igit().getSubmoduleUrl("bogus"); fail(); } catch (GitException e) { // expected } } public void test_setSubmoduleUrl() throws Exception { w = clone(localMirror()); w.cmd("git checkout tests/getSubmodules"); w.git.submoduleInit(); String DUMMY = "/dummy"; w.igit().setSubmoduleUrl("modules/firewall", DUMMY); // create a brand new Git object to make sure it's persisted WorkingArea subModuleVerify = new WorkingArea(w.repo); assertEquals(DUMMY, subModuleVerify.igit().getSubmoduleUrl("modules/firewall")); } public void test_prune() throws Exception { // pretend that 'r' is a team repository and ws1 and ws2 are team members WorkingArea r = new WorkingArea(); r.init(true); WorkingArea ws1 = new WorkingArea().init(); WorkingArea ws2 = w.init(); ws1.commitEmpty("c"); ws1.cmd("git remote add origin " + r.repoPath()); ws1.cmd("git push origin master:b1"); ws1.cmd("git push origin master:b2"); ws1.cmd("git push origin master"); ws2.cmd("git remote add origin " + r.repoPath()); ws2.cmd("git fetch origin"); // at this point both ws1&ws2 have several remote tracking branches ws1.cmd("git push origin :b1"); ws1.cmd("git push origin master:b3"); ws2.git.prune(new RemoteConfig(new Config(),"origin")); assertFalse(ws2.exists(".git/refs/remotes/origin/b1")); assertTrue( ws2.exists(".git/refs/remotes/origin/b2")); assertFalse(ws2.exists(".git/refs/remotes/origin/b3")); } public void test_revListAll() throws Exception { w.init(); w.launchCommand("git", "pull", localMirror()); StringBuilder out = new StringBuilder(); for (ObjectId id : w.git.revListAll()) { out.append(id.name()).append('\n'); } String all = w.cmd("git rev-list --all"); assertEquals(all,out.toString()); } public void test_revList_() throws Exception { List<ObjectId> oidList = new ArrayList<>(); w.init(); w.launchCommand("git", "pull", localMirror()); RevListCommand revListCommand = w.git.revList_(); revListCommand.all(); revListCommand.to(oidList); revListCommand.execute(); StringBuilder out = new StringBuilder(); for (ObjectId id : oidList) { out.append(id.name()).append('\n'); } String all = w.cmd("git rev-list --all"); assertEquals(all,out.toString()); } public void test_revListFirstParent() throws Exception { w.init(); w.launchCommand("git", "pull", localMirror()); for (Branch b : w.git.getRemoteBranches()) { StringBuilder out = new StringBuilder(); List<ObjectId> oidList = new ArrayList<>(); RevListCommand revListCommand = w.git.revList_(); revListCommand.firstParent(); revListCommand.to(oidList); revListCommand.reference(b.getName()); revListCommand.execute(); for (ObjectId id : oidList) { out.append(id.name()).append('\n'); } String all = w.cmd("git rev-list --first-parent " + b.getName()); assertEquals(all,out.toString()); } } public void test_revList() throws Exception { w.init(); w.launchCommand("git", "pull", localMirror()); for (Branch b : w.git.getRemoteBranches()) { StringBuilder out = new StringBuilder(); for (ObjectId id : w.git.revList(b.getName())) { out.append(id.name()).append('\n'); } String all = w.cmd("git rev-list " + b.getName()); assertEquals(all,out.toString()); } } public void test_merge_strategy() throws Exception { w.init(); w.commitEmpty("init"); w.git.branch("branch1"); w.git.checkout("branch1"); w.touch("file", "content1"); w.git.add("file"); w.git.commit("commit1"); w.git.checkout("master"); w.git.branch("branch2"); w.git.checkout("branch2"); File f = w.touch("file", "content2"); w.git.add("file"); w.git.commit("commit2"); w.git.merge().setStrategy(MergeCommand.Strategy.OURS).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch1")).execute(); assertEquals("merge didn't selected OURS content", "content2", FileUtils.readFileToString(f)); } public void test_merge_strategy_correct_fail() throws Exception { w.init(); w.commitEmpty("init"); w.git.branch("branch1"); w.git.checkout("branch1"); w.touch("file", "content1"); w.git.add("file"); w.git.commit("commit1"); w.git.checkout("master"); w.git.branch("branch2"); w.git.checkout("branch2"); w.touch("file", "content2"); w.git.add("file"); w.git.commit("commit2"); try { w.git.merge().setStrategy(MergeCommand.Strategy.RESOLVE).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch1")).execute(); fail(); } catch (GitException e) { // expected } } @Issue("JENKINS-12402") public void test_merge_fast_forward_mode_ff() throws Exception { w.init(); w.commitEmpty("init"); w.git.branch("branch1"); w.git.checkout("branch1"); w.touch("file1", "content1"); w.git.add("file1"); w.git.commit("commit1"); final ObjectId branch1 = w.head(); w.git.checkout("master"); w.git.branch("branch2"); w.git.checkout("branch2"); w.touch("file2", "content2"); w.git.add("file2"); w.git.commit("commit2"); final ObjectId branch2 = w.head(); w.git.checkout("master"); // The first merge is a fast-forward, master moves to branch1 w.git.merge().setGitPluginFastForwardMode(MergeCommand.GitPluginFastForwardMode.FF).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch1")).execute(); assertEquals("Fast-forward merge failed. master and branch1 should be the same.",w.head(),branch1); // The second merge calls for fast-forward (FF), but a merge commit will result // This tests that calling for FF gracefully falls back to a commit merge // master moves to a new commit ahead of branch1 and branch2 w.git.merge().setGitPluginFastForwardMode(MergeCommand.GitPluginFastForwardMode.FF).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch2")).execute(); // The merge commit (head) should have branch2 and branch1 as parents List<ObjectId> revList = w.git.revList("HEAD^1"); assertEquals("Merge commit failed. branch1 should be a parent of HEAD but it isn't.",revList.get(0).name(), branch1.name()); revList = w.git.revList("HEAD^2"); assertEquals("Merge commit failed. branch2 should be a parent of HEAD but it isn't.",revList.get(0).name(), branch2.name()); } public void test_merge_fast_forward_mode_ff_only() throws Exception { w.init(); w.commitEmpty("init"); w.git.branch("branch1"); w.git.checkout("branch1"); w.touch("file1", "content1"); w.git.add("file1"); w.git.commit("commit1"); final ObjectId branch1 = w.head(); w.git.checkout("master"); w.git.branch("branch2"); w.git.checkout("branch2"); w.touch("file2", "content2"); w.git.add("file2"); w.git.commit("commit2"); final ObjectId branch2 = w.head(); w.git.checkout("master"); final ObjectId master = w.head(); // The first merge is a fast-forward, master moves to branch1 w.git.merge().setGitPluginFastForwardMode(MergeCommand.GitPluginFastForwardMode.FF_ONLY).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch1")).execute(); assertEquals("Fast-forward merge failed. master and branch1 should be the same but aren't.",w.head(),branch1); // The second merge calls for fast-forward only (FF_ONLY), but a merge commit is required, hence it is expected to fail try { w.git.merge().setGitPluginFastForwardMode(MergeCommand.GitPluginFastForwardMode.FF_ONLY).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch2")).execute(); fail("Exception not thrown: the fast-forward only mode should have failed"); } catch (GitException e) { // expected assertEquals("Fast-forward merge abort failed. master and branch1 should still be the same as the merge was aborted.",w.head(),branch1); } } public void test_merge_fast_forward_mode_no_ff() throws Exception { w.init(); w.commitEmpty("init"); final ObjectId base = w.head(); w.git.branch("branch1"); w.git.checkout("branch1"); w.touch("file1", "content1"); w.git.add("file1"); w.git.commit("commit1"); final ObjectId branch1 = w.head(); w.git.checkout("master"); w.git.branch("branch2"); w.git.checkout("branch2"); w.touch("file2", "content2"); w.git.add("file2"); w.git.commit("commit2"); final ObjectId branch2 = w.head(); w.git.checkout("master"); final ObjectId master = w.head(); // The first merge is normally a fast-forward, but we're calling for a merge commit which is expected to work w.git.merge().setGitPluginFastForwardMode(MergeCommand.GitPluginFastForwardMode.NO_FF).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch1")).execute(); // The first merge will have base and branch1 as parents List<ObjectId> revList = null; revList = w.git.revList("HEAD^1"); assertEquals("Merge commit failed. base should be a parent of HEAD but it isn't.",revList.get(0).name(), base.name()); revList = w.git.revList("HEAD^2"); assertEquals("Merge commit failed. branch1 should be a parent of HEAD but it isn't.",revList.get(0).name(), branch1.name()); final ObjectId base2 = w.head(); // Calling for NO_FF when required is expected to work w.git.merge().setGitPluginFastForwardMode(MergeCommand.GitPluginFastForwardMode.NO_FF).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch2")).execute(); // The second merge will have base2 and branch2 as parents revList = w.git.revList("HEAD^1"); assertEquals("Merge commit failed. base2 should be a parent of HEAD but it isn't.",revList.get(0).name(), base2.name()); revList = w.git.revList("HEAD^2"); assertEquals("Merge commit failed. branch2 should be a parent of HEAD but it isn't.",revList.get(0).name(), branch2.name()); } public void test_merge_squash() throws Exception{ w.init(); w.commitEmpty("init"); w.git.branch("branch1"); //First commit to branch1 w.git.checkout("branch1"); w.touch("file1", "content1"); w.git.add("file1"); w.git.commit("commit1"); //Second commit to branch1 w.touch("file2", "content2"); w.git.add("file2"); w.git.commit("commit2"); //Merge branch1 with master, squashing both commits w.git.checkout("master"); w.git.merge().setSquash(true).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch1")).execute(); //Compare commit counts of before and after commiting the merge, should be one due to the squashing of commits. final int commitCountBefore = w.git.revList("HEAD").size(); w.git.commit("commitMerge"); final int commitCountAfter = w.git.revList("HEAD").size(); assertEquals("Squash merge failed. Should have merged only one commit.", 1, commitCountAfter - commitCountBefore); } public void test_merge_no_squash() throws Exception{ w.init(); w.commitEmpty("init"); //First commit to branch1 w.git.branch("branch1"); w.git.checkout("branch1"); w.touch("file1", "content1"); w.git.add("file1"); w.git.commit("commit1"); //Second commit to branch1 w.touch("file2", "content2"); w.git.add("file2"); w.git.commit("commit2"); //Merge branch1 with master, without squashing commits. //Compare commit counts of before and after commiting the merge, should be one due to the squashing of commits. w.git.checkout("master"); final int commitCountBefore = w.git.revList("HEAD").size(); w.git.merge().setSquash(false).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch1")).execute(); final int commitCountAfter = w.git.revList("HEAD").size(); assertEquals("Squashless merge failed. Should have merged two commits.", 2, commitCountAfter - commitCountBefore); } public void test_merge_no_commit() throws Exception{ w.init(); w.commitEmpty("init"); //Create branch1 and commit a file w.git.branch("branch1"); w.git.checkout("branch1"); w.touch("file1", "content1"); w.git.add("file1"); w.git.commit("commit1"); //Merge branch1 with master, without committing the merge. //Compare commit counts of before and after the merge, should be zero due to the lack of autocommit. w.git.checkout("master"); final int commitCountBefore = w.git.revList("HEAD").size(); w.git.merge().setCommit(false).setGitPluginFastForwardMode(MergeCommand.GitPluginFastForwardMode.NO_FF).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch1")).execute(); final int commitCountAfter = w.git.revList("HEAD").size(); assertEquals("No Commit merge failed. Shouldn't have committed any changes.", commitCountBefore, commitCountAfter); } public void test_merge_commit() throws Exception{ w.init(); w.commitEmpty("init"); //Create branch1 and commit a file w.git.branch("branch1"); w.git.checkout("branch1"); w.touch("file1", "content1"); w.git.add("file1"); w.git.commit("commit1"); //Merge branch1 with master, without committing the merge. //Compare commit counts of before and after the merge, should be two due to the commit of the file and the commit of the merge. w.git.checkout("master"); final int commitCountBefore = w.git.revList("HEAD").size(); w.git.merge().setCommit(true).setGitPluginFastForwardMode(MergeCommand.GitPluginFastForwardMode.NO_FF).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch1")).execute(); final int commitCountAfter = w.git.revList("HEAD").size(); assertEquals("Commit merge failed. Should have committed the merge.", 2, commitCountAfter - commitCountBefore); } public void test_merge_with_message() throws Exception { w.init(); w.commitEmpty("init"); // First commit to branch1 w.git.branch("branch1"); w.git.checkout("branch1"); w.touch("file1", "content1"); w.git.add("file1"); w.git.commit("commit1"); // Merge branch1 into master w.git.checkout("master"); String mergeMessage = "Merge message to be tested."; w.git.merge().setMessage(mergeMessage).setGitPluginFastForwardMode(MergeCommand.GitPluginFastForwardMode.NO_FF).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch1")).execute(); // Obtain last commit message String resultMessage = w.git.showRevision(w.head()).get(7).trim(); assertEquals("Custom message merge failed. Should have set custom merge message.", mergeMessage, resultMessage); } @Deprecated public void test_merge_refspec() throws Exception { w.init(); w.commitEmpty("init"); w.touch("file-master", "content-master"); w.git.add("file-master"); w.git.commit("commit1-master"); final ObjectId base = w.head(); w.git.branch("branch1"); w.git.checkout("branch1"); w.touch("file1", "content1"); w.git.add("file1"); w.git.commit("commit1-branch1"); final ObjectId branch1 = w.head(); w.cmd("git branch branch2 master"); w.git.checkout("branch2"); File f = w.touch("file2", "content2"); w.git.add("file2"); w.git.commit("commit2-branch2"); final ObjectId branch2 = w.head(); assertTrue("file2 does not exist", f.exists()); assertFalse("file1 exists before merge", w.exists("file1")); assertEquals("Wrong merge-base branch1 branch2", base, w.igit().mergeBase(branch1, branch2)); String badSHA1 = "15c80fb1567f0e88ca855c69e3f17425d515a188"; ObjectId badBase = ObjectId.fromString(badSHA1); try { assertNull("Base unexpected for bad SHA1", w.igit().mergeBase(branch1, badBase)); assertTrue("Exception not thrown by CliGit", w.git instanceof CliGitAPIImpl); } catch (GitException moa) { assertFalse("Exception thrown by CliGit", w.git instanceof CliGitAPIImpl); assertExceptionMessageContains(moa, badSHA1); } try { assertNull("Base unexpected for bad SHA1", w.igit().mergeBase(badBase, branch1)); assertTrue("Exception not thrown by CliGit", w.git instanceof CliGitAPIImpl); } catch (GitException moa) { assertFalse("Exception thrown by CliGit", w.git instanceof CliGitAPIImpl); assertExceptionMessageContains(moa, badSHA1); } w.igit().merge("branch1"); assertTrue("file1 does not exist after merge", w.exists("file1")); /* Git 1.7.1 does not understand the --orphan argument to checkout. * Stop the test here on older git versions */ if (!w.cgit().isAtLeastVersion(1, 7, 9, 0)) { return; } w.cmd("git checkout --orphan newroot"); // Create an independent root w.commitEmpty("init-on-newroot"); final ObjectId newRootCommit = w.head(); assertNull("Common root not expected", w.igit().mergeBase(newRootCommit, branch1)); final String remoteUrl = "ssh://mwaite.example.com//var/lib/git/mwaite/jenkins/git-client-plugin.git"; w.git.setRemoteUrl("origin", remoteUrl); assertEquals("Wrong origin default remote", "origin", w.igit().getDefaultRemote("origin")); assertEquals("Wrong invalid default remote", "origin", w.igit().getDefaultRemote("invalid")); } public void test_rebase_passes_without_conflict() throws Exception { w.init(); w.commitEmpty("init"); // First commit to master w.touch("master_file", "master1"); w.git.add("master_file"); w.git.commit("commit-master1"); // Create a feature branch and make a commit w.git.branch("feature1"); w.git.checkout("feature1"); w.touch("feature_file", "feature1"); w.git.add("feature_file"); w.git.commit("commit-feature1"); // Second commit to master w.git.checkout("master"); w.touch("master_file", "master2"); w.git.add("master_file"); w.git.commit("commit-master2"); // Rebase feature commit onto master w.git.checkout("feature1"); w.git.rebase().setUpstream("master").execute(); assertThat("Should've rebased feature1 onto master", w.git.revList("feature1").contains(w.git.revParse("master"))); assertEquals("HEAD should be on the rebased branch", w.git.revParse("HEAD").name(), w.git.revParse("feature1").name()); assertThat("Rebased file should be present in the worktree",w.git.getWorkTree().child("feature_file").exists()); } public void test_rebase_fails_with_conflict() throws Exception { w.init(); w.commitEmpty("init"); // First commit to master w.touch("file", "master1"); w.git.add("file"); w.git.commit("commit-master1"); // Create a feature branch and make a commit w.git.branch("feature1"); w.git.checkout("feature1"); w.touch("file", "feature1"); w.git.add("file"); w.git.commit("commit-feature1"); // Second commit to master w.git.checkout("master"); w.touch("file", "master2"); w.git.add("file"); w.git.commit("commit-master2"); // Rebase feature commit onto master w.git.checkout("feature1"); try { w.git.rebase().setUpstream("master").execute(); fail("Rebase did not throw expected GitException"); } catch (GitException e) { assertEquals("HEAD not reset to the feature branch.", w.git.revParse("HEAD").name(), w.git.revParse("feature1").name()); Status status = new org.eclipse.jgit.api.Git(w.repo()).status().call(); assertTrue("Workspace is not clean", status.isClean()); assertFalse("Workspace has uncommitted changes", status.hasUncommittedChanges()); assertTrue("Workspace has conflicting changes", status.getConflicting().isEmpty()); assertTrue("Workspace has missing changes", status.getMissing().isEmpty()); assertTrue("Workspace has modified files", status.getModified().isEmpty()); assertTrue("Workspace has removed files", status.getRemoved().isEmpty()); assertTrue("Workspace has untracked files", status.getUntracked().isEmpty()); } } /** * Checks that the ChangelogCommand abort() API does not write * output to the destination. Does not check that the abort() API * releases resources. */ public void test_changelog_abort() throws InterruptedException, IOException { final String logMessage = "changelog-abort-test-commit"; w.init(); w.touch("file-changelog-abort", "changelog abort file contents " + java.util.UUID.randomUUID().toString()); w.git.add("file-changelog-abort"); w.git.commit(logMessage); String sha1 = w.git.revParse("HEAD").name(); ChangelogCommand changelogCommand = w.git.changelog(); StringWriter writer = new StringWriter(); changelogCommand.to(writer); /* Abort the changelog, confirm no content was written */ changelogCommand.abort(); assertEquals("aborted changelog wrote data", "", writer.toString()); /* Execute the changelog, confirm expected content was written */ changelogCommand = w.git.changelog(); changelogCommand.to(writer); changelogCommand.execute(); assertTrue("No log message in " + writer.toString(), writer.toString().contains(logMessage)); assertTrue("No SHA1 in " + writer.toString(), writer.toString().contains(sha1)); } @Issue("JENKINS-23299") public void test_getHeadRev() throws Exception { Map<String, ObjectId> heads = w.git.getHeadRev(remoteMirrorURL); ObjectId master = w.git.getHeadRev(remoteMirrorURL, "refs/heads/master"); assertEquals("URL is " + remoteMirrorURL + ", heads is " + heads, master, heads.get("refs/heads/master")); /* Test with a specific tag reference - JENKINS-23299 */ ObjectId knownTag = w.git.getHeadRev(remoteMirrorURL, "refs/tags/git-client-1.10.0"); ObjectId expectedTag = ObjectId.fromString("1fb23708d6b639c22383c8073d6e75051b2a63aa"); // commit SHA1 assertEquals("Wrong SHA1 for git-client-1.10.0 tag", expectedTag, knownTag); } /** * User interface calls getHeadRev without a workspace while * validating user input. This test showed a null pointer * exception in a development version of credential passing to * command line git. The referenced repository is a public * repository, and https access to a public repository is allowed * even if invalid credentials are provided. * * @throws Exception on test failure */ public void test_getHeadRevFromPublicRepoWithInvalidCredential() throws Exception { GitClient remoteGit = Git.with(listener, env).using("git").getClient(); StandardUsernamePasswordCredentials testCredential = new UsernamePasswordCredentialsImpl(CredentialsScope.GLOBAL, "bad-id", "bad-desc", "bad-user", "bad-password"); remoteGit.addDefaultCredentials(testCredential); Map<String, ObjectId> heads = remoteGit.getHeadRev(remoteMirrorURL); ObjectId master = w.git.getHeadRev(remoteMirrorURL, "refs/heads/master"); assertEquals("URL is " + remoteMirrorURL + ", heads is " + heads, master, heads.get("refs/heads/master")); } @Issue("JENKINS-25444") public void test_fetch_delete_cleans() throws Exception { w.init(); w.touch("file1", "old"); w.git.add("file1"); w.git.commit("commit1"); w.touch("file1", "new"); checkoutTimeout = 1 + random.nextInt(60 * 24); w.git.checkout().branch("other").ref(Constants.HEAD).timeout(checkoutTimeout).deleteBranchIfExist(true).execute(); Status status = new org.eclipse.jgit.api.Git(w.repo()).status().call(); assertTrue("Workspace must be clean", status.isClean()); } /** * Test getHeadRev with wildcard matching in the branch name. * Relies on the branches in the git-client-plugin repository * include at least branches named: * master * tests/getSubmodules * * Also relies on a specific return ordering of the values in the * pattern matching performed by getHeadRev, and relies on not * having new branches created which match the patterns and will * occur earlier than the expected value. */ public void test_getHeadRev_wildcards() throws Exception { Map<String, ObjectId> heads = w.git.getHeadRev(localMirror()); ObjectId master = w.git.getHeadRev(localMirror(), "refs/heads/master"); assertEquals("heads is " + heads, heads.get("refs/heads/master"), master); ObjectId wildOrigin = w.git.getHeadRev(localMirror(), "*/master"); assertEquals("heads is " + heads, heads.get("refs/heads/master"), wildOrigin); /** * Test getHeadRev with namespaces in the branch name * and branch specs containing only the simple branch name. * * TODO: This does not work yet! Fix behaviour and enable test! */ public void test_getHeadRev_namespaces_withSimpleBranchNames() throws Exception { setTimeoutVisibleInCurrentTest(false); File tempRemoteDir = temporaryDirectoryAllocator.allocate(); extract(new ZipFile("src/test/resources/namespaceBranchRepo.zip"), tempRemoteDir); Properties commits = parseLsRemote(new File("src/test/resources/namespaceBranchRepo.ls-remote")); w = clone(tempRemoteDir.getAbsolutePath()); final String remote = tempRemoteDir.getAbsolutePath(); final String[][] checkBranchSpecs = //TODO: Fix and enable test { {"a_tests/b_namespace1/master", commits.getProperty("refs/heads/a_tests/b_namespace1/master")}, // {"a_tests/b_namespace2/master", commits.getProperty("refs/heads/a_tests/b_namespace2/master")}, // {"a_tests/b_namespace3/master", commits.getProperty("refs/heads/a_tests/b_namespace3/master")}, // {"b_namespace3/master", commits.getProperty("refs/heads/b_namespace3/master")}, // {"master", commits.getProperty("refs/heads/master")}, }; for(String[] branch : checkBranchSpecs) { final ObjectId objectId = ObjectId.fromString(branch[1]); final String branchName = branch[0]; check_getHeadRev(remote, branchName, objectId); check_getHeadRev(remote, "remotes/origin/" + branchName, objectId); check_getHeadRev(remote, "refs/heads/" + branchName, objectId); } } /** * Test getHeadRev with namespaces in the branch name * and branch specs starting with "refs/heads/". */ public void test_getHeadRev_namespaces_withRefsHeads() throws Exception { File tempRemoteDir = temporaryDirectoryAllocator.allocate(); extract(new ZipFile("src/test/resources/namespaceBranchRepo.zip"), tempRemoteDir); Properties commits = parseLsRemote(new File("src/test/resources/namespaceBranchRepo.ls-remote")); w = clone(tempRemoteDir.getAbsolutePath()); final String remote = tempRemoteDir.getAbsolutePath(); final String[][] checkBranchSpecs = { {"refs/heads/master", commits.getProperty("refs/heads/master")}, {"refs/heads/a_tests/b_namespace1/master", commits.getProperty("refs/heads/a_tests/b_namespace1/master")}, {"refs/heads/a_tests/b_namespace2/master", commits.getProperty("refs/heads/a_tests/b_namespace2/master")}, {"refs/heads/a_tests/b_namespace3/master", commits.getProperty("refs/heads/a_tests/b_namespace3/master")}, {"refs/heads/b_namespace3/master", commits.getProperty("refs/heads/b_namespace3/master")} }; for(String[] branch : checkBranchSpecs) { final ObjectId objectId = ObjectId.fromString(branch[1]); final String branchName = branch[0]; check_getHeadRev(remote, branchName, objectId); } } /** * Test getHeadRev with branch names which SHOULD BE reserved by Git, but ARE NOT.<br/> * E.g. it is possible to create the following LOCAL (!) branches:<br/> * <ul> * <li> origin/master * <li> remotes/origin/master * <li> refs/heads/master * <li> refs/remotes/origin/master * </ul> * * TODO: This does not work yet! Fix behaviour and enable test! */ public void test_getHeadRev_reservedBranchNames() throws Exception { /* REMARK: Local branch names in this test are called exactly like follows! * e.g. origin/master means the branch is called "origin/master", it does NOT mean master branch in remote "origin". * or refs/heads/master means branch called "refs/heads/master" ("refs/heads/refs/heads/master" in the end). */ setTimeoutVisibleInCurrentTest(false); File tempRemoteDir = temporaryDirectoryAllocator.allocate(); extract(new ZipFile("src/test/resources/specialBranchRepo.zip"), tempRemoteDir); Properties commits = parseLsRemote(new File("src/test/resources/specialBranchRepo.ls-remote")); w = clone(tempRemoteDir.getAbsolutePath()); /* * The first entry in the String[2] is the branch name (as specified in the job config). * The second entry is the expected commit. */ final String[][] checkBranchSpecs = {}; //TODO: Fix and enable test // {"master", commits.getProperty("refs/heads/master")}, // {"origin/master", commits.getProperty("refs/heads/master")}, // {"remotes/origin/master", commits.getProperty("refs/heads/master")}, // {"refs/remotes/origin/master", commits.getProperty("refs/heads/refs/remotes/origin/master")}, // {"refs/heads/origin/master", commits.getProperty("refs/heads/origin/master")}, // {"refs/heads/master", commits.getProperty("refs/heads/master")}, // {"refs/heads/refs/heads/master", commits.getProperty("refs/heads/refs/heads/master")}, // {"refs/heads/refs/heads/refs/heads/master", commits.getProperty("refs/heads/refs/heads/refs/heads/master")}, // {"refs/tags/master", commits.getProperty("refs/tags/master^{}")} for(String[] branch : checkBranchSpecs) { check_getHeadRev(tempRemoteDir.getAbsolutePath(), branch[0], ObjectId.fromString(branch[1])); } } /** * Test getRemoteReferences with listing all references */ public void test_getRemoteReferences() throws Exception { Map<String, ObjectId> references = w.git.getRemoteReferences(remoteMirrorURL, null, false, false); assertTrue(references.containsKey("refs/heads/master")); assertTrue(references.containsKey("refs/tags/git-client-1.0.0")); } /** * Test getRemoteReferences with listing references limit to refs/heads or refs/tags */ public void test_getRemoteReferences_withLimitReferences() throws Exception { Map<String, ObjectId> references = w.git.getRemoteReferences(remoteMirrorURL, null, true, false); assertTrue(references.containsKey("refs/heads/master")); assertTrue(!references.containsKey("refs/tags/git-client-1.0.0")); references = w.git.getRemoteReferences(remoteMirrorURL, null, false, true); assertTrue(!references.containsKey("refs/heads/master")); assertTrue(references.containsKey("refs/tags/git-client-1.0.0")); for (String key : references.keySet()) { assertTrue(!key.endsWith("^{}")); } } /** * Test getRemoteReferences with matching pattern */ public void test_getRemoteReferences_withMatchingPattern() throws Exception { Map<String, ObjectId> references = w.git.getRemoteReferences(remoteMirrorURL, "refs/heads/master", true, false); assertTrue(references.containsKey("refs/heads/master")); assertTrue(!references.containsKey("refs/tags/git-client-1.0.0")); references = w.git.getRemoteReferences(remoteMirrorURL, "git-client-*", false, true); assertTrue(!references.containsKey("refs/heads/master")); for (String key : references.keySet()) { assertTrue(key.startsWith("refs/tags/git-client")); } references = new HashMap<>(); try { references = w.git.getRemoteReferences(remoteMirrorURL, "notexists-*", false, false); } catch (GitException ge) { assertExceptionMessageContains(ge, "unexpected ls-remote output"); } assertTrue(references.isEmpty()); } /** * Test getRemoteSymbolicReferences with listing all references */ public void test_getRemoteSymbolicReferences() throws Exception { if (!hasWorkingGetRemoteSymbolicReferences()) return; // JUnit 3 replacement for assumeThat Map<String, String> references = w.git.getRemoteSymbolicReferences(remoteMirrorURL, null); assertThat(references, hasEntry(is(Constants.HEAD), is(Constants.R_HEADS + Constants.MASTER))); } protected abstract boolean hasWorkingGetRemoteSymbolicReferences(); /** * Test getRemoteSymbolicReferences with listing all references */ public void test_getRemoteSymbolicReferences_withMatchingPattern() throws Exception { if (!hasWorkingGetRemoteSymbolicReferences()) return; // JUnit 3 replacement for assumeThat Map<String, String> references = w.git.getRemoteSymbolicReferences(remoteMirrorURL, Constants.HEAD); assertThat(references, hasEntry(is(Constants.HEAD), is(Constants.R_HEADS + Constants.MASTER))); assertThat(references.size(), is(1)); } private Properties parseLsRemote(File file) throws IOException { Properties properties = new Properties(); Pattern pattern = Pattern.compile("([a-f0-9]{40})\\s*(.*)"); for(Object lineO : FileUtils.readLines(file)) { String line = ((String)lineO).trim(); Matcher matcher = pattern.matcher(line); if(matcher.matches()) { properties.setProperty(matcher.group(2), matcher.group(1)); } else { System.err.println("ls-remote pattern does not match '" + line + "'"); } } return properties; } private void extract(ZipFile zipFile, File outputDir) throws IOException { Enumeration<? extends ZipEntry> entries = zipFile.entries(); while (entries.hasMoreElements()) { ZipEntry entry = entries.nextElement(); File entryDestination = new File(outputDir, entry.getName()); entryDestination.getParentFile().mkdirs(); if (entry.isDirectory()) entryDestination.mkdirs(); else { try (InputStream in = zipFile.getInputStream(entry); OutputStream out = Files.newOutputStream(entryDestination.toPath());) { org.apache.commons.io.IOUtils.copy(in, out); } } } } private void check_getHeadRev(String remote, String branchSpec, ObjectId expectedObjectId) throws Exception { ObjectId actualObjectId = w.git.getHeadRev(remote, branchSpec); assertNotNull(String.format("Expected ObjectId is null expectedObjectId '%s', remote '%s', branchSpec '%s'.", expectedObjectId, remote, branchSpec), expectedObjectId); assertNotNull(String.format("Actual ObjectId is null. expectedObjectId '%s', remote '%s', branchSpec '%s'.", expectedObjectId, remote, branchSpec), actualObjectId); assertEquals(String.format("Actual ObjectId differs from expected one for branchSpec '%s', remote '%s':\n" + "Actual %s,\nExpected %s\n", branchSpec, remote, StringUtils.join(getBranches(actualObjectId), ", "), StringUtils.join(getBranches(expectedObjectId), ", ")), expectedObjectId, actualObjectId); } private List<Branch> getBranches(ObjectId objectId) throws GitException, InterruptedException { List<Branch> matches = new ArrayList<>(); Set<Branch> branches = w.git.getBranches(); for(Branch branch : branches) { if(branch.getSHA1().equals(objectId)) matches.add(branch); } return unmodifiableList(matches); } private void check_headRev(String repoURL, ObjectId expectedId) throws InterruptedException, IOException { final ObjectId originMaster = w.git.getHeadRev(repoURL, "origin/master"); assertEquals("origin/master mismatch", expectedId, originMaster); final ObjectId simpleMaster = w.git.getHeadRev(repoURL, "master"); assertEquals("simple master mismatch", expectedId, simpleMaster); final ObjectId wildcardSCMMaster = w.git.getHeadRev(repoURL, "*/master"); assertEquals("wildcard SCM master mismatch", expectedId, wildcardSCMMaster); /* This assertion may fail if the localMirror has more than * one branch matching the wildcard expression in the call to * getHeadRev. The expression is chosen to be unlikely to * match with typical branch names, while still matching a * known branch name. Should be fine so long as no one creates * branches named like master-master or new-master on the * remote repo */ final ObjectId wildcardEndMaster = w.git.getHeadRev(repoURL, "origin/m*aste?"); assertEquals("wildcard end master mismatch", expectedId, wildcardEndMaster); } public void test_getHeadRev_localMirror() throws Exception { check_headRev(localMirror(), getMirrorHead()); } public void test_getHeadRev_remote() throws Exception { String lsRemote = w.cmd("git ls-remote -h " + remoteMirrorURL + " refs/heads/master"); ObjectId lsRemoteId = ObjectId.fromString(lsRemote.substring(0, 40)); check_headRev(remoteMirrorURL, lsRemoteId); } public void test_getHeadRev_current_directory() throws Exception { w = clone(localMirror()); w.git.checkout("master"); final ObjectId master = w.head(); w.git.branch("branch1"); w.git.checkout("branch1"); w.touch("file1", "branch1 contents " + java.util.UUID.randomUUID().toString()); w.git.add("file1"); w.git.commit("commit1-branch1"); final ObjectId branch1 = w.head(); Map<String, ObjectId> heads = w.git.getHeadRev(w.repoPath()); assertEquals(master, heads.get("refs/heads/master")); assertEquals(branch1, heads.get("refs/heads/branch1")); check_headRev(w.repoPath(), getMirrorHead()); } public void test_getHeadRev_returns_accurate_SHA1_values() throws Exception { /* CliGitAPIImpl had a longstanding bug that it inserted the * same SHA1 in all the values, rather than inserting the SHA1 * which matched the key. */ w = clone(localMirror()); w.git.checkout("master"); final ObjectId master = w.head(); w.git.branch("branch1"); w.git.checkout("branch1"); w.touch("file1", "content1"); w.git.add("file1"); w.git.commit("commit1-branch1"); final ObjectId branch1 = w.head(); w.cmd("git branch branch.2 master"); w.git.checkout("branch.2"); File f = w.touch("file.2", "content2"); w.git.add("file.2"); w.git.commit("commit2-branch.2"); final ObjectId branchDot2 = w.head(); assertTrue("file.2 does not exist", f.exists()); Map<String,ObjectId> heads = w.git.getHeadRev(w.repoPath()); assertEquals("Wrong master in " + heads, master, heads.get("refs/heads/master")); assertEquals("Wrong branch1 in " + heads, branch1, heads.get("refs/heads/branch1")); assertEquals("Wrong branch.2 in " + heads, branchDot2, heads.get("refs/heads/branch.2")); assertEquals("wildcard branch.2 mismatch", branchDot2, w.git.getHeadRev(w.repoPath(), "br*.2")); check_headRev(w.repoPath(), getMirrorHead()); } private void check_changelog_sha1(final String sha1, final String branchName) throws InterruptedException { ChangelogCommand changelogCommand = w.git.changelog(); changelogCommand.max(1); StringWriter writer = new StringWriter(); changelogCommand.to(writer); changelogCommand.execute(); String splitLog[] = writer.toString().split("[\\n\\r]", 3); // Extract first line of changelog assertEquals("Wrong changelog line 1 on branch " + branchName, "commit " + sha1, splitLog[0]); } public void test_changelog() throws Exception { w = clone(localMirror()); String sha1Prev = w.git.revParse("HEAD").name(); w.touch("changelog-file", "changelog-file-content-" + sha1Prev); w.git.add("changelog-file"); w.git.commit("changelog-commit-message"); String sha1 = w.git.revParse("HEAD").name(); check_changelog_sha1(sha1, "master"); } public void test_show_revision_for_merge() throws Exception { w = clone(localMirror()); ObjectId from = ObjectId.fromString("45e76942914664ee19f31d90e6f2edbfe0d13a46"); ObjectId to = ObjectId.fromString("b53374617e85537ec46f86911b5efe3e4e2fa54b"); List<String> revisionDetails = w.git.showRevision(from, to); Collection<String> commits = Collections2.filter(revisionDetails, (String detail) -> detail.startsWith("commit ")); assertEquals(3, commits.size()); assertTrue(commits.contains("commit 4f2964e476776cf59be3e033310f9177bedbf6a8")); // Merge commit is duplicated as have to capture changes that may have been made as part of merge assertTrue(commits.contains("commit b53374617e85537ec46f86911b5efe3e4e2fa54b (from 4f2964e476776cf59be3e033310f9177bedbf6a8)")); assertTrue(commits.contains("commit b53374617e85537ec46f86911b5efe3e4e2fa54b (from 45e76942914664ee19f31d90e6f2edbfe0d13a46)")); Collection<String> diffs = Collections2.filter(revisionDetails, (String detail) -> detail.startsWith(":")); Collection<String> paths = Collections2.transform(diffs, (String diff) -> diff.substring(diff.indexOf('\t')+1).trim() // Windows diff output ^M removed by trim() ); assertTrue(paths.contains(".gitignore")); // Some irrelevant changes will be listed due to merge commit assertTrue(paths.contains("pom.xml")); assertTrue(paths.contains("src/main/java/hudson/plugins/git/GitAPI.java")); assertTrue(paths.contains("src/main/java/org/jenkinsci/plugins/gitclient/CliGitAPIImpl.java")); assertTrue(paths.contains("src/main/java/org/jenkinsci/plugins/gitclient/Git.java")); assertTrue(paths.contains("src/main/java/org/jenkinsci/plugins/gitclient/GitClient.java")); assertTrue(paths.contains("src/main/java/org/jenkinsci/plugins/gitclient/JGitAPIImpl.java")); assertTrue(paths.contains("src/test/java/org/jenkinsci/plugins/gitclient/GitAPITestCase.java")); assertTrue(paths.contains("src/test/java/org/jenkinsci/plugins/gitclient/JGitAPIImplTest.java")); // Previous implementation included other commits, and listed irrelevant changes assertFalse(paths.contains("README.md")); } public void test_show_revision_for_merge_exclude_files() throws Exception { w = clone(localMirror()); ObjectId from = ObjectId.fromString("45e76942914664ee19f31d90e6f2edbfe0d13a46"); ObjectId to = ObjectId.fromString("b53374617e85537ec46f86911b5efe3e4e2fa54b"); Boolean useRawOutput = false; List<String> revisionDetails = w.git.showRevision(from, to, useRawOutput); Collection<String> commits = Collections2.filter(revisionDetails, (String detail) -> detail.startsWith("commit ")); assertEquals(2, commits.size()); assertTrue(commits.contains("commit 4f2964e476776cf59be3e033310f9177bedbf6a8")); assertTrue(commits.contains("commit b53374617e85537ec46f86911b5efe3e4e2fa54b")); Collection<String> diffs = Collections2.filter(revisionDetails, (String detail) -> detail.startsWith(":")); assertTrue(diffs.isEmpty()); } private void check_bounded_changelog_sha1(final String sha1Begin, final String sha1End, final String branchName) throws InterruptedException { StringWriter writer = new StringWriter(); w.git.changelog(sha1Begin, sha1End, writer); String splitLog[] = writer.toString().split("[\\n\\r]", 3); // Extract first line of changelog assertEquals("Wrong bounded changelog line 1 on branch " + branchName, "commit " + sha1End, splitLog[0]); assertTrue("Begin sha1 " + sha1Begin + " not in changelog: " + writer.toString(), writer.toString().contains(sha1Begin)); } public void test_changelog_bounded() throws Exception { w = clone(localMirror()); String sha1Prev = w.git.revParse("HEAD").name(); w.touch("changelog-file", "changelog-file-content-" + sha1Prev); w.git.add("changelog-file"); w.git.commit("changelog-commit-message"); String sha1 = w.git.revParse("HEAD").name(); check_bounded_changelog_sha1(sha1Prev, sha1, "master"); } public void test_show_revision_for_single_commit() throws Exception { w = clone(localMirror()); ObjectId to = ObjectId.fromString("51de9eda47ca8dcf03b2af58dfff7355585f0d0c"); List<String> revisionDetails = w.git.showRevision(null, to); Collection<String> commits = Collections2.filter(revisionDetails, (String detail) -> detail.startsWith("commit ")); assertEquals(1, commits.size()); assertTrue(commits.contains("commit 51de9eda47ca8dcf03b2af58dfff7355585f0d0c")); } @Issue("JENKINS-22343") public void test_show_revision_for_first_commit() throws Exception { w.init(); w.touch("a"); w.git.add("a"); w.git.commit("first"); ObjectId first = w.head(); List<String> revisionDetails = w.git.showRevision(first); Collection<String> commits = Collections2.filter(revisionDetails, (String detail) -> detail.startsWith("commit ")); assertTrue("Commits '" + commits + "' missing " + first.getName(), commits.contains("commit " + first.getName())); assertEquals("Commits '" + commits + "' wrong size", 1, commits.size()); } public void test_describe() throws Exception { w.init(); w.commitEmpty("first"); w.tag("-m test t1"); w.touch("a"); w.git.add("a"); w.git.commit("second"); assertThat(w.cmd("git describe").trim(), sharesPrefix(w.git.describe("HEAD"))); w.tag("-m test2 t2"); assertThat(w.cmd("git describe").trim(), sharesPrefix(w.git.describe("HEAD"))); } public void test_getAllLogEntries() throws Exception { /* Use original clone source instead of localMirror. The * namespace test modifies the localMirror content by creating * three independent branches very rapidly. Those three * branches may be created within the same second, making it * more difficult for git to provide a time ordered log. The * reference to localMirror will help performance of the C git * implementation, since that will avoid copying content which * is already local. */ String gitUrl = "https://github.com/jenkinsci/git-client-plugin.git"; if (SystemUtils.IS_OS_WINDOWS) { // Does not leak an open file w = clone(gitUrl); } else { // Leaks an open file - unclear why w.git.clone_().url(gitUrl).repositoryName("origin").reference(localMirror()).execute(); } assertEquals( w.cgit().getAllLogEntries("origin/master"), w.igit().getAllLogEntries("origin/master")); } public void test_branchContaining() throws Exception { w.init(); w.commitEmpty("c1"); ObjectId c1 = w.head(); w.cmd("git branch Z "+c1.name()); w.git.checkout("Z"); w.commitEmpty("T"); ObjectId t = w.head(); w.commitEmpty("c2"); ObjectId c2 = w.head(); w.commitEmpty("Z"); w.cmd("git branch X "+c1.name()); w.git.checkout("X"); w.commitEmpty("X"); w.cmd("git branch Y "+c1.name()); w.git.checkout("Y"); w.commitEmpty("c3"); ObjectId c3 = w.head(); w.cmd("git merge --no-ff -m Y "+c2.name()); w.git.deleteBranch("master"); assertEquals(3,w.git.getBranches().size()); // X, Y, and Z assertEquals("X,Y,Z",formatBranches(w.igit().getBranchesContaining(c1.name()))); assertEquals("Y,Z",formatBranches(w.igit().getBranchesContaining(t.name()))); assertEquals("Y",formatBranches(w.igit().getBranchesContaining(c3.name()))); assertEquals("X",formatBranches(w.igit().getBranchesContaining("X"))); } /** * UT for {@link GitClient#getBranchesContaining(String, boolean)}. The main * testing case is retrieving remote branches. * @throws Exception on exceptions occur */ public void test_branchContainingRemote() throws Exception { final WorkingArea r = new WorkingArea(); r.init(); r.commitEmpty("c1"); ObjectId c1 = r.head(); w.git.clone_().url("file://" + r.repoPath()).execute(); final URIish remote = new URIish(Constants.DEFAULT_REMOTE_NAME); final List<RefSpec> refspecs = Collections.singletonList(new RefSpec( @NotImplementedInJGit /* JGit doesn't have timeout */ @NotImplementedInJGit /* JGit lock file management ignored for now */ @NotImplementedInJGit /* JGit lock file management ignored for now */ /** * Test case for auto local branch creation behviour. * This is essentially a stripped down version of {@link #test_branchContainingRemote()} * @throws Exception on exceptions occur */ public void test_checkout_remote_autocreates_local() throws Exception { final WorkingArea r = new WorkingArea(); r.init(); r.commitEmpty("c1"); w.git.clone_().url("file://" + r.repoPath()).execute(); final URIish remote = new URIish(Constants.DEFAULT_REMOTE_NAME); final List<RefSpec> refspecs = Collections.singletonList(new RefSpec( /* The most critical use cases of isBareRepository respond the * same for both the JGit implementation and the CliGit * implementation. Those are asserted first in this section of * assertions. */ @Deprecated public void test_isBareRepository_working_repoPath_dot_git() throws IOException, InterruptedException { w.init(); w.commitEmpty("Not-a-bare-repository-false-repoPath-dot-git"); assertFalse("repoPath/.git is a bare repository", w.igit().isBareRepository(w.repoPath() + File.separator + ".git")); } @Deprecated public void test_isBareRepository_working_null() throws IOException, InterruptedException { w.init(); w.commitEmpty("Not-a-bare-repository-working-null"); try { assertFalse("null is a bare repository", w.igit().isBareRepository(null)); fail("Did not throw expected exception"); } catch (GitException ge) { assertExceptionMessageContains(ge, "not a git repository"); } } @Deprecated public void test_isBareRepository_bare_null() throws IOException, InterruptedException { w.init(true); try { assertTrue("null is not a bare repository", w.igit().isBareRepository(null)); fail("Did not throw expected exception"); } catch (GitException ge) { assertExceptionMessageContains(ge, "not a git repository"); } } @Deprecated public void test_isBareRepository_bare_repoPath() throws IOException, InterruptedException { w.init(true); assertTrue("repoPath is not a bare repository", w.igit().isBareRepository(w.repoPath())); assertTrue("abs(.) is not a bare repository", w.igit().isBareRepository(w.file(".").getAbsolutePath())); } @Deprecated public void test_isBareRepository_working_no_arg() throws IOException, InterruptedException { w.init(); w.commitEmpty("Not-a-bare-repository-no-arg"); assertFalse("no arg is a bare repository", w.igit().isBareRepository()); } @Deprecated public void test_isBareRepository_bare_no_arg() throws IOException, InterruptedException { w.init(true); assertTrue("no arg is not a bare repository", w.igit().isBareRepository()); } @Deprecated public void test_isBareRepository_working_empty_string() throws IOException, InterruptedException { w.init(); w.commitEmpty("Not-a-bare-repository-empty-string"); assertFalse("empty string is a bare repository", w.igit().isBareRepository("")); } @Deprecated public void test_isBareRepository_bare_empty_string() throws IOException, InterruptedException { w.init(true); assertTrue("empty string is not a bare repository", w.igit().isBareRepository("")); } /* The less critical assertions do not respond the same for the * JGit and the CliGit implementation. They are implemented here * so that the current behavior is described in tests and can be * used to assure that changes to current behavior are * detected. */ // Fails on both JGit and CliGit, though with different failure modes // @Deprecated // public void test_isBareRepository_working_repoPath() throws IOException, InterruptedException { // w.init(); // w.commitEmpty("Not-a-bare-repository-working-repoPath-dot-git"); // assertFalse("repoPath is a bare repository", w.igit().isBareRepository(w.repoPath())); // assertFalse("abs(.) is a bare repository", w.igit().isBareRepository(w.file(".").getAbsolutePath())); @Deprecated public void test_isBareRepository_working_dot() throws IOException, InterruptedException { w.init(); w.commitEmpty("Not-a-bare-repository-working-dot"); try { assertFalse(". is a bare repository", w.igit().isBareRepository(".")); if (w.git instanceof CliGitAPIImpl) { /* No exception from JGit */ fail("Did not throw expected exception"); } } catch (GitException ge) { assertExceptionMessageContains(ge, "not a git repository"); } } @Deprecated public void test_isBareRepository_bare_dot() throws IOException, InterruptedException { w.init(true); assertTrue(". is not a bare repository", w.igit().isBareRepository(".")); } @Deprecated public void test_isBareRepository_working_dot_git() throws IOException, InterruptedException { w.init(); w.commitEmpty("Not-a-bare-repository-dot-git"); assertFalse(".git is a bare repository", w.igit().isBareRepository(".git")); } @Deprecated public void test_isBareRepository_bare_dot_git() throws IOException, InterruptedException { w.init(true); /* Bare repository does not have a .git directory. This is * another no-such-location test but is included here for * consistency. */ try { /* JGit knows that w.igit() has a workspace, and asks the workspace * if it is bare. That seems more correct than relying on testing * a specific file that the repository is bare. JGit behaves better * than CliGit in this case. */ assertTrue("non-existent .git is in a bare repository", w.igit().isBareRepository(".git")); /* JGit will not throw an exception - it knows the repo is bare */ /* CliGit throws an exception so should not reach the next assertion */ assertFalse("CliGitAPIImpl did not throw expected exception", w.igit() instanceof CliGitAPIImpl); } catch (GitException ge) { /* Only enters this path for CliGit */ assertExceptionMessageContains(ge, "not a git repository"); } } @Deprecated public void test_isBareRepository_working_no_such_location() throws IOException, InterruptedException { w.init(); w.commitEmpty("Not-a-bare-repository-working-no-such-location"); try { assertFalse("non-existent location is in a bare repository", w.igit().isBareRepository("no-such-location")); /* JGit will not throw an exception - it knows the repo is not bare */ /* CliGit throws an exception so should not reach the next assertion */ assertFalse("CliGitAPIImpl did not throw expected exception", w.igit() instanceof CliGitAPIImpl); } catch (GitException ge) { /* Only enters this path for CliGit */ assertExceptionMessageContains(ge, "not a git repository"); } } @Deprecated public void test_isBareRepository_bare_no_such_location() throws IOException, InterruptedException { w.init(true); try { assertTrue("non-existent location is in a bare repository", w.igit().isBareRepository("no-such-location")); /* JGit will not throw an exception - it knows the repo is not bare */ /* CliGit throws an exception so should not reach the next assertion */ assertFalse("CliGitAPIImpl did not throw expected exception", w.igit() instanceof CliGitAPIImpl); } catch (GitException ge) { /* Only enters this path for CliGit */ assertExceptionMessageContains(ge, "not a git repository"); } } public void test_checkoutBranchFailure() throws Exception { w = clone(localMirror()); File lock = new File(w.repo, ".git/index.lock"); try { FileUtils.touch(lock); w.git.checkoutBranch("somebranch", "master"); fail(); } catch (GitLockFailedException e) { // expected } finally { lock.delete(); } } @Deprecated public void test_reset() throws IOException, InterruptedException { w.init(); /* No valid HEAD yet - nothing to reset, should give no error */ w.igit().reset(false); w.igit().reset(true); w.touch("committed-file", "committed-file content " + java.util.UUID.randomUUID().toString()); w.git.add("committed-file"); w.git.commit("commit1"); assertTrue("committed-file missing at commit1", w.file("committed-file").exists()); assertFalse("added-file exists at commit1", w.file("added-file").exists()); assertFalse("touched-file exists at commit1", w.file("added-file").exists()); w.cmd("git rm committed-file"); w.touch("added-file", "File 2 content " + java.util.UUID.randomUUID().toString()); w.git.add("added-file"); w.touch("touched-file", "File 3 content " + java.util.UUID.randomUUID().toString()); assertFalse("committed-file exists", w.file("committed-file").exists()); assertTrue("added-file missing", w.file("added-file").exists()); assertTrue("touched-file missing", w.file("touched-file").exists()); w.igit().reset(false); assertFalse("committed-file exists", w.file("committed-file").exists()); assertTrue("added-file missing", w.file("added-file").exists()); assertTrue("touched-file missing", w.file("touched-file").exists()); w.git.add("added-file"); /* Add the file which soft reset "unadded" */ w.igit().reset(true); assertTrue("committed-file missing", w.file("committed-file").exists()); assertFalse("added-file exists at hard reset", w.file("added-file").exists()); assertTrue("touched-file missing", w.file("touched-file").exists()); final String remoteUrl = "git@github.com:MarkEWaite/git-client-plugin.git"; w.git.setRemoteUrl("origin", remoteUrl); w.git.setRemoteUrl("ndeloof", "git@github.com:ndeloof/git-client-plugin.git"); assertEquals("Wrong origin default remote", "origin", w.igit().getDefaultRemote("origin")); assertEquals("Wrong ndeloof default remote", "ndeloof", w.igit().getDefaultRemote("ndeloof")); /* CliGitAPIImpl and JGitAPIImpl return different ordered lists for default remote if invalid */ assertEquals("Wrong invalid default remote", w.git instanceof CliGitAPIImpl ? "ndeloof" : "origin", w.igit().getDefaultRemote("invalid")); } private static final int MAX_PATH = 256; private void commitFile(String dirName, String fileName, boolean longpathsEnabled) throws Exception { assertTrue("Didn't mkdir " + dirName, w.file(dirName).mkdir()); String fullName = dirName + File.separator + fileName; w.touch(fullName, fullName + " content " + UUID.randomUUID().toString()); boolean shouldThrow = !longpathsEnabled && SystemUtils.IS_OS_WINDOWS && w.git instanceof CliGitAPIImpl && w.cgit().isAtLeastVersion(1, 9, 0, 0) && !w.cgit().isAtLeastVersion(2, 8, 0, 0) && (new File(fullName)).getAbsolutePath().length() > MAX_PATH; try { w.git.add(fullName); w.git.commit("commit-" + fileName); assertFalse("unexpected success " + fullName, shouldThrow); } catch (GitException ge) { assertEquals("Wrong message", "Cannot add " + fullName, ge.getMessage()); } assertTrue("file " + fullName + " missing at commit", w.file(fullName).exists()); } private void commitFile(String dirName, String fileName) throws Exception { commitFile(dirName, fileName, false); } /** * msysgit prior to 1.9 forbids file names longer than MAXPATH. * msysgit 1.9 and later allows longer paths if core.longpaths is * set to true. * * JGit does not have that limitation. */ public void check_longpaths(boolean longpathsEnabled) throws Exception { String shortName = "0123456789abcdef" + "ghijklmnopqrstuv"; String longName = shortName + shortName + shortName + shortName; String dirName1 = longName; commitFile(dirName1, "file1a", longpathsEnabled); String dirName2 = dirName1 + File.separator + longName; commitFile(dirName2, "file2b", longpathsEnabled); String dirName3 = dirName2 + File.separator + longName; commitFile(dirName3, "file3c", longpathsEnabled); String dirName4 = dirName3 + File.separator + longName; commitFile(dirName4, "file4d", longpathsEnabled); String dirName5 = dirName4 + File.separator + longName; commitFile(dirName5, "file5e", longpathsEnabled); } private String getConfigValue(File workingDir, String name) throws IOException, InterruptedException { String[] args = {"git", "config", "--get", name}; ByteArrayOutputStream out = new ByteArrayOutputStream(); int st = new Launcher.LocalLauncher(listener).launch().pwd(workingDir).cmds(args).stdout(out).join(); String result = out.toString(); if (st != 0 && result != null && !result.isEmpty()) { fail("git config --get " + name + " failed with result: " + result); } return out.toString().trim(); } private String getHomeConfigValue(String name) throws IOException, InterruptedException { return getConfigValue(new File(System.getProperty("user.home")), name); } private void assert_longpaths(boolean expectedLongPathSetting) throws IOException, InterruptedException { String value = getHomeConfigValue("core.longpaths"); boolean longPathSetting = Boolean.valueOf(value); assertEquals("Wrong value: '" + value + "'", expectedLongPathSetting, longPathSetting); } private void assert_longpaths(WorkingArea workingArea, boolean expectedLongPathSetting) throws IOException, InterruptedException { String value = getConfigValue(workingArea.repo, "core.longpaths"); boolean longPathSetting = Boolean.valueOf(value); assertEquals("Wrong value: '" + value + "'", expectedLongPathSetting, longPathSetting); } public void test_longpaths_default() throws Exception { assert_longpaths(false); w.init(); assert_longpaths(w, false); check_longpaths(false); assert_longpaths(w, false); } @NotImplementedInJGit /* Not implemented in JGit because it is not needed there */ public void test_longpaths_enabled() throws Exception { assert_longpaths(false); w.init(); assert_longpaths(w, false); w.cmd("git config core.longpaths true"); assert_longpaths(w, true); check_longpaths(true); assert_longpaths(w, true); } @NotImplementedInJGit /* Not implemented in JGit because it is not needed there */ public void test_longpaths_disabled() throws Exception { assert_longpaths(false); w.init(); assert_longpaths(w, false); w.cmd("git config core.longpaths false"); assert_longpaths(w, false); check_longpaths(false); assert_longpaths(w, false); } /** * Returns the prefix for the remote branches while querying them. * @return remote branch prefix, for example, "remotes/" */ protected abstract String getRemoteBranchPrefix(); /** * Test parsing of changelog with unicode characters in commit messages. */ @Issue({"JENKINS-6203", "JENKINS-14798", "JENKINS-23091"}) public void test_unicodeCharsInChangelog() throws Exception { File tempRemoteDir = temporaryDirectoryAllocator.allocate(); extract(new ZipFile("src/test/resources/unicodeCharsInChangelogRepo.zip"), tempRemoteDir); File pathToTempRepo = new File(tempRemoteDir, "unicodeCharsInChangelogRepo"); w = clone(pathToTempRepo.getAbsolutePath()); // w.git.changelog gives us strings // We want to collect all the strings and check that unicode characters are still there. StringWriter sw = new StringWriter(); w.git.changelog("v0", "vLast", sw); String content = sw.toString(); assertTrue(content.contains("hello in English: hello")); assertTrue(content.contains("hello in Russian: \u043F\u0440\u0438\u0432\u0435\u0442 (priv\u00E9t)")); assertTrue(content.contains("hello in Chinese: \u4F60\u597D (n\u01D0 h\u01CEo)")); assertTrue(content.contains("hello in French: \u00C7a va ?")); assertTrue(content.contains("goodbye in German: Tsch\u00FCss")); } /** * Multi-branch pipeline plugin and other AbstractGitSCMSource callers were * initially using JGit as their implementation, and developed an unexpected * dependency on JGit behavior. JGit init() (in JGit 3.7 at least) creates * the directory if it does not exist. Rather than change the multi-branch * pipeline when the git client plugin was adapted to allow either git or * jgit, instead the git.init() method was changed to create the target * directory if it does not exist. * * Low risk from that change of behavior, since a non-existent directory * caused the command line git init() method to consistently throw an * exception. * * @throws java.lang.Exception on error */ public void test_git_init_creates_directory_if_needed() throws Exception { File nonexistentDir = new File(UUID.randomUUID().toString()); assertFalse("Dir unexpectedly exists at start of test", nonexistentDir.exists()); try { GitClient git = setupGitAPI(nonexistentDir); git.init(); } finally { FileUtils.deleteDirectory(nonexistentDir); } } @Issue("JENKINS-40023") public void test_changelog_with_merge_commit_and_max_log_history() throws Exception { w.init(); w.commitEmpty("init"); // First commit to branch-1 w.git.branch("branch-1"); w.git.checkout("branch-1"); w.touch("file-1", "content-1"); w.git.add("file-1"); w.git.commit("commit-1"); String commitSha1 = w.git.revParse("HEAD").name(); // Merge branch-1 into master w.git.checkout("master"); String mergeMessage = "Merge message to be tested."; w.git.merge().setMessage(mergeMessage).setGitPluginFastForwardMode(MergeCommand.GitPluginFastForwardMode.NO_FF).setRevisionToMerge(w.git.getHeadRev(w.repoPath(), "branch-1")).execute(); /* JGit, and git 1.7.1 handle merge commits in changelog * differently than git 1.7.9 and later. See JENKINS-40023. */ int maxlimit; if (w.git instanceof CliGitAPIImpl) { if (!w.cgit().isAtLeastVersion(1, 7, 9, 0)) { return; /* git 1.7.1 is too old, changelog is too different */ } maxlimit = 1; } else { maxlimit = 2; } StringWriter writer = new StringWriter(); w.git.changelog().max(maxlimit).to(writer).execute(); assertThat(writer.toString(),not(isEmptyString())); } /** inline ${@link hudson.Functions#isWindows()} to prevent a transient remote classloader issue */ private boolean isWindows() { return File.pathSeparatorChar==';'; } private void withSystemLocaleReporting(String fileName, TestedCode code) throws Exception { try { code.run(); } catch (GitException ge) { // Exception message should contain the actual file name. // It may just contain ? for characters that are not encoded correctly due to the system locale. // If such a mangled file name is seen instead, throw a clear exception to indicate the root cause. assertTrue("System locale does not support filename '" + fileName + "'", ge.getMessage().contains("?")); // Rethrow exception for all other issues. throw ge; } } @FunctionalInterface interface TestedCode { void run() throws Exception; } private WorkingArea setupRepositoryWithSubmodule() throws Exception { WorkingArea workingArea = new WorkingArea(); File repositoryDir = workingArea.file("dir-repository"); File submoduleDir = workingArea.file("dir-submodule"); assertTrue("did not create dir " + repositoryDir.getName(), repositoryDir.mkdir()); assertTrue("did not create dir " + submoduleDir.getName(), submoduleDir.mkdir()); WorkingArea submoduleWorkingArea = new WorkingArea(submoduleDir).init(); for (int commit = 1; commit <= 5; commit++) { submoduleWorkingArea.touch("file", String.format("submodule content-%d", commit)); submoduleWorkingArea.cgit().add("file"); submoduleWorkingArea.cgit().commit(String.format("submodule commit-%d", commit)); } WorkingArea repositoryWorkingArea = new WorkingArea(repositoryDir).init(); repositoryWorkingArea.commitEmpty("init"); repositoryWorkingArea.cgit().add("."); repositoryWorkingArea.cgit().addSubmodule("file://" + submoduleDir.getAbsolutePath(), "submodule"); repositoryWorkingArea.cgit().commit("submodule"); return workingArea; } }
package org.mockitousage.junitrule; import org.assertj.core.api.Assertions; import org.junit.After; import org.junit.Ignore; import org.junit.Test; import org.junit.runners.model.FrameworkMethod; import org.junit.runners.model.Statement; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.exceptions.base.MockitoAssertionError; import org.mockito.internal.junit.JUnitRule; import org.mockito.internal.util.MockitoLogger; import org.mockitousage.IMethods; import org.mockitoutil.TestBase; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.*; public class StrictJUnitRuleTest extends TestBase { private MockitoLogger logger = new MockitoLogger() { public void log(Object what) { throw new AssertionError("This implementation of rule should not warn about anything"); } }; private JUnitRule jUnitRule = new JUnitRule(logger, JUnitRule.Strictness.STRICT_STUBS); private FrameworkMethod dummy = mock(FrameworkMethod.class); @After public void after() { //so that the validate framework usage exceptions do not collide with the tests here resetState(); } @Test public void ok_when_no_stubbings() throws Throwable { run(new MockitoStatement() { public void evaluate(IMethods mock, IMethods mock2) throws Throwable { mock.simpleMethod(); Mockito.verify(mock).simpleMethod(); } }); } @Test public void ok_when_all_stubbings_used() throws Throwable { run(new MockitoStatement() { public void evaluate(IMethods mock1, IMethods mock2) throws Throwable { IMethods mock = mock(IMethods.class); given(mock.simpleMethod(10)).willReturn("foo"); mock.simpleMethod(10); } }); } @Test public void ok_when_used_and_mismatched_argument() throws Throwable { run(new MockitoStatement() { public void evaluate(IMethods mock, IMethods mock2) throws Throwable { given(mock.simpleMethod(10)).willReturn("foo"); mock.simpleMethod(10); mock.simpleMethod(15); } }); } @Test public void fails_when_unused_stubbings() throws Throwable { try { //when run(new MockitoStatement() { public void evaluate(IMethods mock, IMethods mock2) throws Throwable { given(mock.simpleMethod(10)).willReturn("foo"); mock2.simpleMethod(15); } }); //then fail(); } catch (MockitoAssertionError e) { Assertions.assertThat(e.getMessage()).startsWith("Unused stubbings"); } } @Test public void test_failure_trumps_unused_stubbings() throws Throwable { try { //when run(new MockitoStatement() { public void evaluate(IMethods mock, IMethods mock2) throws Throwable { given(mock.simpleMethod(10)).willReturn("foo"); mock.otherMethod(); throw new AssertionError("x"); } }); //then fail(); } catch (AssertionError e) { assertEquals("x", e.getMessage()); } } @Test public void fails_fast_when_stubbing_invoked_with_different_argument() throws Throwable { try { //when run(new MockitoStatement() { public void evaluate(IMethods mock, IMethods mock2) throws Throwable { //stubbing in the test code: given(mock.simpleMethod(10)).willReturn("foo"); //invocation in the code under test uses different argument and should fail immediately //this helps with debugging and is essential for Mockito strictness mock.simpleMethod(15); } }); //then fail(); } catch (MockitoAssertionError e) { assertThat(e.getMessage()).startsWith("Argument mismatch"); } } @Test public void verify_no_more_interactions_ignores_stubs() throws Throwable { //when run(new MockitoStatement() { public void evaluate(IMethods mock, IMethods mock2) throws Throwable { //in test: given(mock.simpleMethod(10)).willReturn("foo"); //in code: mock.simpleMethod(10); //implicitly verifies the stubbing mock.otherMethod(); //in test: verify(mock).otherMethod(); verifyNoMoreInteractions(mock); } }); } @Test public void unused_stubs_with_multiple_mocks() throws Throwable { try { //when run(new MockitoStatement() { public void evaluate(IMethods mock1, IMethods mock2) throws Throwable { given(mock1.simpleMethod(10)).willReturn("foo"); given(mock2.simpleMethod(20)).willReturn("foo"); mock1.otherMethod(); mock2.booleanObjectReturningMethod(); } }); //then fail(); } catch (MockitoAssertionError e) { assertThat(e.getMessage()).startsWith("Unused stubbings"); } } private void run(MockitoStatement statement) throws Throwable { jUnitRule.apply(statement, dummy, new DummyTestCase()).evaluate(); } public static class DummyTestCase { @Mock private IMethods mock; } abstract class MockitoStatement extends Statement { abstract void evaluate(IMethods mock1, IMethods mock2) throws Throwable; @Override public void evaluate() throws Throwable { evaluate(mock(IMethods.class), mock(IMethods.class)); } } }
package seedu.todo.guitests; import static org.junit.Assert.assertEquals; import java.time.LocalDateTime; import org.junit.Before; import org.junit.Test; import seedu.todo.commons.util.DateUtil; import seedu.todo.controllers.CompleteTaskController; import seedu.todo.controllers.UncompleteTaskController; import seedu.todo.models.Event; import seedu.todo.models.Task; /** * @@author A0093907W */ public class CompleteUncompleteTaskTest extends GuiTest { private final LocalDateTime oneDayFromNow = LocalDateTime.now().plusDays(1); private final String oneDayFromNowString = DateUtil.formatDate(oneDayFromNow); private final String oneDayFromNowIsoString = DateUtil.formatIsoDate(oneDayFromNow); private final LocalDateTime twoDaysFromNow = LocalDateTime.now().plusDays(2); private final String twoDaysFromNowString = DateUtil.formatDate(twoDaysFromNow); private final String twoDaysFromNowIsoString = DateUtil.formatIsoDate(twoDaysFromNow); private final LocalDateTime oneDayToNow = LocalDateTime.now().minusDays(1); private final String oneDayToNowString = DateUtil.formatDate(oneDayToNow); private final String oneDayToNowIsoString = DateUtil.formatIsoDate(oneDayToNow); String commandAdd1 = String.format("add task Buy KOI by \"%s 8pm\"", oneDayToNowString); Task task1 = new Task(); String commandAdd2 = String.format("add task Buy Milk by \"%s 9pm\"", oneDayFromNowString); Task task2 = new Task(); String commandAdd3 = String.format("add event Some Event from \"%s 4pm\" to \"%s 5pm\"", twoDaysFromNowString, twoDaysFromNowString); Event event3 = new Event(); public CompleteUncompleteTaskTest() { task1.setName("Buy KOI"); task1.setDueDate(DateUtil.parseDateTime( String.format("%s 20:00:00", oneDayToNowIsoString))); task2.setName("Buy Milk"); task2.setDueDate(DateUtil.parseDateTime( String.format("%s 21:00:00", oneDayFromNowIsoString))); event3.setName("Some Event"); event3.setStartDate(DateUtil.parseDateTime( String.format("%s 16:00:00", twoDaysFromNowIsoString))); event3.setEndDate(DateUtil.parseDateTime( String.format("%s 17:00:00", twoDaysFromNowIsoString))); } @Before public void fixtures() { console.runCommand("clear"); console.runCommand(commandAdd1); console.runCommand(commandAdd2); console.runCommand(commandAdd3); } @Test public void complete_futureTask_show() { assertTaskVisibleAfterCmd("complete 2", task2); } @Test public void complete_overdueTask_hide() { assertTaskNotVisibleAfterCmd("complete 1", task1); } @Test public void uncomplete_futureTask_show() { console.runCommand("complete 2"); assertTaskVisibleAfterCmd("uncomplete 2", task2); } @Test public void uncomplete_overdueTask_show() { console.runCommand("complete 1"); console.runCommand("list completed"); assertTaskVisibleAfterCmd("uncomplete 1", task1); } @Test public void complete_event_error() { console.runCommand("complete 3"); String consoleMessage = CompleteTaskController.MESSAGE_CANNOT_COMPLETE_EVENT; assertEquals(consoleMessage, console.getConsoleTextArea()); } @Test public void uncomplete_event_error() { console.runCommand("uncomplete 3"); String consoleMessage = UncompleteTaskController.MESSAGE_CANNOT_UNCOMPLETE_EVENT; assertEquals(consoleMessage, console.getConsoleTextArea()); } @Test public void complete_completedTask_error() { console.runCommand("complete 2"); console.runCommand("complete 2"); String consoleMessage = CompleteTaskController.MESSAGE_ALREADY_COMPLETED; assertEquals(consoleMessage, console.getConsoleTextArea()); } @Test public void uncomplete_uncompleteTask_error() { console.runCommand("uncomplete 1"); String consoleMessage = UncompleteTaskController.MESSAGE_ALREADY_INCOMPLETE; assertEquals(consoleMessage, console.getConsoleTextArea()); } }
package nucleus.presenter; import android.os.Bundle; import android.support.annotation.CallSuper; import android.support.annotation.Nullable; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import nucleus.presenter.delivery.DeliverFirst; import nucleus.presenter.delivery.DeliverLatestCache; import nucleus.presenter.delivery.DeliverReplay; import nucleus.presenter.delivery.Delivery; import rx.Observable; import rx.Subscription; import rx.functions.Action1; import rx.functions.Action2; import rx.functions.Func0; import rx.subjects.BehaviorSubject; import rx.subscriptions.CompositeSubscription; /** * This is an extension of {@link Presenter} which provides RxJava functionality. * * @param <View> a type of view. */ public class RxPresenter<View> extends Presenter<View> { private static final String REQUESTED_KEY = RxPresenter.class.getName() + "#requested"; private final BehaviorSubject<View> views = BehaviorSubject.create(); private final CompositeSubscription subscriptions = new CompositeSubscription(); private final HashMap<Integer, Func0<Subscription>> restartables = new HashMap<>(); private final HashMap<Integer, Subscription> restartableSubscriptions = new HashMap<>(); private final ArrayList<Integer> requested = new ArrayList<>(); /** * Returns an {@link rx.Observable} that emits the current attached view or null. * See {@link BehaviorSubject} for more information. * * @return an observable that emits the current attached view or null. */ public Observable<View> view() { return views; } /** * Registers a subscription to automatically unsubscribe it during onDestroy. * See {@link CompositeSubscription#add(Subscription) for details.} * * @param subscription a subscription to add. */ public void add(Subscription subscription) { subscriptions.add(subscription); } /** * Removes and unsubscribes a subscription that has been registered with {@link #add} previously. * See {@link CompositeSubscription#remove(Subscription)} for details. * * @param subscription a subscription to remove. */ public void remove(Subscription subscription) { subscriptions.remove(subscription); } /** * A restartable is any RxJava observable that can be started (subscribed) and * should be automatically restarted (re-subscribed) after a process restart if * it was still subscribed at the moment of saving presenter's state. * * Registers a factory. Re-subscribes the restartable after the process restart. * * @param restartableId id of the restartable * @param factory factory of the restartable */ public void restartable(int restartableId, Func0<Subscription> factory) { restartables.put(restartableId, factory); if (requested.contains(restartableId)) start(restartableId); } /** * Starts the given restartable. * * @param restartableId id of the restartable */ public void start(int restartableId) { stop(restartableId); requested.add(restartableId); restartableSubscriptions.put(restartableId, restartables.get(restartableId).call()); } /** * Unsubscribes a restartable * * @param restartableId id of a restartable. */ public void stop(int restartableId) { requested.remove((Integer) restartableId); Subscription subscription = restartableSubscriptions.get(restartableId); if (subscription != null) subscription.unsubscribe(); } /** * Checks if a restartable is unsubscribed. * * @param restartableId id of the restartable. * @return true if the subscription is null or unsubscribed, false otherwise. */ public boolean isUnsubscribed(int restartableId) { Subscription subscription = restartableSubscriptions.get(restartableId); return subscription == null || subscription.isUnsubscribed(); } /** * This is a shortcut that can be used instead of combining together * {@link #restartable(int, Func0)}, * {@link #deliverFirst()}, * {@link #split(Action2, Action2)}. * * @param restartableId an id of the restartable. * @param observableFactory a factory that should return an Observable when the restartable should run. * @param onNext a callback that will be called when received data should be delivered to view. * @param onError a callback that will be called if the source observable emits onError. * @param <T> the type of the observable. */ public <T> void restartableFirst(int restartableId, final Func0<Observable<T>> observableFactory, final Action2<View, T> onNext, @Nullable final Action2<View, Throwable> onError) { restartable(restartableId, new Func0<Subscription>() { @Override public Subscription call() { return observableFactory.call() .compose(RxPresenter.this.<T>deliverFirst()) .subscribe(split(onNext, onError)); } }); } /** * This is a shortcut for calling {@link #restartableFirst(int, Func0, Action2, Action2)} with the last parameter = null. */ public <T> void restartableFirst(int restartableId, final Func0<Observable<T>> observableFactory, final Action2<View, T> onNext) { restartableFirst(restartableId, observableFactory, onNext, null); } /** * This is a shortcut that can be used instead of combining together * {@link #restartable(int, Func0)}, * {@link #deliverLatestCache()}, * {@link #split(Action2, Action2)}. * * @param restartableId an id of the restartable. * @param observableFactory a factory that should return an Observable when the restartable should run. * @param onNext a callback that will be called when received data should be delivered to view. * @param onError a callback that will be called if the source observable emits onError. * @param <T> the type of the observable. */ public <T> void restartableLatestCache(int restartableId, final Func0<Observable<T>> observableFactory, final Action2<View, T> onNext, @Nullable final Action2<View, Throwable> onError) { restartable(restartableId, new Func0<Subscription>() { @Override public Subscription call() { return observableFactory.call() .compose(RxPresenter.this.<T>deliverLatestCache()) .subscribe(split(onNext, onError)); } }); } /** * This is a shortcut for calling {@link #restartableLatestCache(int, Func0, Action2, Action2)} with the last parameter = null. */ public <T> void restartableLatestCache(int restartableId, final Func0<Observable<T>> observableFactory, final Action2<View, T> onNext) { restartableLatestCache(restartableId, observableFactory, onNext, null); } /** * This is a shortcut that can be used instead of combining together * {@link #restartable(int, Func0)}, * {@link #deliverReplay()}, * {@link #split(Action2, Action2)}. * * @param restartableId an id of the restartable. * @param observableFactory a factory that should return an Observable when the restartable should run. * @param onNext a callback that will be called when received data should be delivered to view. * @param onError a callback that will be called if the source observable emits onError. * @param <T> the type of the observable. */ public <T> void restartableReplay(int restartableId, final Func0<Observable<T>> observableFactory, final Action2<View, T> onNext, @Nullable final Action2<View, Throwable> onError) { restartable(restartableId, new Func0<Subscription>() { @Override public Subscription call() { return observableFactory.call() .compose(RxPresenter.this.<T>deliverReplay()) .subscribe(split(onNext, onError)); } }); } /** * This is a shortcut for calling {@link #restartableReplay(int, Func0, Action2, Action2)} with the last parameter = null. */ public <T> void restartableReplay(int restartableId, final Func0<Observable<T>> observableFactory, final Action2<View, T> onNext) { restartableReplay(restartableId, observableFactory, onNext, null); } /** * Returns an {@link rx.Observable.Transformer} that couples views with data that has been emitted by * the source {@link rx.Observable}. * * {@link #deliverLatestCache} keeps the latest onNext value and emits it each time a new view gets attached. * If a new onNext value appears while a view is attached, it will be delivered immediately. * * @param <T> the type of source observable emissions */ public <T> DeliverLatestCache<View, T> deliverLatestCache() { return new DeliverLatestCache<>(views); } /** * Returns an {@link rx.Observable.Transformer} that couples views with data that has been emitted by * the source {@link rx.Observable}. * * {@link #deliverFirst} delivers only the first onNext value that has been emitted by the source observable. * * @param <T> the type of source observable emissions */ public <T> DeliverFirst<View, T> deliverFirst() { return new DeliverFirst<>(views); } /** * Returns an {@link rx.Observable.Transformer} that couples views with data that has been emitted by * the source {@link rx.Observable}. * * {@link #deliverReplay} keeps all onNext values and emits them each time a new view gets attached. * If a new onNext value appears while a view is attached, it will be delivered immediately. * * @param <T> the type of source observable emissions */ public <T> DeliverReplay<View, T> deliverReplay() { return new DeliverReplay<>(views); } /** * Returns a method that can be used for manual restartable chain build. It returns an Action1 that splits * a received {@link Delivery} into two {@link Action2} onNext and onError calls. * * @param onNext a method that will be called if the delivery contains an emitted onNext value. * @param onError a method that will be called if the delivery contains an onError throwable. * @param <T> a type on onNext value. * @return an Action1 that splits a received {@link Delivery} into two {@link Action2} onNext and onError calls. */ public <T> Action1<Delivery<View, T>> split(final Action2<View, T> onNext, @Nullable final Action2<View, Throwable> onError) { return new Action1<Delivery<View, T>>() { @Override public void call(Delivery<View, T> delivery) { delivery.split(onNext, onError); } }; } /** * This is a shortcut for calling {@link #split(Action2, Action2)} when the second parameter is null. */ public <T> Action1<Delivery<View, T>> split(Action2<View, T> onNext) { return split(onNext, null); } /** * {@inheritDoc} */ @CallSuper @Override protected void onCreate(Bundle savedState) { if (savedState != null) requested.addAll(savedState.getIntegerArrayList(REQUESTED_KEY)); } /** * {@inheritDoc} */ @CallSuper @Override protected void onDestroy() { views.onCompleted(); subscriptions.unsubscribe(); for (Map.Entry<Integer, Subscription> entry : restartableSubscriptions.entrySet()) entry.getValue().unsubscribe(); } /** * {@inheritDoc} */ @CallSuper @Override protected void onSave(Bundle state) { for (int i = requested.size() - 1; i >= 0; i int restartableId = requested.get(i); Subscription subscription = restartableSubscriptions.get(restartableId); if (subscription != null && subscription.isUnsubscribed()) requested.remove(i); } state.putIntegerArrayList(REQUESTED_KEY, requested); } /** * {@inheritDoc} */ @CallSuper @Override protected void onTakeView(View view) { views.onNext(view); } /** * {@inheritDoc} */ @CallSuper @Override protected void onDropView() { views.onNext(null); } /** * Please, use restartableXX and deliverXX methods for pushing data from RxPresenter into View. */ @Deprecated @Nullable @Override public View getView() { return super.getView(); } }
package oasis.jongo; import java.io.IOException; import java.util.Set; import javax.inject.Inject; import javax.inject.Provider; import javax.inject.Singleton; import org.joda.time.Instant; import org.jongo.Jongo; import org.jongo.marshall.jackson.configuration.MapperModifier; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.JsonDeserializer; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.datatype.guava.GuavaModule; import com.fasterxml.jackson.datatype.joda.JodaModule; import com.google.common.base.Preconditions; import com.mongodb.Mongo; import com.mongodb.MongoClient; import de.undercouch.bson4jackson.BsonGenerator; import de.undercouch.bson4jackson.serializers.BsonSerializer; import oasis.jongo.guice.JongoModule; import oasis.model.i18n.LocalizableModule; @Singleton public class JongoService implements Provider<Jongo> { private Mongo mongoConnection; private Jongo jongoConnection; private final JongoModule.Settings settings; private final Provider<Set<JongoBootstrapper>> bootstrappers; @Inject JongoService(JongoModule.Settings settings, Provider<Set<JongoBootstrapper>> bootstrappers) { this.settings = settings; this.bootstrappers = bootstrappers; } @Override public Jongo get() { Preconditions.checkState(jongoConnection != null, "Thou shalt start tha JongoService"); return jongoConnection; } public void start() throws Exception { mongoConnection = new MongoClient(settings.mongoURI); jongoConnection = new Jongo(mongoConnection.getDB(settings.mongoURI.getDatabase()), new OasisMapper.Builder() .registerModule(new CustomJodaModule()) .registerModule(new GuavaModule()) .registerModule(new LocalizableModule()) .addModifier(new MapperModifier() { @Override public void modify(ObjectMapper mapper) { mapper.setSerializationInclusion(Include.NON_EMPTY); // instead of NON_NULL } }) .enable(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY) .build()); for (JongoBootstrapper bootstrapper : bootstrappers.get()) { bootstrapper.bootstrap(); } } public void stop() { mongoConnection.close(); } static class CustomJodaModule extends JodaModule { CustomJodaModule() { super(); addSerializer(Instant.class, new BsonSerializer<Instant>() { @Override public void serialize(Instant instant, BsonGenerator bsonGenerator, SerializerProvider serializerProvider) throws IOException { bsonGenerator.writeDateTime(instant.toDate()); } }); addDeserializer(Instant.class, new JsonDeserializer<Instant>() { @Override public Instant deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException { return new Instant(jp.getEmbeddedObject()); } }); } } }
package cgeo.geocaching.location; import cgeo.geocaching.network.Network; import cgeo.geocaching.network.Parameters; import cgeo.geocaching.utils.Log; import cgeo.geocaching.utils.RxUtils; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ObjectNode; import org.apache.commons.lang3.StringUtils; import org.eclipse.jdt.annotation.NonNull; import rx.Observable; import rx.Observable.OnSubscribe; import rx.Subscriber; import rx.functions.Func0; import android.location.Address; import java.util.Locale; public class MapQuestGeocoder { private static final String MAPQUEST_KEY = "Fmjtd|luurn1u2n9,bs=o5-9wynua"; private MapQuestGeocoder() { // Do not instantiate } /** * Retrieve addresses from a textual location using MapQuest geocoding API. The work happens on the network * scheduler. * * @param address * the location * @return an observable containing zero or more locations * * @see android.location.Geocoder#getFromLocationName(String, int) */ public static Observable<Address> getFromLocationName(@NonNull final String address) { return get("address", new Parameters("location", address, "maxResults", "20", "thumbMaps", "false")); } /** * Retrieve the physical address for coordinates. The work happens on the network scheduler. * * @param coords the coordinates * @return an observable containing one location or an error */ public static Observable<Address> getFromLocation(@NonNull final Geopoint coords) { return get("reverse", new Parameters("location", String.format(Locale.US, "%f,%f", coords.getLatitude(), coords.getLongitude()))).first(); } private static Observable<Address> get(@NonNull final String method, @NonNull final Parameters parameters) { return Observable.defer(new Func0<Observable<Address>>() { @Override public Observable<Address> call() { final ObjectNode response = Network.requestJSON("https://open.mapquestapi.com/geocoding/v1/" + method, parameters.put("key", MAPQUEST_KEY)); if (response == null) { Log.w("MapQuest decoder error: no response"); return Observable.error(new RuntimeException("no answer from MapQuest geocoder")); } final int statusCode = response.path("info").path("statuscode").asInt(-1); if (statusCode != 0) { Log.w("MapQuest decoder error: statuscode is not 0"); return Observable.error(new RuntimeException("no correct answer from MapQuest geocoder")); } return Observable.create(new OnSubscribe<Address>() { @Override public void call(final Subscriber<? super Address> subscriber) { try { for (final JsonNode address: response.get("results").get(0).get("locations")) { subscriber.onNext(mapquestToAddress(address)); } subscriber.onCompleted(); } catch (final Exception e) { Log.e("Error decoding MapQuest address", e); subscriber.onError(e); } } }); } }).subscribeOn(RxUtils.networkScheduler); } private static Address mapquestToAddress(final JsonNode mapquestAddress) { final Address address = new Address(Locale.getDefault()); for (int i = 1; i <= 6; i++) { final String adminAreaName = "adminArea" + i; setComponent(address, mapquestAddress, adminAreaName, mapquestAddress.path(adminAreaName + "Type").asText()); } setComponent(address, mapquestAddress, "postalCode", "PostalCode"); int index = 0; for (final String addressComponent: new String[]{ mapquestAddress.path("street").asText(), address.getSubLocality(), address.getLocality(), address.getPostalCode(), address.getSubAdminArea(), address.getAdminArea(), address.getCountryCode() }) { if (StringUtils.isNotBlank(addressComponent)) { address.setAddressLine(index++, addressComponent); } } final JsonNode latLng = mapquestAddress.get("latLng"); address.setLatitude(latLng.get("lat").asDouble()); address.setLongitude(latLng.get("lng").asDouble()); return address; } private static void setComponent(final Address address, final JsonNode mapquestAddress, final String adminArea, final String adminAreaType) { final String content = StringUtils.trimToNull(mapquestAddress.path(adminArea).asText()); switch (adminAreaType) { case "City": address.setLocality(content); break; case "Neighborhood": address.setSubLocality(content); break; case "PostalCode": address.setPostalCode(content); break; case "State": address.setAdminArea(content); break; case "County": address.setSubAdminArea(content); break; case "Country": address.setCountryCode(content); address.setCountryName(new Locale("", content).getDisplayCountry()); break; // Make checkers happy default: break; } } }
package com.google.refine.expr.functions; import java.util.Properties; import org.json.JSONException; import org.json.JSONWriter; import com.google.refine.expr.EvalError; import com.google.refine.grel.Function; public class ToNumber implements Function { @Override public Object call(Properties bindings, Object[] args) { if (args.length == 1 && args[0] != null) { if (args[0] instanceof Number) { return args[0]; } else { String s = args[0].toString().trim(); if (s.length() > 0) { try { return Long.parseLong(s); } catch (NumberFormatException e) { } try { return Double.parseDouble(s); } catch (NumberFormatException e) { return new EvalError("Cannot parse to number"); } } } } return null; } @Override public void write(JSONWriter writer, Properties options) throws JSONException { writer.object(); writer.key("description"); writer.value("Returns o converted to a number"); writer.key("params"); writer.value("o"); writer.key("returns"); writer.value("number"); writer.endObject(); } }
package me.stefvanschie.buildinggame.timers; import me.stefvanschie.buildinggame.managers.arenas.ArenaManager; import me.stefvanschie.buildinggame.utils.arena.Arena; import me.stefvanschie.buildinggame.utils.particle.Particle; import me.stefvanschie.buildinggame.utils.plot.Plot; import org.bukkit.scheduler.BukkitRunnable; public class ParticleRender extends BukkitRunnable { @Override public void run() { for (Arena arena : ArenaManager.getInstance().getArenas()) { for (Plot plot : arena.getPlots()) { for (Particle particle : plot.getParticles()) { particle.render(); } } } } }
package no.ntnu.osnap.com; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.IOException; import no.ntnu.osnap.com.ConnectionMetadata.DefaultServices; import android.app.Activity; import android.app.Service; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.bluetooth.BluetoothSocket; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.util.Log; /** * A class for any BluetoothConnection on Android. This class offers easy and useful services * giving a simple interface to the developer to establish a Bluetooth connection and * send or receive data without needing to know any low-level details. Simply create new instance * of this class with the remote device address and use connect() to establish the connection. * This class will automatically create connection and communication threads to handle everything. */ public class BluetoothConnection extends Protocol { /** Unique requestResult ID when using startActivityForResult in the parentActivity to enable the Bluetooth Adapter*/ public static int REQUEST_ENABLE_BT = 374370074; /** The Activity that created this instance of BluetoothConnection (others could still be using this instance) */ private Activity parentActivity; /** We notify this listener on any connection state changes */ private ConnectionListener connectionListener; protected BufferedInputStream input; protected BufferedOutputStream output; protected BluetoothDevice device; protected BluetoothSocket socket; protected BluetoothAdapter bluetooth; private ConnectionState connectionState; /** * An enumeration describing the different connection states a BluetoothConnection can be */ public enum ConnectionState { /** Initial state. No connection has been established. */ STATE_DISCONNECTED, /** The device is trying to establish a connection. */ STATE_CONNECTING, /** A valid open connection is established to the remote device. */ STATE_CONNECTED } /** * Same as calling BluetoothConnection(device.getAddress(), parentActivity) * Is useful for connecting to a specific device through discovery mode * @see BluetoothConnection(String address, Activity parentActivity) */ public BluetoothConnection(BluetoothDevice device, Activity parentActivity, ConnectionListener listener) throws UnsupportedHardwareException, IllegalArgumentException { this(device.getAddress(), parentActivity, listener); } public BluetoothConnection(String address, Activity parentActivity, ConnectionListener listener) throws UnsupportedHardwareException, IllegalArgumentException{ //Validate the address if( !BluetoothAdapter.checkBluetoothAddress(address) ){ throw new IllegalArgumentException("The specified bluetooth address is not valid"); } //Make sure there is a valid listener if(listener == null){ throw new IllegalArgumentException("ConnectionListener cannot be null in BluetoothConnection constructor"); } //Make sure this device has bluetooth bluetooth = BluetoothAdapter.getDefaultAdapter(); if( bluetooth == null ){ throw new UnsupportedHardwareException("No bluetooth hardware found"); } this.connectionListener = listener; this.parentActivity = parentActivity; connectionState = ConnectionState.STATE_DISCONNECTED; device = bluetooth.getRemoteDevice(address); //Register broadcast receivers parentActivity.registerReceiver(mReceiver, new IntentFilter(BluetoothAdapter.ACTION_STATE_CHANGED)); parentActivity.registerReceiver(mReceiver, new IntentFilter(BluetoothAdapter.ACTION_DISCOVERY_FINISHED)); } /** * Changes the connection state of this BluetoothConnection. Package visible. * @param setState the new ConnectionState of this BluetoothConnection */ synchronized void setConnectionState(ConnectionState setState) { connectionState = setState; //Tell listener about any connection changes switch(setState) { case STATE_CONNECTED: connectionListener.onConnect(this); break; case STATE_DISCONNECTED: connectionListener.onDisconnect(this); break; case STATE_CONNECTING: connectionListener.onConnecting(this); break; } } /** * Private connection method. This actually creates a new thread that established the connection to * the remote device. This method does not have any safeguards to check if Bluetooth or remote device * is valid. */ private synchronized final void establishConnection() { //Never establish connections when in discovery mode if( bluetooth.isDiscovering() ) return; //Start an asynchronous connection and return immediately so we do not interrupt program flow ConnectionThread thread = new ConnectionThread(this); thread.start(); } /** * Establishes a connection to the remote device. Note that this function is asynchronous and returns * immediately after starting a new connection thread. Use isConnected() or getConnectionState() to * check when the connection has been established. disconnect() can be called to stop trying to get an * active connection (STATE_CONNECTING to STATE_DISCONNECTED) */ public synchronized void connect(ConnectionListener listener) { //Don't try to connect more than once if( connectionState != ConnectionState.STATE_DISCONNECTED ) { Log.w("BluetoothConnection", "Trying to connect to the same device twice!"); return; } //Start connecting setConnectionState(ConnectionState.STATE_CONNECTING); //Make sure bluetooth is enabled if( !bluetooth.isEnabled() ) { //wait until Bluetooth is enabled by the OS Log.v("BluetoothConnection", "BluetoothDevice is DISABLED. Asking user to enable Bluetooth"); parentActivity.startActivityForResult(new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE), REQUEST_ENABLE_BT); return; } //Stop discovery when connecting if( bluetooth.isDiscovering() ){ //wait until discovery has finished before connecting Log.v("BluetoothConnection", "BluetoothDevice is in discovery mode. Waiting for discovery to finish before connecting"); return; } //All is good! establishConnection(); } /** * Get the Bluetooth MAC address of the remote device * @return a String representation of the MAC address. For example: "00:10:06:29:00:48" */ public String getAddress() { return device.getAddress(); } @Override public String toString() { return device.getName(); } /** * Returns the current connection state of this BluetoothConnection to the remote device * @return STATE_CONNECTED, STATE_CONNECTING or STATE_DISCONNECTED */ public synchronized ConnectionState getConnectionState() { return connectionState; } /** * Returns true if there is an active open and valid bluetooth connection to the * remote device. Same as calling getConnectionState() == ConnectionState.STATE_CONNECTED * @return true if there is a connection, false otherwise */ public boolean isConnected() { return getConnectionState() == ConnectionState.STATE_CONNECTED; } /** * Disconnects the remote device. connect() has to be called before any communication to the * remote device can be done again. * @throws IOException if there was a problem closing the connection. */ public synchronized void disconnect() throws IOException { //Close socket only if we are connected or trying to connect if(getConnectionState() != ConnectionState.STATE_DISCONNECTED) { setConnectionState(ConnectionState.STATE_DISCONNECTED); if(socket != null) { socket.close(); input = null; output = null; socket = null; super.running = false; } Log.v("BluetoothConnection", "Bluetooth connection closed: " + device.getAddress()); return; } } // Create a BroadcastReceiver for enabling bluetooth private final BroadcastReceiver mReceiver = new BroadcastReceiver() { public void onReceive(Context context, Intent intent) { String action = intent.getAction(); //Device is turning on or off if( action.equals(BluetoothAdapter.ACTION_STATE_CHANGED) ) { switch(bluetooth.getState()) { //Bluetooth is starting up case BluetoothAdapter.STATE_TURNING_ON: //Don't care break; //Bluetooth is shutting down or disabled case BluetoothAdapter.STATE_TURNING_OFF: case BluetoothAdapter.STATE_OFF: //make sure socket is disconnected when Bluetooth is shutdown try { disconnect(); } catch (IOException e) {} break; //Bluetooth is Enabled and ready case BluetoothAdapter.STATE_ON: //automatically connect if we are waiting for a connection if( getConnectionState() == ConnectionState.STATE_CONNECTING ) { establishConnection(); } break; } } //Discovery mode has finished else if( action.equals(BluetoothAdapter.ACTION_DISCOVERY_FINISHED) ) { //automatically connect if we are waiting for a connection if( getConnectionState() == ConnectionState.STATE_CONNECTING ) { establishConnection(); } } } }; @Override public void finalize() throws Throwable { //Make sure activity is unregistered parentActivity.unregisterReceiver(mReceiver); //make sure that the Bluetooth connection is terminated on object destruction disconnect(); //Allow deconstruction super.finalize(); } @Override protected synchronized void sendBytes(byte[] data) throws IOException { //Make sure we are connected before sending data if( !isConnected() ){ throw new IOException("Trying to send data while Bluetooth is not connected!"); } //Send the data output.write(data); output.flush(); } @Override public ConnectionMetadata getConnectionData() { if(super.connectionMetadata == null) super.connectionMetadata = new ConnectionMetadata(device.getName(), device.getAddress(), null, null); //TODO: fix this return super.connectionMetadata; } }
package org.nutz.zdoc.impl.html; import java.util.ArrayList; import org.nutz.lang.Lang; import org.nutz.lang.Strings; import org.nutz.zdoc.Rendering; import org.nutz.zdoc.ZDocEle; import org.nutz.zdoc.ZDocEleType; import org.nutz.zdoc.ZDocNode; import org.nutz.zdoc.ZDocNodeType; import org.nutz.zdoc.ZLinkInfo; public class ZDocNode2Html { void joinNode(StringBuilder sb, ZDocNode nd, Rendering ing) { if (ing.isOutOfLimit()) return; if (nd.is(ZDocNodeType.HEADER)) { nodeAsHeader(sb, nd, ing); } else if (nd.is(ZDocNodeType.PARAGRAPH)) { nodeAsParagraph(sb, nd, ing); } else if (nd.is(ZDocNodeType.CODE)) { nodeAsCode(sb, nd, ing); } // UL | OL else if (nd.is(ZDocNodeType.UL) || nd.is(ZDocNodeType.OL)) { nodeAsList(sb, nd, ing); } // TABLE else if (nd.is(ZDocNodeType.TABLE)) { nodeAsTable(sb, nd, ing); } // COMMENT else if (nd.is(ZDocNodeType.COMMENT)) { nodeAsComment(sb, nd, ing); } else if (nd.is(ZDocNodeType.HR)) { nodeAsHr(sb, nd, ing); } // BLOCKQUOTE else if (nd.is(ZDocNodeType.BLOCKQUOTE)) { nodeAsBlockquote(sb, nd, ing); } else { throw Lang.makeThrow("Unrenderable node :\n %s", nd); } } private void nodeAsHeader(StringBuilder sb, ZDocNode nd, Rendering ing) { _join_newline_of_node(sb, nd, ing); String tagName = nd.attrs().getString("tagName", "h" + Math.min(6, nd.depth())); sb.append("<" + tagName + ">"); { String anm = nd.text().replaceAll("[ \t\n]", "_"); sb.append("<a name=\"").append(anm).append("\"></a>"); joinEles(sb, nd, ing); } sb.append("</" + tagName + ">"); { for (ZDocNode sub : nd.children()) { this.joinNode(sb, sub, ing); } } } private void nodeAsParagraph(StringBuilder sb, ZDocNode nd, Rendering ing) { _join_newline_of_node(sb, nd, ing); // IMG DIV boolean onlyOneImg = true; int img = 0; for (ZDocEle ele : nd.eles()) { if (ele.is(ZDocEleType.IMG)) { if (img > 0) { onlyOneImg = false; break; } else { img = 1; } } else if (Strings.isBlank(ele.text()) && !ele.hasAttr("href")) { continue; } else { onlyOneImg = false; break; } } if (onlyOneImg) { sb.append("<div class=\"pa-img\">"); joinEles(sb, nd, ing); sb.append("</div>"); } else { sb.append("<p>"); joinEles(sb, nd, ing); } } private void nodeAsBlockquote(StringBuilder sb, ZDocNode nd, Rendering ing) { _join_newline_of_node(sb, nd, ing); sb.append("<blockquote>"); { this.joinEles(sb, nd, ing); for (ZDocNode sub : nd.children()) { this.joinNode(sb, sub, ing); } } _join_newline_of_node(sb, nd, ing); sb.append("</blockquote>"); } private void nodeAsHr(StringBuilder sb, ZDocNode nd, Rendering ing) { _join_newline_of_node(sb, nd, ing); sb.append("<div class=\"doc-hr\"></div>"); } private void nodeAsComment(StringBuilder sb, ZDocNode nd, Rendering ing) { _join_newline_of_node(sb, nd, ing); sb.append("<!--").append(nd.text()).append("-->"); } private void nodeAsTable(StringBuilder sb, ZDocNode nd, Rendering ing) { _join_newline_of_node(sb, nd, ing); sb.append("<table border=\"1\" cellspacing=\"2\" cellpadding=\"4\">"); for (ZDocNode row : nd.children()) { _join_newline_of_node(sb, row, ing); sb.append(String.format("<%s>", row.type())); for (ZDocNode cell : row.children()) { _join_newline_of_node(sb, cell, ing); sb.append(String.format("<%s>", cell.type())); { joinEles(sb, cell, ing); } _join_newline_of_node(sb, cell, ing); sb.append(String.format("</%s>", cell.type())); } _join_newline_of_node(sb, row, ing); sb.append(String.format("</%s>", row.type())); } _join_newline_of_node(sb, nd, ing); sb.append("</table>"); } private void nodeAsList(StringBuilder sb, ZDocNode nd, Rendering ing) { String tagName = nd.type().toString().toLowerCase(); _join_newline_of_node(sb, nd, ing); sb.append(String.format("<%s>", tagName)); for (ZDocNode li : nd.children()) { _join_newline_of_node(sb, li, ing); sb.append("<li>"); joinEles(sb, li, ing); for (ZDocNode child : li.children()) { joinNode(sb, child, ing); } sb.append("</li>"); } _join_newline_of_node(sb, nd, ing); sb.append(String.format("</%s>", tagName)); } private void nodeAsCode(StringBuilder sb, ZDocNode nd, Rendering ing) { _join_newline_of_node(sb, nd, ing); sb.append("<div class='code-block'><pre>\n"); String codeType = nd.attrs().getString("code-type", "txt"); sb.append("<code class='language-" + codeType.toLowerCase() + "'>"); sb.append(nd.text().replace("<", "&lt;").replace(">", "&gt;").replace("\t", " ")); sb.append("</code>\n"); sb.append("</pre></div>"); } private boolean joinEle(StringBuilder sb, ZDocEle ele, Rendering ing) { switch (ele.type()) { case INLINE: case SUP: case SUB: eleAsInline(sb, ele, ing); break; case QUOTE: eleAsQuote(sb, ele); break; case IMG: eleAsImg(sb, ele, ing); break; case BR: eleAsBr(sb, ele); break; default: throw Lang.impossible(); } return ing.isOutOfLimit(); } private void eleAsBr(StringBuilder sb, ZDocEle ele) { sb.append("<br>"); } private void eleAsImg(StringBuilder sb, ZDocEle ele, Rendering ing) { if (ing.hasLimit()) return; if (ele.hasAttr("href")) { sb.append("<a href=\"").append(ele.href()).append(">"); } ZLinkInfo linfo = ele.linkInfo("src"); String src = null == linfo ? ele.src() : linfo.link(); String apath = ele.attrString("apath"); if (Strings.isBlank(apath) || src.toLowerCase().matches("^[a-z]+: sb.append("<img src=\"").append(src).append('"'); } else { sb.append("<img src=\"") .append(ing.currentBasePath) .append(apath + "/" + src) .append('"') .append(" apath=\"") .append(apath) .append('"'); } int w = ele.width(); if (w > 0) { sb.append(" width=\"").append(w).append('"'); } int h = ele.height(); if (h > 0) { sb.append(" height=\"").append(h).append('"'); } if (null != linfo && !Strings.isBlank(linfo.title())) { sb.append(" title=\"").append(linfo.title()).append("\">"); } else if (!Strings.isBlank(ele.text())) { sb.append(" title=\"").append(ele.text()).append("\">"); } else { sb.append(">"); } if (ele.hasAttr("href")) { sb.append("</a>"); } } private void eleAsQuote(StringBuilder sb, ZDocEle ele) { sb.append("<code>") .append(ele.text().replace("<", "&lt;").replace(">", "&gt;")) .append("</code>"); } private void eleAsInline(StringBuilder sb, ZDocEle ele, Rendering ing) { // text String href = ele.href(); if (Strings.isBlank(href) && Strings.isBlank(ele.text())) { sb.append(' '); return; } ArrayList<String> tagNames = new ArrayList<String>(10); if (!Strings.isBlank(href)) { tagNames.add("a"); } else if (ele.is(ZDocEleType.SUB)) { tagNames.add("sub"); } else if (ele.is(ZDocEleType.SUP)) { tagNames.add("sup"); } else if (ele.hasStyleAs("font-weight", "bold")) { tagNames.add("b"); } else if (ele.hasStyleAs("font-style", "italic")) { tagNames.add("i"); } else if (ele.hasStyleAs("text-decoration", "underline")) { tagNames.add("u"); } // style StringBuilder sbStyle = new StringBuilder(); if (ele.hasStyleAs("text-decoratioin", "line-through")) { sbStyle.append("text-decoratioin:line-through;"); } else if (ele.hasStyle("color")) { sbStyle.append("color:").append(ele.style("color")).append(";"); } // style if (sbStyle.length() > 0 && tagNames.isEmpty()) { tagNames.add("span"); } ZLinkInfo linfo = ele.linkInfo("href"); if (null != linfo) href = linfo.link(); if (!tagNames.isEmpty()) { sb.append("<").append(tagNames.get(0)); if (ele.hasAttr("href")) { String apath = ele.attrString("apath"); if (href.startsWith(" || href.toLowerCase().matches("^[a-z]+: sb.append(" href=\"").append(href).append('"'); } else { sb.append(" href=\"") .append(ing.currentBasePath) .append(apath + "/" + href) .append('"') .append(" apath=\"") .append(apath) .append('"'); } } if (sbStyle.length() > 0) { sb.append(" style=\"").append(sbStyle).append("\""); } if (null != linfo && !Strings.isBlank(linfo.title())) { sb.append(" title=\"").append(linfo.title()).append("\""); } sb.append(">"); for (int i = 1; i < tagNames.size(); i++) { sb.append("<").append(tagNames.get(i)).append('>'); } } if (ing.limit <= 0) { sb.append(Strings.sBlank(ele.text(), href)); } else { int len = ing.limit - ing.charCount; if (len > 0) { String txt = ele.text(); if (null != txt) { if (len > txt.length()) { sb.append(txt); } else { sb.append(txt.substring(0, len)).append(" ... "); } ing.charCount += txt.length(); } } } for (int i = tagNames.size() - 1; i >= 0; i sb.append("</").append(tagNames.get(i)).append('>'); } } private void joinEles(StringBuilder sb, ZDocNode nd, Rendering ing) { for (ZDocEle ele : nd.eles()) { if (joinEle(sb, ele, ing)) break; } } private void _join_newline_of_node(StringBuilder sb, ZDocNode nd, Rendering ing) { sb.append("\n");// .append(Strings.dup(" ", nd.depth())); } }
package com.raizlabs.baseutils; import android.os.Handler; import android.os.Looper; import android.view.View; public class ThreadingUtils { private static Handler uiHandler; /** * @return A {@link Handler} that is bound to the UI thread. */ public static Handler getUIHandler() { if (uiHandler == null) uiHandler = new Handler(Looper.getMainLooper()); return uiHandler; } /** * Returns true if this function was called on the thread the given * {@link Handler} is bound to. * @param handler The {@link Handler} to check the thread of. * @return True if this function was called on the {@link Handler}'s * thread. */ public static boolean isOnHandlerThread(Handler handler) { Looper handlerLooper = handler.getLooper(); if (handlerLooper != null) { return handlerLooper.equals(Looper.myLooper()); } return false; } /** * @return True if this function was called from the UI thread */ public static boolean isOnUIThread() { return Looper.getMainLooper().equals(Looper.myLooper()); } /** * Runs the given {@link Runnable} on the thread the given {@link Handler} * is bound to. This will execute immediately, before this function returns, * if this function was already called on the given {@link Handler}'s thread. * Otherwise, the {@link Runnable} will be posted to the {@link Handler}. * @param handler The {@link Handler} to run the action on. * @param action The {@link Runnable} to execute. * @return True if the action was already executed before this funcion * returned, or false if the action was posted to be handled later. */ public static boolean runOnHandler(Handler handler, Runnable action) { if (isOnHandlerThread(handler)) { action.run(); return true; } else { handler.post(action); return false; } } /** * Runs the given {@link Runnable} on the UI thread. This will execute * immediately, before this function returns, if this function was called * on the UI thread. Otherwise, the {@link Runnable} will be posted to the * UI thread. * @see #runOnUIThread(Runnable, Handler) * @see #runOnUIThread(Runnable, View) * @param action The {@link Runnable} to execute on the UI thread. * @return True if the action was already executed before this function * returned, or false if the action was posted to be handled later. */ public static boolean runOnUIThread(Runnable action) { if (isOnUIThread()) { action.run(); return true; } else { getUIHandler().post(action); return false; } } /** * Runs the given {@link Runnable} on the UI thread. This will execute * immediately, before this function returns, if this function was called * on the UI thread. Otherwise, the {@link Runnable} will be posted using * the given {@link View}. * <br><br> * NOTE: This method will attempt to force the action onto the UI thread. * <br><br> * WARNING: The action may still not be taken if the view's * {@link View#post(Runnable)} method returns true, but doesn't execute. * (This is the case when the view is not attached to a window). * @see #runOnUIThread(Runnable) * @see #runOnUIThread(Runnable, Handler) * @param action The {@link Runnable} to execute. * @param v A {@link View} to use to post the {@link Runnable} if this * wasn't called on the UI thread. * @return True if the action was already executed before this function * returned, or false if the action was posted. */ public static boolean runOnUIThread(View v, Runnable action) { if (isOnUIThread()) { action.run(); return true; } else { if (!v.post(action)) { runOnUIThread(action); } return false; } } /** * Runs the given {@link Runnable} on the UI thread. This will execute * immediately, before this function returns, if this function was called * on the UI thread. Otherwise, the {@link Runnable} will be posted using * the given {@link View}. * <br><br> * NOTE: This method will attempt to force the action onto the UI thread. * <br><br> * WARNING: The action may still not be taken if the view's * {@link View#post(Runnable)} method returns true, but doesn't execute. * (This is the case when the view is not attached to a window). * @see #runOnUIThread(Runnable) * @see #runOnUIThread(Runnable, Handler) * @param v A {@link View} to use to post the {@link Runnable} if this * wasn't called on the UI thread. * @param action The {@link Runnable} to execute. * @return True if the action was already executed before this function * returned, or false if the action was posted. */ @Deprecated public static boolean runOnUIThread(Runnable action, View v) { return runOnUIThread(v, action); } /** * Runs the given {@link Runnable} immediately if this function is called * on the UI thread. Otherwise, it is posted to the given {@link Handler} * and executed on its bound thread. Though it is assumed that the given * {@link Handler} is bound to the UI thread, it is not necessary, and it * will execute the action either way. * @param action The {@link Runnable} to execute. * @param handler The {@link Handler} to post the action to if if this * wasn't called on the UI thread. * @return True if the action was already executed before this function * returned, or false if the action was posted to the {@link Handler}. */ @Deprecated public static boolean runOnUIThread(Runnable action, Handler handler) { return runOnUIThread(handler, action); } /** * Runs the given {@link Runnable} immediately if this function is called * on the UI thread. Otherwise, it is posted to the given {@link Handler} * and executed on its bound thread. Though it is assumed that the given * {@link Handler} is bound to the UI thread, it is not necessary, and it * will execute the action either way. * @param handler The {@link Handler} to post the action to if if this * wasn't called on the UI thread. * @param action The {@link Runnable} to execute. * @return True if the action was already executed before this function * returned, or false if the action was posted to the {@link Handler}. */ public static boolean runOnUIThread(Handler handler, Runnable action) { if (isOnUIThread()) { action.run(); return true; } else { if (!handler.post(action)) { runOnUIThread(action); } return false; } } }
package io.swagger.api; import io.swagger.model.GenericError; import io.swagger.model.InventoryItem; import javax.ws.rs.*; import javax.ws.rs.core.Response; import io.swagger.annotations.*; import java.util.List; @Path("/inventory") @Api(description = "the inventory API") @javax.annotation.Generated(value = "class io.swagger.codegen.languages.JavaJAXRSSpecServerCodegen", date = "2017-03-23T05:55:00.679Z") public class InventoryApi { @POST @Consumes({ "application/json" }) @ApiOperation(value = "Adds a new item to the inventory", notes = "Duplicate SKUs will be rejected", response = void.class, tags={ "Inventory", }) @ApiResponses(value = { @ApiResponse(code = 201, message = "Item created", response = void.class), @ApiResponse(code = 400, message = "General Error", response = void.class) }) public Response addInventory(InventoryItem inventory) { return Response.ok().entity("magic!").build(); } @DELETE @Path("/{sku}") @ApiOperation(value = "Deletes an item by its SKU", notes = "", response = void.class, tags={ "Inventory", }) @ApiResponses(value = { @ApiResponse(code = 200, message = "Item successfully deleted", response = void.class) }) public Response deleteItemBySKU(@PathParam("sku") String sku) { return Response.ok().entity("magic!").build(); } @GET @Produces({ "application/json" }) @ApiOperation(value = "Returns inventory from the system", notes = "longer description", response = InventoryItem.class, responseContainer = "List", tags={ "Inventory" }) @ApiResponses(value = { @ApiResponse(code = 200, message = "Returns the inventory for the query", response = InventoryItem.class, responseContainer = "List") }) public Response getInventory(@QueryParam("skip") Integer skip) { return Response.ok().entity("magic!").build(); } }
package bat_nav; import java.rmi.*; import java.rmi.server.UnicastRemoteObject; import java.util.Random; interface Serveur extends Remote { public int joueCoup(int x, int y) throws RemoteException; public int recoitCoup(int x, int y) throws RemoteException; } @SuppressWarnings("serial") public class Reseau extends UnicastRemoteObject implements Serveur { Plateau plateau; //Plateau/Socket joueur_distant; int joueur; int premier_joueur; //client = joueur 1 //serveur = joueur 2 //il faut se mettre d'accord public Reseau(Plateau p) throws RemoteException { plateau = p; //connecter au joueur distant ici //determiner le numero des joueurs ici + leurs noms Random rand = new Random(); premier_joueur = rand.nextInt(2)+1; } public int joueCoup(int x, int y) { //on recoit //0: plouf int resultat = 0; //ecrire sur le socket //resultat = joueur_distant.recoitCoup(x, y); return resultat; } public int recoitCoup(int x, int y) { int resultat = plateau.coupJoue(x, y); return resultat; } }
package com.rexsl.core; import com.ymock.util.Logger; import java.io.File; import java.lang.annotation.Annotation; import java.util.ArrayList; import java.util.List; import javax.servlet.ServletContext; import javax.ws.rs.Produces; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.ext.ContextResolver; import javax.ws.rs.ext.Provider; import javax.xml.XMLConstants; import javax.xml.bind.JAXBContext; import javax.xml.bind.Marshaller; import javax.xml.validation.SchemaFactory; import org.apache.commons.lang.StringEscapeUtils; /** * Provider of JAXB {@link Marshaller} for JAX-RS framework. * * <p>You don't need to use this class directly. It is made public only becuase * JAX-RS implementation should be able to discover it in classpath. * * @author Yegor Bugayenko (yegor@rexsl.com) * @author Krzysztof Krason (Krzysztof.Krason@gmail.com) * @version $Id$ * @since 0.2 */ @Provider @Produces({ MediaType.APPLICATION_XML, MediaType.TEXT_XML }) public final class XslResolver implements ContextResolver<Marshaller> { /** * Folder with XSD files. * @see #setServletContext(ServletContext) */ private File xsdFolder; /** * Classes to process. */ private final List<Class> classes = new ArrayList<Class>(); /** * JAXB context. */ private JAXBContext context; /** * Public ctor. */ public XslResolver() { // intentionally empty } /** * Set servlet context from container, to be called by JAX-RS framework * because of {@link Context} annotation. * @param ctx The context */ @Context public void setServletContext(final ServletContext ctx) { final String name = ctx.getInitParameter("com.rexsl.core.XSD_FOLDER"); if (name != null) { this.xsdFolder = new File(name); Logger.debug( this, "#setServletContext(%s): XSD folder set to '%s'", ctx.getClass().getName(), this.xsdFolder ); } Logger.debug( this, "#setServletContext(%s): context injected by JAX-RS", ctx.getClass().getName() ); } /** * {@inheritDoc} */ @Override public Marshaller getContext(final Class<?> type) { Marshaller mrsh; try { mrsh = this.context(type).createMarshaller(); mrsh.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE); final String header = String.format( "\n<?xml-stylesheet type='text/xml' href='%s'?>", StringEscapeUtils.escapeXml(this.stylesheet(type)) ); mrsh.setProperty("com.sun.xml.bind.xmlHeaders", header); } catch (javax.xml.bind.JAXBException ex) { throw new IllegalStateException(ex); } if (this.xsdFolder != null) { mrsh = this.addXsdValidator(mrsh, type); } else { Logger.debug( this, "#getContext(%s): marshaller created (no XSD validator)", type.getName() ); } return mrsh; } /** * Add new class to context. * @param cls The class we should add */ public void add(final Class cls) { synchronized (this) { if (!this.classes.contains(cls)) { try { this.classes.add(cls); this.context = JAXBContext.newInstance( this.classes.toArray(new Class[] {}) ); Logger.info( this, "#add(%s): added to JAXBContext (%d total)", cls.getName(), this.classes.size() ); } catch (javax.xml.bind.JAXBException ex) { throw new IllegalStateException(ex); } } } } /** * Create and return a context. * @param cls The class we should process * @return The context */ private JAXBContext context(final Class cls) { this.add(cls); return this.context; } /** * Returns the name of XSL stylesheet for this type. * @param type The class * @return The name of stylesheet * @see #getContext(Class) */ private String stylesheet(final Class<?> type) { final Annotation antn = type.getAnnotation(Stylesheet.class); String stylesheet; if (antn == null) { stylesheet = String.format( "/xsl/%s.xsl", type.getSimpleName() ); } else { stylesheet = ((Stylesheet) antn).value(); } Logger.debug( this, "#stylesheet(%s): '%s' stylesheet discovered", type.getName(), stylesheet ); return stylesheet; } /** * Configure marhaller and return a new one (or the same). * @param mrsh The marshaller, already created and ready to marshal * @param type The class to be marshalled * @return New marshalled to be used instead * @see #getContext(Class) */ private Marshaller addXsdValidator(final Marshaller mrsh, final Class<?> type) { final String name = this.schema(type); if (name.isEmpty()) { Logger.debug( this, "Schema validation turned off for class '%s'", type.getName() ); } else { final File xsd = new File(this.xsdFolder, name); if (xsd.exists()) { final SchemaFactory factory = SchemaFactory.newInstance( XMLConstants.W3C_XML_SCHEMA_NS_URI ); try { mrsh.setSchema(factory.newSchema(xsd)); } catch (org.xml.sax.SAXException ex) { throw new IllegalStateException( String.format( "Failed to use XSD schema from '%s' for class '%s'", xsd, type.getName() ), ex ); } try { mrsh.setEventHandler(new XsdEventHandler()); } catch (javax.xml.bind.JAXBException ex) { throw new IllegalStateException(ex); } Logger.debug( this, "Class '%s' will be validated with '%s' schema", type.getName(), xsd ); } else { Logger.warn( this, "No XSD schema for class '%s' in '%s' file", type.getName(), xsd ); } } return mrsh; } /** * Returns the name of XSD schema for this type. * @param type The class * @return The name of XSD file */ private String schema(final Class<?> type) { final Annotation antn = type.getAnnotation(XmlSchema.class); String schema; if (antn == null) { schema = String.format("%s.xsd", type.getName()); } else { if (((XmlSchema) antn).ignore()) { schema = ""; } else { schema = ((XmlSchema) antn).value(); } } Logger.debug( this, "#schema(%s): '%s' schema discovered", type.getName(), schema ); return schema; } }
package com.netflix.recipes.rss; public class RSSVersion { private final static String RSS_VERSION = "0.3"; public static String getVersion() { return RSSVersion.RSS_VERSION; } }
package ru.stqa.java.sandbox; import com.sun.org.apache.xerces.internal.impl.xpath.regex.Match; import com.sun.org.apache.xpath.internal.SourceTree; public class MyFirstProgram { public static void main(String[] args) { hello("world"); hello("Кирилл"); hello("User"); Square s = new Square(5); System.out.println("Площадь квадрата со стороной " + s.l + " равна " + s.area()); Rectangle r = new Rectangle(4, 5); System.out.println("Площадь прямоугольника со сторонами " + r.a + " и " + r.b + " равна " + r.area()); Point p1 = new Point(); Point p2 = new Point(); p1.x = 3; p1.y = 2; p2.x = 4; p2.y = 6; distance(p1, p2); } public static void hello(String somebody) { System.out.println("Hello, " + somebody); } public static void distance(Point p1, Point p2) { System.out.println(Math.sqrt(Math.pow(p2.x - p1.x, 2) + Math.pow(p2.y - p1.y, 2))); } }
package com.podio.sdk.domain.field; import com.podio.sdk.domain.Profile; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; public class ContactField extends Field<ContactField.Value> { private static class Settings { private final String type = null; private final String[] valid_types = null; } public static class Configuration extends Field.Configuration { public enum Type { space_contacts, space_users, all_users, undefined; public static Type fromString(String string) { try { return Type.valueOf(string); } catch (IllegalArgumentException e) { return Type.undefined; } catch (NullPointerException e) { return Type.undefined; } } } private final Value default_value = null; private final Settings settings = null; public Value getDefaultValue() { return default_value; } public Type getType() { return settings != null ? Type.fromString(settings.type) : Type.undefined; } public List<String> getValidTypes() { return settings != null && settings.valid_types != null ? Arrays.asList(settings.valid_types) : Arrays.asList(new String[0]); } } public static class Value extends Field.Value { private final Profile value; public Value(Profile contact) { this.value = contact; } @Override public Map<String, Object> getCreateData() { HashMap<String, Object> data = null; long profileId = value != null ? value.getId() : 0L; if (profileId > 0L) { data = new HashMap<String, Object>(); data.put("value", profileId); } return data; } public String getExternalId() { return value != null ? value.getExternalId() : null; } public Profile getProfile() { return value; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Value value1 = (Value) o; return !(value != null ? !value.equals(value1.value) : value1.value != null); } @Override public int hashCode() { return value != null ? value.hashCode() : 0; } } // Private fields private final Configuration config = null; private final ArrayList<Value> values; public ContactField(String externalId) { super(externalId); this.values = new ArrayList<Value>(); } @Override public void setValues(List<Value> values) { this.values.clear(); this.values.addAll(values); } @Override public void addValue(Value value) { if (values != null && !values.contains(value)) { values.add(value); } } @Override public Value getValue(int index) { return values != null ? values.get(index) : null; } @Override public List<Value> getValues() { return values; } @Override public void removeValue(Value value) { if (values != null && values.contains(value)) { values.remove(value); } } @Override public void clearValues() { values.clear(); } @Override public int valuesCount() { return values != null ? values.size() : 0; } public Configuration getConfiguration() { return config; } }
public class Level { public static final int CENTRE_HEIGHT = 3; // TODO only makes sense for 5x5 so need to fix this!! public static final int CENTRE_WIDTH = 3; public Level (int height, int width, int level, boolean debug) { _level = level; _height = height; _width = width; _debug = debug; _theWorld = new Tile[_height][_width]; for (int i = 0; i < _height; i++) { for (int j = 0; j < _width; j++) { _theWorld[i][j] = new Tile(TileId.EMPTY_SPACE); } } _theWorld[CENTRE_HEIGHT][CENTRE_WIDTH] = new Tile(TileId.NESTED_GRID); } public Level (Tile[][] theWorld, int level, boolean debug) { _theWorld = theWorld; _height = _theWorld.length; _width = _theWorld[0].length; _level = level; _debug = debug; } public final int getLevel () { return _level; } @Override public String toString () { String str = ""; for (int i = 0; i < _height; i++) { for (int j = 0; j < _width; j++) { if ((i == CENTRE_HEIGHT) && (j == CENTRE_WIDTH)) str += TileId.NESTED_GRID; else str += _theWorld[i][j]; } str += "\n"; } return str; } @Override public boolean equals (Object obj) { if (obj == null) return false; if (this == obj) return true; if (getClass() == obj.getClass()) { Level temp = (Level) obj; if ((temp._height == _height) && (temp._width == _width)) { for (int i = 0; i < _height; i++) { for (int j = 0; j < _width; j++) { if (temp._theWorld[i][j].type() != _theWorld[i][j].type()) return false; } } return true; } } return false; } protected Level (Level theGrid) { _height = theGrid._height; _width = theGrid._width; _debug = theGrid._debug; _theWorld = new Tile[_height][_width]; for (int i = 0; i < _height; i++) { for (int j = 0; j < _width; j++) { _theWorld[i][j] = new Tile(theGrid._theWorld[i][j].type()); } } } private Tile[][] _theWorld; private int _level; private int _height; private int _width; private boolean _debug; }
package com.kylinolap.job; import java.io.File; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import org.apache.commons.io.IOUtils; import org.apache.hadoop.hbase.util.Pair; import org.apache.maven.model.Model; import org.apache.maven.model.io.xpp3.MavenXpp3Reader; import org.codehaus.plexus.util.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.kylinolap.common.KylinConfig; import com.kylinolap.common.persistence.ResourceTool; import com.kylinolap.common.util.AbstractKylinTestCase; import com.kylinolap.common.util.CliCommandExecutor; import com.kylinolap.cube.CubeInstance; import com.kylinolap.cube.CubeManager; import com.kylinolap.cube.dataGen.FactTableGenerator; import com.kylinolap.job.engine.JobEngineConfig; import com.kylinolap.job.hadoop.hive.SqlHiveDataTypeMapping; import com.kylinolap.job.tools.LZOSupportnessChecker; import com.kylinolap.metadata.MetadataManager; import com.kylinolap.metadata.model.schema.ColumnDesc; import com.kylinolap.metadata.model.schema.TableDesc; public class DeployUtil { @SuppressWarnings("unused") private static final Logger logger = LoggerFactory.getLogger(DeployUtil.class); public static void initCliWorkDir() throws IOException { execCliCommand("rm -rf " + getHadoopCliWorkingDir()); execCliCommand("mkdir -p " + config().getKylinJobLogDir()); } public static void deployMetadata() throws IOException { // install metadata to hbase ResourceTool.reset(config()); ResourceTool.copy(KylinConfig.createInstanceFromUri(AbstractKylinTestCase.LOCALMETA_TEST_DATA), config()); // update cube desc signature. for (CubeInstance cube : CubeManager.getInstance(config()).listAllCubes()) { cube.getDescriptor().setSignature(cube.getDescriptor().calculateSignature()); CubeManager.getInstance(config()).updateCube(cube); } } public static void overrideJobJarLocations() { Pair<File, File> files = getJobJarFiles(); File jobJar = files.getFirst(); File coprocessorJar = files.getSecond(); config().overrideKylinJobJarPath(jobJar.getAbsolutePath()); config().overrideCoprocessorLocalJar(coprocessorJar.getAbsolutePath()); } public static void deployJobJars() throws IOException { Pair<File, File> files = getJobJarFiles(); File jobJar = files.getFirst(); File coprocessorJar = files.getSecond(); File jobJarRemote = new File(config().getKylinJobJarPath()); File jobJarLocal = new File(jobJar.getParentFile(), jobJarRemote.getName()); if (jobJar.equals(jobJarLocal) == false) { FileUtils.copyFile(jobJar, jobJarLocal); } File coprocessorJarRemote = new File(config().getCoprocessorLocalJar()); File coprocessorJarLocal = new File(coprocessorJar.getParentFile(), coprocessorJarRemote.getName()); if (coprocessorJar.equals(coprocessorJarLocal) == false) { FileUtils.copyFile(coprocessorJar, coprocessorJarLocal); } CliCommandExecutor cmdExec = config().getCliCommandExecutor(); cmdExec.copyFile(jobJarLocal.getAbsolutePath(), jobJarRemote.getParent()); cmdExec.copyFile(coprocessorJar.getAbsolutePath(), coprocessorJarRemote.getParent()); } private static Pair<File, File> getJobJarFiles() { String version; try { MavenXpp3Reader pomReader = new MavenXpp3Reader(); Model model = pomReader.read(new FileReader("../pom.xml")); version = model.getVersion(); } catch (Exception e) { throw new RuntimeException(e.getMessage(), e); } File jobJar = new File("../job/target", "kylin-job-" + version + "-job.jar"); File coprocessorJar = new File("../job/storage", "kylin-storage-" + version + "-coprocessor.jar"); return new Pair<File, File>(jobJar, coprocessorJar); } public static void overrideJobConf(String confDir) throws IOException { boolean enableLzo = LZOSupportnessChecker.getSupportness(); overrideJobConf(confDir, enableLzo); } public static void overrideJobConf(String confDir, boolean enableLzo) throws IOException { File src = new File(confDir, JobEngineConfig.HADOOP_JOB_CONF_FILENAME + (enableLzo ? ".lzo_enabled" : ".lzo_disabled") + ".xml"); File dst = new File(confDir, JobEngineConfig.HADOOP_JOB_CONF_FILENAME + ".xml"); FileUtils.copyFile(src, dst); } private static void execCliCommand(String cmd) throws IOException { config().getCliCommandExecutor().execute(cmd); } private static String getHadoopCliWorkingDir() { return config().getCliWorkingDir(); } private static KylinConfig config() { return KylinConfig.getInstanceFromEnv(); } static final String TABLE_CAL_DT = "test_cal_dt"; static final String TABLE_CATEGORY_GROUPINGS = "test_category_groupings"; static final String TABLE_KYLIN_FACT = "test_kylin_fact"; static final String TABLE_SELLER_TYPE_DIM = "test_seller_type_dim"; static final String TABLE_SITES = "test_sites"; static final String[] TABLE_NAMES = new String[] { TABLE_CAL_DT, TABLE_CATEGORY_GROUPINGS, TABLE_KYLIN_FACT, TABLE_SELLER_TYPE_DIM, TABLE_SITES }; public static void prepareTestData(String joinType, String cubeName) throws Exception { // data is generated according to cube descriptor and saved in resource store if (joinType.equalsIgnoreCase("inner")) { FactTableGenerator.generate(cubeName, "10000", "1", null, "inner"); } else if (joinType.equalsIgnoreCase("left")) { FactTableGenerator.generate(cubeName, "10000", "0.6", null, "left"); } else { throw new IllegalArgumentException("Unsupported join type : " + joinType); } deployHiveTables(); } private static void deployHiveTables() throws Exception { MetadataManager metaMgr = MetadataManager.getInstance(config()); // scp data files, use the data from hbase, instead of local files File temp = File.createTempFile("temp", ".csv"); temp.createNewFile(); for (String tablename : TABLE_NAMES) { tablename = tablename.toUpperCase(); File localBufferFile = new File(temp.getParent() + "/" + tablename + ".csv"); localBufferFile.createNewFile(); InputStream hbaseDataStream = metaMgr.getStore().getResource("/data/" + tablename + ".csv"); FileOutputStream localFileStream = new FileOutputStream(localBufferFile); IOUtils.copy(hbaseDataStream, localFileStream); hbaseDataStream.close(); localFileStream.close(); config().getCliCommandExecutor().copyFile(localBufferFile.getPath(), config().getCliWorkingDir()); localBufferFile.delete(); } temp.delete(); // create hive tables execHiveCommand(generateCreateTableHql(metaMgr.getTableDesc(TABLE_CAL_DT.toUpperCase()))); execHiveCommand(generateCreateTableHql(metaMgr.getTableDesc(TABLE_CATEGORY_GROUPINGS.toUpperCase()))); execHiveCommand(generateCreateTableHql(metaMgr.getTableDesc(TABLE_KYLIN_FACT.toUpperCase()))); execHiveCommand(generateCreateTableHql(metaMgr.getTableDesc(TABLE_SELLER_TYPE_DIM.toUpperCase()))); execHiveCommand(generateCreateTableHql(metaMgr.getTableDesc(TABLE_SITES.toUpperCase()))); // load data to hive tables // LOAD DATA LOCAL INPATH 'filepath' [OVERWRITE] INTO TABLE tablename execHiveCommand(generateLoadDataHql(TABLE_CAL_DT)); execHiveCommand(generateLoadDataHql(TABLE_CATEGORY_GROUPINGS)); execHiveCommand(generateLoadDataHql(TABLE_KYLIN_FACT)); execHiveCommand(generateLoadDataHql(TABLE_SELLER_TYPE_DIM)); execHiveCommand(generateLoadDataHql(TABLE_SITES)); } private static void execHiveCommand(String hql) throws IOException { String hiveCmd = "hive -e \"" + hql + "\""; config().getCliCommandExecutor().execute(hiveCmd); } private static String generateLoadDataHql(String tableName) { return "LOAD DATA LOCAL INPATH '" + config().getCliWorkingDir() + "/" + tableName.toUpperCase() + ".csv' OVERWRITE INTO TABLE " + tableName.toUpperCase(); } private static String generateCreateTableHql(TableDesc tableDesc) { StringBuilder ddl = new StringBuilder(); ddl.append("DROP TABLE IF EXISTS " + tableDesc.getName() + ";\n"); ddl.append("CREATE TABLE " + tableDesc.getName() + "\n"); ddl.append("(" + "\n"); for (int i = 0; i < tableDesc.getColumns().length; i++) { ColumnDesc col = tableDesc.getColumns()[i]; if (i > 0) { ddl.append(","); } ddl.append(col.getName() + " " + SqlHiveDataTypeMapping.getHiveDataType((col.getDatatype())) + "\n"); } ddl.append(")" + "\n"); ddl.append("ROW FORMAT DELIMITED FIELDS TERMINATED BY ','" + "\n"); ddl.append("STORED AS TEXTFILE;"); return ddl.toString(); } }
package main; import lejos.nxt.SensorPort; import lejos.nxt.addon.tetrix.TetrixControllerFactory; import lejos.nxt.addon.tetrix.TetrixEncoderMotor; import lejos.nxt.addon.tetrix.TetrixMotorController; /** * @author iasmh2015 * */ public class Drive { private TetrixControllerFactory cf; private TetrixMotorController mc; private TetrixEncoderMotor rMot; private TetrixEncoderMotor lMot; public Drive() { cf = new TetrixControllerFactory(SensorPort.S1); mc = cf.newMotorController(); rMot = mc.getEncoderMotor(TetrixMotorController.MOTOR_2); lMot = mc.getEncoderMotor(TetrixMotorController.MOTOR_1); } public void set(float power, float turn) { stop(); if (power == 0.0) { stop(); } else if (power > 0.0) { setForward(Math.abs(power), (turn + 1) / 2); } else { setBackward(Math.abs(power), (turn + 1) / 2); } } private void setForward(float power, float turn) { float lTurn = turn + 1; float rTurn = turn * -1 + 1; int lSpeed = (int) (power * lTurn * 50); if (lSpeed > 100) { lSpeed = 100; } else if (lSpeed < 0) { lSpeed = 0; } int rSpeed = (int) (power * rTurn * 50); if (rSpeed > 100) { rSpeed = 100; } else if (rSpeed < 0) { rSpeed = 0; } lMot.setPower(lSpeed); rMot.setPower(rSpeed); lMot.backward(); rMot.forward(); } private void setBackward(float power, float turn) { float lTurn = turn + 1; float rTurn = turn * -1 + 1; int lSpeed = (int) (power * lTurn * 50); if (lSpeed > 100) { lSpeed = 100; } else if (lSpeed < 0) { lSpeed = 0; } int rSpeed = (int) (power * rTurn * 50); if (rSpeed > 100) { rSpeed = 100; } else if (rSpeed < 0) { rSpeed = 0; } lMot.setPower(lSpeed); rMot.setPower(rSpeed); lMot.forward(); rMot.backward(); } private void stop() { lMot.stop(); rMot.stop(); } }
package org.vast.util; public class SpatialExtent { protected String crs; protected double minX = Double.NaN; protected double maxX = Double.NaN; protected double minY = Double.NaN; protected double maxY = Double.NaN; protected double minZ = Double.NaN; protected double maxZ = Double.NaN; public SpatialExtent() { } /** * Returns an exact copy of this SpatialExtent * @return */ public SpatialExtent copy() { SpatialExtent bbox = new SpatialExtent(); bbox.crs = this.crs; bbox.minX = this.minX; bbox.minY = this.minY; bbox.minZ = this.minZ; bbox.maxX = this.maxX; bbox.maxY = this.maxY; bbox.maxZ = this.maxZ; return bbox; } public double[] getCenter() { double[] center = new double[3]; center[0] = (minX + maxX) / 2; center[0] = (minY + maxY) / 2; center[0] = (minZ + maxZ) / 2; return center; } public double getDiagonalDistance() { double dx = (maxX - minX); double dy = (maxY - minY); double dz = (maxZ - minZ); double dx2 = dx * dx; double dy2 = dy * dy; if (Double.isNaN(dz)) return Math.sqrt(dx2 + dy2); double dz2 = dz * dz; return Math.sqrt(dx2 + dy2 + dz2); } public double getMaxDistance() { double dx = (maxX - minX); double dy = (maxY - minY); double dz = (maxZ - minZ); if (Double.isNaN(dz)) return Math.max(dx, dy); else return Math.max(Math.max(dx, dy), dz); } public double getSizeX() { return maxX - minX; } public double getSizeY() { return maxY - minY; } public double getSizeZ() { return maxZ - minZ; } public boolean isNull() { if (Double.isNaN(minX)) return true; if (Double.isNaN(minY)) return true; //if (Double.isNaN(minZ)) return true; if (Double.isNaN(maxX)) return true; if (Double.isNaN(maxY)) return true; //if (Double.isNaN(maxZ)) return true; return false; } public void nullify() { minX = Double.NaN; maxX = Double.NaN; minY = Double.NaN; maxY = Double.NaN; minZ = Double.NaN; maxZ = Double.NaN; } /** * Resize spatial extent so that it contains the given 3D point * Point x,y,z coordinates must be in same Crs as SpatialExtent * @param x * @param y * @param z */ public void resizeToContain(double x, double y, double z) { if (isNull()) { minX = maxX = x; minY = maxY = y; minZ = maxZ = z; return; } if (x < minX) minX = x; else if (x > maxX) maxX = x; if (y < minY) minY = y; else if (y > maxY) maxY = y; if (z < minZ) minZ = z; else if (z > maxZ) maxZ = z; } /** * Combines given extent with this extent * by computing the smallest rectangular * extent that contains both of them. * @param bbox */ public void add(SpatialExtent bbox) { checkCrs(bbox); if (isNull()) { minX = bbox.minX; minY = bbox.minY; minZ = bbox.minZ; maxX = bbox.maxX; maxY = bbox.maxY; maxZ = bbox.maxZ; return; } if (minX > bbox.minX) minX = bbox.minX; if (minY > bbox.minY) minY = bbox.minY; if (minZ > bbox.minZ) minZ = bbox.minZ; if (maxX < bbox.maxX) maxX = bbox.maxX; if (maxY < bbox.maxY) maxY = bbox.maxY; if (maxZ < bbox.maxZ) maxZ = bbox.maxZ; } /** * Finds out if this bbox intersects the given bbox. * @param bbox * @return */ public boolean intersects(SpatialExtent bbox) { double bboxX1 = bbox.getMinX(); double bboxX2 = bbox.getMaxX(); double bboxY1 = bbox.getMinY(); double bboxY2 = bbox.getMaxY(); if (bboxX1 < minX && bboxX2 < minX) return false; if (bboxX1 > maxX && bboxX2 > maxX) return false; if (bboxY1 < minY && bboxY2 < minY) return false; if (bboxY1 > maxY && bboxY2 > maxY) return false; return true; } /** * Finds out if given extent is included in this one. * Returns true if extent is completely contained * within this extent * @param bbox * @return */ public boolean contains(SpatialExtent bbox) { double bboxX1 = bbox.getMinX(); double bboxX2 = bbox.getMaxX(); double bboxY1 = bbox.getMinY(); double bboxY2 = bbox.getMaxY(); if (bboxX1 < minX || bboxX1 > maxX) return false; if (bboxX2 < minX || bboxX2 > maxX) return false; if (bboxY1 < minY || bboxY1 > maxY) return false; if (bboxY2 < minY || bboxY2 > maxY) return false; return true; } /** * Finds out if given extent crosses this one * Returns true if so. * @param bbox * @return */ public boolean cross(SpatialExtent bbox) { checkCrs(bbox); // TODO cross method return true; } /** * Checks if extents crs are compatible * @throws exception if not * @param bbox * @return */ protected void checkCrs(SpatialExtent bbox) { if (crs != null && bbox.crs != null) if (!crs.equals(bbox.crs)) throw new IllegalStateException("CRS must match"); } public String getCrs() { return crs; } public void setCrs(String crs) { this.crs = crs; } public double getMaxX() { return maxX; } public void setMaxX(double maxX) { this.maxX = maxX; } public double getMaxY() { return maxY; } public void setMaxY(double maxY) { this.maxY = maxY; } public double getMaxZ() { return maxZ; } public void setMaxZ(double maxZ) { this.maxZ = maxZ; } public double getMinX() { return minX; } public void setMinX(double minX) { this.minX = minX; } public double getMinY() { return minY; } public void setMinY(double minY) { this.minY = minY; } public double getMinZ() { return minZ; } public void setMinZ(double minZ) { this.minZ = minZ; } public String toString() { return minX + "," + minY + " - " + maxX + "," + maxY; } }
package cmput301f17t26.smores.all_activities; import android.Manifest; import android.app.ProgressDialog; import android.content.pm.PackageManager; import android.location.Location; import android.support.annotation.NonNull; import android.support.design.widget.Snackbar; import android.support.v4.app.FragmentActivity; import android.os.Bundle; import android.text.Editable; import android.text.TextWatcher; import android.view.View; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.EditText; import android.widget.Toast; import com.google.android.gms.location.FusedLocationProviderClient; import com.google.android.gms.location.LocationServices; import com.google.android.gms.maps.CameraUpdateFactory; import com.google.android.gms.maps.GoogleMap; import com.google.android.gms.maps.OnMapReadyCallback; import com.google.android.gms.maps.SupportMapFragment; import com.google.android.gms.maps.model.BitmapDescriptorFactory; import com.google.android.gms.maps.model.CameraPosition; import com.google.android.gms.maps.model.LatLng; import com.google.android.gms.maps.model.MarkerOptions; import com.google.android.gms.tasks.OnSuccessListener; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.UUID; import cmput301f17t26.smores.R; import cmput301f17t26.smores.all_exceptions.LocationNotSetException; import cmput301f17t26.smores.all_models.HabitEvent; import cmput301f17t26.smores.all_storage_controller.HabitController; import cmput301f17t26.smores.all_storage_controller.HabitEventController; import cmput301f17t26.smores.all_storage_controller.RequestController; import cmput301f17t26.smores.all_storage_controller.UserController; import cmput301f17t26.smores.utils.DateUtils; import static cmput301f17t26.smores.all_activities.HabitEventDetailsActivity.LOCATION_REQUEST_CODE; public class MapsActivity extends FragmentActivity implements OnMapReadyCallback { private GoogleMap mMap; private ArrayList<HabitEvent> userHabitEvents; private ArrayList<HabitEvent> friendHabitEvents; private CheckBox mMyself, mFriendsCheckbox; private EditText mRadiusField; private Button mSearch; private Location currentLocation; private FusedLocationProviderClient mFusedLocationClient; private Thread worker; private HashMap<UUID, String> friendHabitTitles; private HashMap<UUID, String> friendUsernames; private static final int FROM_ONCREATE = 0; private static final int FROM_UPDATE = 1; @Override public void onResume() { super.onResume(); } @Override public void onPause() { super.onPause(); } @Override public void onBackPressed() { finish(); // code here to show dialog super.onBackPressed(); // optional depending on your needs } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_maps); // Obtain the SupportMapFragment and get notified when the map is ready to be used. SupportMapFragment mapFragment = (SupportMapFragment) getSupportFragmentManager() .findFragmentById(R.id.map); mapFragment.getMapAsync(this); userHabitEvents = new ArrayList<>(); userHabitEvents.addAll(HabitEventController.getHabitEventController(this).getFilteredHabitEvents()); mFriendsCheckbox = (CheckBox) findViewById(R.id.friendsCheckbox); mFriendsCheckbox.setChecked(false); mMyself = (CheckBox) findViewById(R.id.meCheckbox); mMyself.setChecked(true); mRadiusField = (EditText) findViewById(R.id.radiusField); mSearch = (Button) findViewById(R.id.searchButton); mSearch.setEnabled(false); mFusedLocationClient = LocationServices.getFusedLocationProviderClient(this); if (checkSelfPermission(Manifest.permission.ACCESS_COARSE_LOCATION) == PackageManager.PERMISSION_GRANTED) { getLocation(); } else { String[] permissionRequested = {Manifest.permission.ACCESS_COARSE_LOCATION}; requestPermissions(permissionRequested, LOCATION_REQUEST_CODE); } } /** * Manipulates the map once available. * This callback is triggered when the map is ready to be used. * This is where we can add markers or lines, add listeners or move the camera. In this case, * we just add a marker near Sydney, Australia. * If Google Play services is not installed on the device, the user will be prompted to install * it inside the SupportMapFragment. This method will only be triggered once the user has * installed Google Play services and returned to the app. */ @Override public void onMapReady(GoogleMap googleMap) { mMap = googleMap; mMap.clear(); loadData(FROM_ONCREATE); mRadiusField.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void onTextChanged(CharSequence s, int start, int before, int count) { if (!s.toString().equals("")) { mSearch.setEnabled(true); } else { mSearch.setEnabled(false); loadData(FROM_UPDATE); } } @Override public void afterTextChanged(Editable s) { } }); mSearch.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { mMap.clear(); loadData(FROM_UPDATE); } }); mMyself.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { mMap.clear(); loadData(FROM_UPDATE); } }); mFriendsCheckbox.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { mMap.clear(); loadData(FROM_UPDATE); } }); /*LatLng sydney = new LatLng(-34, 151); mMap.addMarker(new MarkerOptions().position(sydney).title("Marker in Sydney")); mMap.moveCamera(CameraUpdateFactory.newLatLng(sydney));*/ } private void loadMyMarkers() { if (mMyself.isChecked()) { for (HabitEvent habitEvent: userHabitEvents) { try { if (mRadiusField.getText().toString().trim().equals("")) { String fullTitle = getMarkerString(habitEvent); mMap.addMarker(new MarkerOptions().position(habitEvent.getLatLng()).title(fullTitle).icon(BitmapDescriptorFactory.fromResource(R.drawable.ic_mode_edit_black_24dp))); } else { if (currentLocation.distanceTo(habitEvent.getLocation()) <= 1000 * Float.valueOf(mRadiusField.getText().toString())){ String fullTitle = getMarkerString(habitEvent); mMap.addMarker(new MarkerOptions().position(habitEvent.getLatLng()).title(fullTitle).icon(BitmapDescriptorFactory.fromResource(R.drawable.ic_mode_edit_black_24dp))); } else { String fullTitle = getMarkerString(habitEvent); mMap.addMarker(new MarkerOptions().position(habitEvent.getLatLng()).title(fullTitle).icon(BitmapDescriptorFactory.fromResource(R.drawable.ic_mode_edit_black_24dp)).alpha(0.2f)); } } } catch (LocationNotSetException e) { } } } } private void loadFriendMarkers() { if (mFriendsCheckbox.isChecked()) { for (HabitEvent habitEvent: friendHabitEvents) { try { if (mRadiusField.getText().toString().trim().equals("")) { String fullTitle = getMarkerStringFriend(habitEvent); mMap.addMarker(new MarkerOptions().position(habitEvent.getLatLng()).title(fullTitle).icon(BitmapDescriptorFactory.defaultMarker(BitmapDescriptorFactory.HUE_CYAN)).snippet(getMarkerUserNameFriend(habitEvent))); } else { if (currentLocation.distanceTo(habitEvent.getLocation()) <= 1000 * Float.valueOf(mRadiusField.getText().toString())){ String fullTitle = getMarkerStringFriend(habitEvent); mMap.addMarker(new MarkerOptions().position(habitEvent.getLatLng()).title(fullTitle).icon(BitmapDescriptorFactory.defaultMarker(BitmapDescriptorFactory.HUE_CYAN)).snippet(getMarkerUserNameFriend(habitEvent))); } else { String fullTitle = getMarkerStringFriend(habitEvent); mMap.addMarker(new MarkerOptions().position(habitEvent.getLatLng()).title(fullTitle).icon(BitmapDescriptorFactory.defaultMarker(BitmapDescriptorFactory.HUE_CYAN)).alpha(0.2f).snippet(getMarkerUserNameFriend(habitEvent))); //grey } } } catch (LocationNotSetException e) { } } } } @NonNull private String getMarkerString(HabitEvent habitEvent) { String Habit_title = HabitController.getHabitController(this).getHabitTitleByHabitID(habitEvent.getHabitID()); String Habit_dateCompleted = DateUtils.getStringOfDate(habitEvent.getDate()); return Habit_title + " | " + Habit_dateCompleted; } private String getMarkerStringFriend(HabitEvent habitEvent) { String Habit_title = friendHabitTitles.get(habitEvent.getID()); String Habit_dateCompleted = DateUtils.getStringOfDate(habitEvent.getDate()); return Habit_title + " | " + Habit_dateCompleted; } private String getMarkerUserNameFriend(HabitEvent habitEvent) { String friendUsername = friendUsernames.get(habitEvent.getID()); return friendUsername; } private void loadData(final int called_from) { final ProgressDialog progressDialog = new ProgressDialog(this); progressDialog.setProgressStyle(ProgressDialog.STYLE_SPINNER); progressDialog.setMessage("Now loading maps..."); progressDialog.setIndeterminate(true); progressDialog.setCanceledOnTouchOutside(false); progressDialog.show(); worker = new Thread(new Runnable() { @Override public void run() { if (called_from == FROM_ONCREATE) { UserController.getUserController(MapsActivity.this).updateFollowingList(); friendHabitEvents = UserController.getUserController(MapsActivity.this).mostRecentFriendsHabitEvents(); } friendHabitTitles = new HashMap<>(); friendUsernames = new HashMap<>(); for (HabitEvent habitEvent: friendHabitEvents) { friendHabitTitles.put(habitEvent.getID(), RequestController.getRequestController(MapsActivity.this).getHabitTitleByHabitID(habitEvent.getUserID(), habitEvent.getHabitID())); friendUsernames.put(habitEvent.getID(), UserController.getUserController(MapsActivity.this).getUsernameByID(habitEvent.getUserID())); } MapsActivity.this.runOnUiThread(new Runnable() { @Override public void run() { loadMyMarkers(); loadFriendMarkers(); progressDialog.dismiss(); } }); } }); worker.start(); } public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) { super.onRequestPermissionsResult(requestCode, permissions, grantResults); if (requestCode == LOCATION_REQUEST_CODE) { if (grantResults[0] == PackageManager.PERMISSION_GRANTED) { Toast.makeText(this, "Request for location granted", Toast.LENGTH_LONG).show(); getLocation(); } else { Toast.makeText(this, "Unable to request location services", Toast.LENGTH_LONG).show(); } } } private void getLocation() { try { mFusedLocationClient.getLastLocation().addOnSuccessListener(MapsActivity.this, new OnSuccessListener<Location>() { @Override public void onSuccess(Location location) { if (location != null) { currentLocation = location; CameraPosition cameraPosition = new CameraPosition.Builder() .target(new LatLng(currentLocation.getLatitude(), currentLocation.getLongitude())) .zoom(14) .bearing(90) .tilt(30) .build(); mMap.animateCamera(CameraUpdateFactory.newCameraPosition(cameraPosition)); } else { Toast.makeText(getApplicationContext(), "Turn on location services to use maps.", Toast.LENGTH_SHORT).show(); } } }); } catch (SecurityException e) { String[] permissionRequested = {Manifest.permission.ACCESS_COARSE_LOCATION}; requestPermissions(permissionRequested, LOCATION_REQUEST_CODE); } } }
package io.spine.server.delivery; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterators; import com.google.common.truth.Truth8; import com.google.protobuf.Timestamp; import com.google.protobuf.util.Durations; import io.spine.base.Time; import io.spine.server.DefaultRepository; import io.spine.server.delivery.given.ConsecutiveNumberProcess; import io.spine.server.delivery.given.ConsecutiveProjection; import io.spine.server.delivery.given.CounterCatchUp; import io.spine.server.delivery.given.CounterView; import io.spine.server.delivery.given.WhatToCatchUp; import io.spine.server.entity.Repository; import io.spine.test.delivery.ConsecutiveNumberView; import io.spine.test.delivery.EmitNextNumber; import io.spine.test.delivery.NumberAdded; import io.spine.testing.SlowTest; import io.spine.testing.server.blackbox.BlackBoxBoundedContext; import io.spine.testing.server.blackbox.SingleTenantBlackBoxContext; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.concurrent.Callable; import java.util.stream.IntStream; import static com.google.common.truth.Truth.assertThat; import static com.google.protobuf.util.Timestamps.subtract; import static io.spine.base.Time.currentTime; import static io.spine.server.delivery.CatchUpStatus.COMPLETED; import static io.spine.server.delivery.TestRoutines.findView; import static io.spine.server.delivery.TestRoutines.post; import static io.spine.server.delivery.given.WhatToCatchUp.catchUpAll; import static io.spine.server.delivery.given.WhatToCatchUp.catchUpOf; import static io.spine.testing.Tests.nullRef; import static java.lang.String.format; import static java.util.stream.Collectors.toList; import static org.junit.jupiter.api.Assertions.fail; /** * Tests for the {@linkplain io.spine.server.projection.ProjectionRepository#catchUp(Timestamp, Set) * projection catch-up} functionality. * * <p>The test routines are designed to check both small and big use-cases, including * the full catch-up. To deal with the different wall-clock providers, some of the tests * configure the {@linkplain Time#currentTime() time provider} to return the values with * millisecond precision. It is required to test the catch-up in the scenarios close to the legacy * applications, as at that time there were no emulation of the nanosecond time resolution. * * <p>As the downstream libraries, such as Spine Google Cloud library would want to run the same * tests under their specific conditions, the big and slow catch-up tests are made {@code public}. * In this way such tests may be overridden and disabled, if needed. */ @SlowTest @DisplayName("Catch-up of projection instances should") @SuppressWarnings("WeakerAccess") // see the class-level documentation. public class CatchUpTest extends AbstractDeliveryTest { @Override @AfterEach public void tearDown() { super.tearDown(); Time.resetProvider(); } @Test @DisplayName("given the time is provided with nanosecond resolution, catch up " + "only particular instances by their IDs") public void withNanosByIds() throws InterruptedException { testCatchUpByIds(); } @Test @DisplayName("given the time is provided with nanosecond resolution, " + "catch up all of projection instances" + "and respect the order of the delivered events") public void withNanosAllInOrder() throws InterruptedException { testCatchUpAll(); } @Test @DisplayName("given the time is provided with millisecond resolution, " + "catch up only particular instances by their IDs") public void withMillisByIds() throws InterruptedException { setupMillis(); testCatchUpByIds(); } @Test @DisplayName("given the time is provided with millisecond resolution, catch up all " + "of projection instances and respect the order of the delivered events") public void withMillisAllInOrder() throws InterruptedException { setupMillis(); testCatchUpAll(); } @Nested @DisplayName("allow catch-up") class AllowCatchUp { @Test @DisplayName("if the event store is empty") void onEmptyEventStore() { CounterCatchUp counterCatchUp = catchUpForCounter(); counterCatchUp.catchUp(WhatToCatchUp.catchUpAll(aMinuteAgo())); } @Test @DisplayName("of the same instance, if the previous catch-up is already completed") void ifPreviousCatchUpCompleted() { CounterCatchUp.addOngoingCatchUpRecord(catchUpAll(aMinuteAgo()), COMPLETED); CounterCatchUp counterCatchUp = catchUpForCounter(); counterCatchUp.catchUp(WhatToCatchUp.catchUpAll(aMinuteAgo())); } } @Nested @DisplayName("not allow simultaneous catch-up") class NotAllowSimultaneousCatchUp { private static final String TARGET_ID = "some target"; @Test @DisplayName("if catching up of all repository instances has started previously") void ifCatchUpAllStartedPreviously() { CounterCatchUp.addOngoingCatchUpRecord(catchUpAll(aMinuteAgo())); CounterCatchUp counterCatchUp = catchUpForCounter(); for (String target : counterCatchUp.targets()) { assertCatchUpAlreadyStarted(counterCatchUp, target); } } @Test @DisplayName("of the same repository instances") void ofSameInstances() { CounterCatchUp.addOngoingCatchUpRecord(catchUpOf(TARGET_ID, aMinuteAgo())); CounterCatchUp counterCatchUp = new CounterCatchUp(TARGET_ID); assertCatchUpAlreadyStarted(counterCatchUp, TARGET_ID); } @Test @DisplayName("of all instances if at least one catch-up of an instance is in progress") void ofAllIfOneAlreadyStarted() { CounterCatchUp.addOngoingCatchUpRecord(catchUpOf(TARGET_ID, aMinuteAgo())); CounterCatchUp counterCatchUp = new CounterCatchUp(TARGET_ID); try { counterCatchUp.catchUp(catchUpAll(aMinuteAgo())); fail("It must not be possible to start catching up all the instances," + " while some instance is already catching up."); } catch (CatchUpAlreadyStartedException exception) { assertThat(exception.projectionStateType()).isEqualTo(CounterView.projectionType()); } } private void assertCatchUpAlreadyStarted(CounterCatchUp counterCatchUp, String target) { try { counterCatchUp.catchUp(catchUpOf(target, aMinuteAgo())); fail(format("Simultaneous catch-up was somehow started for ID `%s`.", target)); } catch (CatchUpAlreadyStartedException exception) { assertThat(exception.projectionStateType()).isEqualTo(CounterView.projectionType()); assertThat(exception.requestedIds()).contains(target); } } } private static CounterCatchUp catchUpForCounter() { return new CounterCatchUp("first", "second", "third", "fourth"); } private static void testCatchUpByIds() throws InterruptedException { changeShardCountTo(2); CounterCatchUp counterCatchUp = catchUpForCounter(); List<NumberAdded> events = counterCatchUp.generateEvents(200); Timestamp aWhileAgo = subtract(currentTime(), Durations.fromHours(1)); counterCatchUp.addHistory(aWhileAgo, events); // Round 1. Fight! int initialWeight = 1; CounterView.changeWeightTo(initialWeight); counterCatchUp.dispatch(events, 20); String[] targets = counterCatchUp.targets(); int totalTargets = targets.length; List<Integer> initialTotals = counterCatchUp.counterValues(); int sumInRound = events.size() / totalTargets * initialWeight; IntStream sums = IntStream.iterate(sumInRound, i -> i) .limit(totalTargets); assertThat(initialTotals).isEqualTo(sums.boxed() .collect(toList())); // Round 2. Catch up the first and the second and fight! int newWeight = 100; CounterView.changeWeightTo(newWeight); counterCatchUp .dispatchWithCatchUp(events, 20, catchUpOf(targets[0], aWhileAgo), catchUpOf(targets[1], aMinuteAgo())); List<Integer> totalsAfterCatchUp = counterCatchUp.counterValues(); int firstSumExpected = sumInRound * newWeight / initialWeight * 3; int secondSumExpected = sumInRound * newWeight / initialWeight * 2; int untouchedSum = sumInRound + sumInRound * newWeight / initialWeight; List<Integer> expectedTotals = ImmutableList.of(firstSumExpected, secondSumExpected, untouchedSum, untouchedSum); assertThat(totalsAfterCatchUp).isEqualTo(expectedTotals); } @SuppressWarnings("OverlyLongMethod") // Complex environment setup. private static void testCatchUpAll() throws InterruptedException { ConsecutiveProjection.usePositives(); String[] ids = {"erste", "zweite", "dritte", "vierte"}; int totalCommands = 300; List<EmitNextNumber> commands = generateEmissionCommands(totalCommands, ids); changeShardCountTo(3); ConsecutiveProjection.Repo projectionRepo = new ConsecutiveProjection.Repo(); Repository<String, ConsecutiveNumberProcess> pmRepo = DefaultRepository.of(ConsecutiveNumberProcess.class); SingleTenantBlackBoxContext ctx = BlackBoxBoundedContext.singleTenant() .with(projectionRepo) .with(pmRepo); List<Callable<Object>> jobs = asPostCommandJobs(ctx, commands); post(jobs, 1); int positiveExpected = totalCommands / ids.length; List<Integer> positiveValues = ImmutableList.of(positiveExpected, positiveExpected, positiveExpected, positiveExpected); List<Integer> actualLastValues = readLastValues(projectionRepo, ids); assertThat(actualLastValues).isEqualTo(positiveValues); ConsecutiveProjection.useNegatives(); String excludedTarget = ids[0]; projectionRepo.excludeFromRouting(excludedTarget); List<Callable<Object>> sameWithCatchUp = ImmutableList.<Callable<Object>>builder() .addAll(jobs) .add(() -> { projectionRepo.catchUpAll(aMinuteAgo()); return nullRef(); }) .build(); post(sameWithCatchUp, 20); int negativeExpected = -1 * positiveExpected * 2; Truth8.assertThat(projectionRepo.find(excludedTarget)) .isEmpty(); for (int idIndex = 1; idIndex < ids.length; idIndex++) { String identifier = ids[idIndex]; Optional<ConsecutiveProjection> maybeState = projectionRepo.find(identifier); Truth8.assertThat(maybeState) .isPresent(); ConsecutiveNumberView state = maybeState.get() .state(); assertThat(state.getLastValue()).isEqualTo(negativeExpected); } } private static Timestamp aMinuteAgo() { return subtract(currentTime(), Durations.fromMinutes(1)); } private static List<Integer> readLastValues(ConsecutiveProjection.Repo repo, String[] ids) { return Arrays.stream(ids) .map((id) -> findView(repo, id).state() .getLastValue()) .collect(toList()); } private static List<EmitNextNumber> generateEmissionCommands(int howMany, String[] ids) { Iterator<String> idIterator = Iterators.cycle(ids); List<EmitNextNumber> commands = new ArrayList<>(howMany); for (int i = 0; i < howMany; i++) { commands.add(EmitNextNumber.newBuilder() .setId(idIterator.next()) .vBuild()); } return commands; } private static List<Callable<Object>> asPostCommandJobs(SingleTenantBlackBoxContext ctx, List<EmitNextNumber> commands) { return commands.stream() .map(cmd -> (Callable<Object>) () -> ctx.receivesCommand(cmd)) .collect(toList()); } private static void setupMillis() { Time.setProvider(new WithMillisOnlyResolution()); } /** * A time provider which provides the current time based upon JDK's wall clock, i.e. without * the emulated nanoseconds. */ private static class WithMillisOnlyResolution implements Time.Provider { @Override public Timestamp currentTime() { Instant now = Instant.now(); Timestamp result = Timestamp.newBuilder() .setSeconds(now.getEpochSecond()) .setNanos(now.getNano()) .build(); return result; } } }
package org.transscript.antlr; import java.io.IOException; import java.io.Reader; import java.io.StringReader; import java.util.ArrayDeque; import java.util.HashMap; import java.util.List; import java.util.Optional; import java.util.function.Consumer; import org.antlr.v4.runtime.Parser; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.tree.ErrorNode; import org.antlr.v4.runtime.tree.ParseTreeListener; import org.antlr.v4.runtime.tree.TerminalNode; import org.transscript.compiler.parser.TransScript.TransScript_xterm_xsort; import org.transscript.runtime.BufferSink; import org.transscript.runtime.ConstructionDescriptor; import org.transscript.runtime.Sink; import org.transscript.runtime.StringTerm; import org.transscript.runtime.Variable; import org.transscript.runtime.utils.Pair; import org.transscript.runtime.utils.Scoping; import org.transscript.runtime.utils.StringUtils; import org.transscript.tool.MetaBufferSink; import org.transscript.tool.MetaSink; import org.transscript.tool.MutableInt; /** * Convert custom ANTLR parse tree events to {@link Sink} events. * * <p>Works with meta parsers annotated with special actions driving * the mapping between grammar and terms. * * <p>This listener supports two kinds of sink: {@link Sink} and {@link MetaSink}. * {@link MetaSink} support is only needed for parsing embedded terms during meta-compilation. * Non-Java runtimes don't need to support {@link MetaSink}. * * @author Lionel Villard */ public class ToSinkListener implements ParseTreeListener { // Static helper public static void fireEnterZOM(List<ParseTreeListener> listeners, ParserRuleContext _ctx) { fire(listeners, _ctx, l -> ((ToSinkListener) l).enterZOM(_ctx)); } public static void fireExitZOM(List<ParseTreeListener> listeners, ParserRuleContext _ctx) { fire(listeners, _ctx, l -> ((ToSinkListener) l).exitZOM(_ctx)); } public static void fireEnterOPT(List<ParseTreeListener> listeners, ParserRuleContext _ctx) { fire(listeners, _ctx, l -> ((ToSinkListener) l).enterOPT(_ctx)); } public static void fireExitOPT(List<ParseTreeListener> listeners, ParserRuleContext _ctx) { fire(listeners, _ctx, l -> ((ToSinkListener) l).exitOPT(_ctx)); } public static void fireEnterAlt(List<ParseTreeListener> listeners, ParserRuleContext _ctx) { fire(listeners, _ctx, l -> ((ToSinkListener) l).enterAlt(_ctx)); } public static void fireEnterAlt(List<ParseTreeListener> listeners, ParserRuleContext _ctx, String name) { fire(listeners, _ctx, l -> ((ToSinkListener) l).enterAlt(_ctx, name)); } public static void fireExitAlt(List<ParseTreeListener> listeners, ParserRuleContext _ctx) { fire(listeners, _ctx, l -> ((ToSinkListener) l).exitAlt(_ctx)); } public static void fireHide(List<ParseTreeListener> listeners, ParserRuleContext _ctx) { fire(listeners, _ctx, l -> ((ToSinkListener) l).hide(_ctx)); } public static void fireTerm(List<ParseTreeListener> listeners, ParserRuleContext _ctx, String type) { fire(listeners, _ctx, l -> ((ToSinkListener) l).term(_ctx, type)); } public static void fireTail(List<ParseTreeListener> listeners, ParserRuleContext _ctx) { fire(listeners, _ctx, l -> ((ToSinkListener) l).tail(_ctx)); } public static void fireEmbed(List<ParseTreeListener> listeners, ParserRuleContext _ctx) { fire(listeners, _ctx, l -> ((ToSinkListener) l).embed(_ctx)); } public static void fireEnterSymbol(List<ParseTreeListener> listeners, ParserRuleContext _ctx) { fire(listeners, _ctx, l -> ((ToSinkListener) l).enterSymbol(_ctx)); } public static void fireExitSymbol(List<ParseTreeListener> listeners, ParserRuleContext _ctx) { fire(listeners, _ctx, l -> ((ToSinkListener) l).exitSymbol(_ctx)); } public static void fireEnterBinder(List<ParseTreeListener> listeners, ParserRuleContext _ctx, String name) { fire(listeners, _ctx, l -> ((ToSinkListener) l).enterBinder(_ctx, name)); } public static void fireExitBinder(List<ParseTreeListener> listeners, ParserRuleContext _ctx) { fire(listeners, _ctx, l -> ((ToSinkListener) l).exitBinder(_ctx)); } public static void fireEnterBinds(List<ParseTreeListener> listeners, ParserRuleContext _ctx, String names) { fire(listeners, _ctx, l -> ((ToSinkListener) l).enterBinds(_ctx, names)); } public static void fireExitBinds(List<ParseTreeListener> listeners, ParserRuleContext _ctx) { fire(listeners, _ctx, l -> ((ToSinkListener) l).exitBinds(_ctx)); } private static void fire(List<ParseTreeListener> listeners, ParserRuleContext _ctx, Consumer<ParseTreeListener> apply) { if (listeners != null) listeners.stream().filter(l -> l instanceof ToSinkListener).forEach(apply); } // Variable stack marker final static private Pair<String, Variable> MARKER = new Pair<>(null, null); // Some enums enum State { PARSE, START_EMBED, PARSE_EMBED, NAME, CONCRETE, END_CONCRETE, END_CONCRETE_TERM, SKIP } enum TokenKind { STRING, NUMERIC, METAVAR } // The state. /** TransScript sink */ private Sink sink; /** The List construction descriptors */ final protected ConstructionDescriptor nilDesc; final protected ConstructionDescriptor consDesc; private ArrayDeque<Pair<MutableInt, String>> consCount; private ArrayDeque<ParserRuleContext> ruleContext; /** The ANTLR4 parser */ private Parser parser; /** Parsing transscript? */ final private boolean parsets; /** Constructor name prefix */ private String prefix; /** Language specific meta variable prefix */ private String metachar; /** Whether the next token represent the tail of a list. */ private boolean tail; /** When non-null, indicates received tokens are parts of a name, to associate to this id */ private String binderId; /** Name being constructed. Whitespace are ignored. */ private String binderName; /** Map binder id to binder name */ private HashMap<String, String> binderNames; /** In scope bound variables. */ private Scoping bounds; /** In scope fresh variables. */ private Scoping freshes; /** Current token sort */ private TokenKind kind; /** Meta term type */ private String termType; /** Listener state */ private State state; /** * Create an TS ANTLR listener * @param sink where to send events * @param prefix prefix to apply to constructor names * @param metachar language specific meta variable prefix * @param parser using this listener * @param bounds bound variables. Modifiable. * @param freshes global fresh variables. Modifiable. */ public ToSinkListener(Sink sink, String prefix, String metachar, Parser parser, Scoping bounds, Scoping freshes) { this.sink = sink; this.consCount = new ArrayDeque<>(); this.ruleContext = new ArrayDeque<>(); this.parser = parser; this.prefix = prefix; this.metachar = metachar; this.state = State.PARSE; this.kind = TokenKind.STRING; this.binderNames = new HashMap<>(); this.bounds = bounds; this.freshes = freshes; this.nilDesc = sink.context().lookupDescriptor("Nil"); this.consDesc = sink.context().lookupDescriptor("Cons"); this.parsets = prefix.equals("TransScript_"); } /** * Send location properties */ protected void sendLocation(Token token) { // No location until crsx4 can compile crsx4 // int column = token.getCharPositionInLine(); // int line = token.getLine(); // return Util.wrapWithLocation(sink, c, parser.getInputStream().getSourceName(), line, column); } /** * Receive the notification the next sequence of tokens are list items. * * <p>Constructs nested Cons(..., ...) and Nil terms. * * @param context */ public void enterZOM(ParserRuleContext context) { ParserRuleContext parentCtx = ruleContext.peek(); String ruleName = parser.getRuleNames()[parentCtx.getRuleIndex()]; String type = fixupType(ruleName); consCount.push(new Pair<>(new MutableInt(0), type)); tail = false; } /** * Closing list. * @param context */ public void exitZOM(ParserRuleContext context) { if (!tail) { if (metasink() != null) { ParserRuleContext parentCtx = ruleContext.peek(); String ruleName = parser.getRuleNames()[parentCtx.getRuleIndex()]; String type = fixupType(ruleName); metasink().type(type); } sink.start(nilDesc).end(); } int count = consCount.pop().fst.v; while (count { sink.end(); } tail = false; } /** * Receive the notification the next token is optional. * @param context */ public void enterOPT(ParserRuleContext context) { enterZOM(context); } /** * Closing optional token * @param context */ public void exitOPT(ParserRuleContext context) { exitZOM(context); } /** * Start a rule alternative * * Generate a constructor of the form <prefix><rulename> * * @param context */ public void enterAlt(ParserRuleContext context) { switch (state) { case CONCRETE : break; default : ParserRuleContext parentCtx = ruleContext.peek(); String ruleName = parser.getRuleNames()[parentCtx.getRuleIndex()]; sendLocation(parentCtx.getStart()); if (metasink() != null) metasink().type(fixupType(ruleName)); sink = sink.start(sink.context().lookupDescriptor(prefix + ruleName)); } } /** * Start a rule alternative of a given name * * Generate a constructor of the form <prefix><rulename>_A<name> * * @param context * @param name */ public void enterAlt(ParserRuleContext context, String name) { ParserRuleContext parentCtx = ruleContext.peek(); String ruleName = parser.getRuleNames()[parentCtx.getRuleIndex()]; if (isConcrete(ruleName, name)) state = State.CONCRETE; else { sendLocation(parentCtx.getStart()); if (metasink() != null) metasink().type(fixupType(ruleName)); sink.start(sink.context().lookupDescriptor(prefix + ruleName + "_A" + name)); } } /** * Closing alternative * @param context */ public void exitAlt(ParserRuleContext context) { switch (state) { case END_CONCRETE : state = State.END_CONCRETE_TERM; break; case END_CONCRETE_TERM : state = State.PARSE; break; default : sink.end();// end construction } } /** * Receive the notification the next token is an embedded program. * @param context */ public void embed(ParserRuleContext context) { state = State.START_EMBED; } /** * Receive the notification the next token is a metavariable of the given type * Only needed when parsing meta-terms. */ public void term(ParserRuleContext _ctx, String type) { termType = fixupType(type); kind = TokenKind.METAVAR; } /** * Receive the notification the next token matches list tail * Only needed when parsing meta-terms. */ public void tail(ParserRuleContext context) { tail = true; } /** * Hide next terminal * @param context */ public void hide(ParserRuleContext context) { state = State.SKIP; } /** * Receive the notification the next tokens are part of a binder name * @param context * @param name to associate to the binder */ public void enterBinder(ParserRuleContext context, String name) { assert!tail : "Cannot declare a binder is a list tail"; assert binderId == null : "Cannot nest binders"; state = State.NAME; binderId = name.trim(); binderName = ""; } /** * Receive the notification the binder name is complete * @param context */ public void exitBinder(ParserRuleContext context) { assert state == State.NAME; assert!tail : "Cannot declare a binder is a list tail"; assert binderId != null : "Missing enterBinder notification"; binderNames.put(binderId, binderName); binderId = null; binderName = null; state = State.PARSE; } /** * Receive the notification the next tokens declare a binder * @param context */ public void enterSymbol(ParserRuleContext context) { assert!tail : "Cannot declare a binder is a list tail"; assert binderId == null : "Cannot nest binders"; binderName = ""; state = State.NAME; } /** * Receive the notification all tokens parts of a binder name have been received * @param context */ public void exitSymbol(ParserRuleContext context) { assert state == State.NAME; assert!tail : "Cannot declare a name in a list tail"; if (kind == TokenKind.METAVAR) { // received a metavariable matching a syntactic variable. String metaname = fixupMetachar(binderName); metasink().startMetaApplication(metaname); if (termType != null) metasink().type(termType); metasink().endMetaApplication(); kind = TokenKind.STRING; } else { // This is a binder occurrence. Resolve and emit assert bounds != null; Optional<Pair<String, Variable>> variable = bounds.stream().filter(pair -> { if (pair == MARKER) return false; return pair.fst.equals(binderName); }).findFirst(); if (!variable.isPresent()) { // Try among fresh variables variable = freshes.stream().filter(pair -> { return pair.fst.equals(binderName); }).findFirst(); } if (!variable.isPresent()) { // Create new fresh variable. // For now all variables are of type String variable = Optional.of(new Pair<>(binderName, StringTerm.varStringTerm(sink.context(), binderName))); freshes.push(variable.get()); } // Can now emit variable sink = sink.use(variable.get().snd); } state = State.PARSE; } /** * Binds the name associated to the given identifier * @param context * @param id space-separated ids. */ public void enterBinds(ParserRuleContext context, String names) { String[] snames = names.trim().split(" "); Variable[] binders = new Variable[snames.length]; bounds.push(MARKER); for (int i = 0; i < snames.length; i++) { String id = snames[i]; String name = binderNames.remove(id); // consume binder name assert name != null : "Invalid grammar: binds used without binder/name"; binders[i] = StringTerm.varStringTerm(sink.context(), name); bounds.push(new Pair<>(name, binders[i])); } for (int i = 0; i < binders.length; i++) sink.bind(binders[i]); } /** * Unbinds last bound group of binders. * @param context */ public void exitBinds(ParserRuleContext context) { assert!bounds.isEmpty() : "Unbalanced use of enterBinds/exitBinds"; while (bounds.pop() != MARKER); } // Overrides @Override public void enterEveryRule(ParserRuleContext context) { // Is that a rule part of a list? if (!consCount.isEmpty() && consCount.peek().fst != MutableInt.MARKER) { if (!tail) { if (metasink() != null) metasink().type(consCount.peek().snd); sink.start(consDesc); consCount.peek().fst.v++; } else { // Following events fill the second Cons argument } } consCount.push(new Pair<>(MutableInt.MARKER, null)); ruleContext.push(context); } @Override public void exitEveryRule(ParserRuleContext context) { consCount.pop(); ruleContext.pop(); } @Override public void visitErrorNode(ErrorNode arg0) {} @Override public void visitTerminal(TerminalNode context) { switch (state) { case SKIP : state = State.PARSE; break; case PARSE : if (context.getSymbol().getType() != -1) { // Is that a terminal part of a list? if (!consCount.isEmpty() && consCount.peek().fst != MutableInt.MARKER) { if (!tail) { if (metasink() != null) metasink().type(consCount.peek().snd); sink.start(consDesc); consCount.peek().fst.v++; } } switch (kind) { case NUMERIC : case STRING : sendLocation(context.getSymbol()); String t = context.getText(); // TODO: SHOULD FIND A BETTER WAY, like another sort case. if (t.startsWith("\"")) t = StringUtils.unquoteJava(t); sink = sink.literal(t); break; case METAVAR : assert metasink() != null; String metaname = fixupMetachar(context.getText()); metasink().startMetaApplication(metaname); metasink().startSubstitutes(); // Add all bound variables. // REVISIT: should be user-specified. for (Pair<String, Variable> bound : bounds) { if (bound != MARKER) sink.use(bound.snd); } metasink().endSubstitutes(); if (termType != null) metasink().type(termType); metasink().endMetaApplication(); break; default : break; } kind = TokenKind.STRING; } break; case START_EMBED : // Just the category/sort name. Ignore state = State.PARSE_EMBED; break; case PARSE_EMBED : { // Recursively parse this token Token token = context.getSymbol(); String text = context.getText(); if (text.length() > 1) { // Last character is closing the embedded section: trim it. text = text.trim(); text = text.substring(0, text.length() - 1); parseTSTerm(text, token.getLine(), token.getCharPositionInLine()); } state = State.PARSE; break; } case NAME : // Receive a symbol or a bound variable binderName += context.getText().trim(); break; case CONCRETE : { if (kind == TokenKind.METAVAR) { // Cancel parsing concrete. Produce skipped start events and resume normal parsing. metasink().type("TransScript_aterm_sort"); sink.start(sink.context().lookupDescriptor("TransScript_aterm_A8")); metasink().type("TransScript_concrete_sort"); sink.start(sink.context().lookupDescriptor("TransScript_concrete")); String metaname = fixupMetachar(context.getText()); metasink().startMetaApplication(metaname); if (termType != null) metasink().type(termType); metasink().endMetaApplication(); state = State.PARSE; } else { Token token = context.getSymbol(); parseConcrete(context.getText(), token.getLine(), token.getCharPositionInLine()); state = State.END_CONCRETE; } break; } case END_CONCRETE : case END_CONCRETE_TERM : break; } } // Parse concrete syntax private void parseConcrete(String text, int line, int column) { String category = text.substring(0, text.indexOf("")); String program = text.substring(text.indexOf("") + 1); program = program.substring(0, program.lastIndexOf("")); org.transscript.runtime.Parser parser = sink.context().getParser(category, true); // Get latest boot parser. if (parser == null) throw new RuntimeException("Fatal error: no parser found for category " + category); try (Reader reader = new StringReader(program)) { MetaBufferSink innersink = new MetaBufferSink(sink.context()); parser.parse(innersink, category, reader, null, line, column, bounds, freshes); sink.copy(innersink.metaterm().asTransScript_xterm(sink.context()).getField1()); } catch (RuntimeException e) { System.err.println("Error while parsing: " + program); throw e; } catch (IOException e) {} // can't happen. } // Parse embedded TS term. private void parseTSTerm(String text, int line, int column) { try (Reader reader = new StringReader(text)) { org.transscript.runtime.Parser innerParser = sink.context().getParser("term", true); BufferSink buffer = sink.context().makeBuffer(); innerParser.parser().parse(buffer, "term", reader, null, line, column, bounds, freshes); metasink().copy((TransScript_xterm_xsort) buffer.term()); } catch (IOException e) {} // can't happen. } /** * Convert parser specific metacharacter to Crsx meta character (#). */ protected String fixupMetachar(String metavar) { return "#" + metavar.substring(metachar.length()); } /** * Convert raw type to proper TransScript type. */ private String fixupType(String type) { final boolean islist = type.endsWith("_OOM") || type.endsWith("_ZOM") || type.endsWith("_OPT"); type = islist ? type.substring(0, type.length() - "_ZOM".length()) : type; if (type.endsWith("_TOK")) return (islist ? "List<" : "") + "StringTerm" + (islist ? ">" : ""); return (islist ? "List<" : "") + prefix + type + "_sort" + (islist ? ">" : ""); } /** Cast sink to metasink */ final private MetaSink metasink() { return sink instanceof MetaSink ? (MetaSink) sink : null; } // Tell whether about to parse concrete programs. private boolean isConcrete(String rulename, String altname) { // TODO: this is quite brittle. Should change PG. return parsets && altname.equals("8") && rulename.equals("aterm"); } }
package li.util; import java.lang.reflect.Array; import java.security.MessageDigest; import java.sql.Time; import java.sql.Timestamp; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import li.dao.Record; import li.model.Field; /** * * * @author li (limw@w.cn) * @version 0.1.7 (2012-05-08) */ public class Convert { /** * json */ public static String toJson(Object target) { if (target instanceof Collection) { return toJson(((Collection) target).toArray()); } if (target.getClass().isArray()) { String json = "["; for (Object one : (Object[]) target) { json += toJson(one) + ","; } return json.substring(0, json.length() - 1) + "]"; } String json = "{"; if (Record.class.isAssignableFrom(target.getClass())) {// Record Set<Entry> entries = ((Record) target).entrySet(); for (Entry<String, Object> entry : entries) {// Record json += "\"" + entry.getKey() + "\":\"" + entry.getValue() + "\","; } } else {// RecordPOJO List<Field> fields = Field.list(target.getClass(), true); for (Field field : fields) {// POJO json += "\"" + field.name + "\":\"" + Reflect.get(target, field.name) + "\","; } } return json.substring(0, json.length() - 1) + "}"; } /** * jsonList */ public static <T> List<T> fromJson(Class<T> type, String json) { final String JSON_REGEX = "^.*}[]]{0,1},[\\[]{0,1}\\{.*$", JSON_SPLIT = "}[]]{0,1},[\\[]{0,1}\\{"; List<T> list = new ArrayList<T>(); if (Verify.regex(json, JSON_REGEX)) { String[] array = json.split(JSON_SPLIT); for (String one : array) { list.addAll(fromJson(type, one)); } return (List<T>) list; } T one = Reflect.born(type); String[] array = json.split(","); for (String field : array) { String[] strs = field.split(":"); String key = strs[0].substring(strs[0].indexOf('"') + 1, strs[0].lastIndexOf('"')); String value = strs[1].substring(strs[1].indexOf('"') + 1, strs[1].lastIndexOf('"')); Reflect.set(one, key, value);// null } list.add(one); return list; } /** * Map,key,value; items */ public static Map<Object, Object> toMap(Object... items) { Map map = new HashMap(); if (null != items && items.length > 0) { if (items.length % 2 != 0) { throw new RuntimeException("Count of items must be even !!!"); } else { for (int i = 0; i < items.length; i = i + 2) { map.put(items[i], items[i + 1]); } } } return map; } /** * MD5 */ public static String toMD5(Object input) { try { MessageDigest messageDigest = MessageDigest.getInstance("MD5"); messageDigest.update(input.toString().getBytes()); byte[] byteDigest = messageDigest.digest(); StringBuffer stringBuffer = new StringBuffer(""); int i; for (int offset = 0; offset < byteDigest.length; offset++) { i = byteDigest[offset] < 0 ? byteDigest[offset] + 256 : byteDigest[offset]; if (i < 16) { stringBuffer.append("0"); } stringBuffer.append(Integer.toHexString(i)); } return stringBuffer.toString(); } catch (Exception e) { throw new RuntimeException("Exception at li.util.Convert.toMD5(Object)", e); } } /** * valuetype */ public static <T> T toType(Class<T> type, Object value) { if (null != type && null != value && value.toString().length() > 0) { if ((type.equals(Integer.TYPE) || type.equals(Integer.class)) && !(value instanceof Integer)) { return (T) Integer.valueOf(value.toString().trim()); } else if ((type.equals(Boolean.TYPE) || type.equals(Boolean.class)) && !(value instanceof Boolean)) { return (T) Boolean.valueOf(value.toString().trim()); } else if ((type.equals(Long.TYPE) || type.equals(Long.class)) && !(value instanceof Long)) { return (T) Long.valueOf(value.toString().trim()); } else if ((type.equals(Float.TYPE) || type.equals(Float.class)) && !(value instanceof Float)) { return (T) Float.valueOf(value.toString().trim()); } else if ((type.equals(Double.TYPE) || type.equals(Double.class)) && !(value instanceof Double)) { return (T) Double.valueOf(value.toString().trim()); } else if ((type.equals(Short.TYPE) || type.equals(Short.class)) && !(value instanceof Short)) { return (T) Short.valueOf(value.toString().trim()); } else if ((type.equals(Byte.TYPE) || type.equals(Byte.class)) && !(value instanceof Byte)) { return (T) Byte.valueOf(value.toString().trim()); } else if ((type.equals(Character.TYPE) || type.equals(Character.class)) && !(value instanceof Character)) { return (T) Character.valueOf(value.toString().trim().charAt(0)); } else if (type.equals(Time.class) && !(value instanceof Time)) { return (T) new Time(toType(java.util.Date.class, value).getTime()); } else if (type.equals(Timestamp.class) && !(value instanceof Timestamp)) { return (T) new Timestamp(toType(java.util.Date.class, value).getTime()); } else if (type.equals(java.sql.Date.class) && !(value instanceof java.sql.Date)) { return (T) new java.sql.Date(toType(java.util.Date.class, value).getTime()); } else if (type.equals(java.util.Date.class) && !(value instanceof java.util.Date)) { String pattern = ""; if (Verify.regex(value.toString().trim(), "^[0-2]{0,1}[0-9]{1}:[0-6]{0,1}[0-9]{1}$")) { pattern = "HH:mm"; } else if (Verify.regex(value.toString().trim(), "^[0-2]{0,1}[0-9]{1}:[0-6]{0,1}[0-9]{1}:[0-6]{0,1}[0-9]{1}$")) { pattern = "HH:mm:ss"; } else if (Verify.regex(value.toString().trim(), "^[0-9]{4}-[0-1]{0,1}[0-9]{1}-[0-3]{0,1}[0-9]{1}$")) { pattern = "yyyy-MM-dd"; } else if (Verify.regex(value.toString().trim(), "^[0-9]{4}/[0-1]{0,1}[0-9]{1}/[0-3]{0,1}[0-9]{1}$")) { pattern = "yyyy/MM/dd"; } else if (Verify.regex(value.toString().trim(), "^[0-9]{4}-[0-1]{0,1}[0-9]{1}-[0-3]{0,1}[0-9]{1} [0-2]{0,1}[0-9]{1}:[0-6]{0,1}[0-9]{1}$")) { pattern = "yyyy-MM-dd HH:mm"; } else if (Verify.regex(value.toString().trim(), "^[0-9]{4}/[0-1]{0,1}[0-9]{1}/[0-3]{0,1}[0-9]{1} [0-2]{0,1}[0-9]{1}:[0-6]{0,1}[0-9]{1}$")) { pattern = "yyyy/MM/dd HH:mm"; } else if (Verify.regex(value.toString().trim(), "^[0-9]{4}-[0-1]{0,1}[0-9]{1}-[0-3]{0,1}[0-9]{1} [0-2]{0,1}[0-9]{1}:[0-6]{0,1}[0-9]{1}:[0-6]{0,1}[0-9]{1}$")) { pattern = "yyyy-MM-dd HH:mm:ss"; } else if (Verify.regex(value.toString().trim(), "^[0-9]{4}/[0-1]{0,1}[0-9]{1}/[0-3]{0,1}[0-9]{1} [0-2]{0,1}[0-9]{1}:[0-6]{0,1}[0-9]{1}:[0-6]{0,1}[0-9]{1}$")) { pattern = "yyyy/MM/dd HH:mm:ss"; } try { return (T) new SimpleDateFormat(pattern).parse(value.toString()); } catch (ParseException e) {} } } return (T) value; } /** * * * @param type type Primitive */ public static <T> T[] toType(Class<T> type, Object... values) { T[] dest = null; if (null != type && null != values && values.length > 0) { if (type == Integer.TYPE) { type = (Class<T>) Integer.class; } else if (type == Boolean.TYPE) { type = (Class<T>) Boolean.class; } else if (type == Long.TYPE) { type = (Class<T>) Long.class; } else if (type == Float.TYPE) { type = (Class<T>) Float.class; } else if (type == Double.TYPE) { type = (Class<T>) Double.class; } else if (type == Short.TYPE) { type = (Class<T>) Short.class; } else if (type == Byte.TYPE) { type = (Class<T>) Byte.class; } else if (type == Character.TYPE) { type = (Class<T>) Character.class; } dest = (T[]) Array.newInstance(type, values.length); for (int i = 0; i < values.length; i++) { dest[i] = (T) toType(type, values[i]); } } return dest; } }
package nars.vision; import boofcv.core.image.ConvertBufferedImage; import boofcv.io.webcamcapture.UtilWebcamCapture; import boofcv.alg.misc.ImageMiscOps; import boofcv.struct.image.*; import com.github.sarxos.webcam.Webcam; import georegression.struct.point.Point2D_I32; import nars.gui.NARSwing; import nars.NAR; import nars.model.impl.Default; import org.infinispan.commons.hash.Hash; import javax.swing.*; import java.awt.*; import java.awt.image.BufferedImage; import java.util.HashMap; /** * Class for NARS Vision using a webcam with raster hierarchy representation. * Includes visualization. All relevant parameters can be adjusted in real time * and will update the visualization. * * @author James McLaughlin */ public class RasterHierarchy extends JPanel { // The number of rasters to calculate. int numberRasters; // The dimensions of the input frame. int frameWidth, frameHeight; // The number of blocks to divide the coarsest raster into. int divisions; // The scaling factor for each raster in the hierarchy. int scalingFactor; // The center of the region of focus Point2D_I32 focusPoint = new Point2D_I32(); // Image for visualization BufferedImage workImage; // Window for visualization JFrame window; /** * Configure the Raster Hierarchy * * @param numberRasters The number of rasters to generate * @param frameWidth The desired size of the input stream * @param frameHeight The desired height of the input stream * @param divisions The number of blocks to divide the coarsest grained raster into * @param scalingFactor The scaling factor for each raster in the heirarchy. */ public RasterHierarchy(int numberRasters, int frameWidth, int frameHeight, int divisions, int scalingFactor) { this.numberRasters = numberRasters; this.frameWidth = frameWidth; this.frameHeight = frameHeight; this.divisions = divisions; this.scalingFactor = scalingFactor; // Set the default focus to the center this.setFocus(frameWidth/2, frameHeight/2); window = new JFrame("Hierarchical Raster Vision Representation"); window.setContentPane(this); window.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); } /** * Set the focus to the given location. All rasters (other than the most coarse-grained) are centered on * this point. * * @param x The x-coordinate of the focal point * @param y The y-coordinate of the focal point */ public void setFocus(int x, int y) { this.focusPoint.set(x, y); } /** * Generate the raster hierarchy for a given image. * C * @param input The image to rasterize * @return The rasterized image. */ int updaterate=60; int cnt=1; static int arrsz=1000; //todo refine HashMap<Integer,Float> lastvalR=new HashMap<>(); HashMap<Integer,Float> lastvalG=new HashMap<>(); HashMap<Integer,Float> lastvalB=new HashMap<>(); HashMap<Integer,Value> voter=new HashMap<>(); public class Value { public int x; public int y; public int r; public double value; public Value(int r, int x, int y, double value) { this.x=x; this.y=y; this.r=r; this.value=value; } } public BufferedImage rasterizeImage(BufferedImage input) { voter = new HashMap<>(); boolean putin=false; //vladimir cnt if(cnt==0) { putin = true; cnt=updaterate; } int red, green, blue; int redSum, greenSum, blueSum; int x, y, startX, startY; int newX, newY; int width = input.getWidth(); int height = input.getHeight(); int blockXSize = width/divisions; int blockYSize = height/divisions; MultiSpectral<ImageUInt8> image = ConvertBufferedImage.convertFromMulti(input,null,true,ImageUInt8.class); MultiSpectral<ImageUInt8> output = new MultiSpectral<>(ImageUInt8.class, width, height, 3); BufferedImage rasterizedImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB); // Set the initial raster region int regionWidth = width; int regionHeight = height; newX = 0; newY = 0; startX = 0; startY = 0; for (int step = 1; step <= numberRasters; step++) { // For each step we need to reduce the dimensions of the area that is pixelated and // also reduce the block size. if (step > 1) { newX = startX + (regionWidth - regionWidth / scalingFactor) / scalingFactor; newY = startY + (regionHeight - regionHeight / scalingFactor) / scalingFactor; if (newX < 0) {newX = 0;} if (newY < 0) {newY = 0;} regionWidth = regionWidth/ scalingFactor; regionHeight = regionHeight/ scalingFactor; blockXSize = blockXSize/ scalingFactor; blockYSize = blockYSize/ scalingFactor; if (blockXSize < 1) {blockXSize = 1;} if (blockYSize < 1) {blockYSize = 1;} } // Set the starting point for the next step startX = this.focusPoint.getX() - ((regionWidth)/2); startY = this.focusPoint.getY() - ((regionHeight)/2); int pixelCount = blockXSize * blockYSize; // Number of pixels per block int h=0,j=0; for (x = newX; x < ((step == 1 ? 0 : startX) + regionWidth); x += blockXSize) { h++; for (y = newY; y < ((step == 1 ? 0 : startY) + regionHeight); y += blockYSize) { j++; redSum = 0; greenSum = 0; blueSum = 0; for (int pixelX = 0; (pixelX < blockXSize) && (x + pixelX < width); pixelX++) { for (int pixelY = 0; (pixelY < blockYSize) && (y + pixelY < height); pixelY++) { redSum += image.getBand(0).get(x + pixelX, y + pixelY); greenSum += image.getBand(1).get(x + pixelX, y + pixelY); blueSum += image.getBand(2).get(x + pixelX, y + pixelY); } } red = redSum / pixelCount; green = greenSum / pixelCount; blue = blueSum / pixelCount; float fred = ((float) red) / 255.0f; float fgreen = ((float) red) / 255.0f; float fblue = ((float) red) / 255.0f; //manage move heuristic int brightness = (red+green+blue)/3; //maybe not needed int key=step+10*x+10000*y; if(lastvalR.containsKey(key) && putin) { double area = blockXSize * blockYSize; double diff = Math.abs(fred - (lastvalR.get(key))) + Math.abs(fgreen - (lastvalG.get(key))) + Math.abs(fblue - (lastvalB.get(key))); double vote = diff;// / area; // vote*=step; voter.put(key, new Value(step, x + blockXSize / 2, y + blockYSize / 2, vote)); } lastvalR.put(key, fred); lastvalG.put(key, fgreen); lastvalB.put(key, fblue); if(putin && step==numberRasters) { //input Narsese translation String st="<(*,r"+ String.valueOf(step)+","+String.valueOf(h)+","+String.valueOf(j)+") --> RED>. :|: %"+String.valueOf(fred)+"%"; nar.input(st); } // Here we can generate NAL, since we know all of the required values. ImageMiscOps.fillRectangle(output.getBand(0), red, x, y, blockXSize, blockYSize); ImageMiscOps.fillRectangle(output.getBand(1), green, x, y, blockXSize, blockYSize); ImageMiscOps.fillRectangle(output.getBand(2), blue, x, y, blockXSize, blockYSize); } } } //search for maximum vote to move heuristic if(putin) { Value maxvalue = null; float threshold = 0.05f; for (Integer key : voter.keySet()) { Value value = voter.get(key); if (maxvalue == null || value.value > maxvalue.value) { if (value.value > threshold) maxvalue = value; } } if (maxvalue != null && maxvalue.x!=0 && maxvalue.y!=0) { this.setFocus(maxvalue.x, maxvalue.y); } } ConvertBufferedImage.convertTo(output, rasterizedImage, true); return rasterizedImage; } /** * Invoke to start the main processing loop. */ public void process() { Webcam webcam = UtilWebcamCapture.openDefault(frameWidth, frameHeight); // adjust the window size and let the GUI know it has changed Dimension actualSize = webcam.getViewSize(); setPreferredSize(actualSize); setMinimumSize(actualSize); window.setMinimumSize(actualSize); window.setPreferredSize(actualSize); window.setVisible(true); BufferedImage input, buffered; workImage = new BufferedImage(actualSize.width, actualSize.height, BufferedImage.TYPE_INT_RGB); //int counter = 0; while( true ) { /* * Uncomment this section to scan the focal point across the frame * automatically - just for demo purposes. */ /* int xx = this.focusPoint.getX(); int yy = this.focusPoint.getY(); xx += 1; if(xx > frameWidth) { xx = 0; yy += 1; if (yy > frameHeight) yy = 0; } this.setFocus(xx, yy); */ input = webcam.getImage(); synchronized( workImage ) { // copy the latest image into the work buffer Graphics2D g2 = workImage.createGraphics(); buffered = this.rasterizeImage(input); g2.drawImage(buffered,0,0,null); } repaint(); } } @Override public void paint (Graphics g) { if( workImage != null ) { // draw the work image and be careful to make sure it isn't being manipulated at the same time synchronized (workImage) { ((Graphics2D) g).drawImage(workImage, 0, 0, null); } } } static NAR nar; public static void main(String[] args) { //RasterHierarchy rh = new RasterHierarchy(8, 640, 480, 12, 2); // RasterHierarchy rh = new RasterHierarchy(3, 640, 480, 5, 2); nar = new NAR(new Default.CommandLineNARBuilder(args)); NARSwing swing = new NARSwing(nar); RasterHierarchy rh = new RasterHierarchy(3, 640, 480, 4, 3); rh.process(); } public int getNumberRasters() { return numberRasters; } public void setNumberRasters(int numberRasters) { this.numberRasters = numberRasters; } public int getDivisions() { return divisions; } public void setDivisions(int divisions) { this.divisions = divisions; } public int getScalingFactor() { return scalingFactor; } public void setScalingFactor(int scalingFactor) { this.scalingFactor = scalingFactor; } }
package com.ironz.binaryprefs; import com.ironz.binaryprefs.cache.CacheProvider; import com.ironz.binaryprefs.encryption.ByteEncryption; import com.ironz.binaryprefs.events.EventBridge; import com.ironz.binaryprefs.file.transaction.FileTransaction; import com.ironz.binaryprefs.file.transaction.TransactionElement; import com.ironz.binaryprefs.serialization.SerializerFactory; import com.ironz.binaryprefs.serialization.serializer.persistable.Persistable; import com.ironz.binaryprefs.serialization.strategy.SerializationStrategy; import com.ironz.binaryprefs.serialization.strategy.impl.*; import com.ironz.binaryprefs.task.Completable; import com.ironz.binaryprefs.task.TaskExecutor; import java.util.*; import java.util.concurrent.locks.Lock; @SuppressWarnings("WeakerAccess") final class BinaryPreferencesEditor implements PreferencesEditor { private final Map<String, SerializationStrategy> strategyMap = new HashMap<>(0); private final Set<String> removeSet = new HashSet<>(0); private final Preferences preferences; private final FileTransaction fileTransaction; private final EventBridge bridge; private final TaskExecutor taskExecutor; private final SerializerFactory serializerFactory; private final CacheProvider cacheProvider; private final Lock writeLock; private final ByteEncryption byteEncryption; private boolean clear; BinaryPreferencesEditor(Preferences preferences, FileTransaction fileTransaction, EventBridge bridge, TaskExecutor taskExecutor, SerializerFactory serializerFactory, CacheProvider cacheProvider, Lock writeLock, ByteEncryption byteEncryption) { this.preferences = preferences; this.fileTransaction = fileTransaction; this.bridge = bridge; this.taskExecutor = taskExecutor; this.serializerFactory = serializerFactory; this.cacheProvider = cacheProvider; this.writeLock = writeLock; this.byteEncryption = byteEncryption; } @Override public PreferencesEditor putString(String key, String value) { if (value == null) { return remove(key); } writeLock.lock(); try { SerializationStrategy strategy = new StringSerializationStrategyImpl(value, serializerFactory); strategyMap.put(key, strategy); return this; } finally { writeLock.unlock(); } } @Override public PreferencesEditor putStringSet(String key, Set<String> value) { if (value == null) { return remove(key); } writeLock.lock(); try { SerializationStrategy strategy = new StringSetSerializationStrategyImpl(value, serializerFactory); strategyMap.put(key, strategy); return this; } finally { writeLock.unlock(); } } @Override public PreferencesEditor putInt(String key, int value) { writeLock.lock(); try { SerializationStrategy strategy = new IntegerSerializationStrategyImpl(value, serializerFactory); strategyMap.put(key, strategy); return this; } finally { writeLock.unlock(); } } @Override public PreferencesEditor putLong(String key, long value) { writeLock.lock(); try { SerializationStrategy strategy = new LongSerializationStrategyImpl(value, serializerFactory); strategyMap.put(key, strategy); return this; } finally { writeLock.unlock(); } } @Override public PreferencesEditor putFloat(String key, float value) { writeLock.lock(); try { SerializationStrategy strategy = new FloatSerializationStrategyImpl(value, serializerFactory); strategyMap.put(key, strategy); return this; } finally { writeLock.unlock(); } } @Override public PreferencesEditor putBoolean(String key, boolean value) { writeLock.lock(); try { SerializationStrategy strategy = new BooleanSerializationStrategyImpl(value, serializerFactory); strategyMap.put(key, strategy); return this; } finally { writeLock.unlock(); } } @Override public <T extends Persistable> PreferencesEditor putPersistable(String key, T value) { if (value == null) { return remove(key); } writeLock.lock(); try { SerializationStrategy strategy = new PersistableSerializationStrategyImpl(value, serializerFactory); strategyMap.put(key, strategy); return this; } finally { writeLock.unlock(); } } @Override public PreferencesEditor putByte(String key, byte value) { writeLock.lock(); try { SerializationStrategy strategy = new ByteSerializationStrategyImpl(value, serializerFactory); strategyMap.put(key, strategy); return this; } finally { writeLock.unlock(); } } @Override public PreferencesEditor putShort(String key, short value) { writeLock.lock(); try { SerializationStrategy strategy = new ShortSerializationStrategyImpl(value, serializerFactory); strategyMap.put(key, strategy); return this; } finally { writeLock.unlock(); } } @Override public PreferencesEditor putChar(String key, char value) { writeLock.lock(); try { SerializationStrategy strategy = new CharSerializationStrategyImpl(value, serializerFactory); strategyMap.put(key, strategy); return this; } finally { writeLock.unlock(); } } @Override public PreferencesEditor putDouble(String key, double value) { writeLock.lock(); try { SerializationStrategy strategy = new DoubleSerializationStrategyImpl(value, serializerFactory); strategyMap.put(key, strategy); return this; } finally { writeLock.lock(); } } @Override public PreferencesEditor remove(String key) { writeLock.lock(); try { removeSet.add(key); return this; } finally { writeLock.unlock(); } } @Override public PreferencesEditor clear() { writeLock.lock(); try { clear = true; return this; } finally { writeLock.unlock(); } } @Override public void apply() { writeLock.lock(); try { clearCache(); removeCache(); storeCache(); taskExecutor.submit(new Runnable() { @Override public void run() { transact(); } }); } finally { writeLock.unlock(); } } @Override public boolean commit() { writeLock.lock(); try { clearCache(); removeCache(); storeCache(); Completable submit = taskExecutor.submit(new Runnable() { @Override public void run() { transact(); } }); return submit.completeBlocking(); } finally { writeLock.unlock(); } } private void clearCache() { if (!clear) { return; } for (String name : cacheProvider.keys()) { cacheProvider.remove(name); } } private void removeCache() { if (clear) { return; } for (String name : removeSet) { cacheProvider.remove(name); } } private void storeCache() { for (String name : strategyMap.keySet()) { SerializationStrategy strategy = strategyMap.get(name); Object value = strategy.getValue(); cacheProvider.put(name, value); } } private void transact() { List<TransactionElement> transaction = createTransaction(); fileTransaction.commit(transaction); notifyListeners(transaction); } private List<TransactionElement> createTransaction() { List<TransactionElement> elements = new ArrayList<>(); elements.addAll(clearPersistence()); elements.addAll(removePersistence()); elements.addAll(storePersistence()); return elements; } private List<TransactionElement> clearPersistence() { if (!clear) { return Collections.emptyList(); } List<TransactionElement> elements = new ArrayList<>(); for (String name : cacheProvider.keys()) { TransactionElement e = TransactionElement.createRemoveElement(name); elements.add(e); } return elements; } private List<TransactionElement> removePersistence() { if (clear) { return Collections.emptyList(); } List<TransactionElement> elements = new ArrayList<>(); for (String name : removeSet) { TransactionElement e = TransactionElement.createRemoveElement(name); elements.add(e); } return elements; } private List<TransactionElement> storePersistence() { Set<String> strings = strategyMap.keySet(); List<TransactionElement> elements = new ArrayList<>(strings.size()); for (String key : strings) { SerializationStrategy strategy = strategyMap.get(key); byte[] bytes = strategy.serialize(); byte[] encrypt = byteEncryption.encrypt(bytes); TransactionElement e = TransactionElement.createUpdateElement(key, encrypt); elements.add(e); } return elements; } private void notifyListeners(List<TransactionElement> transaction) { for (TransactionElement element : transaction) { String name = element.getName(); byte[] bytes = element.getContent(); if (element.getAction() == TransactionElement.ACTION_REMOVE) { bridge.notifyListenersRemove(preferences, name); } if (element.getAction() == TransactionElement.ACTION_UPDATE) { bridge.notifyListenersUpdate(preferences, name, bytes); } } } }
package railo.runtime.functions.image; import java.awt.Composite; import java.awt.Font; import java.awt.Paint; import java.awt.Point; import java.awt.geom.AffineTransform; import java.awt.geom.Point2D; import java.awt.image.BufferedImage; import java.awt.image.Kernel; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import railo.commons.io.IOUtil; import railo.commons.io.res.Resource; import railo.commons.io.res.ResourceProvider; import railo.commons.io.res.ResourcesImpl; import railo.commons.lang.StringUtil; import railo.runtime.PageContext; import railo.runtime.exp.FunctionException; import railo.runtime.exp.PageException; import railo.runtime.img.Image; import railo.runtime.img.filter.*; import railo.runtime.img.math.BinaryFunction; import railo.runtime.img.math.Function2D; import railo.runtime.op.Caster; import railo.runtime.type.List; import railo.runtime.type.Struct; import railo.runtime.type.StructImpl; import edu.emory.mathcs.backport.java.util.Arrays; public class ImageFilter { private static final Struct EMPTY_STRUCT = new StructImpl(); private static Map<String,Class> filters=new HashMap<String, Class>(); static { filters.put("applymask",ApplyMaskFilter.class); filters.put("average",AverageFilter.class); filters.put("bicubicscale",BicubicScaleFilter.class); filters.put("block",BlockFilter.class); filters.put("blur",BlurFilter.class); filters.put("border",BorderFilter.class); filters.put("boxblur",BoxBlurFilter.class); filters.put("brushedmetal",BrushedMetalFilter.class); filters.put("bump",BumpFilter.class); filters.put("caustics",CausticsFilter.class); filters.put("cellular",CellularFilter.class); filters.put("channelmix",ChannelMixFilter.class); filters.put("check",CheckFilter.class); filters.put("chromakey",ChromaKeyFilter.class); filters.put("chrome",ChromeFilter.class); filters.put("circle",CircleFilter.class); filters.put("composite",CompositeFilter.class); //filters.put("compound",CompoundFilter.class); filters.put("contour",ContourFilter.class); filters.put("contrast",ContrastFilter.class); filters.put("convolve",ConvolveFilter.class); filters.put("crop",CropFilter.class); filters.put("crystallize",CrystallizeFilter.class); filters.put("curl",CurlFilter.class); filters.put("curves",CurvesFilter.class); filters.put("despeckle",DespeckleFilter.class); filters.put("diffuse",DiffuseFilter.class); filters.put("diffusion",DiffusionFilter.class); filters.put("dilate",DilateFilter.class); filters.put("displace",DisplaceFilter.class); filters.put("dissolve",DissolveFilter.class); filters.put("dither",DitherFilter.class); filters.put("edge",EdgeFilter.class); filters.put("emboss",EmbossFilter.class); filters.put("equalize",EqualizeFilter.class); filters.put("erodealpha",ErodeAlphaFilter.class); filters.put("erode",ErodeFilter.class); filters.put("exposure",ExposureFilter.class); filters.put("fade",FadeFilter.class); filters.put("fbm",FBMFilter.class); filters.put("feedback",FeedbackFilter.class); filters.put("fieldwarp",FieldWarpFilter.class); filters.put("fill",FillFilter.class); filters.put("flare",FlareFilter.class); filters.put("flip",FlipFilter.class); filters.put("flush3d",Flush3DFilter.class); filters.put("fourcolor",FourColorFilter.class); filters.put("gain",GainFilter.class); filters.put("gamma",GammaFilter.class); filters.put("gaussian",GaussianFilter.class); filters.put("glint",GlintFilter.class); filters.put("glow",GlowFilter.class); filters.put("gradient",GradientFilter.class); filters.put("gradientwipe",GradientWipeFilter.class); filters.put("gray",GrayFilter.class); filters.put("grayscale",GrayscaleFilter.class); filters.put("halftone",HalftoneFilter.class); filters.put("hsbadjust",HSBAdjustFilter.class); filters.put("interpolate",InterpolateFilter.class); filters.put("invertalpha",InvertAlphaFilter.class); filters.put("invert",InvertFilter.class); //filters.put("iterated",IteratedFilter.class); filters.put("javalnf",JavaLnFFilter.class); filters.put("kaleidoscope",KaleidoscopeFilter.class); filters.put("key",KeyFilter.class); filters.put("lensblur",LensBlurFilter.class); filters.put("levels",LevelsFilter.class); filters.put("life",LifeFilter.class); filters.put("light",LightFilter.class); filters.put("lookup",LookupFilter.class); filters.put("mapcolors",MapColorsFilter.class); filters.put("map",MapFilter.class); filters.put("marble",MarbleFilter.class); filters.put("marbletex",MarbleTexFilter.class); filters.put("mask",MaskFilter.class); filters.put("maximum",MaximumFilter.class); filters.put("median",MedianFilter.class); filters.put("minimum",MinimumFilter.class); filters.put("mirror",MirrorFilter.class); filters.put("motionblur",MotionBlurFilter.class); //filters.put("mutatable",MutatableFilter.class); filters.put("noise",NoiseFilter.class); filters.put("offset",OffsetFilter.class); filters.put("oil",OilFilter.class); filters.put("opacity",OpacityFilter.class); filters.put("outline",OutlineFilter.class); filters.put("perspective",PerspectiveFilter.class); filters.put("pinch",PinchFilter.class); filters.put("plasma",PlasmaFilter.class); filters.put("pointillize",PointillizeFilter.class); filters.put("polar",PolarFilter.class); filters.put("posterize",PosterizeFilter.class); filters.put("premultiply",PremultiplyFilter.class); filters.put("quantize",QuantizeFilter.class); filters.put("quilt",QuiltFilter.class); filters.put("rays",RaysFilter.class); filters.put("reducenoise",ReduceNoiseFilter.class); filters.put("rendertext",RenderTextFilter.class); filters.put("rescale",RescaleFilter.class); filters.put("rgbadjust",RGBAdjustFilter.class); filters.put("ripple",RippleFilter.class); filters.put("rotate",RotateFilter.class); filters.put("saturation",SaturationFilter.class); filters.put("scale",ScaleFilter.class); filters.put("scratch",ScratchFilter.class); filters.put("shade",ShadeFilter.class); filters.put("shadow",ShadowFilter.class); filters.put("shape",ShapeFilter.class); filters.put("sharpen",SharpenFilter.class); filters.put("shatter",ShatterFilter.class); filters.put("shear",ShearFilter.class); filters.put("shine",ShineFilter.class); filters.put("skeleton",SkeletonFilter.class); //filters.put("sky",SkyFilter.class); filters.put("smartblur",SmartBlurFilter.class); filters.put("smear",SmearFilter.class); filters.put("solarize",SolarizeFilter.class); filters.put("sparkle",SparkleFilter.class); filters.put("sphere",SphereFilter.class); filters.put("stamp",StampFilter.class); filters.put("swim",SwimFilter.class); filters.put("texture",TextureFilter.class); filters.put("threshold",ThresholdFilter.class); filters.put("tileimage",TileImageFilter.class); //filters.put("transfer",TransferFilter.class); //filters.put("transform",TransformFilter.class); //filters.put("transition",TransitionFilter.class); filters.put("twirl",TwirlFilter.class); filters.put("unpremultiply",UnpremultiplyFilter.class); filters.put("unsharp",UnsharpFilter.class); filters.put("variableblur",VariableBlurFilter.class); filters.put("warp",WarpFilter.class); filters.put("water",WaterFilter.class); filters.put("weave",WeaveFilter.class); filters.put("wholeimage",WholeImageFilter.class); filters.put("wood",WoodFilter.class); } public static String call(PageContext pc, Object name, String filterName) throws PageException { return call(pc, name, filterName, EMPTY_STRUCT); } public static String call(PageContext pc, Object name, String filterName, Struct parameters) throws PageException { if(name instanceof String) name=pc.getVariable(Caster.toString(name)); Image img = Image.toImage(name); BufferedImage bi = img.getBufferedImage(); String lcFilterName = filterName.trim().toLowerCase(); // get filter class Class clazz = filters.get(lcFilterName); if(clazz==null) { String[] keys = filters.keySet().toArray(new String[filters.size()]); Arrays.sort(keys); String list=List.arrayToList(keys, ", "); String soundex = StringUtil.soundex(filterName); java.util.List<String> similar=new ArrayList<String>(); for(int i=0;i<keys.length;i++){ if(StringUtil.soundex(keys[i]).equals(soundex)) similar.add(keys[i]); } if(similar.size()>0) { list=List.arrayToList(similar.toArray(new String[similar.size()]), ", "); throw new FunctionException(pc, "ImageFilter", 2, "filtername", "invalid filter name ["+filterName+"], did you mean ["+list+"]"); } throw new FunctionException(pc, "ImageFilter", 2, "filtername", "invalid filter name ["+filterName+"], valid filter names are ["+list+"]"); } // load filter DynFiltering filter=null; try { filter=(DynFiltering) clazz.newInstance(); } catch (Throwable t) { throw Caster.toPageException(t); } // execute filter filter.filter(bi, bi, parameters); /*InvertFilter _if=new InvertFilter(); _if.filter(bi, bi); if(true) return null; GlowFilter gf=new GlowFilter(); gf.setAmount(0.5F); gf.filter(bi, bi); img.image(bi);*/ return null; } public static void main(String[] args) throws Throwable { ResourceProvider frp = ResourcesImpl.getFileResourceProvider(); Resource filter = frp.getResource("/Users/mic/Projects/Railo/Source2/railo/railo-java/railo-core/src/railo/runtime/img/filter/"); Resource[] children = filter.listResources(); Resource child; String name; for(int i=0;i<children.length;i++){ child=children[i]; name=child.getName(); if(name.endsWith("Filter.java")){ String key=name.substring(0,name.length()-11); //print.o("filters.put(\""+key.toLowerCase()+"\","+key+"Filter.class);"); StringBuilder sb=new StringBuilder(); //Class clazz = ClassUtil.loadClass("railo.runtime.img.filter."+key+"Filter"); //if(clazz!=GlowFilter.class) continue; // create filter method content //setters(key,clazz,sb); String content = IOUtil.toString(child,null); // set implements //content=StringUtil.replace(content, "{", " implements DynFiltering {", true); //IOUtil.write(child, content, null, false); // add method filter //int index = content.lastIndexOf('}'); //content=content.substring(0,index)+sb+content.substring(index); //IOUtil.write(child, content, null, false); // imports content=StringUtil.replace(content, "package railo.runtime.img.filter;", "package railo.runtime.img.filter;\nimport railo.runtime.type.KeyImpl;\nimport railo.runtime.engine.ThreadLocalPageContext;\nimport railo.runtime.exp.PageException;\nimport railo.runtime.type.Struct;\nimport java.awt.image.BufferedImage;\nimport railo.runtime.type.List;\nimport railo.runtime.exp.FunctionException;\n", true); IOUtil.write(child, content, null, false); } } } private static void setters(String key, Class clazz, StringBuilder sb) { //sb.append("Object o;\n"); sb.append(" public BufferedImage filter(BufferedImage src, BufferedImage dst ,Struct parameters) throws PageException {\n"); sb.append(" Object o;\n"); Method[] methods = clazz.getMethods(); Method method; StringBuilder names=new StringBuilder(); for(int i=0;i<methods.length;i++){ method=methods[i]; if(method.getName().startsWith("set") && !method.getName().equals("setRGB") && !method.getName().equals("setDestination")){ String name=method.getName().substring(3); args(key,name,method,sb,i); if(names.length()>0) names.append(", "); names.append(name); } } sb.append("\n"); sb.append(" // check for arguments not supported\n"); sb.append(" if(parameters.size()>0) {\n"); sb.append(" throw new FunctionException(ThreadLocalPageContext.get(), \"ImageFilter\", 3, \"parameters\", \"the parameter\"+(parameters.size()>1?\"s\":\"\")+\" [\"+List.arrayToList(parameters.keysAsString(),\", \")+\"] \"+(parameters.size()>1?\"are\":\"is\")+\" not allowed, only the following parameters are supported ["+names+"]\");\n"); sb.append(" }\n"); sb.append("\n"); sb.append(" return filter(src, dst);\n"); sb.append(" }\n"); } private static void args(String className, String name, Method method, StringBuilder sb, int methodIndex) { Class[] params = method.getParameterTypes(); if(params.length==1){ sb.append(" if((o=parameters.removeEL(KeyImpl.init(\""+name+"\")))!=null)"); sb.append(method.getName()+"("); arg(name, params[0], method,sb,methodIndex); sb.append(");\n"); } else if(params.length==2 && name.equals("Dimensions")){ sb.append(" if((o=parameters.removeEL(KeyImpl.init(\""+name+"\")))!=null){\n"); sb.append(" int[] dim=ImageFilterUtil.toDimensions(o,\"Dimensions\");\n"); sb.append(" "+method.getName()+"(dim[0],dim[1]"); sb.append(");\n"); sb.append(" }\n"); } //else print.e(className+"->"+method); } private static void arg(String name, Class param, Method method, StringBuilder sb, int methodIndex) { sb.append("ImageFilterUtil."); if(param==float.class) sb.append("toFloatValue"); else if(param==boolean.class) sb.append("toBooleanValue"); else if(param==int.class) sb.append("toIntValue"); else if(param==Point2D.class) sb.append("toPoint2D"); else if(param==WarpGrid.class) sb.append("toWarpGrid"); else if(param==Kernel.class) sb.append("toKernel"); else if(param==Colormap.class) sb.append("toColormap"); else if(param==Function2D.class) sb.append("toFunction2D"); else if(param==BufferedImage.class) sb.append("toBufferedImage"); else if(param==BinaryFunction.class) sb.append("toBinaryFunction"); else if(param==String.class) sb.append("toString"); else if(param==Paint.class) sb.append("toPaint"); else if(param==Font.class) sb.append("toFont"); else if(param==AffineTransform.class) sb.append("toAffineTransform"); else if(param==Composite.class) sb.append("toComposite"); else if(param==LightFilter.Material.class) sb.append("toLightFilter$Material"); //else if(param==FieldWarpFilter.Line.class) sb.append("toFieldWarpFilter$Line"); else if(param==FieldWarpFilter.Line[].class) sb.append("toAFieldWarpFilter$Line"); else if(param==CurvesFilter.Curve.class) sb.append("toCurvesFilter$Curve"); else if(param==CurvesFilter.Curve[].class) sb.append("toACurvesFilter$Curve"); else if(param==Point.class) sb.append("toPoint"); else if(param==int[].class) sb.append("toAInt"); else if(param==int[][].class) sb.append("toAAInt"); else if(param==float[].class) sb.append("toAFloat"); else { new RuntimeException(name+"!!!!!!!!!!!!!"+param.getName()).printStackTrace(); } sb.append("(o,\""+name+"\")"); } }
package blue.mesh; import java.io.IOException; import java.util.Set; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.bluetooth.BluetoothSocket; import android.util.Log; public class ClientThread extends Thread{ private static final String TAG = "ClientThread"; private BluetoothAdapter adapter; private RouterObject router; protected ClientThread( BluetoothAdapter mAdapter, RouterObject mRouter ) { adapter = mAdapter; router = mRouter; } //function run gets list of paired devices, and attempts to //open and connect a socket for that device, which is then //passed to the router object public void run() { while (true) { if(this.isInterrupted()){ if(Constants.DEBUG) Log.d(TAG, "interrupted"); return; } //get list of all paired devices Set <BluetoothDevice> pairedDevices = adapter.getBondedDevices(); for (BluetoothDevice d : pairedDevices) { BluetoothSocket clientSocket = null; try { Log.d(TAG, "Device: " + d.getName() ); if( router.getDeviceState(d) == Constants.STATE_CONNECTED) continue; clientSocket = d.createRfcommSocketToServiceRecord( Constants.MY_UUID); } catch (IOException e) { Log.e(TAG, "Socket create() failed", e); //TODO: throw exception return; } //once a socet is opened, try to connect and then pass to router try { clientSocket.connect(); router.beginConnection(clientSocket); } catch (IOException e) { if(this.isInterrupted()){ if(Constants.DEBUG) Log.d(TAG, "interrupted"); return; } Log.e(TAG, "Socket connect() failed", e); } } } } protected int closeSocket(){ //TODO use this function to close any socket that is in a blocking //call in order to kill this thread return Constants.SUCCESS; } protected int kill(){ this.closeSocket(); //TODO: this thread does not get interrupted correctly this.interrupt(); Log.d(TAG, "kill success"); return Constants.SUCCESS; } };
import org.apache.log4j.ConsoleAppender; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher; import org.apache.zookeeper.ZooKeeper; import java.io.IOException; public class Executor implements Watcher, Runnable, DataMonitorListener { private static final String LOCALHOST_ADDRESS = "127.0.0.1"; private static final Logger logger = Logger.getLogger(Executor.class); private String znode; private DataMonitor dm; private ZooKeeper zk; private String exec[]; private Process child; private Executor(String port, String znode, String exec[]) throws KeeperException, IOException { this.exec = exec; int sessionTimeout = 3000; zk = new ZooKeeper(LOCALHOST_ADDRESS + ":" + port, sessionTimeout, this); dm = new DataMonitor(zk, znode, null, this); } public static void main(String[] args) { loggerInit(); if (args.length < 2) { System.err.println("USAGE: port program [args ...]"); System.exit(2); } String hostPort = args[0]; String znode = "/znode_testowy"; String exec[] = new String[args.length - 2]; System.arraycopy(args, 2, exec, 0, exec.length); try { new Executor(hostPort, znode, exec).run(); } catch (Exception e) { e.printStackTrace(); } } private static void loggerInit() { ConsoleAppender console = new ConsoleAppender(); String PATTERN = "%d [%p|%c|%C{1}] %m%n"; console.setLayout(new PatternLayout(PATTERN)); console.setThreshold(Level.INFO); console.activateOptions(); Logger.getRootLogger().addAppender(console); } public void process(WatchedEvent event) { dm.process(event); } public void run() { try { synchronized (this) { while (!dm.dead) { wait(); } } } catch (InterruptedException e) { logger.error("Error while running process", e); } } public void closing(int rc) { synchronized (this) { notifyAll(); } } public void exists(byte[] data) { if (data == null) { if (child != null) { System.out.println("Killing process"); child.destroy(); try { child.waitFor(); } catch (InterruptedException e) { logger.error("Error while killing process", e); } } child = null; } else { if (child != null) { System.out.println("Stopping child"); child.destroy(); try { child.waitFor(); } catch (InterruptedException e) { logger.error("Error while stopping process", e); } } try { System.out.println("Starting child"); child = Runtime.getRuntime().exec(exec); } catch (IOException e) { e.printStackTrace(); } } } }
package blue.mesh; import java.io.IOException; import java.util.Set; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.bluetooth.BluetoothSocket; import android.os.Handler; import android.os.ParcelUuid; import android.util.Log; import public class ClientThread extends Thread{ private static final String TAG = "BluetoothChatService"; private Handler handler; private BluetoothAdapter adapter; private RouterObject router; ClientThread( Handler mHandler, BluetoothAdapter mAdapter, RouterObject mRouter ) { handler = mHandler; adapter = mAdapter; router = mRouter; } //function run gets list of paired devices, and attempts to //open and connect a socket for that device, which is then //passed to the router object public void run() { while (true) { if(this.isInterrupted()){ Log.e(TAG, "Connect thread interrupted", null); return; } //get list of all paired devices Set <BluetoothDevice> pairedDevices = adapter.getBondedDevices(); for (BluetoothDevice d : pairedDevices) { //for each paired device, get uuids //for each uuid, try to open a socket on it BluetoothSocket clientSocket = null; try { clientSocket = d.createRfcommSocketToServiceRecord(Constants.MY_UUID_SECURE); } catch (IOException e) { Log.e(TAG, "Socket create() failed", e); } //once a socet is opened, try to connect and then pass to router try { clientSocket.connect(); router.BeginConnection(clientSocket); } catch (IOException e) { Log.e(TAG, "Socket connect() failed", e); } } } } };
package blue.mesh; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothSocket; import android.os.Handler; public class RouterObject { private Handler handler; private BluetoothAdapter adapter; RouterObject( Handler mHandler, BluetoothAdapter mAdapter) { handler = mHandler; adapter= mAdapter; } public int BeginConnection(BluetoothSocket mSocket) { //TODO: Get rid of these two lines handler.notify(); adapter.enable(); //Great Success! return 1; } public int WriteAll(byte buff[]) { //Great Success! return 1; } public byte [] GetUserMessage() { byte arr[] = null; return arr; } public int stop() { //Great Success! return 1; } }
package com.example.vladimirsinicyn.thermostat; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Timer; import java.util.TimerTask; public class TCConroller { private Time time; private final int timeFactor = 300; //private ThermostateState state; private int dayOfWeek; // 0 = Monday, 6 = Sunday public TCConroller() { String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(Calendar.getInstance().getTime()); String[] arr = timeStamp.split("_"); int realHours = Integer.parseInt(arr[1].substring(0, 2)); int realMins = Integer.parseInt(arr[1].substring(2, 4)); int realMinsTotal = realHours * 60 + realMins; time = new Time(realMinsTotal); int msInMin = 60 * 1000; // our time is 300 times faster than real time // which means that: // one minute of our time // 200 ms of real time Timer timer = new Timer(); timer.scheduleAtFixedRate(new TCTimerTask(), 0, msInMin / timeFactor); } private class TCTimerTask extends TimerTask { @Override public void run() { // // get current time // int hours = time.getHours(); // int mins = time.getMinutes(); // int realMinsTotal = hours * 60 + mins; // time.toMinutes(); // Time currentTime = new Time(realMinsTotal); // // check whether it is time to change // DaySchedule daySchedule = state.getDayShedule(); // TemperatureChange change = daySchedule.find(currentTime); // if (change != null) { // state.change(change.getType()); // if (time.checkMidnight()) { // dayOfWeek++; // if (dayOfWeek >= 7) { // dayOfWeek = 0; time.incrementTime(); } } } //class ThermostateState{ // private Temperature targetTemperature; // private boolean night; // public Temperature getTemperature() { // return targetTemperature; // public void change(ChangeType type) { // public boolean getDayPart() { // return night; // public DaySchedule getDayShedule() {return new DaySchedule();} // public void incrementDay() {
package ru.nsu.ccfit.bogush; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.config.Configurator; import ru.nsu.ccfit.bogush.msg.TextMessage; import java.io.*; import java.net.ServerSocket; import java.net.Socket; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.HashSet; import java.util.Properties; import java.util.concurrent.ArrayBlockingQueue; public class Server { static { LoggingConfiguration.addConfigFile(LoggingConfiguration.DEFAULT_LOGGER_CONFIG_FILE); } private static final Logger logger = LogManager.getLogger(); private static final String DO_LOGGING_KEY = "log"; private static final String SERVER_PORT_KEY = "server-port"; private static final String DO_LOGGING_DEFAULT = "true"; private static final String SERVER_PORT_DEFAULT = "0"; private static final String PROPERTIES_FILE = "server.properties"; private static final String PROPERTIES_COMMENT = "Server properties file"; private static final int HISTORY_CAPACITY = 50; private static final Properties DEFAULT_PROPERTIES = new Properties(); static { DEFAULT_PROPERTIES.setProperty(DO_LOGGING_KEY, DO_LOGGING_DEFAULT); DEFAULT_PROPERTIES.setProperty(SERVER_PORT_KEY, SERVER_PORT_DEFAULT); } private Properties properties = new Properties(DEFAULT_PROPERTIES); private HashSet<ConnectedUser> connectedUsers = new HashSet<>(); private ArrayBlockingQueue<TextMessage> history = new ArrayBlockingQueue<>(HISTORY_CAPACITY); private int port; private Thread thread; public static void main(String[] args) { logger.traceEntry("main"); Server server = new Server(); server.start(); try { server.getThread().join(); } catch (InterruptedException e) { logger.trace("Server interrupted"); e.printStackTrace(); } logger.traceExit("main", null); } private Server() { configure(); } private Thread getThread() { return thread; } private void start() { logger.info("Start server"); thread = new Thread(() -> { try (ServerSocket serverSocket = new ServerSocket(port)){ if (port == 0) { port = serverSocket.getLocalPort(); logger.info("Port set automatically to {}", port); } logger.info("Created socket on port {}", port); logger.info("Server ip: {}", serverSocket.getInetAddress().getHostAddress()); while (!Thread.interrupted()) { Socket socket = serverSocket.accept(); logger.info("Socket [{}] accepted", socket.getInetAddress().getHostAddress()); ConnectedUser connectedUser = new ConnectedUser(this, socket); if (connectedUsers.contains(connectedUser)) { logger.debug("User already connected"); } else { connectedUsers.add(connectedUser); connectedUser.start(); } } } catch (IOException e) { e.printStackTrace(); } finally { logger.info("Stop server"); } }); logger.info("Server started"); } public void logout(ConnectedUser connectedUser) { connectedUsers.remove(connectedUser); connectedUser.stop(); } public void addToHistory(TextMessage message) { logger.trace("Add \"{}\" to history", message.toString()); try { if (history.remainingCapacity() == 0) { history.take(); } history.put(message); } catch (InterruptedException e) { e.printStackTrace(); } } public HashSet<ConnectedUser> getConnectedUsers() { return connectedUsers; } private void configure() { logger.traceEntry("configure"); loadProperties(); boolean doLogging = Boolean.parseBoolean(properties.getProperty(DO_LOGGING_KEY)); if (!doLogging) { Configurator.setRootLevel(Level.OFF); } port = Integer.parseInt(properties.getProperty(SERVER_PORT_KEY)); storeProperties(); logger.traceExit("configure", null); } private void loadProperties() { logger.traceEntry("loadProperties"); Path path = Paths.get(PROPERTIES_FILE); if (Files.exists(path)) { try (InputStream is = new FileInputStream(PROPERTIES_FILE)) { properties.load(is); } catch (FileNotFoundException e) { logger.error("File \"{}\" exists but not found! (Shouldn't get here normally)", PROPERTIES_FILE); } catch (IOException e) { logger.error("Problems with loading properties file \"{}\"", PROPERTIES_FILE); } } else { logger.warn("Properties file \"{}\" doesn't exist", PROPERTIES_FILE); } logger.traceExit("loadProperties", null); } private void storeProperties() { logger.traceEntry("storeProperties"); // force store each key-value pair for (String key : properties.stringPropertyNames()) { properties.setProperty(key, properties.getProperty(key)); } try (OutputStream os = new FileOutputStream(PROPERTIES_FILE)) { properties.store(os, PROPERTIES_COMMENT); } catch (IOException e) { logger.error("Problems with storing properties file \"{}\"", PROPERTIES_FILE); } finally { logger.traceExit("storeProperties", null); } } }
package fitnesse.responders.editing; import static fitnesse.wiki.PageData.PropertyLAST_MODIFIED; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import fitnesse.Responder; import fitnesse.http.MockRequest; import fitnesse.http.Response; import fitnesse.testutil.FitNesseUtil; import fitnesse.wiki.PageData; import fitnesse.wiki.PathParser; import fitnesse.wiki.WikiPage; import fitnesse.wiki.WikiPageProperties; import fitnesse.wiki.WikiPageUtil; import fitnesse.wiki.mem.InMemoryPage; import org.junit.Before; import org.junit.Test; public class SavePropertiesResponderTest { private static final String PAGE_NAME = "PageOne"; private WikiPage root; private MockRequest request; private WikiPage page; private Responder responder; @Before public void setUp() throws Exception { root = InMemoryPage.makeRoot("RooT"); responder = new SavePropertiesResponder(); } private void createRequest() throws Exception { page = WikiPageUtil.addPage(root, PathParser.parse(PAGE_NAME)); request = new MockRequest(); request.addInput("PageType", "Test"); request.addInput("Properties", "on"); request.addInput("Search", "on"); request.addInput("RecentChanges", "on"); request.addInput(PageData.PropertyPRUNE,"on"); request.addInput(PageData.PropertySECURE_READ, "on"); request.addInput("Suites", "Suite A, Suite B"); request.addInput("HelpText", "Help text literal"); request.setResource(PAGE_NAME); } @Test public void testResponse() throws Exception { createRequest(); Response response = responder.makeResponse(FitNesseUtil.makeTestContext(root), request); PageData data = page.getData(); assertTrue(data.hasAttribute("Test")); assertTrue(data.hasAttribute("Properties")); assertTrue(data.hasAttribute("Search")); assertFalse(data.hasAttribute("Edit")); assertTrue(data.hasAttribute("RecentChanges")); assertTrue(data.hasAttribute(PageData.PropertySECURE_READ)); assertFalse(data.hasAttribute(PageData.PropertySECURE_WRITE)); assertTrue(data.hasAttribute(PageData.PropertyPRUNE)); assertEquals("Suite A, Suite B", data.getAttribute(PageData.PropertySUITES)); assertEquals("Help text literal", data.getAttribute(PageData.PropertyHELP)); assertEquals(303, response.getStatus()); assertEquals("/" + PAGE_NAME, response.getHeader("Location")); } @Test public void testRemovesHelpAndSuitesAttributeIfEmpty() throws Exception { createRequest(); request.addInput("Suites", ""); request.addInput("HelpText", ""); responder.makeResponse(FitNesseUtil.makeTestContext(root), request); PageData data = page.getData(); assertFalse("should not have help attribute", data.hasAttribute(PageData.PropertyHELP)); assertFalse("should not have suites attribute", data.hasAttribute(PageData.PropertySUITES)); } @Test public void testPageDataDefaultAttributesAreKeptWhenSavedThroughSavePropertiesResponder() throws Exception { // The old way the default attributes were set in PageData.initializeAttributes() // was to set them with a value of "true" // The SavePropertiesResponder saves them by setting the attribute without a value. // This test ensures that the behavior is the same (i.e. without value) page = WikiPageUtil.addPage(root, PathParser.parse(PAGE_NAME)); PageData defaultData = new PageData(page); request = new MockRequest(); request.setResource(PAGE_NAME); setBooleanAttributesOnRequest(defaultData, PageData.NON_SECURITY_ATTRIBUTES); setBooleanAttributesOnRequest(defaultData, PageData.SECURITY_ATTRIBUTES); responder.makeResponse(FitNesseUtil.makeTestContext(root), request); PageData dataToSave = page.getData(); // The LasModified Attribute is the only one that might be different, so fix it here dataToSave.setAttribute(PropertyLAST_MODIFIED, defaultData.getAttribute(PropertyLAST_MODIFIED)); WikiPageProperties defaultWikiPagePropertiesDefault = new WikiPageProperties(defaultData.getProperties()); WikiPageProperties wikiPagePropertiesToSave = new WikiPageProperties(dataToSave.getProperties()); assertEquals(defaultWikiPagePropertiesDefault.toXml(), wikiPagePropertiesToSave.toXml()); } private void setBooleanAttributesOnRequest(PageData data, String[] booleanAttributes) { for (String booleanAttribute : booleanAttributes) { if (data.hasAttribute(booleanAttribute)) { request.addInput(booleanAttribute, "on"); } } } }
package com.matthewlogan.pushertabstrip.app; import android.content.Context; import android.support.v4.view.ViewPager; import android.util.AttributeSet; import android.util.Log; import android.widget.RelativeLayout; import android.widget.TextView; public class PusherTabStrip extends RelativeLayout implements ViewPager.OnPageChangeListener { private Context mContext; private ViewPager mViewPager; private TextView[] mTextViews; private int mCurrentPosition; private int mCurrentOffsetPixels; public PusherTabStrip(Context context) { this(context, null); } public PusherTabStrip(Context context, AttributeSet attrs) { this(context, attrs, 0); } public PusherTabStrip(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); mContext = context; setWillNotDraw(false); } public void bindViewPager(ViewPager viewPager, String[] titles) { mViewPager = viewPager; mViewPager.setOnPageChangeListener(this); mTextViews = new TextView[titles.length]; for (int i = 0; i < titles.length; i++) { TextView textView = new TextView(mContext); textView.setText(titles[i]); addView(textView); RelativeLayout.LayoutParams lp = (RelativeLayout.LayoutParams) textView.getLayoutParams(); if (lp != null) { lp.addRule(CENTER_VERTICAL); textView.setLayoutParams(lp); } mTextViews[i] = textView; } layoutTextViews(); } @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { Log.d("testing", "\nposition: " + position + "\noffset: " + positionOffset + "\n pixels: " + positionOffsetPixels); mCurrentPosition = position; mCurrentOffsetPixels = positionOffsetPixels; layoutTextViews(); } @Override public void onPageSelected(int position) { } @Override public void onPageScrollStateChanged(int state) { } private void layoutTextViews() { for (int i = 0; i < mTextViews.length; i++) { if (i < mCurrentPosition - 1) { TextView behindTextView = mTextViews[i]; behindTextView.setX(0.f - behindTextView.getMeasuredWidth()); } else if (i == mCurrentPosition - 1) { TextView prevTextView = mTextViews[i]; prevTextView.setX(0.f); } else if (i == mCurrentPosition) { TextView currentTextView = mTextViews[i]; float center = getWidth() / 2.f - currentTextView.getMeasuredWidth() / 2.f; float x = center - mCurrentOffsetPixels; if (x < 0) { x = 0; } currentTextView.setX(x); } else if (i == mCurrentPosition + 1) { TextView nextTextView = mTextViews[i]; float right = getWidth() - nextTextView.getMeasuredWidth(); float center = getWidth() / 2.f - nextTextView.getMeasuredWidth() / 2.f; float distanceToMove = right - center; float x = right; if (getWidth() - mCurrentOffsetPixels <= distanceToMove) { x = center + (getWidth() - mCurrentOffsetPixels); } nextTextView.setX(x); } else if (i > mCurrentPosition + 1) { TextView aheadTextView = mTextViews[i]; aheadTextView.setX(getWidth()); } } } }
package net.ripe.commons.ip; import java.math.BigInteger; import static java.math.BigInteger.ONE; public final class Ipv6 extends AbstractIp<Ipv6, Ipv6Range> { private static final long serialVersionUID = -1L; public static final BigInteger FOUR_OCTECT_MASK = BigInteger.valueOf(0xFFFF); public static final int NUMBER_OF_BITS = 128; public static final BigInteger MINIMUM_VALUE = BigInteger.ZERO; public static final BigInteger MAXIMUM_VALUE = new BigInteger(String.valueOf((ONE.shiftLeft(NUMBER_OF_BITS)).subtract(ONE))); public static final Ipv6 FIRST_IPV6_ADDRESS = Ipv6.of(MINIMUM_VALUE); public static final Ipv6 LAST_IPV6_ADDRESS = Ipv6.of(MAXIMUM_VALUE); private static final int MIN_PART_VALUE = 0x0; private static final int MAX_PART_VALUE = 0xFFFF; private static final int MAX_PART_LENGTH = 4; private static final String DEFAULT_PARSING_ERROR_MESSAGE = "Invalid IPv6 address: "; private static final String COLON = ":"; private static final String ZERO = "0"; private static final int BITS_PER_PART = 16; private static final int TOTAL_OCTETS = 8; private static final int COLON_COUNT_IPV6 = 7; private static final BigInteger MINUS_ONE = BigInteger.valueOf(-1); private final BigInteger value; protected Ipv6(BigInteger value) { this.value = Validate.notNull(value, "value is required"); Validate.isTrue(value.compareTo(MINIMUM_VALUE) >= 0, "Value of IPv6 has to be greater than or equal to " + MINIMUM_VALUE); Validate.isTrue(value.compareTo(MAXIMUM_VALUE) <= 0, "Value of IPv6 has to be less than or equal to " + MAXIMUM_VALUE); } BigInteger value() { return value; } public static Ipv6 of(BigInteger value) { return new Ipv6(value); } public static Ipv6 of(String value) { return parse(value); } @Override public int compareTo(Ipv6 other) { return value.compareTo(other.value); } @Override public Ipv6 next() { return new Ipv6(value.add(ONE)); } @Override public Ipv6 previous() { return new Ipv6(value.subtract(ONE)); } @Override public boolean hasNext() { return this.compareTo(LAST_IPV6_ADDRESS) < 0; } @Override public boolean hasPrevious() { return this.compareTo(FIRST_IPV6_ADDRESS) > 0; } @Override public Ipv6Range asRange() { return new Ipv6Range(this, this); } @Override public String toString() { long[] parts = new long[8]; // Find longest sequence of zeroes. Use the first one if there are // multiple sequences of zeroes with the same length. int currentZeroPartsLength = 0; int currentZeroPartsStart = 0; int maxZeroPartsLength = 0; int maxZeroPartsStart = 0; for (int i = 0; i < parts.length; ++i) { parts[i] = value().shiftRight((7 - i) * BITS_PER_PART).and(FOUR_OCTECT_MASK).longValue(); if (parts[i] == 0) { if (currentZeroPartsLength == 0) { currentZeroPartsStart = i; } ++currentZeroPartsLength; if (currentZeroPartsLength > maxZeroPartsLength) { maxZeroPartsLength = currentZeroPartsLength; maxZeroPartsStart = currentZeroPartsStart; } } else { currentZeroPartsLength = 0; } } StringBuilder sb = new StringBuilder(39); if (maxZeroPartsStart == 0 && maxZeroPartsLength > 1) { sb.append(COLON); } String delimiter = ""; for (int i = 0; i < parts.length; ++i) { if (i == maxZeroPartsStart && maxZeroPartsLength > 1) { i += maxZeroPartsLength; sb.append(COLON); } sb.append(delimiter); if (i <= 7) { sb.append(Long.toHexString(parts[i])); } else { break; } delimiter = COLON; } return sb.toString(); } public static Ipv6 parse(final String ipv6Address) { try { String ipv6String = Validate.notNull(ipv6Address, "IPv6 address must not be null").trim(); Validate.isTrue(!ipv6String.isEmpty(), "IPv6 address must not be empty"); final boolean isIpv6AddressWithEmbeddedIpv4 = ipv6String.contains("."); if (isIpv6AddressWithEmbeddedIpv4) { ipv6String = getIpv6AddressWithIpv4SectionInIpv6Notation(ipv6String); } final int indexOfDoubleColons = ipv6String.indexOf("::"); final boolean isShortened = indexOfDoubleColons != -1; if (isShortened) { Validate.isTrue(indexOfDoubleColons == ipv6String.lastIndexOf("::"), DEFAULT_PARSING_ERROR_MESSAGE + ipv6Address); ipv6String = expandMissingColons(ipv6String, indexOfDoubleColons, countColons(ipv6String), ipv6Address); } String[] split = ipv6String.split(COLON, TOTAL_OCTETS); Validate.isTrue(split.length == TOTAL_OCTETS, DEFAULT_PARSING_ERROR_MESSAGE + ipv6Address); BigInteger ipv6value = BigInteger.ZERO; for (String part : split) { Validate.isTrue(part.length() <= MAX_PART_LENGTH, DEFAULT_PARSING_ERROR_MESSAGE + ipv6Address); Validate.checkRange(Integer.parseInt(part, BITS_PER_PART), MIN_PART_VALUE, MAX_PART_VALUE); ipv6value = ipv6value.shiftLeft(BITS_PER_PART).add(new BigInteger(part, BITS_PER_PART)); } return new Ipv6(ipv6value); } catch (IllegalArgumentException e) { throw new IllegalArgumentException(DEFAULT_PARSING_ERROR_MESSAGE + ipv6Address, e); } } private static String expandMissingColons(final String ipv6String, final int indexOfDoubleColons, final int colonCount, final String ipv6Address) { Validate.isTrue(colonCount >= 2 && colonCount <= COLON_COUNT_IPV6, DEFAULT_PARSING_ERROR_MESSAGE + ipv6Address); final int missingZeros = COLON_COUNT_IPV6 - colonCount + 1; String leftPart = ipv6String.substring(0, indexOfDoubleColons); String rightPart = ipv6String.substring(indexOfDoubleColons + 2); if (leftPart.isEmpty()) { leftPart = ZERO; } if (rightPart.isEmpty()) { rightPart = ZERO; } StringBuilder sb = new StringBuilder(); sb.append(leftPart); for (int i = 0; i < missingZeros; i++) { sb.append(COLON).append(ZERO); } sb.append(COLON).append(rightPart); return sb.toString(); } private static int countColons(String ipv6String) { int count = 0; for (char c : ipv6String.toCharArray()) { if (c == ':') { count++; } } return count; } private static String getIpv6AddressWithIpv4SectionInIpv6Notation(String ipv6String) { try { final int indexOfLastColon = ipv6String.lastIndexOf(COLON); final String ipv6Section = ipv6String.substring(0, indexOfLastColon); final String ipv4Section = ipv6String.substring(indexOfLastColon + 1); final Ipv4 ipv4 = Ipv4.parse(ipv4Section); final Ipv6 ipv6FromIpv4 = new Ipv6(BigInteger.valueOf(ipv4.value())); return ipv6Section + ipv6FromIpv4.toString().substring(1); } catch(IllegalArgumentException e) { throw new IllegalArgumentException(DEFAULT_PARSING_ERROR_MESSAGE + ipv6String, e); } } @Override public int bitSize() { return NUMBER_OF_BITS; } @Override public BigInteger asBigInteger() { return value; } @Override public Ipv6 lowerBoundForPrefix(int prefixLength) { Validate.checkRange(prefixLength, 0, NUMBER_OF_BITS); BigInteger mask = bitMask(0).xor(bitMask(prefixLength)); return new Ipv6(value.and(mask)); } @Override public Ipv6 upperBoundForPrefix(int prefixLength) { Validate.checkRange(prefixLength, 0, NUMBER_OF_BITS); return new Ipv6(value.or(bitMask(prefixLength))); } private BigInteger bitMask(int prefixLength) { return ONE.shiftLeft(NUMBER_OF_BITS - prefixLength).add(MINUS_ONE); } @Override public int getCommonPrefixLength(Ipv6 other) { BigInteger temp = value.xor(other.value); return NUMBER_OF_BITS - temp.bitLength(); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Ipv6 that = (Ipv6) o; return value.equals(that.value); } @Override public int hashCode() { return value.hashCode(); } }
package com.noprestige.kanaquiz.questions; import org.junit.Test; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; public class KanaQuestionTest { private static final KanaQuestion[] HIRAGANA_1 = { new KanaQuestion("", "a"), new KanaQuestion("", "i"), new KanaQuestion("", "u"), new KanaQuestion("", "e"), new KanaQuestion("", "o") }; private static final KanaQuestion[] HIRAGANA_2_BASE = { new KanaQuestion("", "ka"), new KanaQuestion("", "ki"), new KanaQuestion("", "ku"), new KanaQuestion("", "ke"), new KanaQuestion("", "ko") }; private static final KanaQuestion[] HIRAGANA_2_DAKUTEN = { new KanaQuestion("", "ga"), new KanaQuestion("", "gi"), new KanaQuestion("", "gu"), new KanaQuestion("", "ge"), new KanaQuestion("", "go") }; private static final KanaQuestion[] HIRAGANA_2_BASE_DIGRAPHS = { new KanaQuestion("", "kya"), new KanaQuestion("", "kyu"), new KanaQuestion("", "kyo") }; private static final KanaQuestion[] HIRAGANA_2_DAKUTEN_DIGRAPHS = { new KanaQuestion("", "gya"), new KanaQuestion("", "gyu"), new KanaQuestion("", "gyo") }; private static final KanaQuestion[] HIRAGANA_3_BASE = { new KanaQuestion("", "sa"), new KanaQuestion("", "shi"), new KanaQuestion("", "su"), new KanaQuestion("", "se"), new KanaQuestion("", "so") }; private static final KanaQuestion[] HIRAGANA_3_DAKUTEN = { new KanaQuestion("", "za"), new KanaQuestion("", "ji"), new KanaQuestion("", "zu"), new KanaQuestion("", "ze"), new KanaQuestion("", "zo") }; private static final KanaQuestion[] HIRAGANA_3_BASE_DIGRAPHS = { new KanaQuestion("", "sha"), new KanaQuestion("", "shu"), new KanaQuestion("", "sho") }; private static final KanaQuestion[] HIRAGANA_3_DAKUTEN_DIGRAPHS = { new KanaQuestion("", "ja"), new KanaQuestion("", "ju"), new KanaQuestion("", "jo") }; private static final KanaQuestion[] HIRAGANA_4_BASE = { new KanaQuestion("", "ta"), new KanaQuestion("", "chi"), new KanaQuestion("", "tsu"), new KanaQuestion("", "te"), new KanaQuestion("", "to") }; private static final KanaQuestion[] HIRAGANA_4_DAKUTEN = { new KanaQuestion("", "da"), new KanaQuestion("", "ji"), new KanaQuestion("", "zu"), new KanaQuestion("", "de"), new KanaQuestion("", "do") }; private static final KanaQuestion[] HIRAGANA_4_BASE_DIGRAPHS = { new KanaQuestion("", "cha"), new KanaQuestion("", "chu"), new KanaQuestion("", "cho") }; private static final KanaQuestion[] HIRAGANA_4_DAKUTEN_DIGRAPHS = { new KanaQuestion("", "ja"), new KanaQuestion("", "ju"), new KanaQuestion("", "jo") }; private static final KanaQuestion[] HIRAGANA_5 = { new KanaQuestion("", "na"), new KanaQuestion("", "ni"), new KanaQuestion("", "nu"), new KanaQuestion("", "ne"), new KanaQuestion("", "no") }; private static final KanaQuestion[] HIRAGANA_5_DIGRAPHS = { new KanaQuestion("", "nya"), new KanaQuestion("", "nyu"), new KanaQuestion("", "nyo") }; private static final KanaQuestion[] HIRAGANA_6_BASE = { new KanaQuestion("", "ha"), new KanaQuestion("", "hi"), new KanaQuestion("", "fu"), new KanaQuestion("", "he"), new KanaQuestion("", "ho") }; private static final KanaQuestion[] HIRAGANA_6_DAKUTEN = { new KanaQuestion("", "ba"), new KanaQuestion("", "bi"), new KanaQuestion("", "bu"), new KanaQuestion("", "be"), new KanaQuestion("", "bo") }; private static final KanaQuestion[] HIRAGANA_6_HANDAKUTEN = { new KanaQuestion("", "pa"), new KanaQuestion("", "pi"), new KanaQuestion("", "pu"), new KanaQuestion("", "pe"), new KanaQuestion("", "po") }; private static final KanaQuestion[] HIRAGANA_6_BASE_DIGRAPHS = { new KanaQuestion("", "hya"), new KanaQuestion("", "hyu"), new KanaQuestion("", "hyo") }; private static final KanaQuestion[] HIRAGANA_6_DAKUTEN_DIGRAPHS = { new KanaQuestion("", "bya"), new KanaQuestion("", "byu"), new KanaQuestion("", "byo") }; private static final KanaQuestion[] HIRAGANA_6_HANDAKUTEN_DIGRAPHS = { new KanaQuestion("", "pya"), new KanaQuestion("", "pyu"), new KanaQuestion("", "pyo") }; private static final KanaQuestion[] HIRAGANA_7 = { new KanaQuestion("", "ma"), new KanaQuestion("", "mi"), new KanaQuestion("", "mu"), new KanaQuestion("", "me"), new KanaQuestion("", "mo") }; private static final KanaQuestion[] HIRAGANA_7_DIGRAPHS = { new KanaQuestion("", "mya"), new KanaQuestion("", "myu"), new KanaQuestion("", "myo") }; private static final KanaQuestion[] HIRAGANA_8 = { new KanaQuestion("", "ra"), new KanaQuestion("", "ri"), new KanaQuestion("", "ru"), new KanaQuestion("", "re"), new KanaQuestion("", "ro") }; private static final KanaQuestion[] HIRAGANA_8_DIGRAPHS = { new KanaQuestion("", "rya"), new KanaQuestion("", "ryu"), new KanaQuestion("", "ryo") }; private static final KanaQuestion[] HIRAGANA_9 = { new KanaQuestion("", "ya"), new KanaQuestion("", "yu"), new KanaQuestion("", "yo") }; private static final KanaQuestion[] HIRAGANA_10_W_GROUP = { new KanaQuestion("", "wa"), new KanaQuestion("", "wo") }; private static final KanaQuestion[] HIRAGANA_10_N_CONSONANT = { new KanaQuestion("", "n") }; private static final KanaQuestion[] KATAKANA_1 = { new KanaQuestion("", "a"), new KanaQuestion("", "i"), new KanaQuestion("", "u"), new KanaQuestion("", "e"), new KanaQuestion("", "o") }; private static final KanaQuestion[] KATAKANA_2_BASE = { new KanaQuestion("", "ka"), new KanaQuestion("", "ki"), new KanaQuestion("", "ku"), new KanaQuestion("", "ke"), new KanaQuestion("", "ko") }; private static final KanaQuestion[] KATAKANA_2_DAKUTEN = { new KanaQuestion("", "ga"), new KanaQuestion("", "gi"), new KanaQuestion("", "gu"), new KanaQuestion("", "ge"), new KanaQuestion("", "go") }; private static final KanaQuestion[] KATAKANA_2_BASE_DIGRAPHS = { new KanaQuestion("", "kya"), new KanaQuestion("", "kyu"), new KanaQuestion("", "kyo") }; private static final KanaQuestion[] KATAKANA_2_DAKUTEN_DIGRAPHS = { new KanaQuestion("", "gya"), new KanaQuestion("", "gyu"), new KanaQuestion("", "gyo") }; private static final KanaQuestion[] KATAKANA_3_BASE = { new KanaQuestion("", "sa"), new KanaQuestion("", "shi"), new KanaQuestion("", "su"), new KanaQuestion("", "se"), new KanaQuestion("", "so") }; private static final KanaQuestion[] KATAKANA_3_DAKUTEN = { new KanaQuestion("", "za"), new KanaQuestion("", "ji"), new KanaQuestion("", "zu"), new KanaQuestion("", "ze"), new KanaQuestion("", "zo") }; private static final KanaQuestion[] KATAKANA_3_BASE_DIGRAPHS = { new KanaQuestion("", "sha"), new KanaQuestion("", "shu"), new KanaQuestion("", "sho") }; private static final KanaQuestion[] KATAKANA_3_DAKUTEN_DIGRAPHS = { new KanaQuestion("", "ja"), new KanaQuestion("", "ju"), new KanaQuestion("", "jo") }; private static final KanaQuestion[] KATAKANA_4_BASE = { new KanaQuestion("", "ta"), new KanaQuestion("", "chi"), new KanaQuestion("", "tsu"), new KanaQuestion("", "te"), new KanaQuestion("", "to") }; private static final KanaQuestion[] KATAKANA_4_DAKUTEN = { new KanaQuestion("", "da"), new KanaQuestion("", "ji"), new KanaQuestion("", "zu"), new KanaQuestion("", "de"), new KanaQuestion("", "do") }; private static final KanaQuestion[] KATAKANA_4_BASE_DIGRAPHS = { new KanaQuestion("", "cha"), new KanaQuestion("", "chu"), new KanaQuestion("", "cho") }; private static final KanaQuestion[] KATAKANA_4_DAKUTEN_DIGRAPHS = { new KanaQuestion("", "ja"), new KanaQuestion("", "ju"), new KanaQuestion("", "jo") }; private static final KanaQuestion[] KATAKANA_5 = { new KanaQuestion("", "na"), new KanaQuestion("", "ni"), new KanaQuestion("", "nu"), new KanaQuestion("", "ne"), new KanaQuestion("", "no") }; private static final KanaQuestion[] KATAKANA_5_DIGRAPHS = { new KanaQuestion("", "nya"), new KanaQuestion("", "nyu"), new KanaQuestion("", "nyo") }; private static final KanaQuestion[] KATAKANA_6_BASE = { new KanaQuestion("", "ha"), new KanaQuestion("", "hi"), new KanaQuestion("", "fu"), new KanaQuestion("", "he"), new KanaQuestion("", "ho") }; private static final KanaQuestion[] KATAKANA_6_DAKUTEN = { new KanaQuestion("", "ba"), new KanaQuestion("", "bi"), new KanaQuestion("", "bu"), new KanaQuestion("", "be"), new KanaQuestion("", "bo") }; private static final KanaQuestion[] KATAKANA_6_HANDAKUTEN = { new KanaQuestion("", "pa"), new KanaQuestion("", "pi"), new KanaQuestion("", "pu"), new KanaQuestion("", "pe"), new KanaQuestion("", "po") }; private static final KanaQuestion[] KATAKANA_6_BASE_DIGRAPHS = { new KanaQuestion("", "hya"), new KanaQuestion("", "hyu"), new KanaQuestion("", "hyo") }; private static final KanaQuestion[] KATAKANA_6_DAKUTEN_DIGRAPHS = { new KanaQuestion("", "bya"), new KanaQuestion("", "byu"), new KanaQuestion("", "byo") }; private static final KanaQuestion[] KATAKANA_6_HANDAKUTEN_DIGRAPHS = { new KanaQuestion("", "pya"), new KanaQuestion("", "pyu"), new KanaQuestion("", "pyo") }; private static final KanaQuestion[] KATAKANA_7 = { new KanaQuestion("", "ma"), new KanaQuestion("", "mi"), new KanaQuestion("", "mu"), new KanaQuestion("", "me"), new KanaQuestion("", "mo") }; private static final KanaQuestion[] KATAKANA_7_DIGRAPHS = { new KanaQuestion("", "mya"), new KanaQuestion("", "myu"), new KanaQuestion("", "myo") }; private static final KanaQuestion[] KATAKANA_8 = { new KanaQuestion("", "ra"), new KanaQuestion("", "ri"), new KanaQuestion("", "ru"), new KanaQuestion("", "re"), new KanaQuestion("", "ro") }; private static final KanaQuestion[] KATAKANA_8_DIGRAPHS = { new KanaQuestion("", "rya"), new KanaQuestion("", "ryu"), new KanaQuestion("", "ryo") }; private static final KanaQuestion[] KATAKANA_9 = { new KanaQuestion("", "ya"), new KanaQuestion("", "yu"), new KanaQuestion("", "yo") }; private static final KanaQuestion[] KATAKANA_10_W_GROUP = { new KanaQuestion("", "wa"), new KanaQuestion("", "wo") }; private static final KanaQuestion[] KATAKANA_10_N_CONSONANT = { new KanaQuestion("", "n") }; @Test public void isDigraphTest() { assertDigraphTest(HIRAGANA_1, false); assertDigraphTest(HIRAGANA_2_BASE, false); assertDigraphTest(HIRAGANA_2_BASE_DIGRAPHS, true); assertDigraphTest(HIRAGANA_2_DAKUTEN, false); assertDigraphTest(HIRAGANA_2_DAKUTEN_DIGRAPHS, true); assertDigraphTest(HIRAGANA_3_BASE, false); assertDigraphTest(HIRAGANA_3_BASE_DIGRAPHS, true); assertDigraphTest(HIRAGANA_3_DAKUTEN, false); assertDigraphTest(HIRAGANA_3_DAKUTEN_DIGRAPHS, true); assertDigraphTest(HIRAGANA_4_BASE, false); assertDigraphTest(HIRAGANA_4_BASE_DIGRAPHS, true); assertDigraphTest(HIRAGANA_4_DAKUTEN, false); assertDigraphTest(HIRAGANA_4_DAKUTEN_DIGRAPHS, true); assertDigraphTest(HIRAGANA_5, false); assertDigraphTest(HIRAGANA_5_DIGRAPHS, true); assertDigraphTest(HIRAGANA_6_BASE, false); assertDigraphTest(HIRAGANA_6_BASE_DIGRAPHS, true); assertDigraphTest(HIRAGANA_6_DAKUTEN, false); assertDigraphTest(HIRAGANA_6_DAKUTEN_DIGRAPHS, true); assertDigraphTest(HIRAGANA_6_HANDAKUTEN, false); assertDigraphTest(HIRAGANA_6_HANDAKUTEN_DIGRAPHS, true); assertDigraphTest(HIRAGANA_7, false); assertDigraphTest(HIRAGANA_7_DIGRAPHS, true); assertDigraphTest(HIRAGANA_8, false); assertDigraphTest(HIRAGANA_8_DIGRAPHS, true); assertDigraphTest(HIRAGANA_9, false); assertDigraphTest(HIRAGANA_10_W_GROUP, false); assertDigraphTest(HIRAGANA_10_N_CONSONANT, false); assertDigraphTest(KATAKANA_1, false); assertDigraphTest(KATAKANA_2_BASE, false); assertDigraphTest(KATAKANA_2_BASE_DIGRAPHS, true); assertDigraphTest(KATAKANA_2_DAKUTEN, false); assertDigraphTest(KATAKANA_2_DAKUTEN_DIGRAPHS, true); assertDigraphTest(KATAKANA_3_BASE, false); assertDigraphTest(KATAKANA_3_BASE_DIGRAPHS, true); assertDigraphTest(KATAKANA_3_DAKUTEN, false); assertDigraphTest(KATAKANA_3_DAKUTEN_DIGRAPHS, true); assertDigraphTest(KATAKANA_4_BASE, false); assertDigraphTest(KATAKANA_4_BASE_DIGRAPHS, true); assertDigraphTest(KATAKANA_4_DAKUTEN, false); assertDigraphTest(KATAKANA_4_DAKUTEN_DIGRAPHS, true); assertDigraphTest(KATAKANA_5, false); assertDigraphTest(KATAKANA_5_DIGRAPHS, true); assertDigraphTest(KATAKANA_6_BASE, false); assertDigraphTest(KATAKANA_6_BASE_DIGRAPHS, true); assertDigraphTest(KATAKANA_6_DAKUTEN, false); assertDigraphTest(KATAKANA_6_DAKUTEN_DIGRAPHS, true); assertDigraphTest(KATAKANA_6_HANDAKUTEN, false); assertDigraphTest(KATAKANA_6_HANDAKUTEN_DIGRAPHS, true); assertDigraphTest(KATAKANA_7, false); assertDigraphTest(KATAKANA_7_DIGRAPHS, true); assertDigraphTest(KATAKANA_8, false); assertDigraphTest(KATAKANA_8_DIGRAPHS, true); assertDigraphTest(KATAKANA_9, false); assertDigraphTest(KATAKANA_10_W_GROUP, false); assertDigraphTest(KATAKANA_10_N_CONSONANT, false); } @Test public void isDiacriticTest() { assertDiacriticTest(HIRAGANA_1, false); assertDiacriticTest(HIRAGANA_2_BASE, false); assertDiacriticTest(HIRAGANA_2_BASE_DIGRAPHS, false); assertDiacriticTest(HIRAGANA_2_DAKUTEN, true); assertDiacriticTest(HIRAGANA_2_DAKUTEN_DIGRAPHS, true); assertDiacriticTest(HIRAGANA_3_BASE, false); assertDiacriticTest(HIRAGANA_3_BASE_DIGRAPHS, false); assertDiacriticTest(HIRAGANA_3_DAKUTEN, true); assertDiacriticTest(HIRAGANA_3_DAKUTEN_DIGRAPHS, true); assertDiacriticTest(HIRAGANA_4_BASE, false); assertDiacriticTest(HIRAGANA_4_BASE_DIGRAPHS, false); assertDiacriticTest(HIRAGANA_4_DAKUTEN, true); assertDiacriticTest(HIRAGANA_4_DAKUTEN_DIGRAPHS, true); assertDiacriticTest(HIRAGANA_5, false); assertDiacriticTest(HIRAGANA_5_DIGRAPHS, false); assertDiacriticTest(HIRAGANA_6_BASE, false); assertDiacriticTest(HIRAGANA_6_BASE_DIGRAPHS, false); assertDiacriticTest(HIRAGANA_6_DAKUTEN, true); assertDiacriticTest(HIRAGANA_6_DAKUTEN_DIGRAPHS, true); assertDiacriticTest(HIRAGANA_6_HANDAKUTEN, true); assertDiacriticTest(HIRAGANA_6_HANDAKUTEN_DIGRAPHS, true); assertDiacriticTest(HIRAGANA_7, false); assertDiacriticTest(HIRAGANA_7_DIGRAPHS, false); assertDiacriticTest(HIRAGANA_8, false); assertDiacriticTest(HIRAGANA_8_DIGRAPHS, false); assertDiacriticTest(HIRAGANA_9, false); assertDiacriticTest(HIRAGANA_10_W_GROUP, false); assertDiacriticTest(HIRAGANA_10_N_CONSONANT, false); assertDiacriticTest(KATAKANA_1, false); assertDiacriticTest(KATAKANA_2_BASE, false); assertDiacriticTest(KATAKANA_2_BASE_DIGRAPHS, false); assertDiacriticTest(KATAKANA_2_DAKUTEN, true); assertDiacriticTest(KATAKANA_2_DAKUTEN_DIGRAPHS, true); assertDiacriticTest(KATAKANA_3_BASE, false); assertDiacriticTest(KATAKANA_3_BASE_DIGRAPHS, false); assertDiacriticTest(KATAKANA_3_DAKUTEN, true); assertDiacriticTest(KATAKANA_3_DAKUTEN_DIGRAPHS, true); assertDiacriticTest(KATAKANA_4_BASE, false); assertDiacriticTest(KATAKANA_4_BASE_DIGRAPHS, false); assertDiacriticTest(KATAKANA_4_DAKUTEN, true); assertDiacriticTest(KATAKANA_4_DAKUTEN_DIGRAPHS, true); assertDiacriticTest(KATAKANA_5, false); assertDiacriticTest(KATAKANA_5_DIGRAPHS, false); assertDiacriticTest(KATAKANA_6_BASE, false); assertDiacriticTest(KATAKANA_6_BASE_DIGRAPHS, false); assertDiacriticTest(KATAKANA_6_DAKUTEN, true); assertDiacriticTest(KATAKANA_6_DAKUTEN_DIGRAPHS, true); assertDiacriticTest(KATAKANA_6_HANDAKUTEN, true); assertDiacriticTest(KATAKANA_6_HANDAKUTEN_DIGRAPHS, true); assertDiacriticTest(KATAKANA_7, false); assertDiacriticTest(KATAKANA_7_DIGRAPHS, false); assertDiacriticTest(KATAKANA_8, false); assertDiacriticTest(KATAKANA_8_DIGRAPHS, false); assertDiacriticTest(KATAKANA_9, false); assertDiacriticTest(KATAKANA_10_W_GROUP, false); assertDiacriticTest(KATAKANA_10_N_CONSONANT, false); } @Test public void isDiacriticCharTest() { /* Unicode chart: */ assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(false)); assertThat(KanaQuestion.isDiacritic(''), is(true)); assertThat(KanaQuestion.isDiacritic(''), is(false)); } private void assertDigraphTest(KanaQuestion[] questions, boolean expected) { for (KanaQuestion question : questions) assertThat(question.isDigraph(), is(expected)); } private void assertDiacriticTest(KanaQuestion[] questions, boolean expected) { for (KanaQuestion question : questions) assertThat(question.isDiacritic(), is(expected)); } }
package org.neo4j.shell; import java.rmi.RemoteException; /** * A {@link ShellServer} with the addition of executing apps. */ public interface AppShellServer extends ShellServer { /** * Adds an {@link App} (given its class) to the list of apps this * shell server can execute. * @param appClass the class of the {@link App} to add to the list. * @throws RemoteException if an RMI exception occurs. */ void addApp( Class<? extends App> appClass ) throws RemoteException; /** * Finds and returns an {@link App} implementation with a given name. * @param name the name of the app. * @return an {@link App} instance for {@code name}. * @throws RemoteException if an RMI exception occurs. */ App findApp( String name ) throws RemoteException; }
package org.exist.xquery.functions.request; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import org.apache.commons.httpclient.HttpException; import org.apache.commons.httpclient.HttpMethod; import org.apache.commons.httpclient.HttpStatus; import org.apache.commons.httpclient.NameValuePair; import org.apache.commons.httpclient.methods.GetMethod; import org.apache.commons.httpclient.methods.PostMethod; import org.exist.http.RESTTest; import org.exist.xmldb.EXistResource; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.xml.sax.SAXException; import org.xmldb.api.DatabaseManager; import org.xmldb.api.base.Collection; import org.xmldb.api.base.Resource; import org.xmldb.api.base.XMLDBException; import org.xmldb.api.modules.BinaryResource; /** * Tests expected behaviour of request:get-parameter() XQuery function * * @author Adam Retter <adam@exist-db.org> * @version 1.0 */ public class GetParameterTest extends RESTTest { private final static String XQUERY = "for $param-name in request:get-parameter-names() return for $param-value in request:get-parameter($param-name, ()) return fn:concat($param-name, '=', $param-value)"; private final static String XQUERY_FILENAME = "test-get-parameter.xql"; private static Collection root; @BeforeClass public static void beforeClass() throws XMLDBException { root = DatabaseManager.getCollection("xmldb:exist://localhost:8088/xmlrpc/db", "admin", ""); BinaryResource res = (BinaryResource)root.createResource(XQUERY_FILENAME, "BinaryResource"); ((EXistResource) res).setMimeType("application/xquery"); res.setContent(XQUERY); root.storeResource(res); } @AfterClass public static void afterClass() throws XMLDBException { BinaryResource res = (BinaryResource)root.getResource(XQUERY_FILENAME); root.removeResource(res); } @Test public void testGetNoParameter() throws XMLDBException { testGet(null); } @Test public void testPostNoParameter() throws XMLDBException { testPost(null); } @Test public void testGetEmptyParameter() { testGet(new NameValues[] { new NameValues("param1", new String[]{}) }); } @Test public void testPostEmptyParameter() { testPost(new NameValues[] { new NameValues("param1", new String[]{}) }); } @Test public void testGetSingleValueParameter() { testGet(new NameValues[] { new NameValues("param1", new String[] { "value1" }) }); } @Test public void testPostSingleValueParameter() { testPost(new NameValues[] { new NameValues("param1", new String[] { "value1" }) }); } @Test public void testGetMultiValueParameter() { testGet(new NameValues[]{ new NameValues("param1", new String[] { "value1", "value2", "value3", "value4" }) }); } @Test public void testPostMultiValueParameter() { testPost(new NameValues[]{ new NameValues("param1", new String[] { "value1", "value2", "value3", "value4" }) }); } private void testGet(NameValues queryStringParams[]) { StringBuilder expectedResponse = new StringBuilder(); NameValuePair qsParams[] = convertNameValuesToNameValuePairs(queryStringParams, expectedResponse); GetMethod get = new GetMethod(COLLECTION_ROOT_URL + "/" + XQUERY_FILENAME); if(qsParams.length > 0) { get.setQueryString(qsParams); } testRequest(get, expectedResponse); } private void testPost(NameValues formParams[]) { StringBuilder expectedResponse = new StringBuilder(); NameValuePair fParams[] = convertNameValuesToNameValuePairs(formParams, expectedResponse); PostMethod post = new PostMethod(COLLECTION_ROOT_URL + "/" + XQUERY_FILENAME); if(fParams.length > 0) { post.setRequestBody(fParams); } testRequest(post, expectedResponse); } private void testRequest(HttpMethod method, StringBuilder expectedResponse) { try { int httpResult = client.executeMethod(method); byte buf[] = new byte[1024]; int read = -1; StringBuilder responseBody = new StringBuilder(); InputStream is = method.getResponseBodyAsStream(); while((read = is.read(buf)) > -1) { responseBody.append(new String(buf, 0, read)); } assertEquals(HttpStatus.SC_OK, httpResult); assertEquals(expectedResponse.toString(), responseBody.toString()); } catch(HttpException he) { fail(he.getMessage()); } catch(IOException ioe) { fail(ioe.getMessage()); } finally { method.releaseConnection(); } } private NameValuePair[] convertNameValuesToNameValuePairs(NameValues nameValues[], StringBuilder expectedResponse) { List<NameValuePair> nameValuePairs = new ArrayList<NameValuePair>(); if(nameValues != null) { for(NameValues param : nameValues) { for(String paramValue : param.getValues()) { nameValuePairs.add(new NameValuePair(param.getName(), paramValue)); expectedResponse.append(param.getName()); expectedResponse.append("="); expectedResponse.append(paramValue); } } } return nameValuePairs.toArray(new NameValuePair[nameValuePairs.size()]); } public class NameValues { final String name; final String values[]; public NameValues(String name, String values[]) { this.name = name; this.values = values; } public String getName() { return name; } public String[] getValues() { return values; } } }
// NikonReader.java package loci.formats.in; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import loci.common.RandomAccessInputStream; import loci.formats.FormatException; import loci.formats.FormatTools; import loci.formats.ImageTools; import loci.formats.MetadataTools; import loci.formats.codec.BitBuffer; import loci.formats.codec.NikonCodec; import loci.formats.codec.NikonCodecOptions; import loci.formats.meta.MetadataStore; import loci.formats.tiff.IFD; import loci.formats.tiff.IFDList; import loci.formats.tiff.PhotoInterp; import loci.formats.tiff.TiffCompression; import loci.formats.tiff.TiffParser; import loci.formats.tiff.TiffRational; public class NikonReader extends BaseTiffReader { // -- Constants -- /** Logger for this class. */ private static final Logger LOGGER = LoggerFactory.getLogger(NikonReader.class); public static final String[] NEF_SUFFIX = {"nef"}; // Tags that give a good indication of whether this is an NEF file. private static final int TIFF_EPS_STANDARD = 37398; private static final int COLOR_MAP = 33422; // Maker Note tags. private static final int FIRMWARE_VERSION = 1; private static final int ISO = 2; private static final int QUALITY = 4; private static final int MAKER_WHITE_BALANCE = 5; private static final int SHARPENING = 6; private static final int FOCUS_MODE = 7; private static final int FLASH_SETTING = 8; private static final int FLASH_MODE = 9; private static final int WHITE_BALANCE_FINE = 11; private static final int WHITE_BALANCE_RGB_COEFFS = 12; private static final int FLASH_COMPENSATION = 18; private static final int TONE_COMPENSATION = 129; private static final int LENS_TYPE = 131; private static final int LENS = 132; private static final int FLASH_USED = 135; private static final int CURVE = 140; private static final int COLOR_MODE = 141; private static final int LIGHT_TYPE = 144; private static final int HUE = 146; private static final int CAPTURE_EDITOR_DATA = 3585; // -- Fields -- /** Offset to the Nikon Maker Note. */ protected int makerNoteOffset; /** The original IFD. */ protected IFD original; private TiffRational[] whiteBalance; private Object cfaPattern; private int[] curve; private int[] vPredictor; private boolean lossyCompression; private int split = -1; private byte[] lastPlane = null; private int lastIndex = -1; // -- Constructor -- /** Constructs a new Nikon reader. */ public NikonReader() { super("Nikon NEF", new String[] {"nef", "tif", "tiff"}); suffixSufficient = false; domains = new String[] {FormatTools.GRAPHICS_DOMAIN}; } // -- IFormatReader API methods -- /* @see loci.formats.IFormatReader#isThisType(String, boolean) */ public boolean isThisType(String name, boolean open) { // extension is sufficient as long as it is NEF if (checkSuffix(name, NEF_SUFFIX)) return true; return super.isThisType(name, open); } /* @see loci.formats.IFormatReader#isThisType(RandomAccessInputStream) */ public boolean isThisType(RandomAccessInputStream stream) throws IOException { TiffParser tp = new TiffParser(stream); IFD ifd = tp.getFirstIFD(); if (ifd == null) return false; if (ifd.containsKey(TIFF_EPS_STANDARD)) return true; String make = ifd.getIFDTextValue(IFD.MAKE); return make != null && make.indexOf("Nikon") != -1; } /** * @see loci.formats.IFormatReader#openBytes(int, byte[], int, int, int, int) */ public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) throws FormatException, IOException { FormatTools.checkPlaneParameters(this, no, buf.length, x, y, w, h); IFD ifd = ifds.get(no); int[] bps = ifd.getBitsPerSample(); int dataSize = bps[0]; long[] byteCounts = ifd.getStripByteCounts(); long totalBytes = 0; for (long b : byteCounts) { totalBytes += b; } if (totalBytes == FormatTools.getPlaneSize(this) || bps.length > 1) { return super.openBytes(no, buf, x, y, w, h); } if (lastPlane == null || lastIndex != no) { long[] offsets = ifd.getStripOffsets(); boolean maybeCompressed = ifd.getCompression() == TiffCompression.NIKON; boolean compressed = vPredictor != null && curve != null && maybeCompressed; if (!maybeCompressed && dataSize == 14) dataSize = 16; ByteArrayOutputStream src = new ByteArrayOutputStream(); NikonCodec codec = new NikonCodec(); NikonCodecOptions options = new NikonCodecOptions(); options.width = getSizeX(); options.height = getSizeY(); options.bitsPerSample = dataSize; options.curve = curve; if (vPredictor != null) { options.vPredictor = new int[vPredictor.length]; } options.lossless = !lossyCompression; options.split = split; for (int i=0; i<byteCounts.length; i++) { byte[] t = new byte[(int) byteCounts[i]]; in.seek(offsets[i]); in.read(t); if (compressed) { options.maxBytes = (int) byteCounts[i]; System.arraycopy(vPredictor, 0, options.vPredictor, 0, vPredictor.length); t = codec.decompress(t, options); } src.write(t); } BitBuffer bb = new BitBuffer(src.toByteArray()); short[] pix = new short[getSizeX() * getSizeY() * 3]; src.close(); int[] colorMap = {1, 0, 2, 1}; // default color map short[] ifdColors = (short[]) ifd.get(COLOR_MAP); if (ifdColors != null && ifdColors.length >= colorMap.length) { boolean colorsValid = true; for (int q=0; q<colorMap.length; q++) { if (ifdColors[q] < 0 || ifdColors[q] > 2) { // found invalid channel index, use default color map instead colorsValid = false; break; } } if (colorsValid) { for (int q=0; q<colorMap.length; q++) { colorMap[q] = ifdColors[q]; } } } boolean interleaveRows = offsets.length == 1 && !maybeCompressed && colorMap[0] != 0; for (int row=0; row<getSizeY(); row++) { int realRow = interleaveRows ? (row < (getSizeY() / 2) ? row * 2 : (row - (getSizeY() / 2)) * 2 + 1) : row; for (int col=0; col<getSizeX(); col++) { short val = (short) (bb.getBits(dataSize) & 0xffff); int mapIndex = (realRow % 2) * 2 + (col % 2); int redOffset = realRow * getSizeX() + col; int greenOffset = (getSizeY() + realRow) * getSizeX() + col; int blueOffset = (2 * getSizeY() + realRow) * getSizeX() + col; if (colorMap[mapIndex] == 0) { pix[redOffset] = adjustForWhiteBalance(val, 0); } else if (colorMap[mapIndex] == 1) { pix[greenOffset] = adjustForWhiteBalance(val, 1); } else if (colorMap[mapIndex] == 2) { pix[blueOffset] = adjustForWhiteBalance(val, 2); } if (maybeCompressed && !compressed) { int toSkip = 0; if ((col % 10) == 9) { toSkip = 1; } if (col == getSizeX() - 1) { toSkip = 10; } bb.skipBits(toSkip * 8); } } } lastPlane = new byte[FormatTools.getPlaneSize(this)]; ImageTools.interpolate(pix, lastPlane, colorMap, getSizeX(), getSizeY(), isLittleEndian()); lastIndex = no; } int bpp = FormatTools.getBytesPerPixel(getPixelType()) * 3; int rowLen = w * bpp; int width = getSizeX() * bpp; for (int row=0; row<h; row++) { System.arraycopy( lastPlane, (row + y) * width + x * bpp, buf, row * rowLen, rowLen); } return buf; } /* @see loci.formats.IFormatReader#close(boolean) */ public void close(boolean fileOnly) throws IOException { super.close(fileOnly); if (!fileOnly) { makerNoteOffset = 0; original = null; split = -1; whiteBalance = null; cfaPattern = null; curve = null; vPredictor = null; lossyCompression = false; lastPlane = null; lastIndex = -1; } } // -- Internal BaseTiffReader API methods -- /* @see BaseTiffReader#initStandardMetadata() */ protected void initStandardMetadata() throws FormatException, IOException { super.initStandardMetadata(); // reset image dimensions // the actual image data is stored in IFDs referenced by the SubIFD tag // in the 'real' IFD core[0].imageCount = ifds.size(); IFD firstIFD = ifds.get(0); PhotoInterp photo = firstIFD.getPhotometricInterpretation(); int samples = firstIFD.getSamplesPerPixel(); core[0].rgb = samples > 1 || photo == PhotoInterp.RGB || photo == PhotoInterp.CFA_ARRAY; if (photo == PhotoInterp.CFA_ARRAY) samples = 3; core[0].sizeX = (int) firstIFD.getImageWidth(); core[0].sizeY = (int) firstIFD.getImageLength(); core[0].sizeZ = 1; core[0].sizeC = isRGB() ? samples : 1; core[0].sizeT = ifds.size(); core[0].pixelType = firstIFD.getPixelType(); core[0].indexed = false; // now look for the EXIF IFD pointer IFDList exifIFDs = tiffParser.getExifIFDs(); if (exifIFDs.size() > 0) { IFD exifIFD = exifIFDs.get(0); tiffParser.fillInIFD(exifIFD); // put all the EXIF data in the metadata hashtable for (Integer key : exifIFD.keySet()) { int tag = key.intValue(); String name = IFD.getIFDTagName(tag); if (tag == IFD.CFA_PATTERN) { byte[] cfa = (byte[]) exifIFD.get(key); int[] colorMap = new int[cfa.length]; for (int i=0; i<cfa.length; i++) colorMap[i] = (int) cfa[i]; addGlobalMeta(name, colorMap); cfaPattern = colorMap; } else { addGlobalMeta(name, exifIFD.get(key)); if (name.equals("MAKER_NOTE")) { byte[] b = (byte[]) exifIFD.get(key); int extra = new String(b, 0, 10).startsWith("Nikon") ? 10 : 0; byte[] buf = new byte[b.length]; System.arraycopy(b, extra, buf, 0, buf.length - extra); RandomAccessInputStream makerNote = new RandomAccessInputStream(buf); TiffParser tp = new TiffParser(makerNote); IFD note = null; try { note = tp.getFirstIFD(); } catch (Exception e) { LOGGER.debug("Failed to parse first IFD", e); } if (note != null) { for (Integer nextKey : note.keySet()) { int nextTag = nextKey.intValue(); addGlobalMeta(name, note.get(nextKey)); if (nextTag == 150) { b = (byte[]) note.get(nextKey); RandomAccessInputStream s = new RandomAccessInputStream(b); byte check1 = s.readByte(); byte check2 = s.readByte(); lossyCompression = check1 != 0x46; vPredictor = new int[4]; for (int q=0; q<vPredictor.length; q++) { vPredictor[q] = s.readShort(); } curve = new int[16385]; int bps = ifds.get(0).getBitsPerSample()[0]; int max = 1 << bps & 0x7fff; int step = 0; int csize = s.readShort(); if (csize > 1) { step = max / (csize - 1); } if (check1 == 0x44 && check2 == 0x20 && step > 0) { for (int i=0; i<csize; i++) { curve[i * step] = s.readShort(); } for (int i=0; i<max; i++) { int n = i % step; curve[i] = (curve[i - n] * (step - n) + curve[i - n + step] * n) / step; } s.seek(562); split = s.readShort(); } else { int maxValue = (int) Math.pow(2, bps) - 1; Arrays.fill(curve, maxValue); int nElements = (int) (s.length() - s.getFilePointer()) / 2; if (nElements < 100) { for (int i=0; i<curve.length; i++) { curve[i] = (short) i; } } else { for (int q=0; q<nElements; q++) { curve[q] = s.readShort(); } } } s.close(); } else if (nextTag == WHITE_BALANCE_RGB_COEFFS) { whiteBalance = (TiffRational[]) note.get(nextKey); } } } makerNote.close(); } } } } } // -- Internal FormatReader API methods -- /* @see loci.formats.FormatReader#initFile(String) */ protected void initFile(String id) throws FormatException, IOException { super.initFile(id); original = ifds.get(0); if (cfaPattern != null) { original.putIFDValue(IFD.COLOR_MAP, (int[]) cfaPattern); } ifds.set(0, original); core[0].imageCount = 1; core[0].sizeT = 1; if (ifds.get(0).getSamplesPerPixel() == 1) { core[0].interleaved = true; } MetadataStore store = makeFilterMetadata(); MetadataTools.populatePixels(store, this); } // -- Helper methods -- private short adjustForWhiteBalance(short val, int index) { if (whiteBalance != null && whiteBalance.length == 3) { return (short) (val * whiteBalance[index].doubleValue()); } return val; } }
package gov.nih.nci.gss.grid; import gov.nih.nci.gss.domain.GridService; import gov.nih.nci.gss.util.GSSUtil; import java.io.InputStream; import java.util.concurrent.Callable; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.io.IOUtils; import org.apache.http.HttpHost; import org.apache.http.HttpRequest; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.log4j.Logger; /** * Utility class for retrieving the WSDL from a data service to * verify its accessibility. * * @author <a href="mailto:rokickik@mail.nih.gov">Konrad Rokicki</a> */ public class GridServiceVerifier implements Callable<Boolean> { private static Logger logger = Logger.getLogger(GridServiceVerifier.class); private boolean defunct = false; private GridService gridService; public GridServiceVerifier(GridService gridService) { this.gridService = gridService; } public void disregard() { this.defunct = true; } /** * Callable callback method called when this update is actually run. */ public Boolean call() throws Exception { String url = gridService.getUrl(); logger.debug("Verifying service: "+url); try { String wsdl = GridServiceVerifier.getWSDL(url); synchronized (this) { if (defunct == true) { logger.warn("WSDL query for service "+url+ " returned but is no longer needed"); return false; } else if (wsdl.contains("<wsdl")) { logger.info("Retrieved WSDL for service: "+url); gridService.setAccessible(true); return true; } else { logger.warn("Retrieved unrecognizable WSDL for service: "+url); gridService.setAccessible(false); return false; } } } catch (Exception e) { synchronized (this) { logger.warn("Could not get WSDL for service "+url+": "+e.getMessage()); gridService.setAccessible(false); } return false; } } public static String getWSDL(String dataServiceUrl) throws Exception { DefaultHttpClient httpclient = new DefaultHttpClient(); GSSUtil.useTrustingTrustManager(httpclient); try { String wsdlURL = dataServiceUrl+"?wsdl"; logger.debug("Getting "+wsdlURL); // Have to parse hostname manually because httpclient has a bug // in dealing with underscores in hostnames. String scheme = null; String hostname = null; int port = 80; Pattern p = Pattern.compile("^(\\w+?): Matcher m = p.matcher(wsdlURL); if (m.matches()) { scheme = m.group(1); hostname = m.group(2); String strPort = m.group(4); if (strPort != null) { port = Integer.parseInt(strPort); } } HttpHost httpHost = new HttpHost(hostname, port, scheme); HttpRequest httpRequest = new HttpGet(wsdlURL); HttpResponse response = httpclient.execute(httpHost, httpRequest); InputStream responseStream = response.getEntity().getContent(); byte[] responseBytes = IOUtils.toByteArray(responseStream); return new String(responseBytes, "UTF-8"); } finally { httpclient.getConnectionManager().shutdown(); } } /** * @param args */ public static void main(String[] args) throws Exception { String url = "https://stylus_157.stylusinternet.net:9600/wsrf/services/cagrid/OwlgenService"; String wsdl = GridServiceVerifier.getWSDL(url); System.out.println("wsdl="+wsdl); if (wsdl.contains("<wsdl")) { System.out.println("Success"); } } }
// ScanrReader.java package loci.formats.in; import java.io.IOException; import java.util.Hashtable; import java.util.Vector; import loci.common.DataTools; import loci.common.Location; import loci.common.RandomAccessInputStream; import loci.common.xml.XMLTools; import loci.formats.CoreMetadata; import loci.formats.FormatException; import loci.formats.FormatReader; import loci.formats.FormatTools; import loci.formats.MetadataTools; import loci.formats.meta.FilterMetadata; import loci.formats.meta.MetadataStore; import loci.formats.tiff.IFD; import loci.formats.tiff.TiffParser; import org.xml.sax.Attributes; import org.xml.sax.helpers.DefaultHandler; public class ScanrReader extends FormatReader { // -- Constants -- private static final String XML_FILE = "experiment_descriptor.xml"; private static final String EXPERIMENT_FILE = "experiment_descriptor.dat"; private static final String ACQUISITION_FILE = "AcquisitionLog.dat"; // -- Fields -- private Vector<String> metadataFiles = new Vector<String>(); private int wellRows, wellColumns; private int fieldRows, fieldColumns; private Vector<String> channelNames = new Vector<String>(); private Hashtable<String, Integer> wellLabels = new Hashtable<String, Integer>(); private String plateName; private String[] tiffs; private MinimalTiffReader reader; // -- Constructor -- /** Constructs a new ScanR reader. */ public ScanrReader() { super("Olympus ScanR", new String[] {"dat", "xml", "tif"}); domains = new String[] {FormatTools.HCS_DOMAIN}; suffixSufficient = false; } // -- IFormatReader API methods -- /* @see loci.formats.IFormatReader#fileGroupOption(String) */ public int fileGroupOption(String id) throws FormatException, IOException { return FormatTools.MUST_GROUP; } /* @see loci.formats.IFormatReader#isThisType(String, boolean) */ public boolean isThisType(String name, boolean open) { String localName = new Location(name).getName(); if (localName.equals(XML_FILE) || localName.equals(EXPERIMENT_FILE) || localName.equals(ACQUISITION_FILE)) { return true; } return super.isThisType(name, open); } /* @see loci.formats.IFormatReader#isThisType(RandomAccessInputStream) */ public boolean isThisType(RandomAccessInputStream stream) throws IOException { TiffParser p = new TiffParser(stream); IFD ifd = p.getFirstIFD(); if (ifd == null) return false; Object s = ifd.getIFDValue(IFD.SOFTWARE); if (s == null) return false; String software = s instanceof String[] ? ((String[]) s)[0] : s.toString(); return software.trim().equals("National Instruments IMAQ"); } /* @see loci.formats.IFormatReader#getSeriesUsedFiles(boolean) */ public String[] getSeriesUsedFiles(boolean noPixels) { FormatTools.assertId(currentId, true, 1); Vector<String> files = new Vector<String>(); for (String file : metadataFiles) { if (file != null) files.add(file); } if (!noPixels && tiffs != null) { int offset = getSeries() * getImageCount(); for (int i=0; i<getImageCount(); i++) { if (tiffs[offset + i] != null) { files.add(tiffs[offset + i]); } } } return files.toArray(new String[files.size()]); } /* @see loci.formats.IFormatReader#close(boolean) */ public void close(boolean fileOnly) throws IOException { super.close(fileOnly); if (!fileOnly) { if (reader != null) { reader.close(); } reader = null; tiffs = null; plateName = null; channelNames.clear(); fieldRows = fieldColumns = 0; wellRows = wellColumns = 0; metadataFiles.clear(); wellLabels.clear(); } } /** * @see loci.formats.IFormatReader#openBytes(int, byte[], int, int, int, int) */ public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) throws FormatException, IOException { FormatTools.checkPlaneParameters(this, no, buf.length, x, y, w, h); int index = getSeries() * getImageCount() + no; if (tiffs[index] != null) { reader.setId(tiffs[index]); reader.openBytes(0, buf, x, y, w, h); reader.close(); } return buf; } // -- Internal FormatReader API methods -- /* @see loci.formats.FormatReader#initFile(String) */ protected void initFile(String id) throws FormatException, IOException { super.initFile(id); // make sure we have the .xml file if (!checkSuffix(id, "xml") && isGroupFiles()) { Location parent = new Location(id).getAbsoluteFile().getParentFile(); if (checkSuffix(id, "tif")) { parent = parent.getParentFile(); } String[] list = parent.list(); for (String file : list) { if (file.equals(XML_FILE)) { id = new Location(parent, file).getAbsolutePath(); super.initFile(id); break; } } if (!checkSuffix(id, "xml")) { throw new FormatException("Could not find " + XML_FILE + " in " + parent.getAbsolutePath()); } } else if (!isGroupFiles() && checkSuffix(id, "tif")) { TiffReader r = new TiffReader(); r.setMetadataStore(getMetadataStore()); r.setId(id); core = r.getCoreMetadata(); metadataStore = r.getMetadataStore(); Hashtable globalMetadata = r.getGlobalMetadata(); for (Object key : globalMetadata.keySet()) { addGlobalMeta(key.toString(), globalMetadata.get(key)); } r.close(); tiffs = new String[] {id}; reader = new MinimalTiffReader(); return; } Location dir = new Location(id).getAbsoluteFile().getParentFile(); String[] list = dir.list(true); for (String file : list) { Location f = new Location(dir, file); if (!f.isDirectory()) { metadataFiles.add(f.getAbsolutePath()); } } // parse XML metadata String xml = DataTools.readFile(id).trim(); // add the appropriate encoding, as some ScanR XML files use non-UTF8 // characters without specifying an encoding if (xml.startsWith("<?")) { xml = xml.substring(xml.indexOf("?>") + 2); } xml = "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>" + xml; XMLTools.parseXML(xml, new ScanrHandler()); Vector<String> uniqueRows = new Vector<String>(); Vector<String> uniqueColumns = new Vector<String>(); for (String well : wellLabels.keySet()) { if (!Character.isLetter(well.charAt(0))) continue; String row = well.substring(0, 1).trim(); String column = well.substring(1).trim(); if (!uniqueRows.contains(row) && row.length() > 0) uniqueRows.add(row); if (!uniqueColumns.contains(column) && column.length() > 0) { uniqueColumns.add(column); } } wellRows = uniqueRows.size(); wellColumns = uniqueColumns.size(); if (wellRows * wellColumns == 0) { if (wellLabels.size() <= 96) { wellColumns = 12; } else if (wellLabels.size() <= 384) { wellColumns = 24; } wellRows = wellLabels.size() / wellColumns; if (wellRows * wellColumns < wellLabels.size()) wellRows++; } int nChannels = getSizeC() == 0 ? channelNames.size() : getSizeC(); if (nChannels == 0) nChannels = 1; int nSlices = getSizeZ() == 0 ? 1 : getSizeZ(); int nTimepoints = getSizeT(); int nWells = wellRows * wellColumns; int nPos = fieldRows * fieldColumns; if (nPos == 0) nPos = 1; // get list of TIFF files Location dataDir = new Location(dir, "data"); list = dataDir.list(true); if (list == null) { // try to find the TIFFs in the current directory list = dir.list(true); } else dir = dataDir; if (nTimepoints == 0) { nTimepoints = list.length / (nChannels * nWells * nPos * nSlices); if (nTimepoints == 0) nTimepoints = 1; } tiffs = new String[nChannels * nWells * nPos * nTimepoints * nSlices]; int next = 0; String[] keys = wellLabels.keySet().toArray(new String[wellLabels.size()]); int realPosCount = 0; for (int well=0; well<nWells; well++) { Integer w = wellLabels.get(keys[well]); int wellIndex = w == null ? well + 1 : w.intValue(); String wellPos = getBlock(wellIndex, "W"); int originalIndex = next; for (int pos=0; pos<nPos; pos++) { String posPos = getBlock(pos + 1, "P"); int posIndex = next; for (int z=0; z<nSlices; z++) { String zPos = getBlock(z, "Z"); for (int t=0; t<nTimepoints; t++) { String tPos = getBlock(t, "T"); for (int c=0; c<nChannels; c++) { for (String file : list) { if (file.indexOf(wellPos) != -1 && file.indexOf(zPos) != -1 && file.indexOf(posPos) != -1 && file.indexOf(tPos) != -1 && file.indexOf(channelNames.get(c)) != -1) { tiffs[next++] = new Location(dir, file).getAbsolutePath(); break; } } } } } if (posIndex != next) realPosCount++; } if (next == originalIndex) { wellLabels.remove(keys[well]); } } if (wellLabels.size() != nWells) { uniqueRows.clear(); uniqueColumns.clear(); for (String well : wellLabels.keySet()) { if (!Character.isLetter(well.charAt(0))) continue; String row = well.substring(0, 1).trim(); String column = well.substring(1).trim(); if (!uniqueRows.contains(row) && row.length() > 0) uniqueRows.add(row); if (!uniqueColumns.contains(column) && column.length() > 0) { uniqueColumns.add(column); } } wellRows = uniqueRows.size(); wellColumns = uniqueColumns.size(); nWells = wellRows * wellColumns; } nPos = realPosCount; reader = new MinimalTiffReader(); reader.setId(tiffs[0]); int sizeX = reader.getSizeX(); int sizeY = reader.getSizeY(); int pixelType = reader.getPixelType(); // we strongly suspect that ScanR incorrectly records the // signedness of the pixels switch (pixelType) { case FormatTools.INT8: pixelType = FormatTools.UINT8; break; case FormatTools.UINT8: pixelType = FormatTools.INT8; break; case FormatTools.INT16: pixelType = FormatTools.UINT16; break; case FormatTools.UINT16: pixelType = FormatTools.INT16; break; } boolean rgb = reader.isRGB(); boolean interleaved = reader.isInterleaved(); boolean indexed = reader.isIndexed(); boolean littleEndian = reader.isLittleEndian(); reader.close(); core = new CoreMetadata[nWells * nPos]; for (int i=0; i<getSeriesCount(); i++) { core[i] = new CoreMetadata(); core[i].sizeC = nChannels; core[i].sizeZ = nSlices; core[i].sizeT = nTimepoints; core[i].sizeX = sizeX; core[i].sizeY = sizeY; core[i].pixelType = pixelType; core[i].rgb = rgb; core[i].interleaved = interleaved; core[i].indexed = indexed; core[i].littleEndian = littleEndian; core[i].dimensionOrder = "XYCTZ"; core[i].imageCount = nSlices * nTimepoints * nChannels; } MetadataStore store = new FilterMetadata(getMetadataStore(), isMetadataFiltered()); MetadataTools.populatePixels(store, this); int nFields = fieldRows * fieldColumns; for (int i=0; i<getSeriesCount(); i++) { MetadataTools.setDefaultCreationDate(store, id, i); int field = i % nFields; int well = i / nFields; int wellRow = well / wellColumns; int wellCol = well % wellColumns; store.setWellColumn(wellCol, 0, well); store.setWellRow(wellRow, 0, well); store.setWellSampleIndex(i, 0, well, field); String imageID = MetadataTools.createLSID("Image", i); store.setWellSampleImageRef(imageID, 0, well, field); store.setImageID(imageID, i); String row = String.valueOf(wellRows > 26 ? wellRow + 1 : (char) ('A' + wellRow)); String col = String.valueOf(wellRows > 26 ? (char) ('A' + wellCol) : wellCol + 1); String name = "Well " + row + col + ", Field " + (field + 1) + " (Spot " + (i + 1) + ")"; store.setImageName(name, i); } if (getMetadataOptions().getMetadataLevel() == MetadataLevel.ALL) { // populate LogicalChannel data for (int i=0; i<getSeriesCount(); i++) { for (int c=0; c<getSizeC(); c++) { store.setLogicalChannelName(channelNames.get(c), i, c); } } if (wellRows > 26) { store.setPlateRowNamingConvention("1", 0); store.setPlateColumnNamingConvention("A", 0); } else { store.setPlateRowNamingConvention("A", 0); store.setPlateColumnNamingConvention("1", 0); } store.setPlateName(plateName, 0); } } // -- Helper class -- class ScanrHandler extends DefaultHandler { private String key, value; private String qName; private String wellIndex; // -- DefaultHandler API methods -- public void characters(char[] ch, int start, int length) { String v = new String(ch, start, length); if (v.trim().length() == 0) return; if (qName.equals("Name")) { key = v; } else if (qName.equals("Val")) { value = v.trim(); addGlobalMeta(key, value); if (key.equals("columns/well")) { fieldColumns = Integer.parseInt(value); } else if (key.equals("rows/well")) { fieldRows = Integer.parseInt(value); } else if (key.equals("# slices")) { core[0].sizeZ = Integer.parseInt(value); } else if (key.equals("timeloop real")) { core[0].sizeT = Integer.parseInt(value); } else if (key.equals("name")) { channelNames.add(value); } else if (key.equals("plate name")) { plateName = value; } else if (key.equals("idle")) { int lastIndex = channelNames.size() - 1; if (value.equals("0") && !channelNames.get(lastIndex).equals("Autofocus")) { core[0].sizeC++; } else channelNames.remove(lastIndex); } else if (key.equals("well selection table + cDNA")) { if (Character.isDigit(value.charAt(0))) { wellIndex = value; } else { wellLabels.put(value, new Integer(wellIndex)); } } } } public void startElement(String uri, String localName, String qName, Attributes attributes) { this.qName = qName; } } // -- Helper methods -- private String getBlock(int index, String axis) { String b = String.valueOf(index); while (b.length() < 5) b = "0" + b; return axis + b; } }
package org.voltdb.planner; import java.util.List; import org.voltdb.expressions.AbstractExpression; import org.voltdb.plannodes.AbstractJoinPlanNode; import org.voltdb.plannodes.AbstractPlanNode; import org.voltdb.plannodes.AbstractScanPlanNode; import org.voltdb.plannodes.IndexScanPlanNode; import org.voltdb.plannodes.NestLoopPlanNode; import org.voltdb.plannodes.PlanNodeTree; import org.voltdb.plannodes.ReceivePlanNode; import org.voltdb.types.ExpressionType; import org.voltdb.types.IndexLookupType; import org.voltdb.types.JoinType; import org.voltdb.types.PlanNodeType; public class TestMultipleOuterJoinPlans extends PlannerTestCase { private void verifyJoinNode(AbstractPlanNode n, PlanNodeType nodeType, JoinType joinType, ExpressionType preJoinExpressionType, ExpressionType joinExpressionType, ExpressionType whereExpressionType, PlanNodeType outerNodeType, PlanNodeType innerNodeType, String outerTableAlias, String innerTableAlias) { assertEquals(nodeType, n.getPlanNodeType()); AbstractJoinPlanNode jn = (AbstractJoinPlanNode) n; assertEquals(joinType, jn.getJoinType()); if (preJoinExpressionType != null) { assertEquals(preJoinExpressionType, jn.getPreJoinPredicate().getExpressionType()); } else { assertNull(jn.getPreJoinPredicate()); } if (joinExpressionType != null) { assertEquals(joinExpressionType, jn.getJoinPredicate().getExpressionType()); } else { assertNull(jn.getJoinPredicate()); } if (whereExpressionType != null) { assertEquals(whereExpressionType, jn.getWherePredicate().getExpressionType()); } else { assertNull(jn.getWherePredicate()); } assertEquals(outerNodeType, jn.getChild(0).getPlanNodeType()); if (outerTableAlias != null) { assertEquals(outerTableAlias, ((AbstractScanPlanNode) jn.getChild(0)).getTargetTableAlias()); } if (nodeType == PlanNodeType.NESTLOOP) { assertEquals(innerNodeType, jn.getChild(1).getPlanNodeType()); } if (innerTableAlias != null) { if (nodeType == PlanNodeType.NESTLOOP) { assertEquals(innerTableAlias, ((AbstractScanPlanNode) jn.getChild(1)).getTargetTableAlias()); } else { IndexScanPlanNode sn = (IndexScanPlanNode) jn.getInlinePlanNode(PlanNodeType.INDEXSCAN); assertEquals(innerTableAlias, sn.getTargetTableAlias()); } } } private void verifyJoinNode(AbstractPlanNode n, PlanNodeType nodeType, JoinType joinType, ExpressionType preJoinExpressionType, ExpressionType joinExpressionType, ExpressionType whereExpressionType, PlanNodeType outerNodeType, PlanNodeType innerNodeType) { verifyJoinNode(n, nodeType, joinType, preJoinExpressionType, joinExpressionType, whereExpressionType, outerNodeType, innerNodeType, null, null); } private void verifyIndexScanNode(AbstractPlanNode n, IndexLookupType lookupType, ExpressionType predExpressionType) { assertNotNull(n); assertEquals(PlanNodeType.INDEXSCAN, n.getPlanNodeType()); IndexScanPlanNode isn = (IndexScanPlanNode) n; assertEquals(lookupType, isn.getLookupType()); if (predExpressionType != null) { assertEquals(predExpressionType, isn.getPredicate().getExpressionType()); } else { assertNull(isn.getPredicate()); } } public void testInnerOuterJoin() { AbstractPlanNode pn; AbstractPlanNode n; pn = compile("select * FROM R1 INNER JOIN R2 ON R1.A = R2.A LEFT JOIN R3 ON R3.C = R2.C"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3"); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN); pn = compile("select * FROM R1, R2 LEFT JOIN R3 ON R3.C = R2.C WHERE R1.A = R2.A"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3"); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN); } public void testOuterOuterJoin() { AbstractPlanNode pn; AbstractPlanNode n; pn = compile("select * FROM R1 LEFT JOIN R2 ON R1.A = R2.A LEFT JOIN R3 ON R3.C = R1.C"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3"); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2"); pn = compile("select * FROM R1 LEFT JOIN R2 ON R1.A = R2.A RIGHT JOIN R3 ON R3.C = R1.C"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.NESTLOOP, "R3", null); n = n.getChild(1); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2"); pn = compile("select * FROM R1 RIGHT JOIN R2 ON R1.A = R2.A RIGHT JOIN R3 ON R3.C = R2.C"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.NESTLOOP, "R3", null); n = n.getChild(1); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R2", "R1"); pn = compile("select * FROM R1 RIGHT JOIN R2 ON R1.A = R2.A LEFT JOIN R3 ON R3.C = R1.C"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3"); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R2", "R1"); pn = compile("select * FROM R1 RIGHT JOIN R2 ON R1.A = R2.A LEFT JOIN R3 ON R3.C = R1.C WHERE R1.A > 0"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3"); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN); } public void testMultiTableJoinExpressions() { AbstractPlanNode pn = compile("select * FROM R1, R2 LEFT JOIN R3 ON R3.A = R2.C OR R3.A = R1.A WHERE R1.C = R2.C"); AbstractPlanNode n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.CONJUNCTION_OR, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3"); NestLoopPlanNode nlj = (NestLoopPlanNode) n; AbstractExpression p = nlj.getJoinPredicate(); assertEquals(ExpressionType.CONJUNCTION_OR, p.getExpressionType()); } public void testPushDownExprJoin() { AbstractPlanNode pn; AbstractPlanNode n; // R3.A > 0 gets pushed down all the way to the R3 scan node and used as an index pn = compile("select * FROM R3, R2 LEFT JOIN R1 ON R1.C = R2.C WHERE R3.C = R2.C AND R3.A > 0"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R1"); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.INDEXSCAN, PlanNodeType.SEQSCAN, "R3", "R2"); // R3.A > 0 is now outer join expression and must stay at the LEFT join pn = compile("select * FROM R3, R2 LEFT JOIN R1 ON R1.C = R2.C AND R3.A > 0 WHERE R3.C = R2.C"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, ExpressionType.COMPARE_GREATERTHAN, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R1"); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R3", "R2"); pn = compile("select * FROM R3 JOIN R2 ON R3.C = R2.C RIGHT JOIN R1 ON R1.C = R2.C AND R3.A > 0"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.NESTLOOP, "R1", null); n = n.getChild(1); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R3", "R2"); // R3.A > 0 gets pushed down all the way to the R3 scan node and used as an index pn = compile("select * FROM R2, R3 LEFT JOIN R1 ON R1.C = R2.C WHERE R3.C = R2.C AND R3.A > 0"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R1"); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN, "R2", "R3"); // R3.A = R2.C gets pushed down to the R2, R3 join node scan node and used as an index pn = compile("select * FROM R2, R3 LEFT JOIN R1 ON R1.C = R2.C WHERE R3.A = R2.C"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.SEQSCAN, null, "R1"); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.INNER, null, null, null, PlanNodeType.SEQSCAN, null, "R2", "R3"); } public void testOuterSimplificationJoin() { // NULL_rejection simplification is the first transformation - // before the LEFT-to-RIGHT and the WHERE expressions push down AbstractPlanNode pn; AbstractPlanNode n; pn = compile("select * FROM R1, R3 RIGHT JOIN R2 ON R1.A = R2.A WHERE R3.C = R1.C"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN); // The second R3.C = R2.C join condition is NULL-rejecting for the outer table // from the first LEFT join - can't simplify (not the inner table) pn = compile("select * FROM R1 LEFT JOIN R2 ON R1.A = R2.A LEFT JOIN R3 ON R3.C = R2.C"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3"); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2"); // The second R3.C = R2.C join condition is NULL-rejecting for the first LEFT join pn = compile("select * FROM R1 LEFT JOIN R2 ON R1.A = R2.A RIGHT JOIN R3 ON R3.C = R2.C"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.NESTLOOP, "R3", null); n = n.getChild(1); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN); // The R3.A = R1.A join condition is NULL-rejecting for the FULL join OUTER (R1) table // simplifying it to R1 LEFT JOIN R2 pn = compile("select * FROM " + "R1 FULL JOIN R2 ON R1.A = R2.A " + "RIGHT JOIN R3 ON R3.A = R1.A"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.NESTLOOP, "R3", null); n = n.getChild(1); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2"); // The R3.A = R2.A join condition is NULL-rejecting for the FULL join INNER (R2) table // simplifying it to R1 RIGHT JOIN R2 which gets converted to R2 LEFT JOIN R1 pn = compile("select * FROM " + "R1 FULL JOIN R2 ON R1.A = R2.A " + "RIGHT JOIN R3 ON R3.A = R2.A"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.NESTLOOP, "R3", null); n = n.getChild(1); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R2", "R1"); // The R1-R2 FULL join is an outer node in the top LEFT join - not simplified pn = compile("select * FROM " + "R1 FULL JOIN R2 ON R1.A = R2.A " + "LEFT JOIN R3 ON R3.A = R2.A"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.LEFT, null, null, null, PlanNodeType.NESTLOOP, PlanNodeType.INDEXSCAN, null, "R3"); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2"); // The R3.A = R2.A AND R3.A = R1.A join condition is NULL-rejecting for the FULL join // OUTER (R1) and INNER (R1) tables simplifying it to R1 JOIN R2 pn = compile("select * FROM " + "R1 FULL JOIN R2 ON R1.A = R2.A " + "RIGHT JOIN R3 ON R3.A = R2.A AND R3.A = R1.A"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.NESTLOOP, "R3", null); n = n.getChild(1); // HSQL doubles the join expression for the first join. Once it's corrected the join expression type // should be ExpressionType.COMPARE_EQUAL verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN); // The R4 FULL join is an outer node in the R5 FULL join and can not be simplified by the R1.A = R5.A ON expression // R1 RIGHT JOIN R2 ON R1.A = R2.A R1 JOIN R3 ON R1.A = R3.A // JOIN R3 ON R1.A = R3.A ==> JOIN R2 ON R1.A = R2.A // FULL JOIN R4 ON R1.A = R4.A FULL JOIN R4 ON R1.A = R4.A // FULL JOIN R5 ON R1.A = R5.A FULL JOIN R5 ON R1.A = R5.A pn = compile("select * FROM " + "R1 RIGHT JOIN R2 ON R1.A = R2.A " + "JOIN R3 ON R1.A = R3.A " + "FULL JOIN R4 ON R1.A = R4.A " + "FULL JOIN R5 ON R1.A = R5.A"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.INDEXSCAN, null, "R5"); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.NESTLOOP, PlanNodeType.INDEXSCAN, null, "R4"); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.SEQSCAN, null, "R2"); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.INNER, null, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN, "R1", "R3"); // The R1-R2 LEFT JOIN belongs to the outer node of the top FULL join // and can't be simplified by the R2.A = R4.A ON join condition pn = compile("select * FROM " + "R1 LEFT JOIN R2 ON R1.A = R2.A " + "JOIN R3 ON R1.A = R3.A " + "FULL JOIN R4 ON R2.A = R4.A"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.INDEXSCAN, null, "R4"); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.INNER, null, null, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3"); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2"); // The R2.A > 0 WHERE expression is NULL rejecting for all outer joins pn = compile("select * FROM " + "R1 LEFT JOIN R2 ON R1.A = R2.A " + "JOIN R3 ON R1.A = R3.A " + "FULL JOIN R4 ON R1.A = R4.A WHERE R2.A > 0"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.LEFT, null, null, null, PlanNodeType.NESTLOOP, PlanNodeType.INDEXSCAN, null, "R4"); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.SEQSCAN, null, "R2"); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.INNER, null, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN, "R1", "R3"); // The R1-R2 RIGHT join is an outer node in the top FULL join - not simplified pn = compile("SELECT * FROM R1 RIGHT JOIN R2 ON R1.A = R2.A FULL JOIN R3 ON R3.A = R1.A"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.NESTLOOP, PlanNodeType.INDEXSCAN, null, "R3"); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R2", "R1"); // The R1-R2 LEFT join is an outer node in the top FULL join - not simplified pn = compile("SELECT * FROM R1 LEFT JOIN R2 ON R1.A = R2.A FULL JOIN R3 ON R3.A = R2.A"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.NESTLOOP, PlanNodeType.INDEXSCAN, null, "R3"); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2"); } public void testMultitableDistributedJoin() { List<AbstractPlanNode> lpn; AbstractPlanNode n; // One distributed table lpn = compileToFragments("select * FROM R3,R1 LEFT JOIN P2 ON R3.A = P2.A WHERE R3.A=R1.A "); assertTrue(lpn.size() == 2); n = lpn.get(0).getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.RECEIVE); // R3.A and P2.A have an index. P2,R1 is NLIJ/inlined IndexScan because it's an inner join even P2 is distributed lpn = compileToFragments("select * FROM P2,R1 LEFT JOIN R3 ON R3.A = P2.A WHERE P2.A=R1.A "); assertTrue(lpn.size() == 2); n = lpn.get(0).getChild(0).getChild(0); assertTrue(n instanceof ReceivePlanNode); n = lpn.get(1).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.LEFT, null, null, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.INDEXSCAN); // R3.A has an index. R3,P2 is NLJ because it's an outer join and P2 is distributed lpn = compileToFragments("select * FROM R3,R1 LEFT JOIN P2 ON R3.A = P2.A WHERE R3.A=R1.A "); assertTrue(lpn.size() == 2); // to debug */ System.out.println("DEBUG 0.0: " + lpn.get(0).toExplainPlanString()); // to debug */ System.out.println("DEBUG 0.1: " + lpn.get(1).toExplainPlanString()); n = lpn.get(0).getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.RECEIVE); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.INNER, null, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN); n = lpn.get(1).getChild(0); // For determinism reason assertTrue(n instanceof IndexScanPlanNode); // R3.A has an index. P2,R1 is NLJ because P2 is distributed and it's an outer join lpn = compileToFragments("select * FROM R1 LEFT JOIN P2 ON R1.A = P2.A, R3 WHERE R1.A=R3.A "); assertTrue(lpn.size() == 2); // to debug */ System.out.println("DEBUG 1.0: " + lpn.get(0).toExplainPlanString()); // to debug */ System.out.println("DEBUG 1.1: " + lpn.get(1).toExplainPlanString()); n = lpn.get(0).getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.INNER, null, null, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.RECEIVE); n = lpn.get(1).getChild(0); // For determinism reason assertTrue(n instanceof IndexScanPlanNode); // Two distributed table lpn = compileToFragments("select * FROM R3,P1 LEFT JOIN P2 ON R3.A = P2.A WHERE R3.A=P1.A "); assertTrue(lpn.size() == 2); n = lpn.get(0).getChild(0).getChild(0); assertTrue(n instanceof ReceivePlanNode); n = lpn.get(1).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.LEFT, null, null, null, PlanNodeType.NESTLOOPINDEX, PlanNodeType.INDEXSCAN); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.INNER, null, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN); } public void testFullJoinExpressions() { AbstractPlanNode pn; AbstractPlanNode n; // WHERE outer and inner expressions stay at the FULL NLJ node pn = compile("select * FROM " + "R1 FULL JOIN R2 ON R1.A = R2.A WHERE R2.C IS NULL AND R1.C is NULL"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, ExpressionType.CONJUNCTION_AND, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN); // WHERE outer and inner expressions stay at the FULL NLJ node // The outer node is a join itself pn = compile("select * FROM " + "R1 JOIN R2 ON R1.A = R2.A FULL JOIN R3 ON R3.C = R2.C WHERE R1.C is NULL"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, ExpressionType.OPERATOR_IS_NULL, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN); // WHERE outer-inner expressions stay at the FULL NLJ node pn = compile("select * FROM " + "R1 FULL JOIN R2 ON R1.A = R2.A WHERE R2.C IS NULL OR R1.C is NULL"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, ExpressionType.CONJUNCTION_OR, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN); // WHERE outer and inner expressions push down process stops at the FULL join (R1,R2) node - // FULL join is itself an outer node pn = compile("select * FROM " + "R1 FULL JOIN R2 ON R1.A = R2.A LEFT JOIN R3 ON R3.C = R2.C WHERE R1.C is NULL"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.LEFT, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, ExpressionType.OPERATOR_IS_NULL, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN); // OUTER JOIN expression (R1.A > 0) is pre-predicate, inner and inner - outer expressions R3.C = R2.C AND R3.C < 0 are predicate pn = compile("select * FROM R1 JOIN R2 ON R1.A = R2.C FULL JOIN R3 ON R3.C = R2.C AND R1.A > 0 AND R3.C < 0"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, ExpressionType.COMPARE_GREATERTHAN, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN, null, "R3"); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R2"); // NLJ JOIN outer expression is pre-join expression, NLJ JOIN inner expression together with // JOIN inner-outer one are part of the join predicate pn = compile("select * FROM " + "R1 FULL JOIN R2 ON R1.A = R2.A AND R1.C = R2.C"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN); // NLJ JOIN outer expression is pre-join expression, NLJ JOIN inner expression together with // JOIN inner-outer one are part of the join predicate pn = compile("select * FROM " + "R1 FULL JOIN R2 ON R1.A = R2.A AND R1.C < 0 AND R2.C > 0"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, ExpressionType.COMPARE_LESSTHAN, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN); // NLJ JOIN outer expression is pre-join expression, NLJ JOIN inner expression together with // JOIN inner-outer one are part of the join predicate pn = compile("select * FROM " + "R1 JOIN R2 ON R1.A = R2.A FULL JOIN R3 ON R1.A = R3.C AND R1.C is NULL"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, ExpressionType.OPERATOR_IS_NULL, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.NESTLOOP, PlanNodeType.SEQSCAN); } public void testFullIndexJoinExpressions() { AbstractPlanNode pn; AbstractPlanNode n; // Simple FULL NLIJ pn = compile("select * FROM " + "R3 FULL JOIN R1 ON R3.A = R1.A WHERE R3.C IS NULL"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, ExpressionType.OPERATOR_IS_NULL, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN); String json = (new PlanNodeTree(pn)).toJSONString(); // Same Join as above but using FULL OUTER JOIN syntax pn = compile("select * FROM " + "R3 FULL OUTER JOIN R1 ON R3.A = R1.A WHERE R3.C IS NULL"); String json1 = (new PlanNodeTree(pn)).toJSONString(); assertEquals(json, json1); // FULL NLJ. R3.A is an index column but R3.A > 0 expression is used as a PREDICATE only pn = compile("select * FROM " + "R1 FULL JOIN R3 ON R3.C = R1.A AND R3.A > 0"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "R1", "R3"); // FULL NLIJ, inner join R3.A > 0 is added as a post-predicate to the inline Index scan pn = compile("select * FROM R1 FULL JOIN R3 ON R3.A = R1.A AND R3.A > 55"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN, "R1", "R3"); verifyIndexScanNode(n.getInlinePlanNode(PlanNodeType.INDEXSCAN), IndexLookupType.EQ, ExpressionType.COMPARE_GREATERTHAN); // FULL NLIJ, inner join L.A > 0 is added as a pre-predicate to the NLIJ pn = compile("select * FROM R3 L FULL JOIN R3 R ON L.A = R.A AND L.A > 55"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, ExpressionType.COMPARE_GREATERTHAN, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN, "L", "R"); verifyIndexScanNode(n.getInlinePlanNode(PlanNodeType.INDEXSCAN), IndexLookupType.EQ, null); // FULL NLIJ, inner-outer join R3.c = R1.c is a post-predicate for the inline Index scan pn = compile("select * FROM R1 FULL JOIN R3 ON R3.A = R1.A AND R3.C = R1.C"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN, "R1", "R3"); verifyIndexScanNode(n.getInlinePlanNode(PlanNodeType.INDEXSCAN), IndexLookupType.EQ, ExpressionType.COMPARE_EQUAL); // FULL NLIJ, outer join (R1, R2) expression R1.A > 0 is a pre-predicate pn = compile("select * FROM R1 JOIN R2 ON R1.A = R2.C FULL JOIN R3 ON R3.A = R2.C AND R1.A > 0"); n = pn.getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, ExpressionType.COMPARE_GREATERTHAN, null, null, PlanNodeType.NESTLOOP, PlanNodeType.INDEXSCAN, null, "R3"); verifyIndexScanNode(n.getInlinePlanNode(PlanNodeType.INDEXSCAN), IndexLookupType.EQ, null); n = n.getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.INNER, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN); } public void testDistributedFullJoin() { List<AbstractPlanNode> lpn; AbstractPlanNode n; // FULL join on partition column lpn = compileToFragments("select * FROM " + "P1 FULL JOIN R2 ON P1.A = R2.A "); assertEquals(2, lpn.size()); n = lpn.get(0).getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.RECEIVE, "R2", null); // FULL join on partition column lpn = compileToFragments("select * FROM " + "R2 FULL JOIN P1 ON P1.A = R2.A "); assertEquals(2, lpn.size()); n = lpn.get(0).getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.RECEIVE, "R2", null); // FULL join on non-partition column lpn = compileToFragments("select * FROM " + "P1 FULL JOIN R2 ON P1.C = R2.A "); assertEquals(2, lpn.size()); n = lpn.get(0).getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.RECEIVE, "R2", null); // NLJ FULL join (R2, P2) on partition column R2.A > 0 is a pre-predicate, P2.A = R2.A AND P2.E < 0 are join predicate // It can't be a NLIJ because P2 is partitioned - P2.A index is not used lpn = compileToFragments("select * FROM " + "P2 FULL JOIN R2 ON P2.A = R2.A AND R2.A > 0 AND P2.E < 0"); assertEquals(2, lpn.size()); n = lpn.get(0).getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, ExpressionType.COMPARE_GREATERTHAN, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.RECEIVE, "R2", null); // NLJ FULL join (R2, P2) on partition column P2.E = R2.A AND P2.A > 0 are join predicate // Inner join expression P2.A > 0 can't be used as index expression with NLJ lpn = compileToFragments("select * FROM " + "P2 FULL JOIN R2 ON P2.E = R2.A AND P2.A > 0"); assertEquals(2, lpn.size()); n = lpn.get(0).getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.RECEIVE, "R2", null); // NLJ (R3, P2) on partition column P2.A. R3.A > 0 is a PRE_PREDICTAE // NLIJ (P2,R3) on partition column P2.A using index R3.A is an invalid plan for a FULL join lpn = compileToFragments("select * FROM " + "P2 FULL JOIN R3 ON P2.A = R3.A AND R3.A > 0 AND P2.E < 0"); assertEquals(2, lpn.size()); n = lpn.get(0).getChild(0).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, ExpressionType.COMPARE_GREATERTHAN, ExpressionType.CONJUNCTION_AND, null, PlanNodeType.SEQSCAN, PlanNodeType.RECEIVE, "R3", null); // FULL NLJ join of two partition tables on partition column lpn = compileToFragments("select * FROM P1 FULL JOIN P4 ON P1.A = P4.A "); assertEquals(2, lpn.size()); n = lpn.get(1).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOP, JoinType.FULL, null, ExpressionType.COMPARE_EQUAL, null, PlanNodeType.SEQSCAN, PlanNodeType.SEQSCAN, "P1", "P4"); // FULL NLIJ (P1,P2) on partition column P2.A lpn = compileToFragments("select * FROM P2 FULL JOIN P1 ON P1.A = P2.A AND P2.A > 0"); assertEquals(2, lpn.size()); n = lpn.get(1).getChild(0); verifyJoinNode(n, PlanNodeType.NESTLOOPINDEX, JoinType.FULL, null, null, null, PlanNodeType.SEQSCAN, PlanNodeType.INDEXSCAN, "P1", "P2"); verifyIndexScanNode(n.getInlinePlanNode(PlanNodeType.INDEXSCAN), IndexLookupType.EQ, ExpressionType.COMPARE_GREATERTHAN); // FULL join of two partition tables on non-partition column failToCompile("select * FROM P1 FULL JOIN P4 ON P1.C = P4.A ", "Join of multiple partitioned tables has insufficient join criteria"); } @Override protected void setUp() throws Exception { setupSchema(TestJoinOrder.class.getResource("testplans-join-ddl.sql"), "testplansjoin", false); } }
// MainDialog.java package loci.plugins.in; import com.jgoodies.forms.builder.PanelBuilder; import com.jgoodies.forms.layout.CellConstraints; import com.jgoodies.forms.layout.FormLayout; import ij.gui.GenericDialog; import java.awt.Checkbox; import java.awt.Choice; import java.awt.Color; import java.awt.Component; import java.awt.KeyboardFocusManager; import java.awt.Label; import java.awt.event.FocusEvent; import java.awt.event.FocusListener; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.util.HashMap; import java.util.Map; import java.util.Vector; import javax.swing.JEditorPane; import javax.swing.JScrollPane; import loci.plugins.util.WindowTools; public class MainDialog extends ImporterDialog implements FocusListener, ItemListener, MouseListener { // -- Constants -- /** Initial message to display in help text box. */ public static final String INFO_DEFAULT = "<i>Select an option for a detailed explanation. " + "Documentation written by Glen MacDonald and Curtis Rueden.</i>"; // -- Fields -- protected Checkbox autoscaleBox; protected Choice colorModeChoice; protected Checkbox concatenateBox; protected Checkbox cropBox; protected Checkbox groupFilesBox; protected Checkbox ungroupFilesBox; protected Checkbox openAllSeriesBox; //protected Checkbox recordBox; protected Checkbox showMetadataBox; protected Checkbox showOMEXMLBox; protected Checkbox showROIsBox; protected Checkbox specifyRangesBox; protected Checkbox splitZBox; protected Checkbox splitTBox; protected Checkbox splitCBox; protected Choice stackFormatChoice; protected Choice stackOrderChoice; protected Checkbox swapDimsBox; protected Checkbox virtualBox; protected Checkbox stitchTilesBox; protected Map<Component, String> infoTable; protected JEditorPane infoPane; // -- Constructor -- /** Creates a general options dialog for the Bio-Formats Importer. */ public MainDialog(ImportProcess process) { super(process); } // -- ImporterDialog methods -- @Override protected boolean needPrompt() { return !process.isWindowless(); } @Override protected GenericDialog constructDialog() { GenericDialog gd = new GenericDialog("Bio-Formats Import Options"); addCheckbox(gd, ImporterOptions.KEY_AUTOSCALE); addChoice(gd, ImporterOptions.KEY_COLOR_MODE); addCheckbox(gd, ImporterOptions.KEY_CONCATENATE); addCheckbox(gd, ImporterOptions.KEY_CROP); addCheckbox(gd, ImporterOptions.KEY_GROUP_FILES); addCheckbox(gd, ImporterOptions.KEY_UNGROUP_FILES); addCheckbox(gd, ImporterOptions.KEY_OPEN_ALL_SERIES); addCheckbox(gd, ImporterOptions.KEY_QUIET); // NB: invisible //addCheckbox(gd, ImporterOptions.KEY_RECORD); addCheckbox(gd, ImporterOptions.KEY_SHOW_METADATA); addCheckbox(gd, ImporterOptions.KEY_SHOW_OME_XML); addCheckbox(gd, ImporterOptions.KEY_SHOW_ROIS); addCheckbox(gd, ImporterOptions.KEY_SPECIFY_RANGES); addCheckbox(gd, ImporterOptions.KEY_SPLIT_Z); addCheckbox(gd, ImporterOptions.KEY_SPLIT_T); addCheckbox(gd, ImporterOptions.KEY_SPLIT_C); addChoice(gd, ImporterOptions.KEY_STACK_FORMAT); addChoice(gd, ImporterOptions.KEY_STACK_ORDER); addCheckbox(gd, ImporterOptions.KEY_SWAP_DIMS); addCheckbox(gd, ImporterOptions.KEY_VIRTUAL); addCheckbox(gd, ImporterOptions.KEY_STITCH_TILES); rebuildDialog(gd); return gd; } @Override protected boolean harvestResults(GenericDialog gd) { options.setAutoscale(gd.getNextBoolean()); options.setColorMode(options.getColorModes()[gd.getNextChoiceIndex()]); options.setConcatenate(gd.getNextBoolean()); options.setCrop(gd.getNextBoolean()); options.setGroupFiles(gd.getNextBoolean()); options.setUngroupFiles(gd.getNextBoolean()); options.setOpenAllSeries(gd.getNextBoolean()); options.setQuiet(gd.getNextBoolean()); // NB: invisible //options.setRecord(gd.getNextBoolean()); options.setShowMetadata(gd.getNextBoolean()); options.setShowOMEXML(gd.getNextBoolean()); options.setShowROIs(gd.getNextBoolean()); options.setSpecifyRanges(gd.getNextBoolean()); options.setSplitFocalPlanes(gd.getNextBoolean()); options.setSplitTimepoints(gd.getNextBoolean()); options.setSplitChannels(gd.getNextBoolean()); options.setStackFormat(options.getStackFormats()[gd.getNextChoiceIndex()]); options.setStackOrder(options.getStackOrders()[gd.getNextChoiceIndex()]); options.setSwapDimensions(gd.getNextBoolean()); options.setVirtual(gd.getNextBoolean()); options.setStitchTiles(gd.getNextBoolean()); return true; } // -- FocusListener methods -- /** Handles information pane updates when component focus changes. */ public void focusGained(FocusEvent e) { Object src = e.getSource(); String text = infoTable.get(src); infoPane.setText("<html>" + text); infoPane.setCaretPosition(0); } public void focusLost(FocusEvent e) { } // -- ItemListener methods -- /** Handles toggling of mutually exclusive options. */ public void itemStateChanged(ItemEvent e) { verifyOptions(e.getSource()); } // -- MouseListener methods -- /** Focuses the component upon mouseover. */ public void mouseEntered(MouseEvent e) { Object src = e.getSource(); if (src instanceof Component) { ((Component) src).requestFocusInWindow(); } } public void mouseClicked(MouseEvent e) { } public void mouseExited(MouseEvent e) { } public void mousePressed(MouseEvent e) { } public void mouseReleased(MouseEvent e) { } // -- Helper methods -- /** Fancies up the importer dialog to look much nicer. */ private void rebuildDialog(GenericDialog gd) { // extract GUI components from dialog and add listeners Vector<Checkbox> boxes = null; Vector<Choice> choices = null; Vector<Label> labels = null; Label colorModeLabel = null; Label stackFormatLabel = null; Label stackOrderLabel = null; Component[] c = gd.getComponents(); if (c != null) { boxes = new Vector<Checkbox>(); choices = new Vector<Choice>(); labels = new Vector<Label>(); for (int i=0; i<c.length; i++) { if (c[i] instanceof Checkbox) { Checkbox item = (Checkbox) c[i]; item.addFocusListener(this); item.addItemListener(this); item.addMouseListener(this); boxes.add(item); } else if (c[i] instanceof Choice) { Choice item = (Choice) c[i]; item.addFocusListener(this); item.addItemListener(this); item.addMouseListener(this); choices.add(item); } else if (c[i] instanceof Label) labels.add((Label) c[i]); } int boxIndex = 0, choiceIndex = 0, labelIndex = 0; autoscaleBox = boxes.get(boxIndex++); colorModeChoice = choices.get(choiceIndex++); colorModeLabel = labels.get(labelIndex++); concatenateBox = boxes.get(boxIndex++); cropBox = boxes.get(boxIndex++); groupFilesBox = boxes.get(boxIndex++); ungroupFilesBox = boxes.get(boxIndex++); openAllSeriesBox = boxes.get(boxIndex++); boxIndex++; // quiet //recordBox = boxes.get(boxIndex++); showMetadataBox = boxes.get(boxIndex++); showOMEXMLBox = boxes.get(boxIndex++); showROIsBox = boxes.get(boxIndex++); specifyRangesBox = boxes.get(boxIndex++); splitZBox = boxes.get(boxIndex++); splitTBox = boxes.get(boxIndex++); splitCBox = boxes.get(boxIndex++); stackFormatChoice = choices.get(choiceIndex++); stackFormatLabel = labels.get(labelIndex++); stackOrderChoice = choices.get(choiceIndex++); stackOrderLabel = labels.get(labelIndex++); swapDimsBox = boxes.get(boxIndex++); virtualBox = boxes.get(boxIndex++); stitchTilesBox = boxes.get(boxIndex++); } verifyOptions(null); // TODO: The info table and focus logic could be split into // its own class, rather than being specific to this dialog. // associate information for each option infoTable = new HashMap<Component, String>(); infoTable.put(autoscaleBox, options.getAutoscaleInfo()); infoTable.put(colorModeChoice, options.getColorModeInfo()); infoTable.put(colorModeLabel, options.getColorModeInfo()); infoTable.put(concatenateBox, options.getConcatenateInfo()); infoTable.put(cropBox, options.getCropInfo()); infoTable.put(groupFilesBox, options.getGroupFilesInfo()); infoTable.put(ungroupFilesBox, options.getUngroupFilesInfo()); infoTable.put(openAllSeriesBox, options.getOpenAllSeriesInfo()); //infoTable.put(recordBox, options.getRecordInfo()); infoTable.put(showMetadataBox, options.getShowMetadataInfo()); infoTable.put(showOMEXMLBox, options.getShowOMEXMLInfo()); infoTable.put(showROIsBox, options.getShowROIsInfo()); infoTable.put(specifyRangesBox, options.getSpecifyRangesInfo()); infoTable.put(splitZBox, options.getSplitFocalPlanesInfo()); infoTable.put(splitTBox, options.getSplitTimepointsInfo()); infoTable.put(splitCBox, options.getSplitChannelsInfo()); infoTable.put(stackFormatChoice, options.getStackFormatInfo()); infoTable.put(stackFormatLabel, options.getStackFormatInfo()); infoTable.put(stackOrderChoice, options.getStackOrderInfo()); infoTable.put(stackOrderLabel, options.getStackOrderInfo()); infoTable.put(swapDimsBox, options.getSwapDimensionsInfo()); infoTable.put(virtualBox, options.getVirtualInfo()); infoTable.put(stitchTilesBox, options.getStitchTilesInfo()); // rebuild dialog using FormLayout to organize things more nicely String cols = // first column "pref, 3dlu, pref:grow, " + // second column "10dlu, pref, " + // third column "10dlu, fill:150dlu"; String rows = // Stack viewing | Metadata viewing "pref, 3dlu, pref, 3dlu, pref, 3dlu, pref, " + // Dataset organization | Memory management "9dlu, pref, 3dlu, pref, 3dlu, pref, 3dlu, pref, 3dlu, pref, " + "3dlu, pref, " + // Color options | Split into separate windows "9dlu, pref, 3dlu, pref, 3dlu, pref, 3dlu, pref, 3dlu, pref"; // TODO: Change "Use virtual stack" and "Record modifications to virtual // stack" checkboxes to "Stack type" choice with options: // "Normal", "Virtual" or "Smart virtual" PanelBuilder builder = new PanelBuilder(new FormLayout(cols, rows)); CellConstraints cc = new CellConstraints(); // populate 1st column int row = 1; builder.addSeparator("Stack viewing", cc.xyw(1, row, 3)); row += 2; builder.add(stackFormatLabel, cc.xy(1, row)); builder.add(stackFormatChoice, cc.xy(3, row)); row += 2; builder.add(stackOrderLabel, cc.xy(1, row)); builder.add(stackOrderChoice, cc.xy(3, row)); row += 4; builder.addSeparator("Dataset organization", cc.xyw(1, row, 3)); row += 2; builder.add(groupFilesBox, xyw(cc, 1, row, 3)); row += 2; builder.add(ungroupFilesBox, xyw(cc, 1, row, 3)); row += 2; builder.add(swapDimsBox, xyw(cc, 1, row, 3)); row += 2; builder.add(openAllSeriesBox, xyw(cc, 1, row, 3)); row += 2; builder.add(concatenateBox, xyw(cc, 1, row, 3)); row += 2; builder.add(stitchTilesBox, xyw(cc, 1, row, 3)); row += 2; builder.addSeparator("Color options", cc.xyw(1, row, 3)); row += 2; builder.add(colorModeLabel, cc.xy(1, row)); builder.add(colorModeChoice, cc.xy(3, row)); row += 2; builder.add(autoscaleBox, xyw(cc, 1, row, 3)); row += 2; // populate 2nd column row = 1; builder.addSeparator("Metadata viewing", cc.xy(5, row)); row += 2; builder.add(showMetadataBox, xyw(cc, 5, row, 1)); row += 2; builder.add(showOMEXMLBox, xyw(cc, 5, row, 1)); row += 2; builder.add(showROIsBox, xyw(cc, 5, row, 1)); row += 2; builder.addSeparator("Memory management", cc.xy(5, row)); row += 2; builder.add(virtualBox, xyw(cc, 5, row, 1)); row += 2; //builder.add(recordBox, xyw(cc, 5, row, 1)); //row += 2; builder.add(specifyRangesBox, xyw(cc, 5, row, 1)); row += 2; builder.add(cropBox, xyw(cc, 5, row, 1)); row += 4; builder.addSeparator("Split into separate windows", cc.xy(5, row)); row += 2; builder.add(splitCBox, xyw(cc, 5, row, 1)); row += 2; builder.add(splitZBox, xyw(cc, 5, row, 1)); row += 2; builder.add(splitTBox, xyw(cc, 5, row, 1)); //row += 4; // information section builder.addSeparator("Information", cc.xy(7, 1)); //row += 2; infoPane = new JEditorPane(); infoPane.setContentType("text/html"); infoPane.setEditable(false); infoPane.setText("<html>" + INFO_DEFAULT); builder.add(new JScrollPane(infoPane), cc.xywh(7, 3, 1, row)); //row += 2; gd.removeAll(); gd.add(builder.getPanel()); WindowTools.addScrollBars(gd); gd.setBackground(Color.white); // HACK: workaround for JPanel in a Dialog } /** * Convenience method for creating a left-aligned, * vertically centered cell constraints object. */ private CellConstraints xyw(CellConstraints cc, int x, int y, int w) { return cc.xyw(x, y, w, CellConstraints.LEFT, CellConstraints.CENTER); } /** Ensures that the options dialog has no mutually exclusive options. */ private void verifyOptions(Object src) { // record GUI state boolean autoscaleEnabled = autoscaleBox.isEnabled(); boolean colorModeEnabled = colorModeChoice.isEnabled(); boolean concatenateEnabled = concatenateBox.isEnabled(); boolean cropEnabled = cropBox.isEnabled(); boolean groupFilesEnabled = groupFilesBox.isEnabled(); boolean ungroupFilesEnabled = ungroupFilesBox.isEnabled(); boolean openAllSeriesEnabled = openAllSeriesBox.isEnabled(); //boolean recordEnabled = recordBox.isEnabled(); boolean showMetadataEnabled = showMetadataBox.isEnabled(); boolean showOMEXMLEnabled = showOMEXMLBox.isEnabled(); boolean specifyRangesEnabled = specifyRangesBox.isEnabled(); boolean splitZEnabled = splitZBox.isEnabled(); boolean splitTEnabled = splitTBox.isEnabled(); boolean splitCEnabled = splitCBox.isEnabled(); //boolean stackFormatEnabled = stackFormatChoice.isEnabled(); boolean stackOrderEnabled = stackOrderChoice.isEnabled(); boolean swapDimsEnabled = swapDimsBox.isEnabled(); boolean virtualEnabled = virtualBox.isEnabled(); boolean isAutoscale = autoscaleBox.getState(); String colorModeValue = colorModeChoice.getSelectedItem(); boolean isConcatenate = concatenateBox.getState(); boolean isCrop = cropBox.getState(); boolean isGroupFiles = groupFilesBox.getState(); boolean isUngroupFiles = ungroupFilesBox.getState(); boolean isOpenAllSeries = openAllSeriesBox.getState(); //boolean isRecord = recordBox.getState(); boolean isShowMetadata = showMetadataBox.getState(); boolean isShowOMEXML = showOMEXMLBox.getState(); boolean isSpecifyRanges = specifyRangesBox.getState(); boolean isSplitZ = splitZBox.getState(); boolean isSplitT = splitTBox.getState(); boolean isSplitC = splitCBox.getState(); String stackFormatValue = stackFormatChoice.getSelectedItem(); boolean isStackNone = stackFormatValue.equals(ImporterOptions.VIEW_NONE); boolean isStackStandard = stackFormatValue.equals(ImporterOptions.VIEW_STANDARD); boolean isStackHyperstack = stackFormatValue.equals(ImporterOptions.VIEW_HYPERSTACK); boolean isStackBrowser = stackFormatValue.equals(ImporterOptions.VIEW_BROWSER); boolean isStackImage5D = stackFormatValue.equals(ImporterOptions.VIEW_IMAGE_5D); boolean isStackView5D = stackFormatValue.equals(ImporterOptions.VIEW_VIEW_5D); String stackOrderValue = stackOrderChoice.getSelectedItem(); boolean isSwap = swapDimsBox.getState(); boolean isVirtual = virtualBox.getState(); // toggle availability of each option based on state of earlier options // NB: The order the options are examined here defines their order of // precedence. This ordering is necessary because it affects which // component states are capable of graying out other components. // For example, we want to disable autoscaleBox when virtualBox is checked, // so the virtualBox logic must appear before the autoscaleBox logic. // To make it more intuitive for the user, the order of precedence should // match the component layout from left to right, top to bottom, according // to subsection. // == Stack viewing == // stackOrderChoice stackOrderEnabled = isStackStandard; if (src == stackFormatChoice) { if (isStackHyperstack || isStackBrowser || isStackImage5D) { stackOrderValue = ImporterOptions.ORDER_XYCZT; } else if (isStackView5D) stackOrderValue = ImporterOptions.ORDER_XYZCT; else stackOrderValue = ImporterOptions.ORDER_DEFAULT; } // == Metadata viewing == // showMetadataBox showMetadataEnabled = !isStackNone; if (!showMetadataEnabled) isShowMetadata = true; // showOMEXMLBox // NB: no other options affect showOMEXMLBox // == Dataset organization == // groupFilesBox if (src == stackFormatChoice && isStackBrowser) { isGroupFiles = true; } else if (options.isOMERO()) { isGroupFiles = false; groupFilesEnabled = false; } // ungroupFilesBox if (options.isOMERO()) { isUngroupFiles = false; ungroupFilesEnabled = false; } // swapDimsBox // NB: no other options affect swapDimsBox // openAllSeriesBox // NB: no other options affect openAllSeriesBox // concatenateBox // NB: no other options affect concatenateBox // == Memory management == // virtualBox virtualEnabled = !isStackNone && !isStackImage5D && !isStackView5D; if (!virtualEnabled) isVirtual = false; else if (src == stackFormatChoice && isStackBrowser) isVirtual = true; // recordBox //recordEnabled = isVirtual; //if (!recordEnabled) isRecord = false; // specifyRangesBox specifyRangesEnabled = !isStackNone && !isVirtual; if (!specifyRangesEnabled) isSpecifyRanges = false; // cropBox cropEnabled = !isStackNone && !isVirtual; if (!cropEnabled) isCrop = false; // == Color options == // colorModeChoice colorModeEnabled = !isStackImage5D && !isStackView5D && !isStackStandard; if (!colorModeEnabled) colorModeValue = ImporterOptions.COLOR_MODE_DEFAULT; // autoscaleBox autoscaleEnabled = !isVirtual; if (!autoscaleEnabled) isAutoscale = false; // == Split into separate windows == boolean splitEnabled = !isStackNone && !isStackBrowser && !isStackImage5D && !isStackView5D; // TODO: Make splitting work with Data Browser. // splitCBox splitCEnabled = splitEnabled; if (!splitCEnabled) isSplitC = false; // splitZBox splitZEnabled = splitEnabled; if (!splitZEnabled) isSplitZ = false; // splitTBox splitTEnabled = splitEnabled; if (!splitTEnabled) isSplitT = false; // update state of each option, in case anything changed autoscaleBox.setEnabled(autoscaleEnabled); colorModeChoice.setEnabled(colorModeEnabled); concatenateBox.setEnabled(concatenateEnabled); cropBox.setEnabled(cropEnabled); groupFilesBox.setEnabled(groupFilesEnabled); ungroupFilesBox.setEnabled(ungroupFilesEnabled); openAllSeriesBox.setEnabled(openAllSeriesEnabled); //recordBox.setEnabled(recordEnabled); showMetadataBox.setEnabled(showMetadataEnabled); showOMEXMLBox.setEnabled(showOMEXMLEnabled); specifyRangesBox.setEnabled(specifyRangesEnabled); splitZBox.setEnabled(splitZEnabled); splitTBox.setEnabled(splitTEnabled); splitCBox.setEnabled(splitCEnabled); //stackFormatChoice.setEnabled(stackFormatEnabled); stackOrderChoice.setEnabled(stackOrderEnabled); swapDimsBox.setEnabled(swapDimsEnabled); virtualBox.setEnabled(virtualEnabled); autoscaleBox.setState(isAutoscale); colorModeChoice.select(colorModeValue); concatenateBox.setState(isConcatenate); cropBox.setState(isCrop); groupFilesBox.setState(isGroupFiles); ungroupFilesBox.setState(isUngroupFiles); openAllSeriesBox.setState(isOpenAllSeries); //recordBox.setState(isRecord); showMetadataBox.setState(isShowMetadata); showOMEXMLBox.setState(isShowOMEXML); specifyRangesBox.setState(isSpecifyRanges); splitZBox.setState(isSplitZ); splitTBox.setState(isSplitT); splitCBox.setState(isSplitC); //stackFormatChoice.select(stackFormatValue); stackOrderChoice.select(stackOrderValue); swapDimsBox.setState(isSwap); virtualBox.setState(isVirtual); if (IS_GLITCHED) { // HACK - work around a Mac OS X bug where GUI components do not update // list of affected components Component[] c = { autoscaleBox, colorModeChoice, concatenateBox, cropBox, groupFilesBox, ungroupFilesBox, openAllSeriesBox, //recordBox, showMetadataBox, showOMEXMLBox, specifyRangesBox, splitZBox, splitTBox, splitCBox, stackFormatChoice, stackOrderChoice, swapDimsBox, virtualBox }; // identify currently focused component Component focused = null; for (int i=0; i<c.length; i++) { if (c[i].isFocusOwner()) focused = c[i]; } // temporarily disable focus events for (int i=0; i<c.length; i++) c[i].removeFocusListener(this); // cycle through focus on all components for (int i=0; i<c.length; i++) c[i].requestFocusInWindow(); // clear the focus globally KeyboardFocusManager kfm = KeyboardFocusManager.getCurrentKeyboardFocusManager(); kfm.clearGlobalFocusOwner(); sleep(100); // doesn't work if this value is too small // refocus the originally focused component if (focused != null) focused.requestFocusInWindow(); // reenable focus events for (int i=0; i<c.length; i++) c[i].addFocusListener(this); } } }
package org.voltdb.regressionsuites; import java.io.IOException; import org.voltdb.BackendTarget; import org.voltdb.VoltTable; import org.voltdb.VoltType; import org.voltdb.client.Client; import org.voltdb.client.ClientResponse; import org.voltdb.client.NoConnectionsException; import org.voltdb.client.ProcCallException; import org.voltdb.client.ProcedureCallback; import org.voltdb.compiler.VoltProjectBuilder; import org.voltdb.types.TimestampType; import org.voltdb_testprocs.regressionsuites.fixedsql.Insert; import org.voltdb_testprocs.regressionsuites.fixedsql.TestENG1232; import org.voltdb_testprocs.regressionsuites.fixedsql.TestENG1232_2; import org.voltdb_testprocs.regressionsuites.fixedsql.TestENG2423; /** * Actual regression tests for SQL that I found that was broken and * have fixed. Didn't like any of the other potential homes that already * existed for this for one reason or another. */ public class TestFixedSQLSuite extends RegressionSuite { /** Procedures used by this suite */ static final Class<?>[] PROCEDURES = { Insert.class, TestENG1232.class, TestENG1232_2.class, TestENG2423.InnerProc.class }; static final int VARCHAR_VARBINARY_THRESHOLD = 100; public void testTicketEng2250_IsNull() throws Exception { System.out.println("STARTING testTicketEng2250_IsNull"); Client client = getClient(); ProcedureCallback callback = new ProcedureCallback() { @Override public void clientCallback(ClientResponse clientResponse) throws Exception { if (clientResponse.getStatus() != ClientResponse.SUCCESS) { throw new RuntimeException("Failed with response: " + clientResponse.getStatusString()); } } }; /* CREATE TABLE P1 ( ID INTEGER DEFAULT '0' NOT NULL, DESC VARCHAR(300), NUM INTEGER, RATIO FLOAT, PRIMARY KEY (ID) ); */ System.out.println("Eng2250: null entries."); for(int id=0; id < 5; id++) { client.callProcedure(callback, "P1.insert", id, null, 10, 1.1); client.drain(); } System.out.println("Eng2250: not null entries."); for (int id=5; id < 8; id++) { client.callProcedure(callback, "P1.insert", id,"description", 10, 1.1); client.drain(); } VoltTable r1 = client.callProcedure("@AdHoc", "select count(*) from P1 where desc is null").getResults()[0]; //* enable for debugging */ System.out.println(r1); assertEquals(5, r1.asScalarLong()); VoltTable r2 = client.callProcedure("@AdHoc", "select count(*) from P1 where not desc is null").getResults()[0]; //* enable for debugging */ System.out.println(r2); assertEquals(3, r2.asScalarLong()); VoltTable r3 = client.callProcedure("@AdHoc", "select count(*) from P1 where NOT (id=2 and desc is null)").getResults()[0]; //* enable for debugging */ System.out.println(r3); assertEquals(7, r3.asScalarLong()); VoltTable r4 = client.callProcedure("@AdHoc", "select count(*) from P1 where NOT (id=6 and desc is null)").getResults()[0]; //* enable for debugging */ System.out.println(r4); assertEquals(8, r4.asScalarLong()); VoltTable r5 = client.callProcedure("@AdHoc", "select count(*) from P1 where id < 6 and NOT desc is null;").getResults()[0]; //* enable for debugging */ System.out.println(r5); assertEquals(1, r5.asScalarLong()); } public void testTicketEng1850_WhereOrderBy() throws Exception { System.out.println("STARTING testTicketENG1850_WhereOrderBy"); ProcedureCallback callback = new ProcedureCallback() { @Override public void clientCallback(ClientResponse clientResponse) throws Exception { if (clientResponse.getStatus() != ClientResponse.SUCCESS) { throw new RuntimeException("Failed with response: " + clientResponse.getStatusString()); } } }; Client client = getClient(); int cid=0; do { for (int aid = 0; aid < 5; aid++) { int pid = cid % 10; client.callProcedure(callback, "ENG1850.insert", cid++, aid, pid, (pid+aid)); } } while (cid < 1000); client.drain(); VoltTable r1 = client.callProcedure("@AdHoc", "select count(*) from ENG1850;").getResults()[0]; assertEquals(1000, r1.asScalarLong()); VoltTable r2 = client.callProcedure("@AdHoc", "select count(*) from ENG1850 where pid =2;").getResults()[0]; assertEquals(100, r2.asScalarLong()); VoltTable r3 = client.callProcedure("@AdHoc", "select * from ENG1850 where pid = 2 limit 1;").getResults()[0]; //* enable for debugging */ System.out.println("r3\n" + r3); assertEquals(1, r3.getRowCount()); // this failed, returning 0 rows. VoltTable r4 = client.callProcedure("@AdHoc", "select * from ENG1850 where pid = 2 order by pid, aid").getResults()[0]; //* enable for debugging */ System.out.println("r4\n:" + r4); assertEquals(100, r4.getRowCount()); // this is the failing condition reported in the defect report (as above but with the limit) VoltTable r5 = client.callProcedure("@AdHoc", "select * from ENG1850 where pid = 2 order by pid, aid limit 1").getResults()[0]; //* enable for debugging */ System.out.println("r5\n" + r5); assertEquals(1, r5.getRowCount()); } public void testTicketEng1850_WhereOrderBy2() throws Exception { System.out.println("STARTING testTIcketEng1850_WhereOrderBy2"); // verify that selecting * where pid = 2 order by pid, aid gets the right number // of tuples when <pid, null> exists in the relation (as this would be the first // key found by moveToKeyOrGreater - verify this key is added to the output if // it really exists Client client = getClient(); // index is (pid, aid) // schema: insert (cid, aid, pid, attr) client.callProcedure("ENG1850.insert", 0, 1, 1, 0); if (!isHSQL()) { // unsure why HSQL throws out-of-range exception here. // there are sql coverage tests for this case. skip it here. client.callProcedure("ENG1850.insert", 1, null, 2, 0); } client.callProcedure("ENG1850.insert", 2, 1, 2, 0); client.callProcedure("ENG1850.insert", 3, 2, 2, 0); client.callProcedure("ENG1850.insert", 4, 3, 3, 0); VoltTable r1 = client.callProcedure("@AdHoc", "select * from ENG1850 where pid = 2 order by pid, aid").getResults()[0]; //* enable for debugging */ System.out.println(r1); assertEquals(isHSQL() ? 2: 3, r1.getRowCount()); VoltTable r2 = client.callProcedure("@AdHoc", "select * from ENG1850 where pid = 2 order by aid, pid").getResults()[0]; //* enable for debugging */ System.out.println(r2); assertEquals(isHSQL() ? 2 : 3, r2.getRowCount()); VoltTable r3 = client.callProcedure("@AdHoc", "select * from ENG1850 where pid > 1 order by pid, aid").getResults()[0]; //* enable for debugging */ System.out.println(r3); assertEquals(isHSQL() ? 3 : 4, r3.getRowCount()); VoltTable r4 = client.callProcedure("@AdHoc", "select * from ENG1850 where pid = 2").getResults()[0]; //* enable for debugging */ System.out.println(r4); assertEquals(isHSQL() ? 2 : 3, r4.getRowCount()); } public void testTicketENG1232() throws Exception { Client client = getClient(); client.callProcedure("@AdHoc", "insert into test_eng1232 VALUES(9);"); VoltTable result[] = client.callProcedure("TestENG1232", 9).getResults(); assertTrue(result[0].advanceRow()); assertEquals(9, result[0].getLong(0)); assertTrue(result[1].advanceRow()); assertEquals(1, result[1].getLong(0)); client.callProcedure("@AdHoc", "insert into test_eng1232 VALUES(9);"); result = client.callProcedure("TestENG1232_2", 9).getResults(); assertTrue(result[0].advanceRow()); assertEquals(1, result[0].getLong(0)); assertFalse(result[1].advanceRow()); } public void testInsertNullPartitionString() throws IOException, ProcCallException { // This test is for issue ENG-697 Client client = getClient(); boolean caught = false; try { client.callProcedure("InsertNullString", null, 0, 1); } catch (final ProcCallException e) { if (e.getMessage().contains("CONSTRAINT VIOLATION")) caught = true; else { e.printStackTrace(); fail(); } } assertTrue(caught); } public void testTicket309() throws IOException, ProcCallException { String[] tables = {"P1", "R1", "P2", "R2"}; Client client = getClient(); for (String table : tables) { client.callProcedure("Insert", table, 1, "desc", 100, 14.5); client.callProcedure("Insert", table, 2, "desc", 100, 14.5); client.callProcedure("Insert", table, 3, "desc", 100, 14.5); client.callProcedure("Insert", table, 6, "desc", 300, 14.5); client.callProcedure("Insert", table, 7, "desc", 300, 14.5); client.callProcedure("Insert", table, 8, "desc", 500, 14.5); String query = String.format("select count(*), %s.NUM from %s group by %s.NUM", table, table, table); VoltTable[] results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(3, results[0].getRowCount()); while (results[0].advanceRow()) { if (results[0].getLong(1) == 100) { assertEquals(3, results[0].getLong(0)); } else if (results[0].getLong(1) == 300) { assertEquals(2, results[0].getLong(0)); } else if (results[0].getLong(1) == 500) { assertEquals(1, results[0].getLong(0)); } else { fail(); } } } } // Regression test for broken SQL of the variety: // select * from TABLE where (TABLE.ID = value) and // (TABLE.col1 compared_to TABLE.col2) // which would return results any time TABLE.ID = value was true, // regardless of whether the second expression was true. public void testAndExpressionComparingSameTableColumns() throws IOException, ProcCallException { String[] tables = {"P1", "R1"}; for (String table : tables) { Client client = getClient(); client.callProcedure("Insert", table, 5, "desc", 10, 14.5); client.callProcedure("Insert", table, 15, "desc2", 10, 14.5); // These queries should result in no rows, but the defect in // SubPlanAssembler resulted in only the NO_NULLS.PKEY = 5 expression // being used String query = "select * from " + table + " where (" + table + ".ID = 5) and (" + table + ".NUM < " + table +".ID)"; VoltTable[] results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(0, results[0].getRowCount()); query = "select * from " + table + " where (" + table + ".ID = 5) and (" + table + ".NUM <= " + table +".ID)"; results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(0, results[0].getRowCount()); query = "select * from " + table + " where (" + table + ".ID = 15) and (" + table + ".NUM > " + table +".ID)"; results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(0, results[0].getRowCount()); query = "select * from " + table + " where (" + table + ".ID = 15) and (" + table + ".NUM >= " + table +".ID)"; results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(0, results[0].getRowCount()); } } // Regression test for broken SQL of the variety: // select * from replicated_table where (predicate) LIMIT n // For replicated tables, LIMIT is inlined in seqscan; the tuple count was // being incremented for each input tuple regardless of the predicate // result, which was resulting in the wrong number of rows returned in some // cases. // @throws IOException // @throws ProcCallException public void testSeqScanFailedPredicateDoesntCountAgainstLimit() throws IOException, ProcCallException { String[] tables = {"P1", "R1"}; for (String table : tables) { Client client = getClient(); // our predicate is going to be ID < NUM. // Insert one row where this is false client.callProcedure("Insert", table, 1, "desc", -1, 14.5); // And two where it is true client.callProcedure("Insert", table, 2, "desc", 100, 14.5); client.callProcedure("Insert", table, 3, "desc", 100, 14.5); String query = "select * from " + table + " where " + table + ".ID < " + table +".NUM limit 2"; VoltTable[] results = client.callProcedure("@AdHoc", query).getResults(); // we should get 2 rows but this bug would result in only 1 returned assertEquals(2, results[0].getRowCount()); } } // Regression test for broken SQL of the variety: // select (non-aggregating expression) from table // e.g. select col1 + col2 from table // PlanAssembler extracts the left side of the expression to discard // aggregation-type expressions from the parsed SQL, but was basically // assuming that anything not a VALUE_TUPLE was an aggregate. // Note: Adding 5.5 in the third test here also tests a "fix" in // HSQL where we coerce the type of numeric literals from NUMERIC to DOUBLE public void testSelectExpression() throws IOException, ProcCallException { String[] tables = {"P1", "R1"}; for (String table : tables) { Client client = getClient(); client.callProcedure("Insert", table, 1, "desc", 2, 14.5); String query = String.format("select %s.ID + 10 from %s", table, table); VoltTable[] results = client.callProcedure("@AdHoc", query).getResults(); results[0].advanceRow(); assertEquals(11, results[0].getLong(0)); query = String.format("select %s.NUM + 20 from %s", table, table); results = client.callProcedure("@AdHoc", query).getResults(); results[0].advanceRow(); assertEquals(22, results[0].getLong(0)); query = String.format("select %s.RATIO + 5.5 from %s", table, table); results = client.callProcedure("@AdHoc", query).getResults(); results[0].advanceRow(); assertEquals(20.0, results[0].getDouble(0)); query = String.format("select %s.ID + %s.NUM from %s", table, table, table); results = client.callProcedure("@AdHoc", query).getResults(); results[0].advanceRow(); assertEquals(3, results[0].getLong(0)); // ENG-5035 query = String.format("select '%s' from %s", table, table); results = client.callProcedure("@AdHoc", query).getResults(); results[0].advanceRow(); assertEquals(table, results[0].getString(0)); query = String.format("select '%s' from %s", "qwertyuiop", table); results = client.callProcedure("@AdHoc", query).getResults(); results[0].advanceRow(); assertEquals("qwertyuiop", results[0].getString(0)); query = String.format("select %s.RATIO, '%s' from %s", table, "qwertyuiop", table); results = client.callProcedure("@AdHoc", query).getResults(); results[0].advanceRow(); assertEquals("qwertyuiop", results[0].getString(1)); } } // Regression test for broken SQL of the variety: // trac #166 // When evaluating the nest loop join predicate, insufficient // information was available to tuplevalue expression nodes to // understand which column(s) needed to be evaluated by the TVE's // operators. public void testNestLoopJoinPredicates() throws IOException, ProcCallException { Client client = getClient(); for (int id=0; id < 5; id++) { // insert id, (5-id) in to P1 client.callProcedure("Insert", "P1", id, "desc", (5-id), 2.5); // insert id, (id) in to R1 client.callProcedure("Insert", "R1", id, "desc", (id), 2.5); } // join on the (5-id), (id) columns String query = "select * from P1, R1 where P1.NUM = R1.NUM"; VoltTable vts[] = client.callProcedure("@AdHoc", query).getResults(); nestLoopJoinPredicates_verify(vts); // same thing using inner join syntax query = "select * from P1 INNER JOIN R1 on P1.NUM = R1.NUM"; vts = client.callProcedure("@AdHoc", query).getResults(); nestLoopJoinPredicates_verify(vts); // join on ID and verify NUM. (ID is indexed) query = "select * from P1, R1 where P1.ID = R1.ID"; vts = client.callProcedure("@AdHoc", query).getResults(); nestLoopJoinPredicates_verifyid(vts); // as above with inner join syntax query = "select * from P1 INNER JOIN R1 on P1.ID = R1.ID"; vts = client.callProcedure("@AdHoc", query).getResults(); nestLoopJoinPredicates_verifyid(vts); } private void nestLoopJoinPredicates_verifyid(VoltTable[] vts) { assertEquals(1, vts.length); //* enable for debugging */ System.out.println("verifyid: " + vts[0]); assertEquals(5, vts[0].getRowCount()); while(vts[0].advanceRow()) { int p_id = ((Integer)vts[0].get(0, VoltType.INTEGER)).intValue(); int r_id = ((Integer)vts[0].get(4, VoltType.INTEGER)).intValue(); int p_n = ((Integer)vts[0].get(2, VoltType.INTEGER)).intValue(); int r_n = ((Integer)vts[0].get(6, VoltType.INTEGER)).intValue(); assertEquals(p_id, r_id); assertEquals(5 - p_n, r_n); } } private void nestLoopJoinPredicates_verify(VoltTable[] vts) { assertEquals(1, vts.length); //* enable for debugging */ System.out.println(vts[0]); assertEquals(4, vts[0].getRowCount()); // the id of the first should be (5-id) in the second // because of the insertion trickery done above // verifies trac #125 while(vts[0].advanceRow()) { int id1 = ((Integer)vts[0].get(0, VoltType.INTEGER)).intValue(); int id2 = ((Integer)vts[0].get(4, VoltType.INTEGER)).intValue(); assertEquals(id1, (5 - id2)); } } // Regression test for broken SQL of the variety: // trac #125. (verification in addition to testNestLoopJoinPredicates). // Select a complex expression (not just a TupleValueExpression) // to verify that non-root TVEs are correctly offset. public void nestLoopJoinPredicatesWithExpressions() throws IOException, ProcCallException { Client client = getClient(); for (int id=0; id < 5; id++) { // insert id, (5-id) in to P1 client.callProcedure("Insert", "P1", id, "desc", (5-id), 2.5); // insert id, (id) in to R1 client.callProcedure("Insert", "R1", id, "desc", (id), 2.5); } String query = "select (P1.ID + 20), (R1.ID + 40) from P1, R1 where P1.NUM = R1.NUM"; VoltTable vts[] = client.callProcedure("@AdHoc", query).getResults(); nestLoopJoinPredicatesWithExpressions_verify(vts); // same thing using inner join syntax query = "select (P1.ID + 20), (R1.ID + 40) from P1 INNER JOIN R1 on P1.NUM = R1.NUM"; vts = client.callProcedure("@AdHoc", query).getResults(); nestLoopJoinPredicatesWithExpressions_verify(vts); } private void nestLoopJoinPredicatesWithExpressions_verify( VoltTable[] vts) { assertEquals(1, vts.length); //* enable for debugging */ System.out.println(vts[0]); assertEquals(4, vts[0].getRowCount()); // the id of the first should be (5-id) in the second once the addition // done in the select expression is un-done. while(vts[0].advanceRow()) { int p1_id = ((Integer)vts[0].get(0, VoltType.INTEGER)).intValue(); int r1_id = ((Integer)vts[0].get(1, VoltType.INTEGER)).intValue(); assertEquals( (p1_id - 20), (5 - (r1_id - 40)) ); // and verify that the addition actually happened. assertTrue(p1_id >= 20); assertTrue(p1_id <= 24); assertTrue(r1_id >= 40); assertTrue(r1_id <= 44); } } // Regression test for broken SQL of the variety: // trac #125. (additional verification). // Select columns and expressions with aliases. public void testNestLoopJoinPredicatesWithAliases() throws IOException, ProcCallException { Client client = getClient(); for (int id=0; id < 5; id++) { // insert id, (5-id) in to P1 client.callProcedure("Insert", "P1", id, "desc", (5-id), 2.5); // insert id, (id) in to R1 client.callProcedure("Insert", "R1", id, "desc", (id), 2.5); } // use an alias that would select an invalid column. (be a jerk). String query = "select R1.ID AS DESC, (P1.ID + 20) AS THOMAS from P1, R1 where P1.NUM = R1.NUM"; VoltTable vts[] = client.callProcedure("@AdHoc", query).getResults(); nestLoopJoinPredicatesWithAliases_verify(vts); // same thing using inner join syntax query = "select R1.ID AS DESC, (P1.ID + 20) AS THOMAS from P1 INNER JOIN R1 on P1.NUM = R1.NUM"; vts = client.callProcedure("@AdHoc", query).getResults(); nestLoopJoinPredicatesWithAliases_verify(vts); } private void nestLoopJoinPredicatesWithAliases_verify(VoltTable[] vts) { assertEquals(1, vts.length); //* enable for debugging */ System.out.println(vts[0]); assertEquals(4, vts[0].getRowCount()); // the id of the first should be (5-id) in the second once the addition // done in the select expression is un-done. while(vts[0].advanceRow()) { int p1_id = ((Integer)vts[0].get(1, VoltType.INTEGER)).intValue(); int r1_id = ((Integer)vts[0].get(0, VoltType.INTEGER)).intValue(); assertEquals( (p1_id - 20), (5 - r1_id) ); // and verify that the addition actually happened. assertTrue(p1_id >= 20); assertTrue(p1_id <= 24); assertTrue(r1_id >= 0); assertTrue(r1_id <= 4); } } // Regression test for broken SQL of the sort // select * from TABLE where COL_WITH_ORDERED_INDEX > n // The bug is that indexscanexecutor and indexes treat > as >= // @throws IOException // @throws ProcCallException public void testGreaterThanOnOrderedIndex() throws IOException, ProcCallException { String[] tables = {"P2", "R2"}; Client client = getClient(); for (String table : tables) { client.callProcedure("Insert", table, 1, "desc", 100, 14.5); client.callProcedure("Insert", table, 2, "desc", 100, 14.5); client.callProcedure("Insert", table, 3, "desc", 100, 14.5); client.callProcedure("Insert", table, 6, "desc", 100, 14.5); client.callProcedure("Insert", table, 7, "desc", 100, 14.5); client.callProcedure("Insert", table, 8, "desc", 100, 14.5); String query = "select * from " + table + " where " + table + ".ID > 1"; VoltTable[] results = client.callProcedure("@AdHoc", query).getResults(); // we should get 5 rows but this bug would result in all 6 returned assertEquals(5, results[0].getRowCount()); // make sure that we work if the value we want isn't present query = "select * from " + table + " where " + table + ".ID > 4"; results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(3, results[0].getRowCount()); query = "select * from " + table + " where " + table + ".ID > 8"; results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(0, results[0].getRowCount()); } } public void testTicket196() throws IOException, ProcCallException { String[] tables = {"P1", "R1", "P2", "R2"}; Client client = getClient(); for (String table : tables) { client.callProcedure("Insert", table, 1, "desc", 100, 14.5); client.callProcedure("Insert", table, 2, "desc", 100, 14.5); client.callProcedure("Insert", table, 3, "desc", 100, 14.5); client.callProcedure("Insert", table, 6, "desc", 300, 14.5); client.callProcedure("Insert", table, 7, "desc", 300, 14.5); client.callProcedure("Insert", table, 8, "desc", 500, 14.5); String query = String.format("select count(*) from %s", table); VoltTable[] results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(1, results[0].getRowCount()); results[0].advanceRow(); assertEquals(6, results[0].getLong(0)); query = String.format("select %s.NUM, count(*) from %s group by %s.NUM", table, table, table); results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(3, results[0].getRowCount()); while (results[0].advanceRow()) { if (results[0].getLong(0) == 100) { assertEquals(3, results[0].getLong(1)); } else if (results[0].getLong(0) == 300) { assertEquals(2, results[0].getLong(1)); } else if (results[0].getLong(0) == 500) { assertEquals(1, results[0].getLong(1)); } else { fail(); } } } // SO, given our current count(*) hack (replace * with the first column // in the input to the aggregator, this is a test that will // FAIL when we go and implement COUNT to do the right thing with null // values. If this test breaks for you, don't blow it off. String query = "insert into COUNT_NULL values (10, 0, 100)"; client.callProcedure("@AdHoc", query); query = "insert into COUNT_NULL values (NULL, 1, 200)"; client.callProcedure("@AdHoc", query); query = "insert into COUNT_NULL values (10, 2, 300)"; client.callProcedure("@AdHoc", query); query = "insert into COUNT_NULL values (NULL, 3, 400)"; client.callProcedure("@AdHoc", query); query = "select count(*) from COUNT_NULL"; VoltTable[] results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(1, results[0].getRowCount()); results[0].advanceRow(); assertEquals(4, results[0].getLong(0)); } public void testTicket201() throws IOException, ProcCallException { String[] tables = {"P1", "R1", "P2", "R2"}; Client client = getClient(); for (String table : tables) { client.callProcedure("Insert", table, 1, "desc", 100, 14.5); client.callProcedure("Insert", table, 2, "desc", 200, 14.5); client.callProcedure("Insert", table, 3, "desc", 300, 14.5); client.callProcedure("Insert", table, 6, "desc", 400, 14.5); client.callProcedure("Insert", table, 7, "desc", 500, 14.5); client.callProcedure("Insert", table, 8, "desc", 600, 14.5); String query = String.format("select * from %s where (%s.ID + 1) = 2", table, table); VoltTable[] results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(1, results[0].getRowCount()); query = String.format("select * from %s where (%s.ID + 1) > 2", table, table); results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(5, results[0].getRowCount()); query = String.format("select * from %s where (%s.ID + 1) >= 2", table, table); results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(6, results[0].getRowCount()); } } //public void testTicket205() throws IOException, ProcCallException // String[] tables = {"P1", "R1", "P2", "R2"}; // Client client = getClient(); // for (String table : tables) // client.callProcedure("Insert", table, 1, "desc", 100, 14.5); // client.callProcedure("Insert", table, 2, "desc", 200, 14.5); // client.callProcedure("Insert", table, 3, "desc", 300, 14.5); // client.callProcedure("Insert", table, 6, "desc", 400, 14.5); // client.callProcedure("Insert", table, 7, "desc", 500, 14.5); // client.callProcedure("Insert", table, 8, "desc", 600, 14.5); // String query = String.format("select sum(%s.NUM + 1) from %s", // table, table); // VoltTable[] results = client.callProcedure("@AdHoc", query); // assertEquals(1, results[0].getRowCount()); // query = String.format("select sum(%s.NUM + %s.ID) from %s", // table, table); // results = client.callProcedure("@AdHoc", query); // assertEquals(1, results[0].getRowCount()); public void testTicket216() throws IOException, ProcCallException { String[] tables = {"P1", "R1", "P2", "R2"}; Client client = getClient(); for (String table : tables) { client.callProcedure("Insert", table, 1, "desc", 100, 100.0); client.callProcedure("Insert", table, 2, "desc", 200, 200.0); client.callProcedure("Insert", table, 3, "desc", 300, 300.0); client.callProcedure("Insert", table, 6, "desc", 400, 400.0); client.callProcedure("Insert", table, 7, "desc", 500, 500.0); client.callProcedure("Insert", table, 8, "desc", 600, 600.0); String query = String.format("select %s.RATIO / 2.0 from %s order by ID", table, table); VoltTable[] results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(6, results[0].getRowCount()); for (double f=50.0; results[0].advanceRow(); f+=50.0) { double num = (results[0].getDouble(0)); assertEquals(f, num); } query = String.format("select * from %s where %s.RATIO >= 400.0", table, table); results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(3, results[0].getRowCount()); } } public void testTicket194() throws IOException, ProcCallException { String[] tables = {"P1", "R1", "P2", "R2"}; Client client = getClient(); for (String table : tables) { client.callProcedure("Insert", table, 1, "desc", 100, 14.5); client.callProcedure("Insert", table, 2, "desc", 200, 14.5); client.callProcedure("Insert", table, 3, "desc", 300, 14.5); client.callProcedure("Insert", table, 6, "desc", 400, 14.5); client.callProcedure("Insert", table, 7, "desc", 500, 14.5); client.callProcedure("Insert", table, 8, "desc", 600, 14.5); String query = String.format("select * from %s where %s.ID >= 2.1", table, table); VoltTable[] results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(4, results[0].getRowCount()); query = String.format("select * from %s where %s.ID >= 4.0", table, table); results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(3, results[0].getRowCount()); } } public void testTickets227And228() throws IOException, ProcCallException { String[] tables = {"P2", "R2"}; Client client = getClient(); for (String table : tables) { client.callProcedure("Insert", table, 1, "desc", 100, 14.5); client.callProcedure("Insert", table, 2, "desc", 100, 14.5); client.callProcedure("Insert", table, 3, "desc", 100, 14.5); client.callProcedure("Insert", table, 6, "desc", 100, 14.5); client.callProcedure("Insert", table, 7, "desc", 100, 14.5); client.callProcedure("Insert", table, 8, "desc", 100, 14.5); } // test > on the join (ticket 227) String query = "select * from R2, P2 where R2.ID > 1"; VoltTable[] results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(30, results[0].getRowCount()); query = "select * from P2, R2 where R2.ID > 1"; results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(30, results[0].getRowCount()); // test >= on the join (ticket 228) query = "select * from R2, P2 where R2.ID >= 3"; results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(24, results[0].getRowCount()); query = "select * from P2, R2 where R2.ID >= 3"; results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(24, results[0].getRowCount()); query = "select * from R2, P2 where R2.ID >= 4"; results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(18, results[0].getRowCount()); query = "select * from P2, R2 where R2.ID >= 4"; results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(18, results[0].getRowCount()); } public void testTicket220() throws IOException, ProcCallException { String[] tables = {"P1", "R1"}; Client client = getClient(); int id = 0; for (String table : tables) { client.callProcedure("Insert", table, id++, "desc", 100, 14.5); client.callProcedure("Insert", table, id++, "desc", 100, 14.5); client.callProcedure("Insert", table, id++, "desc", 100, 14.5); } String query = "select R1.ID + 5 from R1, P1 order by R1.ID"; VoltTable[] results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(9, results[0].getRowCount()); for (int i = 0; i < 3; i++) { for (int j = 0; j < 3; j++) { results[0].advanceRow(); assertEquals(i + 3 + 5, results[0].getLong(0)); } } } // At first pass, HSQL barfed on decimal in sql-coverage. Debug/test that here. public void testForHSQLDecimalFailures() throws IOException, ProcCallException { Client client = getClient(); String sql = "INSERT INTO R1_DECIMAL VALUES (26, 307473.174514, 289429.605067, 9.71903320295135486617e-01)"; client.callProcedure("@AdHoc", sql); sql = "select R1_DECIMAL.CASH + 2.0 from R1_DECIMAL"; VoltTable[] results = client.callProcedure("@AdHoc", sql).getResults(); assertEquals(1, results.length); } public void testTicket310() throws IOException, ProcCallException { Client client = getClient(); String sql = "INSERT INTO R1_DECIMAL VALUES (26, 307473.174514, 289429.605067, 9.71903320295135486617e-01)"; client.callProcedure("@AdHoc", sql); boolean caught = false; // HSQL doesn't choke the same way Volt does at the moment. // Fake the test out. if (isHSQL()) { caught = true; } try { sql = "SELECT * FROM R1_DECIMAL WHERE " + "(R1_DECIMAL.CASH <= 0.0622493314185)" + " AND (R1_DECIMAL.ID > R1_DECIMAL.CASH)"; client.callProcedure("@AdHoc", sql); } catch (ProcCallException e) { caught = true; } assertTrue(caught); } public void testNumericExpressionConversion() throws IOException, ProcCallException { VoltTable[] results; Client client = getClient(); String sql = "INSERT INTO R1_DECIMAL VALUES " + "(26, 307473.174514, 289429.605067, 9.71903320295135486617e-01)"; results = client.callProcedure("@AdHoc", sql).getResults(); assertEquals(1, results.length); assertEquals(1, results[0].asScalarLong()); sql = "UPDATE R1_DECIMAL SET CASH = CASH * 5 WHERE " + "R1_DECIMAL.CASH != 88687.224073"; results = client.callProcedure("@AdHoc", sql).getResults(); assertEquals(1, results.length); assertEquals(1, results[0].asScalarLong()); sql = "UPDATE R1_DECIMAL SET CASH = CASH + 5.5 WHERE " + "R1_DECIMAL.CASH != 88687.224073"; results = client.callProcedure("@AdHoc", sql).getResults(); assertEquals(1, results.length); assertEquals(1, results[0].asScalarLong()); } public void testTicket221() throws IOException, ProcCallException { String[] tables = {"P1", "R1"}; Client client = getClient(); int id = 0; for (String table : tables) { client.callProcedure("Insert", table, id++, "desc", 100, 14.5); client.callProcedure("Insert", table, id++, "desc", 200, 15.5); client.callProcedure("Insert", table, id++, "desc", 300, 16.5); } String query = "select distinct P1.NUM from R1, P1 order by P1.NUM"; VoltTable[] results = client.callProcedure("@AdHoc", query).getResults(); results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(3, results[0].getRowCount()); for (int i = 100; results[0].advanceRow(); i+=100) { assertEquals(i, results[0].getLong(0)); //* enable for debugging */ System.out.println("i: " + results[0].getLong(0)); } } public void testTicket222() throws IOException, ProcCallException { String[] tables = {"P1", "R1"}; Client client = getClient(); int id = 0; for (String table : tables) { client.callProcedure("Insert", table, id++, "desc", 100, 14.5); client.callProcedure("Insert", table, id++, "desc", 200, 15.5); client.callProcedure("Insert", table, id++, "desc", 300, 16.5); } String query = "select max(P1.ID) from R1, P1"; VoltTable[] results = client.callProcedure("@AdHoc", query).getResults(); results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(1, results[0].getRowCount()); results[0].advanceRow(); assertEquals(2, results[0].getLong(0)); //* enable for debugging */ System.out.println("i: " + results[0].getLong(0)); } public void testTicket224() throws IOException, ProcCallException { String[] tables = {"P1", "R1"}; Client client = getClient(); int id = 0; for (String table : tables) { client.callProcedure("Insert", table, id++, "desc", 100, 14.5); client.callProcedure("Insert", table, id++, "desc", 200, 15.5); client.callProcedure("Insert", table, id++, "desc", 300, 16.5); } String query = "select P1.ID from R1, P1 group by P1.ID order by P1.ID"; VoltTable[] results = client.callProcedure("@AdHoc", query).getResults(); results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(3, results[0].getRowCount()); assertEquals(1, results[0].getColumnCount()); //* enable for debugging */ System.out.println(results[0].toFormattedString()); for (int i = 0; results[0].advanceRow(); i++) { assertEquals(i, results[0].getLong(0)); //* enable for debugging */ System.out.println("i: " + results[0].getLong(0)); } } public void testTicket226() throws IOException, ProcCallException { String[] tables = {"P1", "R1"}; Client client = getClient(); int id = 0; for (String table : tables) { client.callProcedure("Insert", table, id++, "desc", 100, 14.5); client.callProcedure("Insert", table, id++, "desc", 200, 15.5); client.callProcedure("Insert", table, id++, "desc", 300, 16.5); } String query = "select P1.ID from P1, R1 order by P1.ID"; VoltTable[] results = client.callProcedure("@AdHoc", query).getResults(); results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(9, results[0].getRowCount()); assertEquals(1, results[0].getColumnCount()); for (int i = 0; i < 3; i++) { for (int j = 0; j < 3; j++) { results[0].advanceRow(); assertEquals(i, results[0].getLong(0)); //* enable for debugging */ System.out.println("i: " + results[0].getLong(0)); } } } public void testTicket231() throws IOException, ProcCallException { String[] tables = {"P1", "R1", "P2", "R2"}; Client client = getClient(); for (String table : tables) { client.callProcedure("Insert", table, 1, "desc", 100, 14.5); client.callProcedure("Insert", table, 2, "desc", 100, 14.5); client.callProcedure("Insert", table, 3, "desc", 200, 14.5); client.callProcedure("Insert", table, 6, "desc", 200, 14.5); client.callProcedure("Insert", table, 7, "desc", 300, 14.5); client.callProcedure("Insert", table, 8, "desc", 300, 14.5); // This statement is a test case for one of the ticket 231 // work-arounds String query = String.format("select (%s.NUM + %s.NUM) as NUMSUM from %s where (%s.NUM + %s.NUM) > 400", table, table, table, table, table); VoltTable[] results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(2, results[0].getRowCount()); // This failing statement is the current ticket 231 failing behavior. // query = // String.format("select (%s.NUM + %s.NUM) as NUMSUM from %s order by (%s.NUM + %s.NUM)", // table, table, table, table, table); // results = client.callProcedure("@AdHoc", query); // assertEquals(6, results[0].getRowCount()); } } public void testTicket232() throws IOException, ProcCallException { String[] tables = {"P1", "R1", "P2", "R2"}; Client client = getClient(); for (String table : tables) { client.callProcedure("Insert", table, 1, "desc", 100, 14.5); client.callProcedure("Insert", table, 2, "desc", 100, 14.5); client.callProcedure("Insert", table, 3, "desc", 200, 14.5); client.callProcedure("Insert", table, 6, "desc", 200, 14.5); client.callProcedure("Insert", table, 7, "desc", 300, 14.5); client.callProcedure("Insert", table, 8, "desc", 300, 14.5); String query = String.format("select %s.NUM from %s group by %s.NUM order by %s.NUM", table, table, table, table); VoltTable[] results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(3, results[0].getRowCount()); } } public void testTicket293() throws IOException, ProcCallException { String[] tables = {"P1", "R1", "P2", "R2"}; Client client = getClient(); int id = 0; for (String table : tables) { client.callProcedure("Insert", table, id++, "desc", 100, 14.5); client.callProcedure("Insert", table, id++, "desc", 200, 15.5); client.callProcedure("Insert", table, id++, "desc", 300, 16.5); client.callProcedure("Insert", table, id++, "desc", 300, 17.5); client.callProcedure("Insert", table, id++, "desc", 400, 18.5); String query = String.format("select distinct %s.NUM from %s order by %s.NUM", table, table, table); VoltTable[] results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(4, results[0].getRowCount()); } String query = "select distinct P1.NUM from R1, P1 order by P1.NUM"; VoltTable[] results = client.callProcedure("@AdHoc", query).getResults(); results = client.callProcedure("@AdHoc", query).getResults(); assertEquals(4, results[0].getRowCount()); } public void testTicketEng397() throws IOException, ProcCallException { Client client = getClient(); for (int i=0; i < 20; i++) { client.callProcedure("Insert", "P1", i, "desc", 100 + i, 4.5); } // base case VoltTable[] results = client.callProcedure("Eng397Limit1", new Integer(10)).getResults(); assertEquals(10, results[0].getRowCount()); // negative limit rollsback boolean caught = false; try { results = client.callProcedure("Eng397Limit1", new Integer(-1)).getResults(); } catch (ProcCallException ignored) { caught = true; } catch (Exception ex) { ex.printStackTrace(); } assertTrue(caught); } // RE-ENABLE ONCE ENG-490 IS FIXED //public void testTicketEng490() throws IOException, ProcCallException { // Client client = getClient(); // VoltTable[] results = client.callProcedure("Eng490Select"); // assertEquals(1, results.length); // String query = "SELECT A.ASSET_ID, A.OBJECT_DETAIL_ID, OD.OBJECT_DETAIL_ID " + // "FROM ASSET A, OBJECT_DETAIL OD WHERE A.OBJECT_DETAIL_ID = OD.OBJECT_DETAIL_ID"; // results = client.callProcedure("@AdHoc", query); // assertEquals(1, results.length); public void testTicketEng993() throws IOException, ProcCallException { Client client = getClient(); // this tests some other mumbo jumbo as well like ENG-999 and ENG-1001 ClientResponse response = client.callProcedure("Eng993Insert", 5, 5.5); assertTrue(response.getStatus() == ClientResponse.SUCCESS); // Verify ENG-999 (Literal string 'NULL' round-trips as literal string // and doesn't transform into a SQL NULL value) response = client.callProcedure("@AdHoc", "select DESC from P1 where ID = 6"); VoltTable result = response.getResults()[0]; assertEquals("NULL", result.fetchRow(0).get(0, VoltType.STRING)); // Additional verification that inserts are not bothered by math that used to // generate unexpectedly formatted temp tuples and garbled persistent tuples. // ENG-5926 response = client.callProcedure("@AdHoc", "select * from P1"); result = response.getResults()[0]; result.advanceRow(); assertEquals(6, result.getLong(0)); assertEquals("NULL", result.getString(1)); result.getLong(2); // Not sure what's up with HSQL failing to find null here. if ( ! isHSQL()) { assertTrue(result.wasNull()); } assertEquals(6.5, result.getDouble(3)); // Further verify that inline varchar columns still properly handle potentially larger values // even after the temp tuple formatting fix for ENG-5926. response = client.callProcedure("Eng5926Insert", 5, "", 5.5); assertTrue(response.getStatus() == ClientResponse.SUCCESS); try { response = client.callProcedure("Eng5926Insert", 7, "HOO", 7.5); fail("Failed to throw ProcCallException for runtime varchar length exceeded."); } catch(ProcCallException pce) { } response = client.callProcedure("@AdHoc", "select * from PWEE ORDER BY ID DESC"); result = response.getResults()[0]; result.advanceRow(); assertEquals(6, result.getLong(0)); assertEquals("WEE", result.getString(1)); result.getLong(2); // Not sure what's up with HSQL failing to find null here. if ( ! isHSQL()) { assertTrue(result.wasNull()); } assertEquals(6.5, result.getDouble(3)); // this is the actual bug try { client.callProcedure("@AdHoc", "insert into P1 (ID,DESC,NUM,RATIO) VALUES('?',?,?,?);"); fail(); } catch (Exception e) { assertTrue(e.getMessage().contains("invalid format for a constant")); } // test that missing parameters don't work (ENG-1000) try { client.callProcedure("@AdHoc", "insert into P1 (ID,DESC,NUM,RATIO) VALUES(?,?,?,?);"); fail(); } catch (Exception e) { assertTrue(e.getMessage().contains("Number of arguments provided was 0 where 4 was expected")); } //VoltTable results = client.callProcedure("@AdHoc", "select * from P1;").getResults()[0]; //System.out.println(results.toJSONString()); } /** * Verify that DML returns correctly named "modified_tuple" column name * @throws IOException * @throws ProcCallException */ public void testTicketEng1316() throws IOException, ProcCallException { // Fake HSQL. Only care about Volt column naming code. if (isHSQL()) { assertTrue(true); return; } Client client = getClient(); ClientResponse rsp = null; // Test partitioned tables (multipartition query) rsp = client.callProcedure("Eng1316Insert_P", 100, "varcharvalue", 120, 1.0); assertEquals(1, rsp.getResults()[0].asScalarLong()); assertEquals("modified_tuples", rsp.getResults()[0].getColumnName(0)); rsp = client.callProcedure("Eng1316Insert_P", 101, "varcharvalue2", 121, 1.1); rsp = client.callProcedure("Eng1316Insert_P", 102, "varcharvalue2", 122, 1.2); rsp = client.callProcedure("Eng1316Insert_P", 103, "varcharvalue2", 123, 1.3); rsp = client.callProcedure("Eng1316Insert_P", 104, "varcharvalue2", 124, 1.4); rsp = client.callProcedure("Eng1316Update_P"); // update where id < 124 assertEquals(4, rsp.getResults()[0].asScalarLong()); assertEquals("modified_tuples", rsp.getResults()[0].getColumnName(0)); // Test partitioned tables (single partition query) rsp = client.callProcedure("Eng1316Insert_P1", 200, "varcharvalue", 120, 1.0); assertEquals(1, rsp.getResults()[0].asScalarLong()); assertEquals("modified_tuples", rsp.getResults()[0].getColumnName(0)); rsp = client.callProcedure("Eng1316Insert_P1", 201, "varcharvalue2", 121, 1.1); rsp = client.callProcedure("Eng1316Insert_P1", 202, "varcharvalue2", 122, 1.2); rsp = client.callProcedure("Eng1316Insert_P1", 203, "varcharvalue2", 123, 1.3); rsp = client.callProcedure("Eng1316Insert_P1", 204, "varcharvalue2", 124, 1.4); rsp = client.callProcedure("Eng1316Update_P1", 201); // update where id == ? assertEquals(1, rsp.getResults()[0].asScalarLong()); assertEquals("modified_tuples", rsp.getResults()[0].getColumnName(0)); // Test replicated tables. rsp = client.callProcedure("Eng1316Insert_R", 100, "varcharvalue", 120, 1.0); assertEquals(1, rsp.getResults()[0].asScalarLong()); assertEquals("modified_tuples", rsp.getResults()[0].getColumnName(0)); rsp = client.callProcedure("Eng1316Insert_R", 101, "varcharvalue2", 121, 1.1); rsp = client.callProcedure("Eng1316Insert_R", 102, "varcharvalue2", 122, 1.2); rsp = client.callProcedure("Eng1316Insert_R", 103, "varcharvalue2", 123, 1.3); rsp = client.callProcedure("Eng1316Insert_R", 104, "varcharvalue2", 124, 1.4); rsp = client.callProcedure("Eng1316Update_R"); // update where id < 104 assertEquals(4, rsp.getResults()[0].asScalarLong()); assertEquals("modified_tuples", rsp.getResults()[0].getColumnName(0)); } // make sure we can call an inner proc public void testTicket2423() throws NoConnectionsException, IOException, ProcCallException, InterruptedException { Client client = getClient(); client.callProcedure("TestENG2423$InnerProc"); releaseClient(client); // get it again to make sure the server is all good client = getClient(); client.callProcedure("TestENG2423$InnerProc"); } // Ticket: ENG-5151 public void testColumnDefaultNull() throws IOException, ProcCallException { System.out.println("STARTING default null test..."); Client client = getClient(); VoltTable result = null; // It used to throw errors from EE when inserting without giving explicit values for default null columns. result = client.callProcedure("@AdHoc", " INSERT INTO DEFAULT_NULL(id) VALUES (1);").getResults()[0]; result = client.callProcedure("@AdHoc", " select id, num1, num2, ratio from DEFAULT_NULL;").getResults()[0]; assertTrue(result.advanceRow()); assertEquals(1, result.getLong(0)); if (!isHSQL()) { result.getLong(1); assertTrue(result.wasNull()); result.getLong(2); assertTrue(result.wasNull()); result.getDouble(3); assertTrue(result.wasNull()); } } // Ticket: ENG-5486 public void testNULLcomparison() throws IOException, ProcCallException { System.out.println("STARTING default null test..."); Client client = getClient(); VoltTable result = null; /** CREATE TABLE DEFAULT_NULL ( ID INTEGER NOT NULL, num1 INTEGER DEFAULT NULL, num2 INTEGER , ratio FLOAT DEFAULT NULL, num3 INTEGER DEFAULT NULL, desc VARCHAR(300) DEFAULT NULL, PRIMARY KEY (ID) ); create index idx_num3 on DEFAULT_NULL (num3); */ result = client.callProcedure("@AdHoc", " INSERT INTO DEFAULT_NULL(id) VALUES (1);").getResults()[0]; validateTableOfScalarLongs(result, new long[]{1}); // Test null column comparison result = client.callProcedure("@AdHoc", " select count(*), count(num1) from DEFAULT_NULL where num1 < 3;").getResults()[0]; validateTableOfLongs(result, new long[][]{{0, 0}}); result = client.callProcedure("@AdHoc", " select count(*), count(num1) from DEFAULT_NULL where num1 <= 3;").getResults()[0]; validateTableOfLongs(result, new long[][]{{0, 0}}); result = client.callProcedure("@AdHoc", " select count(*), count(num1) from DEFAULT_NULL where num1 > 3;").getResults()[0]; validateTableOfLongs(result, new long[][]{{0, 0}}); // Test null column comparison with index result = client.callProcedure("@AdHoc", " select count(*), count(num3) from DEFAULT_NULL where num3 > 3;").getResults()[0]; validateTableOfLongs(result, new long[][]{{0, 0}}); result = client.callProcedure("@AdHoc", " select count(*), count(num3) from DEFAULT_NULL where num3 < 3;").getResults()[0]; validateTableOfLongs(result, new long[][]{{0, 0}}); result = client.callProcedure("@AdHoc", " select count(*), count(num3) from DEFAULT_NULL where num3 <= 3;").getResults()[0]; validateTableOfLongs(result, new long[][]{{0, 0}}); result = client.callProcedure("@Explain", "select count(*) from DEFAULT_NULL where num3 < 3;").getResults()[0]; //* enable for debugging */ System.out.println(result); // Reverse scan, count(*) result = client.callProcedure("@AdHoc", " select count(*) from DEFAULT_NULL where num3 < 3;").getResults()[0]; validateTableOfScalarLongs(result, new long[]{0}); } public void testENG4146() throws IOException, ProcCallException { System.out.println("STARTING insert no json string..."); Client client = getClient(); VoltTable result = null; if (!isHSQL()) { // it used to throw EE exception // when inserting a non-json encoded var char into a column that has a field() index; client.callProcedure("NO_JSON.insert", 1, "jpiekos1", "foo", "no json"); result = client.callProcedure("@AdHoc","select id, var1, var2, var3 from no_json;").getResults()[0]; assertTrue(result.advanceRow()); assertEquals(1, result.getLong(0)); assertEquals("jpiekos1", result.getString(1)); assertEquals("foo", result.getString(2)); assertEquals("no json", result.getString(3)); client.callProcedure("NO_JSON.insert", 2, "jpiekos2", "foo2", "no json2"); result = client.callProcedure("@AdHoc","select id from no_json " + "order by var2, field(var3,'color');").getResults()[0]; validateTableOfLongs(result, new long[][] {{1},{2}}); result = client.callProcedure("@AdHoc","select id from no_json " + "where var2 = 'foo' and field(var3,'color') = 'red';").getResults()[0]; assertEquals(0, result.getRowCount()); } } // SQL HAVING bug on partitioned materialized table public void testENG5669() throws IOException, ProcCallException { System.out.println("STARTING testing HAVING......"); Client client = getClient(); VoltTable vt = null; String sqlArray = "INSERT INTO P3 VALUES (0, -5377, 837, -21764, 18749);" + "INSERT INTO P3 VALUES (1, -5377, 837, -21764, 26060);" + "INSERT INTO P3 VALUES (2, -5377, 837, -10291, 30855);" + "INSERT INTO P3 VALUES (3, -5377, 837, -10291, 10718);" + "INSERT INTO P3 VALUES (4, -5377, 24139, -12116, -26619);" + "INSERT INTO P3 VALUES (5, -5377, 24139, -12116, -28421);" + "INSERT INTO P3 VALUES (6, -5377, 24139, 26580, 21384);" + "INSERT INTO P3 VALUES (7, -5377, 24139, 26580, 16131);" + "INSERT INTO P3 VALUES (8, 24862, -32179, 17651, 15165);" + "INSERT INTO P3 VALUES (9, 24862, -32179, 17651, -27633);" + "INSERT INTO P3 VALUES (10, 24862, -32179, 12941, 12036);" + "INSERT INTO P3 VALUES (11, 24862, -32179, 12941, 18363);" + "INSERT INTO P3 VALUES (12, 24862, -25522, 7979, 3903);" + "INSERT INTO P3 VALUES (13, 24862, -25522, 7979, 19380);" + "INSERT INTO P3 VALUES (14, 24862, -25522, 29263, 2730);" + "INSERT INTO P3 VALUES (15, 24862, -25522, 29263, -19078);" + "INSERT INTO P3 VALUES (32, 1010, 1010, 1010, 1010);" + "INSERT INTO P3 VALUES (34, 1020, 1020, 1020, 1020);" + "INSERT INTO P3 VALUES (36, -1010, 1010, 1010, 1010);" + "INSERT INTO P3 VALUES (38, -1020, 1020, 1020, 1020);" + "INSERT INTO P3 VALUES (40, 3620, 5836, 10467, 31123);" + "INSERT INTO P3 VALUES (41, 3620, 5836, 10467, -28088);" + "INSERT INTO P3 VALUES (42, 3620, 5836, -29791, -8520);" + "INSERT INTO P3 VALUES (43, 3620, 5836, -29791, 24495);" + "INSERT INTO P3 VALUES (44, 3620, 4927, 18147, -27779);" + "INSERT INTO P3 VALUES (45, 3620, 4927, 18147, -30914);" + "INSERT INTO P3 VALUES (46, 3620, 4927, 8494, -30592);" + "INSERT INTO P3 VALUES (47, 3620, 4927, 8494, 20340);" + "INSERT INTO P3 VALUES (48, -670, 26179, -25323, -23185);" + "INSERT INTO P3 VALUES (49, -670, 26179, -25323, 22429);" + "INSERT INTO P3 VALUES (50, -670, 26179, -17828, 24248);" + "INSERT INTO P3 VALUES (51, -670, 26179, -17828, 4962);" + "INSERT INTO P3 VALUES (52, -670, -14477, -14488, 13599);" + "INSERT INTO P3 VALUES (53, -670, -14477, -14488, -14801);" + "INSERT INTO P3 VALUES (54, -670, -14477, 16827, -12008);" + "INSERT INTO P3 VALUES (55, -670, -14477, 16827, 27722);"; // Test Default String []sqls = sqlArray.split(";"); //* enable for debugging */ System.out.println(sqls); for (String sql: sqls) { sql = sql.trim(); vt = client.callProcedure("@AdHoc", sql).getResults()[0]; } vt = client.callProcedure("@AdHoc", "SELECT SUM(V_SUM_RENT), SUM(V_G2) FROM V_P3;").getResults()[0]; validateTableOfLongs(vt, new long[][] { {90814,-6200}}); vt = client.callProcedure("@AdHoc", "SELECT SUM(V_SUM_RENT) FROM V_P3 HAVING SUM(V_G2) < 42").getResults()[0]; validateTableOfLongs(vt, new long[][] { {90814}}); //* enable for debugging */ System.out.println(vt); } public void testVarcharByBytes() throws IOException, ProcCallException { System.out.println("STARTING testing varchar by BYTES ......"); Client client = getClient(); VoltTable vt = null; String var; var = "VO"; client.callProcedure("@AdHoc", "Insert into VarcharBYTES (id, var2) VALUES (0,'" + var + "')"); vt = client.callProcedure("@AdHoc", "select var2 from VarcharBYTES where id = 0").getResults()[0]; validateTableColumnOfScalarVarchar(vt, new String[] {var}); if (isHSQL()) return; var = "VOLT"; try { client.callProcedure("@AdHoc", "Insert into VarcharBYTES (id, var2) VALUES (1,'" + var + "')"); fail(); } catch(Exception ex) { assertTrue(ex.getMessage().contains( String.format("The size %d of the value '%s' exceeds the size of the VARCHAR(%d BYTES) column.", var.length(), var, 2))); } var = ""; try { // assert here that this two-character string decodes via UTF8 to a bytebuffer longer than 2 bytes. assertEquals(2, var.length()); assertEquals(6, var.getBytes("UTF-8").length); client.callProcedure("@AdHoc", "Insert into VarcharBYTES (id, var2) VALUES (1,'" + var + "')"); fail(); } catch(Exception ex) { assertTrue(ex.getMessage().contains( String.format("The size %d of the value '%s' exceeds the size of the VARCHAR(%d BYTES) column.", 6, var, 2))); } var = "Voltdb is great | Voltdb is great " + "| Voltdb is great | Voltdb is great| Voltdb is great | Voltdb is great" + "| Voltdb is great | Voltdb is great| Voltdb is great | Voltdb is great"; try { client.callProcedure("VARCHARBYTES.insert", 2, null, var); fail(); } catch(Exception ex) { assertTrue(ex.getMessage().contains( String.format("The size %d of the value '%s...' exceeds the size of the VARCHAR(%d BYTES) column.", var.length(), var.substring(0, VARCHAR_VARBINARY_THRESHOLD), 80))); } var = var.substring(0, 70); client.callProcedure("VARCHARBYTES.insert", 2, null, var); vt = client.callProcedure("@AdHoc", "select var80 from VarcharBYTES where id = 2").getResults()[0]; validateTableColumnOfScalarVarchar(vt, new String[] {var}); } public void testVarcharByCharacter() throws IOException, ProcCallException { System.out.println("STARTING testing varchar by character ......"); Client client = getClient(); VoltTable vt = null; String var; var = "VO"; client.callProcedure("@AdHoc", "Insert into VarcharTB (id, var2) VALUES (0,'" + var + "')"); vt = client.callProcedure("@AdHoc", "select var2 from VarcharTB where id = 0").getResults()[0]; validateTableColumnOfScalarVarchar(vt, new String[] {var}); var = "V"; client.callProcedure("@AdHoc", "Insert into VarcharTB (id, var2) VALUES (1,'" + var + "')"); vt = client.callProcedure("@AdHoc", "select var2 from VarcharTB where id = 1").getResults()[0]; validateTableColumnOfScalarVarchar(vt, new String[] {var}); // It used to fail to insert if VARCHAR column is calculated by BYTEs. var = ""; client.callProcedure("@AdHoc", "Insert into VarcharTB (id, var2) VALUES (2,'" + var + "')"); vt = client.callProcedure("@AdHoc", "select var2 from VarcharTB where id = 2").getResults()[0]; validateTableColumnOfScalarVarchar(vt, new String[] {var}); var = "VoltDB."; try { client.callProcedure("VARCHARTB.insert", 3, var, null); fail(); } catch(Exception ex) { System.err.println(ex.getMessage()); if (isHSQL()) { assertTrue(ex.getMessage().contains("HSQLDB Backend DML Error (data exception: string data, right truncation)")); } else { assertTrue(ex.getMessage().contains( String.format("The size %d of the value '%s' exceeds the size of the VARCHAR(%d) column.", var.length(), var, 2))); // var.length is 26; } } // insert into client.callProcedure("VARCHARTB.insert", 3, null, var); vt = client.callProcedure("@AdHoc", "select var80 from VarcharTB where id = 3").getResults()[0]; validateTableColumnOfScalarVarchar(vt, new String[] {var}); // Test threshold var += "PostgresIngresMike Stonebraker" + "VoltDB "; try { client.callProcedure("VARCHARTB.insert", 4, null, var); fail(); } catch(Exception ex) { System.err.println(ex.getMessage()); if (isHSQL()) { assertTrue(ex.getMessage().contains("HSQLDB Backend DML Error (data exception: string data, right truncation)")); } else { assertTrue(ex.getMessage().contains( String.format("The size %d of the value '%s...' exceeds the size of the VARCHAR(%d) column.", var.length(), var.substring(0, 100), 80))); } } } public void testENG5637_VarcharVarbinaryErrorMessage() throws IOException, ProcCallException { System.out.println("STARTING testing error message......"); if (isHSQL()) { return; } Client client = getClient(); // Test Varchar // Test AdHoc String var1 = "Voltdb is a great database product"; try { client.callProcedure("@AdHoc", "Insert into VARLENGTH (id, var1) VALUES (2,'" + var1 + "')"); fail(); } catch(Exception ex) { assertTrue(ex.getMessage().contains("Value ("+var1+") is too wide for a constant varchar value of size 10")); } try { client.callProcedure("@AdHoc", "Insert into VARLENGTH (id, var1) VALUES (2,'" + var1 + "' || 'abc')"); fail(); } catch(Exception ex) { //* enable for debugging */ System.out.println(ex.getMessage()); assertTrue(ex.getMessage().contains("Value ("+var1+"abc) is too wide for a constant varchar value of size 10")); } // Test inlined varchar with stored procedure try { client.callProcedure("VARLENGTH.insert", 1, var1, null, null, null); fail(); } catch(Exception ex) { //* enable for debugging */ System.out.println(ex.getMessage()); assertTrue(ex.getMessage().contains( String.format("The size %d of the value '%s' exceeds the size of the VARCHAR(%d) column.", var1.length(), var1, 10))); } // Test non-inlined varchar with stored procedure and threshold String var2 = "Voltdb is great | Voltdb is great " + "| Voltdb is great | Voltdb is great| Voltdb is great | Voltdb is great" + "| Voltdb is great | Voltdb is great| Voltdb is great | Voltdb is great"; try { client.callProcedure("VARLENGTH.insert", 2, null, var2, null, null); fail(); } catch(Exception ex) { //* enable for debugging */ System.out.println(ex.getMessage()); assertTrue(ex.getMessage().contains( String.format("The size %d of the value '%s...' exceeds the size of the VARCHAR(%d) column.", 174, var2.substring(0, VARCHAR_VARBINARY_THRESHOLD), 80))); } // Test non-inlined varchar with stored procedure var2 = "Voltdb is great | Voltdb is great " + "| Voltdb is great | Voltdb is great| Voltdb is great"; try { client.callProcedure("VARLENGTH.insert", 21, null, var2, null, null); fail(); } catch(Exception ex) { //* enable for debugging */ System.out.println(ex.getMessage()); assertTrue(ex.getMessage().contains( String.format("The size %d of the value '%s' exceeds the size of the VARCHAR(%d) column.", 86, var2, 80))); } // Test update client.callProcedure("VARLENGTH.insert", 1, "voltdb", null, null, null); try { client.callProcedure("VARLENGTH.update", 1, var1, null, null, null, 1); fail(); } catch(Exception ex) { //* enable for debugging */ System.out.println(ex.getMessage()); assertTrue(ex.getMessage().contains( String.format("The size %d of the value '%s' exceeds the size of the VARCHAR(%d) column.", var1.length(), var1, 10))); } // Test varbinary // Test AdHoc String bin1 = "1111111111111111111111000000"; try { client.callProcedure("@AdHoc", "Insert into VARLENGTH (id, bin1) VALUES (6,'" + bin1 + "')"); fail(); } catch(Exception ex) { //* enable for debugging */ System.out.println(ex.getMessage()); assertTrue(ex.getMessage().contains("Value ("+bin1+") is too wide for a constant varbinary value of size 10")); } // Test inlined varchar with stored procedure try { client.callProcedure("VARLENGTH.insert", 7, null, null, bin1, null); fail(); } catch(Exception ex) { //* enable for debugging */ System.out.println(ex.getMessage()); assertTrue(ex.getMessage().contains( String.format("The size %d of the value exceeds the size of the VARBINARY(%d) column.", bin1.length()/2, 10))); } // Test non-inlined varchar with stored procedure String bin2 = "111111111111111111111100000011111111111111111111110000001111111111111111111111000000" + "111111111111111111111100000011111111111111111111110000001111111111111111111111000000" + "111111111111111111111100000011111111111111111111110000001111111111111111111111000000"; try { client.callProcedure("VARLENGTH.insert", 2, null, null, null, bin2); fail(); } catch(Exception ex) { //* enable for debugging */ System.out.println(ex.getMessage()); assertTrue(ex.getMessage().contains( String.format("The size %d of the value exceeds the size of the VARBINARY(%d) column.", bin2.length() / 2, 80))); } // Test update client.callProcedure("VARLENGTH.insert", 7, null, null, "1010", null); try { client.callProcedure("VARLENGTH.update", 7, null, null, bin1, null, 7); fail(); } catch(Exception ex) { //* enable for debugging */ System.out.println(ex.getMessage()); assertTrue(ex.getMessage().contains( String.format("The size %d of the value exceeds the size of the VARBINARY(%d) column.", bin1.length()/2, 10))); } } // This is a regression test for ENG-6792 public void testInlineVarcharAggregation() throws IOException, ProcCallException { Client client = getClient(); ClientResponse cr; cr = client.callProcedure("VARCHARTB.insert", 1, "zz", "panda"); assertEquals(ClientResponse.SUCCESS, cr.getStatus()); cr = client.callProcedure("VARCHARTB.insert", 6, "a", "panda"); assertEquals(ClientResponse.SUCCESS, cr.getStatus()); cr = client.callProcedure("VARCHARTB.insert", 7, "mm", "panda"); assertEquals(ClientResponse.SUCCESS, cr.getStatus()); cr = client.callProcedure("VARCHARTB.insert", 8, "z", "orangutan"); assertEquals(ClientResponse.SUCCESS, cr.getStatus()); cr = client.callProcedure("VARCHARTB.insert", 9, "aa", "orangutan"); assertEquals(ClientResponse.SUCCESS, cr.getStatus()); cr = client.callProcedure("VARCHARTB.insert", 10, "n", "orangutan"); assertEquals(ClientResponse.SUCCESS, cr.getStatus()); cr = client.callProcedure("@AdHoc", "select max(var2), min(var2) from VarcharTB"); assertEquals(ClientResponse.SUCCESS, cr.getStatus()); VoltTable vt = cr.getResults()[0]; assertTrue(vt.advanceRow()); assertEquals("zz", vt.getString(0)); assertEquals("a", vt.getString(1)); // Hash aggregation may have the same problem, so let's // test it here as well. String sql = "select var80, max(var2) as maxvar2, min(var2) as minvar2 " + "from VarcharTB " + "group by var80 " + "order by maxvar2, minvar2"; cr = client.callProcedure("@AdHoc", sql); assertEquals(ClientResponse.SUCCESS, cr.getStatus()); vt = cr.getResults()[0]; assertTrue(vt.advanceRow()); // row 1: panda, zz, a // row 2: orangutan, z, aa assertEquals("orangutan", vt.getString(0)); assertEquals("z", vt.getString(1)); assertEquals("aa", vt.getString(2)); assertTrue(vt.advanceRow()); assertEquals("panda", vt.getString(0)); assertEquals("zz", vt.getString(1)); assertEquals("a", vt.getString(2)); cr = client.callProcedure("PWEE_WITH_INDEX.insert", 0, "MM", 88); assertEquals(ClientResponse.SUCCESS, cr.getStatus()); cr = client.callProcedure("PWEE_WITH_INDEX.insert", 1, "ZZ", 88); assertEquals(ClientResponse.SUCCESS, cr.getStatus()); cr = client.callProcedure("PWEE_WITH_INDEX.insert", 2, "AA", 88); assertEquals(ClientResponse.SUCCESS, cr.getStatus()); cr = client.callProcedure("PWEE_WITH_INDEX.insert", 3, "NN", 88); assertEquals(ClientResponse.SUCCESS, cr.getStatus()); cr = client.callProcedure("@AdHoc", "select num, max(wee), min(wee) " + "from pwee_with_index group by num order by num"); assertEquals(ClientResponse.SUCCESS, cr.getStatus()); vt = cr.getResults()[0]; assertTrue(vt.advanceRow()); assertEquals("ZZ", vt.getString(1)); assertEquals("AA", vt.getString(2)); } // Bug: parser drops extra predicates over certain numbers e.g. 10. public void testENG6870() throws IOException, ProcCallException { System.out.println("test ENG6870..."); Client client = this.getClient(); VoltTable vt; String sql; client.callProcedure("ENG6870.insert", 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, null, 1, 1); client.callProcedure("ENG6870.insert", 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1); client.callProcedure("ENG6870.insert", 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1); sql = "SELECT COUNT(*) FROM ENG6870 " + "WHERE C14 = 1 AND C1 IS NOT NULL AND C2 IS NOT NULL " + "AND C5 = 3 AND C7 IS NOT NULL AND C8 IS NOT NULL " + "AND C0 IS NOT NULL AND C10 IS NOT NULL " + "AND C11 IS NOT NULL AND C13 IS NOT NULL " + "AND C12 IS NOT NULL;"; vt = client.callProcedure("@AdHoc", sql).getResults()[0]; System.err.println(vt); validateTableOfScalarLongs(vt, new long[]{0}); } public void testInsertWithCast() throws Exception { Client client = getClient(); client.callProcedure("@AdHoc", "delete from p1"); // in ENG-5929, this would cause a null pointer exception, // because OperatorException.refineValueType was not robust to casts. String stmt = "insert into p1 (id, num) values (1, cast(1 + ? as integer))"; VoltTable vt = client.callProcedure("@AdHoc", stmt, 100).getResults()[0]; validateTableOfScalarLongs(vt, new long[] {1}); // This should even work when assigning the expression to the partitioning column: // Previously this would fail with a mispartitioned tuple error. stmt = "insert into p1 (id, num) values (cast(1 + ? as integer), 1)"; vt = client.callProcedure("@AdHoc", stmt, 100).getResults()[0]; validateTableOfScalarLongs(vt, new long[] {1}); stmt = "select id, num from p1 order by id"; vt = client.callProcedure("@AdHoc", stmt).getResults()[0]; validateTableOfLongs(vt, new long[][] {{1, 101}, {101, 1}}); } public void testENG6926() throws Exception { // Aggregation of a joined table was not ordered // according to ORDER BY clause when the OB column // was not first in the select list. Client client = getClient(); String insStmt = "insert into eng6926_ipuser(ip, countrycode, province) values (?, ?, ?)"; client.callProcedure("@AdHoc", insStmt, "23.101.135.101", "US", "District of Columbia"); client.callProcedure("@AdHoc", insStmt, "23.101.142.5", "US", "District of Columbia"); client.callProcedure("@AdHoc", insStmt, "23.101.143.89", "US", "District of Columbia"); client.callProcedure("@AdHoc", insStmt, "23.101.138.62", "US", "District of Columbia"); client.callProcedure("@AdHoc", insStmt, "69.67.23.26", "US", "Minnesota"); client.callProcedure("@AdHoc", insStmt, "198.179.137.202", "US", "Minnesota"); client.callProcedure("@AdHoc", insStmt, "23.99.35.61", "US", "Washington"); insStmt = "insert into eng6926_hits(ip, week) values (?, ?)"; client.callProcedure("@AdHoc", insStmt, "23.101.135.101", 20140914); client.callProcedure("@AdHoc", insStmt, "23.101.142.5", 20140914); client.callProcedure("@AdHoc", insStmt, "23.101.143.89", 20140914); client.callProcedure("@AdHoc", insStmt, "23.101.138.62", 20140914); client.callProcedure("@AdHoc", insStmt, "69.67.23.26", 20140914); client.callProcedure("@AdHoc", insStmt, "198.179.137.202", 20140914); client.callProcedure("@AdHoc", insStmt, "23.99.35.61", 20140914); String query = "select count(ip.ip), ip.province as state " + "from eng6926_hits as h, eng6926_ipuser as ip " + "where ip.ip=h.ip and ip.countrycode='US' " + "group by ip.province " + "order by count(ip.ip) desc"; VoltTable vt = client.callProcedure("@AdHoc", query).getResults()[0]; long[] col0Expected = new long[] {4, 2, 1}; String[] col1Expected = new String[] {"District of Columbia", "Minnesota", "Washington"}; int i = 0; while (vt.advanceRow()) { assertEquals(col0Expected[i], vt.getLong(0)); assertEquals(col1Expected[i], vt.getString(1)); ++i; } } public void testENG7041ViewAndExportTable() throws Exception { Client client = getClient(); // Materialized view wasn't being updated, because the // connection with its source table wasn't getting created // when there was a (completely unrelated) export table in the // database. // When loading the catalog in the EE, we were erroneously // aborting view processing when encountering an export table. client.callProcedure("TRANSACTION.insert", 1, 99, 100.0, "NH", "Manchester", new TimestampType(), 20); validateTableOfLongs(client, "select count(*) from transaction", new long[][] {{1}}); // The buggy behavior would show zero rows in the view. validateTableOfLongs(client, "select count(*) from acct_vendor_totals", new long[][] {{1}}); } public void testInnerJoinWithOverflow() throws Exception { // In this bug, ENG-7349, we would fail an erroneous assertion // in the EE that we must have more than one active index key when // joining with a multi-component index. Client client = getClient(); VoltTable vt = client.callProcedure("SM_IDX_TBL.insert", 1, 1, 1000) .getResults()[0]; validateTableOfScalarLongs(vt, new long[] {1}); validateTableOfLongs(client, "select * " + "from sm_idx_tbl as t1 inner join sm_idx_tbl as t2 " + "on t1.ti1 = t2.bi", new long[][] {}); } private void insertForInParamsTests(Client client) throws Exception { for (int i = 0; i < 10; ++i) { VoltTable vt = client.callProcedure("P1.insert", i, Integer.toString(i), i * 10, i * 100.0) .getResults()[0]; validateTableOfScalarLongs(vt, new long[] {1}); } } // Note: the following tests for IN with parameters should at some point // be moved into their own suite along with existing tests for IN // that now live in TestIndexesSuite. This is ENG-7607. public void testInWithIntParams() throws Exception { // HSQL does not support WHERE f IN ? if (isHSQL()) return; Client client = getClient(); insertForInParamsTests(client); VoltTable vt = client.callProcedure("one_list_param", new int[] {1, 2}) .getResults()[0]; validateTableOfScalarLongs(vt, new long[] {1, 2}); // The following error message characterizes what happens if the // users passes long array to an IN parameter on an INTEGER column. // VoltDB requires that the data types match exactly here. // This error message isn't that friendly (ENG-7606). verifyProcFails(client, "tryScalarMakeCompatible: " + "Unable to match parameter array:int to provided long", "one_list_param", new long[] {1, 2}); // scalar param where list should be provided fails verifyProcFails(client, "Array / Scalar parameter mismatch", "one_list_param", 1); vt = client.callProcedure("one_scalar_param", 5) .getResults()[0]; validateTableOfScalarLongs(vt, new long[] {5}); // passing a list to a scalar int parameter fails verifyProcFails(client , "Array / Scalar parameter mismatch", "one_scalar_param", new long[] {1, 2}); } public void testInWithStringParams() throws Exception { if (isHSQL()) return; Client client = getClient(); insertForInParamsTests(client); String[] stringArgs = {"7", "8"}; // For vararg methods like callProcedure, when there is an array of objects // (not an array of native types) passed as the only vararg argument, the // compile-time type affects how the compiler presents the arguments to the // callee: // cast to Object - callProcedure sees just one param (which is an array) // cast to Object[] - (or a subclass of Object[]) callee sees each array // element as its own parameter value // where desc in ? // Cast parameter value as an object and it's treated as a single parameter in the callee. VoltTable vt = client.callProcedure("one_string_list_param", (Object)stringArgs) .getResults()[0]; validateTableOfScalarLongs(vt, new long[] {7, 8}); // where desc in ? // Casting the argument to object array means it's treated // as two arguments in the callee. verifyProcFails(client, "EXPECTS 1 PARAMS, BUT RECEIVED 2", "one_string_list_param", (Object[])stringArgs); // where desc in ? // scalar parameter fails verifyProcFails(client, "Array / Scalar parameter mismatch", "one_string_list_param", "scalar param"); // where desc in (?) // Caller treats this as a single list parameter. verifyProcFails(client, "Array / Scalar parameter mismatch", "one_string_scalar_param", (Object)stringArgs); // where desc in (?) // Cast to an array type makes caller treat this as two arguments. verifyProcFails(client, "EXPECTS 1 PARAMS, BUT RECEIVED 2", "one_string_scalar_param", (Object[])stringArgs); // where desc in (?) // This succeeds as it should vt = client.callProcedure("one_string_scalar_param", "9") .getResults()[0]; validateTableOfScalarLongs(vt, new long[] {9}); } public void testInWithStringParamsAdHoc() throws Exception { if (isHSQL()) return; Client client = getClient(); insertForInParamsTests(client); String[] stringArgs = {"7", "8"}; String adHocQueryWithListParam = "select id from P1 where desc in ?"; String adHocQueryWithScalarParam = "select id from P1 where desc in (?)"; VoltTable vt; verifyProcFails(client, "Array / Scalar parameter mismatch", "@AdHoc", adHocQueryWithListParam, stringArgs); // where desc in ? // scalar parameter fails verifyProcFails(client, "rhs of IN expression is of a non-list type varchar", "@AdHoc", adHocQueryWithListParam, "scalar param"); // where desc in (?) // Caller treats this as a single list parameter. verifyProcFails(client, "Array / Scalar parameter mismatch", "@AdHoc", adHocQueryWithScalarParam, stringArgs); // where desc in (?) // This succeeds as it should vt = client.callProcedure("@AdHoc", adHocQueryWithScalarParam, "9") .getResults()[0]; validateTableOfScalarLongs(vt, new long[] {9}); } static private final class SimpleCallback implements ProcedureCallback { private ClientResponse m_clientResponse = null; @Override public void clientCallback(ClientResponse clientResponse) throws Exception { m_clientResponse = clientResponse; } public ClientResponse getClientResponse() { return m_clientResponse; } } public void testInWithStringParamsAsync() throws Exception { if (isHSQL()) return; // There is nothing particularly special about asynchronous procedure calls // with IN and parameters. I wrote these test cases to try and // reproduce ENG-7354, which was closed as "not a bug." // There doesn't seem to be a lot of tests for async call error recovery, // so these tests are preserved here (hopefully they can find a better // home someday). Client client = getClient(); insertForInParamsTests(client); String[] stringArgs = {"7", "8"}; // Try with the async version of callProcedure. boolean b; SimpleCallback callback = new SimpleCallback(); b = client.callProcedure(callback, "one_string_scalar_param", (Object)stringArgs); // This is queued, but execution fails as it should. assertTrue(b); client.drain(); assertEquals(ClientResponse.GRACEFUL_FAILURE, callback.getClientResponse().getStatus()); assertTrue(callback.getClientResponse().getStatusString().contains( "Array / Scalar parameter mismatch")); b = client.callProcedure(callback, "one_string_scalar_param", (Object[])stringArgs); // This is queued, but execution fails as it should. assertTrue(b); client.drain(); assertEquals(ClientResponse.GRACEFUL_FAILURE, callback.getClientResponse().getStatus()); assertTrue(callback.getClientResponse().getStatusString().contains( "EXPECTS 1 PARAMS, BUT RECEIVED 2")); // This should succeed b = client.callProcedure(callback, "one_string_list_param", (Object)stringArgs); assertTrue(b); client.drain(); assertEquals(ClientResponse.SUCCESS, callback.getClientResponse().getStatus()); VoltTable vt = callback.getClientResponse().getResults()[0]; validateTableOfScalarLongs(vt, new long[] {7, 8}); // Try some ad hoc queries as well. String adHocQueryWithListParam = "select id from P1 where desc in ?"; String adHocQueryWithScalarParam = "select id from P1 where desc in (?)"; // Here's what happens with too many parameters b = client.callProcedure(callback, "one_string_scalar_param", "dog", "cat"); // This is queued, but execution fails as it should. assertTrue(b); client.drain(); assertEquals(ClientResponse.GRACEFUL_FAILURE, callback.getClientResponse().getStatus()); assertTrue(callback.getClientResponse().getStatusString().contains( "EXPECTS 1 PARAMS, BUT RECEIVED 2")); b = client.callProcedure(callback, "@AdHoc", adHocQueryWithScalarParam, stringArgs); assertTrue(b); client.drain(); assertEquals(ClientResponse.GRACEFUL_FAILURE, callback.getClientResponse().getStatus()); assertTrue(callback.getClientResponse().getStatusString().contains( "Array / Scalar parameter mismatch")); // This should succeed, but doesn't (ENG-7604 again) b = client.callProcedure(callback, "@AdHoc", adHocQueryWithListParam, stringArgs); assertTrue(b); client.drain(); assertEquals(ClientResponse.GRACEFUL_FAILURE, callback.getClientResponse().getStatus()); assertTrue(callback.getClientResponse().getStatusString().contains( "Array / Scalar parameter mismatch")); } public void testENG7724() throws Exception { Client client = getClient(); VoltTable vt = client.callProcedure("voltdbSelectProductChanges", 1, 1).getResults()[0]; assertEquals(13, vt.getColumnCount()); } private void runQueryGetDecimal(Client client, String sql, double value) throws Exception { VoltTable vt = client.callProcedure("@AdHoc", sql).getResults()[0]; assertTrue(vt.advanceRow()); assertEquals(value, vt.getDecimalAsBigDecimal(0).doubleValue(), 0.0001); } private void runQueryGetDouble(Client client, String sql, double value) throws Exception { VoltTable vt = client.callProcedure("@AdHoc", sql).getResults()[0]; assertTrue(vt.advanceRow()); assertEquals(value, vt.getDouble(0), 0.0001); } public void testENG7480() throws Exception { Client client = getClient(); String sql; sql = "insert into R1 Values(1, 'MA', 2, 2.2);"; client.callProcedure("@AdHoc", sql); // query constants interpreted as DECIMAL // operation between float and decimal sql = "SELECT 0.1 + (1-0.1) + ratio FROM R1"; runQueryGetDouble(client, sql, 3.2); sql = "SELECT 0.1 + (1-0.1) - ratio FROM R1"; runQueryGetDouble(client, sql, -1.2); sql = "SELECT 0.1 + (1-0.1) / ratio FROM R1"; runQueryGetDouble(client, sql, 0.509090909091); sql = "SELECT 0.1 + (1-0.1) * ratio FROM R1"; runQueryGetDouble(client, sql, 2.08); // reverse order sql = "SELECT 0.1 + ratio + (1-0.1) FROM R1"; runQueryGetDouble(client, sql, 3.2); sql = "SELECT 0.1 + ratio - (1-0.1) FROM R1"; runQueryGetDouble(client, sql, 1.4); sql = "SELECT 0.1 + ratio / (1-0.1) FROM R1"; runQueryGetDouble(client, sql, 2.544444444444); sql = "SELECT 0.1 + ratio * (1-0.1) FROM R1"; runQueryGetDouble(client, sql, 2.08); // operation between decimal and integer if (isHSQL()) { // not compatible with Hsql return; } sql = "SELECT 0.1 + (1-0.1) + NUM FROM R1"; runQueryGetDecimal(client, sql, 3.0); sql = "SELECT 0.1 + (1-0.1) - NUM FROM R1"; runQueryGetDecimal(client, sql, -1.0); sql = "SELECT 0.1 + (1-0.1) / NUM FROM R1"; runQueryGetDouble(client, sql, 0.55); sql = "SELECT 0.1 + (1-0.1) * NUM FROM R1"; runQueryGetDouble(client, sql, 1.9); // reverse order sql = "SELECT 0.1 + NUM + (1-0.1) FROM R1"; runQueryGetDouble(client, sql, 3.0); sql = "SELECT 0.1 + NUM - (1-0.1) FROM R1"; runQueryGetDouble(client, sql, 1.2); sql = "SELECT 0.1 + NUM / (1-0.1) FROM R1"; runQueryGetDouble(client, sql, 2.322222222222); sql = "SELECT 0.1 + NUM * (1-0.1) FROM R1"; runQueryGetDouble(client, sql, 1.9); } private void nullIndexSearchKeyChecker(Client client, String sql) throws Exception { VoltTable vt; vt = client.callProcedure("@AdHoc", sql, null).getResults()[0]; validateTableOfScalarLongs(vt, new long[]{}); } public void testENG8120() throws Exception { // hsqldb does not handle null if (isHSQL()) { return; } Client client = getClient(); VoltTable vt; String sql; String[] tables = {"R1", "R3", "R4"}; for (String tb : tables) { sql = "insert into " + tb + " (id, num) Values(?, ?);"; client.callProcedure("@AdHoc", sql, 1, null); client.callProcedure("@AdHoc", sql, 2, null); client.callProcedure("@AdHoc", sql, 3, 3); client.callProcedure("@AdHoc", sql, 4, 4); sql = "select count(*) from " + tb; vt = client.callProcedure("@AdHoc", sql).getResults()[0]; validateTableOfScalarLongs(vt, new long[]{4}); // activate # of searchkey is 1 sql = "SELECT ID FROM " + tb + " B WHERE B.ID > ?;"; nullIndexSearchKeyChecker(client, sql); sql = "SELECT ID FROM " + tb + " B WHERE B.ID >= ?;"; nullIndexSearchKeyChecker(client, sql); sql = "SELECT ID FROM " + tb + " B WHERE B.ID = ?;"; nullIndexSearchKeyChecker(client, sql); sql = "SELECT ID FROM " + tb + " B WHERE B.ID < ?;"; nullIndexSearchKeyChecker(client, sql); sql = "SELECT ID FROM " + tb + " B WHERE B.ID <= ?;"; nullIndexSearchKeyChecker(client, sql); // activate # of searchkey is 2 sql = "SELECT ID FROM " + tb + " B WHERE B.ID = 3 and num > ?;"; nullIndexSearchKeyChecker(client, sql); sql = "SELECT ID FROM " + tb + " B WHERE B.ID = 3 and num >= ?;"; nullIndexSearchKeyChecker(client, sql); sql = "SELECT ID FROM " + tb + " B WHERE B.ID = 3 and num = ?;"; nullIndexSearchKeyChecker(client, sql); sql = "SELECT ID FROM " + tb + " B WHERE B.ID = 3 and num < ?;"; nullIndexSearchKeyChecker(client, sql); sql = "SELECT ID FROM " + tb + " B WHERE B.ID = 3 and num <= ?;"; nullIndexSearchKeyChecker(client, sql); // post predicate sql = "SELECT ID FROM " + tb + " B WHERE B.ID > ? and num > 1;"; nullIndexSearchKeyChecker(client, sql); sql = "SELECT ID FROM " + tb + " B WHERE B.ID = ? and num > 1;"; nullIndexSearchKeyChecker(client, sql); sql = "SELECT ID FROM " + tb + " B WHERE B.ID < ? and num > 1;"; nullIndexSearchKeyChecker(client, sql); // nest loop index join sql = "SELECT ID FROM R4 A, " + tb + " B WHERE B.ID = A.ID and B.num > ?;"; if (tb != "R4") { vt = client.callProcedure("@Explain", sql, null).getResults()[0]; assertTrue(vt.toString().contains("inline INDEX SCAN of \"" + tb)); assertTrue(vt.toString().contains("SEQUENTIAL SCAN of \"R4\"")); } nullIndexSearchKeyChecker(client, sql); sql = "SELECT ID FROM R4 A, " + tb + " B WHERE B.ID = A.ID and B.num >= ?;"; nullIndexSearchKeyChecker(client, sql); sql = "SELECT ID FROM R4 A, " + tb + " B WHERE B.ID = A.ID and B.num = ?;"; nullIndexSearchKeyChecker(client, sql); sql = "SELECT ID FROM R4 A, " + tb + " B WHERE B.ID = A.ID and B.num < ?;"; nullIndexSearchKeyChecker(client, sql); sql = "SELECT ID FROM R4 A, " + tb + " B WHERE B.ID = A.ID and B.num <= ?;"; nullIndexSearchKeyChecker(client, sql); } } // JUnit / RegressionSuite boilerplate public TestFixedSQLSuite(String name) { super(name); } static public junit.framework.Test suite() { VoltServerConfig config = null; MultiConfigSuiteBuilder builder = new MultiConfigSuiteBuilder(TestFixedSQLSuite.class); boolean success; VoltProjectBuilder project = new VoltProjectBuilder(); project.addSchema(Insert.class.getResource("fixed-sql-ddl.sql")); project.addProcedures(PROCEDURES); // Now that this fails to compile with an overflow error, it should be migrated to a // Failures suite. //project.addStmtProcedure("Crap", "insert into COUNT_NULL values (" + Long.MIN_VALUE + ", 1, 200)"); project.addStmtProcedure("Eng397Limit1", "Select P1.NUM from P1 order by P1.NUM limit ?;"); //project.addStmtProcedure("Eng490Select", "SELECT A.ASSET_ID, A.OBJECT_DETAIL_ID, OD.OBJECT_DETAIL_ID FROM ASSET A, OBJECT_DETAIL OD WHERE A.OBJECT_DETAIL_ID = OD.OBJECT_DETAIL_ID;"); project.addStmtProcedure("InsertNullString", "Insert into STRINGPART values (?, ?, ?);", "STRINGPART.NAME: 0"); project.addStmtProcedure("Eng993Insert", "insert into P1 (ID,DESC,NUM,RATIO) VALUES(1+?,'NULL',NULL,1+?);"); project.addStmtProcedure("Eng5926Insert", "insert into PWEE (ID,WEE,NUM,RATIO) VALUES(1+?,?||'WEE',NULL,1+?);"); project.addStmtProcedure("Eng1316Insert_R", "insert into R1 values (?, ?, ?, ?);"); project.addStmtProcedure("Eng1316Update_R", "update R1 set num = num + 1 where id < 104"); project.addStmtProcedure("Eng1316Insert_P", "insert into P1 values (?, ?, ?, ?);"); project.addStmtProcedure("Eng1316Update_P", "update P1 set num = num + 1 where id < 104"); project.addStmtProcedure("Eng1316Insert_P1", "insert into P1 values (?, ?, ?, ?);", "P1.ID: 0"); project.addStmtProcedure("Eng1316Update_P1", "update P1 set num = num + 1 where id = ?", "P1.ID: 0"); /*/ // CONFIG #1b: IPC -- keep this normally disabled with / * vs. // //* CONFIG #1: JNI -- keep this enabled by default with / / vs. / * config = new LocalCluster("fixedsql-threesite.jar", 3, 1, 0, BackendTarget.NATIVE_EE_JNI); success = config.compile(project); assertTrue(success); builder.addServerConfig(config); config = new LocalCluster("fixedsql-onesite.jar", 1, 1, 0, BackendTarget.NATIVE_EE_IPC); success = config.compile(project); assertTrue(success); builder.addServerConfig(config); // end of normally disabled section */ // CONFIG #2: HSQL config = new LocalCluster("fixedsql-hsql.jar", 1, 1, 0, BackendTarget.HSQLDB_BACKEND); success = config.compile(project); assertTrue(success); builder.addServerConfig(config); return builder; } }
package ome.xml.meta; import io.scif.AbstractChecker; import io.scif.AbstractFormat; import io.scif.AbstractParser; import io.scif.AbstractTranslator; import io.scif.ByteArrayPlane; import io.scif.ByteArrayReader; import io.scif.DefaultImageMetadata; import io.scif.DependencyException; import io.scif.Format; import io.scif.FormatException; import io.scif.ImageMetadata; import io.scif.Plane; import io.scif.SCIFIO; import io.scif.Translator; import io.scif.formats.MinimalTIFFFormat; import io.scif.formats.TIFFFormat; import io.scif.formats.tiff.IFD; import io.scif.formats.tiff.IFDList; import io.scif.formats.tiff.PhotoInterp; import io.scif.formats.tiff.TiffIFDEntry; import io.scif.formats.tiff.TiffParser; import io.scif.formats.tiff.TiffSaver; import io.scif.io.Location; import io.scif.io.RandomAccessInputStream; import io.scif.io.RandomAccessOutputStream; import io.scif.services.ServiceException; import io.scif.util.FormatTools; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Hashtable; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.Vector; import net.imglib2.display.ColorTable; import net.imglib2.meta.Axes; import ome.xml.model.primitives.NonNegativeInteger; import ome.xml.model.primitives.PositiveInteger; import ome.xml.model.primitives.Timestamp; import ome.xml.services.OMEXMLMetadataService; import ome.xml.services.OMEXMLService; import org.scijava.Context; import org.scijava.Priority; import org.scijava.plugin.Attr; import org.scijava.plugin.Plugin; @Plugin(type = OMETIFFFormat.class, priority = TIFFFormat.PRIORITY + 1) public class OMETIFFFormat extends AbstractFormat { // -- Fields -- private static OMEXMLService service; private static OMEXMLMetadataService metaService; // -- Format API Methods -- /* * @see io.scif.Format#getFormatName() */ public String getFormatName() { return "OME-TIFF"; } /* * @see io.scif.Format#getSuffixes() */ public String[] getSuffixes() { return new String[] {"ome.tif", "ome.tiff"}; } // -- Nested Classes -- /** * @author Mark Hiner hinerm at gmail.com * */ public static class Metadata extends TIFFFormat.Metadata { // -- Constants -- public static final String CNAME = "ome.xml.meta.OMETIFFFormat$Metadata"; // -- Fields -- /** Mapping from series and plane numbers to files and IFD entries. */ protected OMETIFFPlane[][] info; // dimensioned [numSeries][numPlanes] private IFD firstIFD; private List<Integer> samples; private List<Boolean> adjustedSamples; /** List of used files. */ protected String[] used; // TODO maybe this should be an o mexmlmetadata... private OMEMetadata omeMeta; private int lastPlane = 0; private boolean hasSPW; private int[] tileWidth; private int[] tileHeight; // -- OMETIFF Metadata API methods -- /** * Returns a MetadataStore that is populated in such a way as to * produce valid OME-XML. The returned MetadataStore cannot be used * by an IFormatWriter, as it will not contain the required * BinData.BigEndian attributes. */ public MetadataStore getMetadataStoreForDisplay() { MetadataStore store = omeMeta.getRoot(); if (service.isOMEXMLMetadata(store)) { service.removeBinData((OMEXMLMetadata) store); for (int i=0; i<getImageCount(); i++) { if (((OMEXMLMetadata) store).getTiffDataCount(i) == 0) { service.addMetadataOnly((OMEXMLMetadata) store, i); } } } return store; } /** * Returns a MetadataStore that is populated in such a way as to be * usable by an IFormatWriter. Any OME-XML generated from this * MetadataStore is <em>very unlikely</em> to be valid, as more than * likely both BinData and TiffData element will be present. */ public MetadataStore getMetadataStoreForConversion() { MetadataStore store = omeMeta.getRoot(); for (int i=0; i<getImageCount(); i++) { store.setPixelsBinDataBigEndian(new Boolean(!isLittleEndian(i)), i, 0); } return store; } // -- OMETIFFMetadata getters and setters -- public OMETIFFPlane[][] getInfo() { return info; } public void setInfo(OMETIFFPlane[][] info) { this.info = info; } public String[] getUsed() { return used; } public void setUsed(String[] used) { this.used = used; } public OMEMetadata getOmeMeta() { return omeMeta; } public void setOmeMeta(OMEMetadata omeMeta) { this.omeMeta = omeMeta; } public int getLastPlane() { return lastPlane; } public void setLastPlane(int lastPlane) { this.lastPlane = lastPlane; } public IFD getFirstIFD() { return firstIFD; } public void setFirstIFD(IFD firstIFD) { this.firstIFD = firstIFD; } public boolean isHasSPW() { return hasSPW; } public void setHasSPW(boolean hasSPW) { this.hasSPW = hasSPW; } public int[] getTileWidth() { return tileWidth; } public void setTileWidth(int[] tileWidth) { this.tileWidth = tileWidth; } public int[] getTileHeight() { return tileHeight; } public void setTileHeight(int[] tileHeight) { this.tileHeight = tileHeight; } // -- Metadata API Methods -- /* * @see io.scif.Metadata#populateImageMetadata() */ public void populateImageMetadata() { // populate core metadata OMEXMLMetadata omexmlMeta = getOmeMeta().getRoot(); for (int s=0; s<getImageCount(); s++) { ImageMetadata m = get(s); try { m.setAxisLength(Axes.X, omexmlMeta.getPixelsSizeX(s).getValue().intValue()); int tiffWidth = (int) firstIFD.getImageWidth(); if (m.getAxisLength(Axes.X) != tiffWidth && s == 0) { LOGGER.warn("SizeX mismatch: OME={}, TIFF={}", m.getAxisLength(Axes.X), tiffWidth); } m.setAxisLength(Axes.Y, omexmlMeta.getPixelsSizeY(s).getValue().intValue()); int tiffHeight = (int) firstIFD.getImageLength(); if (m.getAxisLength(Axes.Y) != tiffHeight && s == 0) { LOGGER.warn("SizeY mismatch: OME={}, TIFF={}", m.getAxisLength(Axes.Y), tiffHeight); } m.setAxisLength(Axes.Z, omexmlMeta.getPixelsSizeZ(s).getValue().intValue()); m.setAxisLength(Axes.CHANNEL, omexmlMeta.getPixelsSizeC(s).getValue().intValue()); m.setAxisLength(Axes.TIME, omexmlMeta.getPixelsSizeT(s).getValue().intValue()); m.setPixelType(FormatTools.pixelTypeFromString(omexmlMeta.getPixelsType(s).toString())); int tiffPixelType = firstIFD.getPixelType(); if (m.getPixelType() != tiffPixelType && (s == 0 || adjustedSamples.get(s))) { LOGGER.warn("PixelType mismatch: OME={}, TIFF={}", m.getPixelType(), tiffPixelType); m.setPixelType(tiffPixelType); } m.setBitsPerPixel(FormatTools.getBitsPerPixel(m.getPixelType())); m.setPlaneCount(info[s].length); String dimensionOrder = omexmlMeta.getPixelsDimensionOrder(s).toString(); // hackish workaround for files exported by OMERO that have an // incorrect dimension order String uuidFileName = ""; try { if (omexmlMeta.getTiffDataCount(s) > 0) { uuidFileName = omexmlMeta.getUUIDFileName(s, 0); } } catch (NullPointerException e) { } if (omexmlMeta.getChannelCount(s) > 0 && omexmlMeta.getChannelName(s, 0) == null && omexmlMeta.getTiffDataCount(s) > 0 && uuidFileName.indexOf("__omero_export") != -1) { dimensionOrder = "XYZCT"; } m.setAxisTypes(FormatTools.findDimensionList(dimensionOrder)); m.setOrderCertain(true); PhotoInterp photo = firstIFD.getPhotometricInterpretation(); m.setRGB(samples.get(s) > 1 || photo == PhotoInterp.RGB); if ((samples.get(s) != m.getAxisLength(Axes.CHANNEL) && (samples.get(s) % m.getAxisLength(Axes.CHANNEL)) != 0 && (m.getAxisLength(Axes.CHANNEL) % samples.get(s)) != 0) || m.getAxisLength(Axes.CHANNEL) == 1 || adjustedSamples.get(s)) { m.setAxisLength(Axes.CHANNEL, m.getAxisLength(Axes.CHANNEL) * samples.get(s)); } if (m.getAxisLength(Axes.Z) * m.getAxisLength(Axes.TIME) * m.getAxisLength(Axes.CHANNEL) > m.getPlaneCount() && !m.isRGB()) { if (m.getAxisLength(Axes.Z) == m.getPlaneCount()) { m.setAxisLength(Axes.TIME, 1); m.setAxisLength(Axes.CHANNEL, 1); } else if (m.getAxisLength(Axes.TIME) == m.getPlaneCount()) { m.setAxisLength(Axes.Z, 1); m.setAxisLength(Axes.CHANNEL, 1); } else if (m.getAxisLength(Axes.CHANNEL) == m.getPlaneCount()) { m.setAxisLength(Axes.TIME, 1); m.setAxisLength(Axes.Z, 1); } } if (omexmlMeta.getPixelsBinDataCount(s) > 1) { LOGGER.warn("OME-TIFF Pixels element contains BinData elements! " + "Ignoring."); } m.setLittleEndian(firstIFD.isLittleEndian()); m.setInterleaved(false); m.setIndexed(photo == PhotoInterp.RGB_PALETTE && firstIFD.getIFDValue(IFD.COLOR_MAP) != null); if (m.isIndexed()) { m.setRGB(false); } m.setFalseColor(true); m.setMetadataComplete(true); } catch (NullPointerException exc) { LOGGER.error("Incomplete Pixels metadata", exc); } catch (FormatException exc) { LOGGER.error("Format exception when creating ImageMetadata", exc); } } // if (getImageCount() == 1) { // CoreMetadata ms0 = core.get(0); // ms0.sizeZ = 1; // if (!ms0.rgb) { // ms0.sizeC = 1; // ms0.sizeT = 1; // metaService.populatePixels(getOmeMeta().getRoot(), this, false, false); getOmeMeta().setRoot((OMEXMLMetadata) getMetadataStoreForConversion()); } @Override public void close(boolean fileOnly) throws IOException { super.close(fileOnly); if (info != null) { for (OMETIFFPlane[] dimension : info) { for (OMETIFFPlane plane : dimension) { if (plane.reader != null) { try { plane.reader.close(); } catch (Exception e) { LOGGER.error("Plane closure failure!", e); } } } } } if (!fileOnly) { info = null; lastPlane = 0; tileWidth = null; tileHeight = null; } } // -- HasColorTable API Methods -- public ColorTable getColorTable(int imageIndex, int planeIndex) { if (info[imageIndex][lastPlane] == null || info[imageIndex][lastPlane].reader == null || info[imageIndex][lastPlane].id == null) { return null; } try { info[imageIndex][lastPlane].reader.setSource(info[imageIndex][lastPlane].id); return info[imageIndex][lastPlane].reader.getMetadata().getColorTable(imageIndex, planeIndex); } catch (IOException e) { LOGGER.error("IOException when trying to read color table", e); return null; } } } /** * @author Mark Hiner hinerm at gmail.com * */ public static class Checker extends AbstractChecker { // -- Constructor -- public Checker() { suffixNecessary = false; suffixSufficient = false; } // -- Checker API Methods -- @Override public boolean isFormat(RandomAccessInputStream stream) throws IOException { TiffParser tp = new TiffParser(getContext(), stream); tp.setDoCaching(false); boolean validHeader = tp.isValidHeader(); if (!validHeader) return false; // look for OME-XML in first IFD's comment IFD ifd = tp.getFirstIFD(); if (ifd == null) return false; Object description = ifd.get(IFD.IMAGE_DESCRIPTION); if (description == null) { return false; } String comment = null; if (description instanceof TiffIFDEntry) { comment = tp.getIFDValue((TiffIFDEntry) description).toString(); } else if (description instanceof String) { comment = (String) description; } if (comment == null || comment.trim().length() == 0) return false; comment = comment.trim(); // do a basic sanity check before attempting to parse the comment as XML // the parsing step is a bit slow, so there is no sense in trying unless // we are reasonably sure that the comment contains XML if (!comment.startsWith("<") || !comment.endsWith(">")) { return false; } try { if (service == null) setupServices(getContext()); IMetadata meta = service.createOMEXMLMetadata(comment); for (int i=0; i<meta.getImageCount(); i++) { meta.setPixelsBinDataBigEndian(Boolean.TRUE, i, 0); metaService.verifyMinimumPopulated(meta, i); } return meta.getImageCount() > 0; } catch (ServiceException se) { LOGGER.debug("OME-XML parsing failed", se); } catch (NullPointerException e) { LOGGER.debug("OME-XML parsing failed", e); } catch (FormatException e) { LOGGER.debug("OME-XML parsing failed", e); } catch (IndexOutOfBoundsException e) { LOGGER.debug("OME-XML parsing failed", e); } return false; } } /** * @author Mark Hiner hinerm at gmail.com * */ public static class Parser extends AbstractParser<Metadata> { // -- Parser API Methods -- @Override public String[] getImageUsedFiles(int imageIndex, boolean noPixels) { FormatTools.assertId(currentId, true, 1); if (noPixels) return null; Vector<String> usedFiles = new Vector<String>(); for (int i=0; i<metadata.info[imageIndex].length; i++) { if (!usedFiles.contains(metadata.info[imageIndex][i].id)) { usedFiles.add(metadata.info[imageIndex][i].id); } } return usedFiles.toArray(new String[usedFiles.size()]); } @Override public Metadata parse(String fileName, Metadata meta) throws IOException, FormatException { return super.parse(normalizeFilename(null, fileName), meta); } @Override public Metadata parse(File file, Metadata meta) throws IOException, FormatException { return super.parse(normalizeFilename(null, file.getPath()), meta); } @Override public int fileGroupOption(String id) throws FormatException, IOException { boolean single = isSingleFile(id); return single ? FormatTools.CAN_GROUP : FormatTools.MUST_GROUP; } @Override public Metadata parse(RandomAccessInputStream stream, Metadata meta) throws IOException, FormatException { super.parse(stream, meta); for (int s=0; s<meta.getImageCount(); s++) { OMETIFFPlane[][] info = meta.getInfo(); try { if (!info[s][0].reader.getFormat().createChecker().isFormat(info[s][0].id)) { info[s][0].id = meta.getSource().getFileName(); } for (int plane=0; plane<info[s].length; plane++) { if (!info[s][plane].reader.getFormat().createChecker().isFormat(info[s][plane].id)) { info[s][plane].id = info[s][0].id; } } info[s][0].reader.setSource(info[s][0].id); meta.getTileWidth()[s] = info[s][0].reader.getOptimalTileWidth(s); meta.getTileHeight()[s] = info[s][0].reader.getOptimalTileHeight(s); } catch (FormatException e) { LOGGER.debug("OME-XML parsing failed", e); } } return meta; } // -- Groupable API Methods -- @Override public boolean isSingleFile(String id) throws FormatException, IOException { return OMETIFFFormat.isSingleFile(getContext(), id); } // -- Abstract Parser API Methods -- @Override protected void typedParse(io.scif.io.RandomAccessInputStream stream, Metadata meta) throws IOException, io.scif.FormatException { // normalize file name String id = stream.getFileName(); String dir = new File(id).getParent(); // parse and populate OME-XML metadata String fileName = new Location(getContext(), id).getAbsoluteFile().getAbsolutePath(); if (!new File(fileName).exists()) { fileName = currentId; } RandomAccessInputStream ras = new RandomAccessInputStream(getContext(), fileName); String xml; IFD firstIFD; try { TiffParser tp = new TiffParser(getContext(), ras); firstIFD = tp.getFirstIFD(); xml = firstIFD.getComment(); } finally { ras.close(); } meta.setFirstIFD(firstIFD); if (service == null) setupServices(getContext()); OMEXMLMetadata omexmlMeta; try { omexmlMeta = service.createOMEXMLMetadata(xml); } catch (ServiceException se) { throw new FormatException(se); } meta.setHasSPW(omexmlMeta.getPlateCount() > 0); for (int i=0; i<meta.getImageCount(); i++) { int sizeC = omexmlMeta.getPixelsSizeC(i).getValue().intValue(); service.removeChannels(omexmlMeta, i, sizeC); } Hashtable<String, Object> originalMetadata = service.getOriginalMetadata(omexmlMeta); if (originalMetadata != null) meta.getTable().putAll(originalMetadata); LOGGER.trace(xml); if (omexmlMeta.getRoot() == null) { throw new FormatException("Could not parse OME-XML from TIFF comment"); } meta.setOmeMeta(new OMEMetadata(getContext(), omexmlMeta)); String[] acquiredDates = new String[meta.getImageCount()]; for (int i=0; i<acquiredDates.length; i++) { Timestamp acquisitionDate = omexmlMeta.getImageAcquisitionDate(i); if (acquisitionDate != null) { acquiredDates[i] = acquisitionDate.getValue(); } } String currentUUID = omexmlMeta.getUUID(); // determine series count from Image and Pixels elements int imageCount = omexmlMeta.getImageCount(); meta.createImageMetadata(imageCount); OMETIFFPlane[][] info = new OMETIFFPlane[imageCount][]; meta.setInfo(info); int[] tileWidth = new int[imageCount]; int[] tileHeight = new int[imageCount]; meta.setTileWidth(tileWidth); meta.setTileHeight(tileHeight); // compile list of file/UUID mappings Hashtable<String, String> files = new Hashtable<String, String>(); boolean needSearch = false; for (int i=0; i<imageCount; i++) { int tiffDataCount = omexmlMeta.getTiffDataCount(i); for (int td=0; td<tiffDataCount; td++) { String uuid = null; try { uuid = omexmlMeta.getUUIDValue(i, td); } catch (NullPointerException e) { } String filename = null; if (uuid == null) { // no UUID means that TiffData element refers to this file uuid = ""; filename = id; } else { filename = omexmlMeta.getUUIDFileName(i, td); if (!new Location(getContext(), dir, filename).exists()) filename = null; if (filename == null) { if (uuid.equals(currentUUID) || currentUUID == null) { // UUID references this file filename = id; } else { // will need to search for this UUID filename = ""; needSearch = true; } } else filename = normalizeFilename(dir, filename); } String existing = files.get(uuid); if (existing == null) files.put(uuid, filename); else if (!existing.equals(filename)) { throw new FormatException("Inconsistent UUID filenames"); } } } // search for missing filenames if (needSearch) { Enumeration<String> en = files.keys(); while (en.hasMoreElements()) { String uuid = (String) en.nextElement(); String filename = files.get(uuid); if (filename.equals("")) { // TODO search... // should scan only other .ome.tif files // to make this work with OME server may be a little tricky? throw new FormatException("Unmatched UUID: " + uuid); } } } // build list of used files Enumeration<String> en = files.keys(); int numUUIDs = files.size(); HashSet<String> fileSet = new HashSet<String>(); // ensure no duplicate filenames for (int i=0; i<numUUIDs; i++) { String uuid = (String) en.nextElement(); String filename = files.get(uuid); fileSet.add(filename); } String[] used = new String[fileSet.size()]; meta.setUsed(used); Iterator<String> iter = fileSet.iterator(); for (int i=0; i<used.length; i++) used[i] = (String) iter.next(); // process TiffData elements Hashtable<String, MinimalTIFFFormat.Reader<?>> readers = new Hashtable<String, MinimalTIFFFormat.Reader<?>>(); List<Boolean> adjustedSamples = new ArrayList<Boolean>(); List<Integer> samples = new ArrayList<Integer>(); meta.adjustedSamples = adjustedSamples; meta.samples = samples; for (int i=0; i<imageCount; i++) { int s = i; LOGGER.debug("Image[{}] {", i); LOGGER.debug(" id = {}", omexmlMeta.getImageID(i)); adjustedSamples.add(false); String order = omexmlMeta.getPixelsDimensionOrder(i).toString(); PositiveInteger samplesPerPixel = null; if (omexmlMeta.getChannelCount(i) > 0) { samplesPerPixel = omexmlMeta.getChannelSamplesPerPixel(i, 0); } samples.add(i, samplesPerPixel == null ? -1 : samplesPerPixel.getValue()); int tiffSamples = firstIFD.getSamplesPerPixel(); if (adjustedSamples.get(i) || (samples.get(i) != tiffSamples && (i == 0 || samples.get(i) < 0))) { LOGGER.warn("SamplesPerPixel mismatch: OME={}, TIFF={}", samples.get(i), tiffSamples); samples.set(i, tiffSamples); adjustedSamples.set(i, true); } else { adjustedSamples.set(i, false); } if (adjustedSamples.get(i)&& omexmlMeta.getChannelCount(i) <= 1) { adjustedSamples.set(i, false); } int effSizeC = omexmlMeta.getPixelsSizeC(i).getValue().intValue(); if (!adjustedSamples.get(i)) { effSizeC /= samples.get(i); } if (effSizeC == 0) effSizeC = 1; if (effSizeC * samples.get(i) != omexmlMeta.getPixelsSizeC(i).getValue().intValue()) { effSizeC = omexmlMeta.getPixelsSizeC(i).getValue().intValue(); } int sizeT = omexmlMeta.getPixelsSizeT(i).getValue().intValue(); int sizeZ = omexmlMeta.getPixelsSizeZ(i).getValue().intValue(); int num = effSizeC * sizeT * sizeZ; OMETIFFPlane[] planes = new OMETIFFPlane[num]; for (int no=0; no<num; no++) planes[no] = new OMETIFFPlane(); int tiffDataCount = omexmlMeta.getTiffDataCount(i); Boolean zOneIndexed = null; Boolean cOneIndexed = null; Boolean tOneIndexed = null; // pre-scan TiffData indices to see if any of them are indexed from 1 for (int td=0; td<tiffDataCount; td++) { NonNegativeInteger firstC = omexmlMeta.getTiffDataFirstC(i, td); NonNegativeInteger firstT = omexmlMeta.getTiffDataFirstT(i, td); NonNegativeInteger firstZ = omexmlMeta.getTiffDataFirstZ(i, td); int c = firstC == null ? 0 : firstC.getValue(); int t = firstT == null ? 0 : firstT.getValue(); int z = firstZ == null ? 0 : firstZ.getValue(); if (c >= effSizeC && cOneIndexed == null) { cOneIndexed = true; } else if (c == 0) { cOneIndexed = false; } if (z >= sizeZ && zOneIndexed == null) { zOneIndexed = true; } else if (z == 0) { zOneIndexed = false; } if (t >= sizeT && tOneIndexed == null) { tOneIndexed = true; } else if (t == 0) { tOneIndexed = false; } } for (int td=0; td<tiffDataCount; td++) { LOGGER.debug(" TiffData[{}] {", td); // extract TiffData parameters String filename = null; String uuid = null; try { filename = omexmlMeta.getUUIDFileName(i, td); } catch (NullPointerException e) { LOGGER.debug("Ignoring null UUID object when retrieving filename."); } try { uuid = omexmlMeta.getUUIDValue(i, td); } catch (NullPointerException e) { LOGGER.debug("Ignoring null UUID object when retrieving value."); } NonNegativeInteger tdIFD = omexmlMeta.getTiffDataIFD(i, td); int ifd = tdIFD == null ? 0 : tdIFD.getValue(); NonNegativeInteger numPlanes = omexmlMeta.getTiffDataPlaneCount(i, td); NonNegativeInteger firstC = omexmlMeta.getTiffDataFirstC(i, td); NonNegativeInteger firstT = omexmlMeta.getTiffDataFirstT(i, td); NonNegativeInteger firstZ = omexmlMeta.getTiffDataFirstZ(i, td); int c = firstC == null ? 0 : firstC.getValue(); int t = firstT == null ? 0 : firstT.getValue(); int z = firstZ == null ? 0 : firstZ.getValue(); // NB: some writers index FirstC, FirstZ and FirstT from 1 if (cOneIndexed != null && cOneIndexed) c if (zOneIndexed != null && zOneIndexed) z if (tOneIndexed != null && tOneIndexed) t if (z >= sizeZ || c >= effSizeC || t >= sizeT) { LOGGER.warn("Found invalid TiffData: Z={}, C={}, T={}", new Object[] {z, c, t}); break; } int index = FormatTools.getIndex(order, sizeZ, effSizeC, sizeT, num, z, c, t); int count = numPlanes == null ? 1 : numPlanes.getValue(); if (count == 0) { meta.get(s); break; } // get reader object for this filename if (filename == null) { if (uuid == null) filename = id; else filename = files.get(uuid); } else filename = normalizeFilename(dir, filename); MinimalTIFFFormat.Reader<?> r = readers.get(filename); if (r == null) { r = getReader(scifio(), MinimalTIFFFormat.class); readers.put(filename, r); } Location file = new Location(getContext(), filename); if (!file.exists()) { // if this is an absolute file name, try using a relative name // old versions of OMETiffWriter wrote an absolute path to // UUID.FileName, which causes problems if the file is moved to // a different directory filename = filename.substring(filename.lastIndexOf(File.separator) + 1); filename = dir + File.separator + filename; if (!new Location(getContext(), filename).exists()) { filename = currentId; } } // populate plane index -> IFD mapping for (int q=0; q<count; q++) { int no = index + q; planes[no].reader = r; planes[no].id = filename; planes[no].ifd = ifd + q; planes[no].certain = true; LOGGER.debug(" Plane[{}]: file={}, IFD={}", new Object[] {no, planes[no].id, planes[no].ifd}); } if (numPlanes == null) { // unknown number of planes; fill down for (int no=index+1; no<num; no++) { if (planes[no].certain) break; planes[no].reader = r; planes[no].id = filename; planes[no].ifd = planes[no - 1].ifd + 1; LOGGER.debug(" Plane[{}]: FILLED", no); } } else { // known number of planes; clear anything subsequently filled for (int no=index+count; no<num; no++) { if (planes[no].certain) break; planes[no].reader = null; planes[no].id = null; planes[no].ifd = -1; LOGGER.debug(" Plane[{}]: CLEARED", no); } } LOGGER.debug(" }"); } if (meta.get(s) == null) continue; // verify that all planes are available LOGGER.debug(" for (int no=0; no<num; no++) { LOGGER.debug(" Plane[{}]: file={}, IFD={}", new Object[] {no, planes[no].id, planes[no].ifd}); if (planes[no].reader == null) { LOGGER.warn("Image ID '{}': missing plane "Using TiffReader to determine the number of planes.", omexmlMeta.getImageID(i), no); TIFFFormat.Reader<?> r = getReader(scifio(), TIFFFormat.class); r.setSource(currentId); try { planes = new OMETIFFPlane[r.getImageCount()]; for (int plane=0; plane<planes.length; plane++) { planes[plane] = new OMETIFFPlane(); planes[plane].id = currentId; planes[plane].reader = r; planes[plane].ifd = plane; } num = planes.length; } finally { r.close(); } } } info[i] = planes; LOGGER.debug(" }"); } // remove null CoreMetadata entries Vector<OMETIFFPlane[]> planeInfo = new Vector<OMETIFFPlane[]>(); for (int i=meta.getImageCount() - 1; i>=0; i if (meta.get(i) == null) { meta.getAll().remove(i); adjustedSamples.remove(i); samples.remove(i); } else { planeInfo.add(0, info[i]); } } info = planeInfo.toArray(new OMETIFFPlane[0][0]); // meta.getOmeMeta().populateImageMetadata(); } // -- Helper methods -- private String normalizeFilename(String dir, String name) { File file = new File(dir, name); if (file.exists()) return file.getAbsolutePath(); return name; } } /** * @author Mark Hiner hinerm at gmail.com * */ public static class Reader extends ByteArrayReader<Metadata> { // -- Fields -- // -- Constructor -- public Reader() { domains = FormatTools.NON_GRAPHICS_DOMAINS; hasCompanionFiles = true; } // -- Reader API Methods -- @Override public int getOptimalTileWidth(int imageIndex) { FormatTools.assertId(currentId, true, 1); return getMetadata().getTileWidth()[imageIndex]; } @Override public int getOptimalTileHeight(int imageIndex) { FormatTools.assertId(currentId, true, 1); return getMetadata().getTileHeight()[imageIndex]; } @Override public String[] getDomains() { FormatTools.assertId(currentId, true, 1); return getMetadata().isHasSPW() ? new String[] {FormatTools.HCS_DOMAIN} : FormatTools.NON_SPECIAL_DOMAINS; } /* * @see io.scif.TypedReader#openPlane(int, int, io.scif.DataPlane, int, int, int, int) */ public ByteArrayPlane openPlane(int imageIndex, int planeIndex, ByteArrayPlane plane, int x, int y, int w, int h) throws io.scif.FormatException, IOException { Metadata meta = getMetadata(); byte[] buf = plane.getBytes(); OMETIFFPlane[][] info = meta.getInfo(); FormatTools.checkPlaneParameters(this, imageIndex, planeIndex, buf.length, x, y, w, h); meta.setLastPlane(planeIndex); int i = info[imageIndex][planeIndex].ifd; MinimalTIFFFormat.Reader<?> r = (MinimalTIFFFormat.Reader<?>) info[imageIndex][planeIndex].reader; if (r.getCurrentFile() == null) { r.setSource(info[imageIndex][planeIndex].id); } IFDList ifdList = r.getMetadata().getIfds(); if (i >= ifdList.size()) { LOGGER.warn("Error untangling IFDs; the OME-TIFF file may be malformed."); return plane; } IFD ifd = ifdList.get(i); RandomAccessInputStream s = new RandomAccessInputStream(getContext(), info[imageIndex][planeIndex].id); TiffParser p = new TiffParser(getContext(), s); p.getSamples(ifd, buf, x, y, w, h); s.close(); return plane; } // -- Groupable API Methods -- public boolean isSingleFile(String id) throws FormatException, IOException { return OMETIFFFormat.isSingleFile(getContext(), id); } } /** * @author Mark Hiner hinerm at gmail.com * */ public static class Writer extends TIFFFormat.Writer<Metadata> { // -- Constants -- private static final String WARNING_COMMENT = "<!-- Warning: this comment is an OME-XML metadata block, which " + "contains crucial dimensional parameters and other important metadata. " + "Please edit cautiously (if at all), and back up the original data " + "before doing so. For more information, see the OME-TIFF web site: " + FormatTools.URL_OME_TIFF + ". // -- Fields -- private List<Integer> imageMap; private String[][] imageLocations; private OMEXMLMetadata omeMeta; private OMEXMLService service; private Map<String, Integer> ifdCounts = new HashMap<String, Integer>(); private Map<String, String> uuids = new HashMap<String, String>(); // -- Writer API Methods -- /* @see IFormatHandler#setId(String) */ public void setDest(RandomAccessOutputStream out, int imageIndex) throws FormatException, IOException { //TODO if already set, return super.setDest(out, imageIndex); if (imageLocations == null) { MetadataRetrieve r = getMetadata().getOmeMeta().getRoot(); imageLocations = new String[r.getImageCount()][]; for (int i=0; i<imageLocations.length; i++) { imageLocations[i] = new String[planeCount(imageIndex)]; } } } /* * @see io.scif.Writer#savePlane(int, int, io.scif.Plane, int, int, int, int) */ public void savePlane(final int imageIndex, final int planeIndex, final Plane plane, final int x, final int y, final int w, final int h) throws FormatException, IOException { savePlane(imageIndex, planeIndex, plane, null, x, y, w, h); } public void savePlane(int imageIndex, int planeIndex, Plane plane, IFD ifd, int x, int y, int w, int h) throws io.scif.FormatException, IOException { if (imageMap == null) imageMap = new ArrayList<Integer>(); if (!imageMap.contains(imageIndex)) { imageMap.add(new Integer(imageIndex)); } super.savePlane(imageIndex, planeIndex, plane, ifd, x, y, w, h); // TODO should this be the output id? imageLocations[imageIndex][planeIndex] = getMetadata().getDatasetName(); } /* @see loci.formats.IFormatHandler#close() */ public void close() throws IOException { try { if (this.out != null) { setupServiceAndMetadata(); // remove any BinData elements from the OME-XML service.removeBinData(omeMeta); for (int series=0; series<omeMeta.getImageCount(); series++) { populateImage(omeMeta, series); } List<String> files = new ArrayList<String>(); for (String[] s : imageLocations) { for (String f : s) { if (!files.contains(f) && f != null) { files.add(f); String xml = getOMEXML(f); // write OME-XML to the first IFD's comment saveComment(f, xml); } } } } } catch (DependencyException de) { throw new RuntimeException(de); } catch (ServiceException se) { throw new RuntimeException(se); } catch (FormatException fe) { throw new RuntimeException(fe); } catch (IllegalArgumentException iae) { throw new RuntimeException(iae); } finally { super.close(); boolean canReallyClose = omeMeta == null || ifdCounts.size() == omeMeta.getImageCount(); if (omeMeta != null && canReallyClose) { int omePlaneCount = 0; for (int i=0; i<omeMeta.getImageCount(); i++) { int sizeZ = omeMeta.getPixelsSizeZ(i).getValue(); int sizeC = omeMeta.getPixelsSizeC(i).getValue(); int sizeT = omeMeta.getPixelsSizeT(i).getValue(); omePlaneCount += sizeZ * sizeC * sizeT; } int ifdCount = 0; for (String key : ifdCounts.keySet()) { ifdCount += ifdCounts.get(key); } canReallyClose = omePlaneCount == ifdCount; } if (canReallyClose) { imageMap = null; imageLocations = null; omeMeta = null; service = null; ifdCounts.clear(); } else { for(String k : ifdCounts.keySet()) ifdCounts.put(k, 0); } } } // -- Helper methods -- /** Gets the UUID corresponding to the given filename. */ private String getUUID(String filename) { String uuid = uuids.get(filename); if (uuid == null) { uuid = UUID.randomUUID().toString(); uuids.put(filename, uuid); } return uuid; } private void setupServiceAndMetadata() throws DependencyException, ServiceException { // extract OME-XML string from metadata object MetadataRetrieve retrieve = getMetadata().getOmeMeta().getRoot(); service = getContext().getService(OMEXMLService.class); OMEXMLMetadata originalOMEMeta = service.getOMEMetadata(retrieve); originalOMEMeta.resolveReferences(); String omexml = service.getOMEXML(originalOMEMeta); omeMeta = service.createOMEXMLMetadata(omexml); } private String getOMEXML(String file) throws FormatException, IOException { // generate UUID and add to OME element String uuid = "urn:uuid:" + getUUID(new Location(getContext(), file).getName()); omeMeta.setUUID(uuid); String xml; try { xml = service.getOMEXML(omeMeta); } catch (ServiceException se) { throw new FormatException(se); } // insert warning comment String prefix = xml.substring(0, xml.indexOf(">") + 1); String suffix = xml.substring(xml.indexOf(">") + 1); return prefix + WARNING_COMMENT + suffix; } private void saveComment(String file, String xml) throws IOException { if (out != null) out.close(); out = new RandomAccessOutputStream(getContext(), file); RandomAccessInputStream in = null; try { TiffSaver saver = new TiffSaver(getContext(), out, file); saver.setBigTiff(isBigTiff); in = new RandomAccessInputStream(getContext(), file); saver.overwriteLastIFDOffset(in); saver.overwriteComment(in, xml); in.close(); } catch (FormatException exc) { IOException io = new IOException("Unable to append OME-XML comment"); io.initCause(exc); throw io; } finally { if (out != null) out.close(); if (in != null) in.close(); } } private void populateTiffData(OMEXMLMetadata omeMeta, int[] zct, int ifd, int series, int plane) { omeMeta.setTiffDataFirstZ(new NonNegativeInteger(zct[0]), series, plane); omeMeta.setTiffDataFirstC(new NonNegativeInteger(zct[1]), series, plane); omeMeta.setTiffDataFirstT(new NonNegativeInteger(zct[2]), series, plane); omeMeta.setTiffDataIFD(new NonNegativeInteger(ifd), series, plane); omeMeta.setTiffDataPlaneCount(new NonNegativeInteger(1), series, plane); } private void populateImage(OMEXMLMetadata omeMeta, int imageIndex) { String dimensionOrder = omeMeta.getPixelsDimensionOrder(imageIndex).toString(); int sizeZ = omeMeta.getPixelsSizeZ(imageIndex).getValue().intValue(); int sizeC = omeMeta.getPixelsSizeC(imageIndex).getValue().intValue(); int sizeT = omeMeta.getPixelsSizeT(imageIndex).getValue().intValue(); int planeCount = getPlaneCount(imageIndex); int ifdCount = imageMap.size(); if (planeCount == 0) { omeMeta.setTiffDataPlaneCount(new NonNegativeInteger(0), imageIndex, 0); return; } PositiveInteger samplesPerPixel = new PositiveInteger((sizeZ * sizeC * sizeT) / planeCount); for (int c=0; c<omeMeta.getChannelCount(imageIndex); c++) { omeMeta.setChannelSamplesPerPixel(samplesPerPixel, imageIndex, c); } sizeC /= samplesPerPixel.getValue(); int nextPlane = 0; for (int plane=0; plane<planeCount; plane++) { int[] zct = FormatTools.getZCTCoords(dimensionOrder, sizeZ, sizeC, sizeT, planeCount, imageIndex, plane); int planeIndex = plane; if (imageLocations[imageIndex].length < planeCount) { planeIndex /= (planeCount / imageLocations[imageIndex].length); } String filename = imageLocations[imageIndex][planeIndex]; if (filename != null) { filename = new Location(getContext(), filename).getName(); Integer ifdIndex = ifdCounts.get(filename); int ifd = ifdIndex == null ? 0 : ifdIndex.intValue(); omeMeta.setUUIDFileName(filename, imageIndex, nextPlane); String uuid = "urn:uuid:" + getUUID(filename); omeMeta.setUUIDValue(uuid, imageIndex, nextPlane); // fill in any non-default TiffData attributes populateTiffData(omeMeta, zct, ifd, imageIndex, nextPlane); ifdCounts.put(filename, ifd + 1); nextPlane++; } } } private int planeCount(int imageIndex) { MetadataRetrieve r = getMetadata().getOmeMeta().getRoot(); int z = r.getPixelsSizeZ(imageIndex).getValue().intValue(); int t = r.getPixelsSizeT(imageIndex).getValue().intValue(); int c = r.getChannelCount(imageIndex); String pixelType = r.getPixelsType(imageIndex).getValue(); int bytes = FormatTools.getBytesPerPixel(pixelType); if (bytes > 1 && c == 1) { c = r.getChannelSamplesPerPixel(imageIndex, 0).getValue(); } return z * c * t; } } // -- Helper Methods -- @SuppressWarnings("unchecked") private static <T extends MinimalTIFFFormat.Reader<?>> T getReader(SCIFIO scifio, Class<? extends Format> formatClass) throws FormatException { return (T) scifio.format().getFormatFromClass(formatClass).createReader(); } private static void setupServices(Context ctx) { service = ctx.getService(OMEXMLService.class); metaService = ctx.getService(OMEXMLMetadataService.class); } private static boolean isSingleFile(Context context, String id) throws FormatException, IOException { // parse and populate OME-XML metadata String fileName = new Location(context, id).getAbsoluteFile().getAbsolutePath(); RandomAccessInputStream ras = new RandomAccessInputStream(context, fileName); TiffParser tp = new TiffParser(context, ras); IFD ifd = tp.getFirstIFD(); long[] ifdOffsets = tp.getIFDOffsets(); ras.close(); String xml = ifd.getComment(); if (service == null) setupServices(context); OMEXMLMetadata meta; try { meta = service.createOMEXMLMetadata(xml); } catch (ServiceException se) { throw new FormatException(se); } if (meta.getRoot() == null) { throw new FormatException("Could not parse OME-XML from TIFF comment"); } int nImages = 0; for (int i=0; i<meta.getImageCount(); i++) { int nChannels = meta.getChannelCount(i); if (nChannels == 0) nChannels = 1; int z = meta.getPixelsSizeZ(i).getValue().intValue(); int t = meta.getPixelsSizeT(i).getValue().intValue(); nImages += z * t * nChannels; } return nImages <= ifdOffsets.length; } /** * This class can be used for translating any io.scif.Metadata * to Metadata for writing OME-TIFF. * files. * <p> * Note that Metadata translated from Core is only write-safe. * </p> * <p> * If trying to read, there should already exist an originally-parsed OME-TIFF * Metadata object which can be used. * </p> * <p> * Note also that any OME-TIFF image written must be reparsed, as the Metadata used * to write it can not be guaranteed valid. * </p> */ @Plugin(type = Translator.class, attrs = {@Attr(name = OMETIFFTranslator.SOURCE, value = io.scif.Metadata.CNAME), @Attr(name = OMETIFFTranslator.DEST, value = Metadata.CNAME)}, priority = TIFFFormat.PRIORITY + 1) public static class OMETIFFTranslator extends AbstractTranslator<io.scif.Metadata, Metadata> { // -- Translator API Methods -- public void typedTranslate(io.scif.Metadata source, Metadata dest) { if (dest.getOmeMeta() == null) { OMEMetadata omeMeta = new OMEMetadata(getContext()); scifio().translator().translate(source, omeMeta, false); dest.setOmeMeta(omeMeta); } try { TIFFFormat.Metadata tiffMeta = (TIFFFormat.Metadata) scifio().format().getFormatFromClass(TIFFFormat.class).createMetadata(); scifio().translator().translate(source, tiffMeta, false); dest.setFirstIFD(tiffMeta.getIfds().get(0)); } catch (FormatException e) { LOGGER.error("Failed to generate TIFF data", e); } OMETIFFPlane[][] info = new OMETIFFPlane[source.getImageCount()][]; dest.setInfo(info); List<Integer> samples = new ArrayList<Integer>(); List<Boolean> adjustedSamples = new ArrayList<Boolean>(); dest.samples = samples; dest.adjustedSamples = adjustedSamples; dest.createImageMetadata(0); for (int i=0; i<source.getImageCount(); i++) { info[i] = new OMETIFFPlane[source.getPlaneCount(i)]; for (int j=0; j<source.getPlaneCount(i); j++) { info[i][j] = new OMETIFFPlane(); } dest.add(new DefaultImageMetadata()); samples.add(source.getRGBChannelCount(i)); adjustedSamples.add(false); } } } // -- Helper classes -- /** Structure containing details on where to find a particular image plane. */ private static class OMETIFFPlane { /** Reader to use for accessing this plane. */ public MinimalTIFFFormat.Reader<?> reader; /** File containing this plane. */ public String id; /** IFD number of this plane. */ public int ifd = -1; /** Certainty flag, for dealing with unspecified NumPlanes. */ public boolean certain = false; } }
package ome.xml.meta; import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.net.URL; import java.util.Arrays; import java.util.Hashtable; import java.util.StringTokenizer; import java.util.Vector; import net.imglib2.meta.Axes; import ome.scifio.FormatException; import ome.scifio.Parser; import ome.scifio.Reader; import ome.scifio.common.Constants; import ome.scifio.io.Location; import ome.scifio.services.DependencyException; import ome.scifio.services.ServiceException; import ome.scifio.services.ServiceFactory; import ome.scifio.util.FormatTools; import ome.scifio.filters.ChannelFiller; import ome.scifio.filters.ChannelSeparator; import ome.scifio.filters.FileStitcher; import ome.scifio.filters.ReaderFilter; import ome.xml.DOMUtil; import ome.xml.r2003fc.ome.OMENode; import ome.xml.services.OMEXMLService; import org.w3c.dom.Document; import org.w3c.dom.Element; public class OmeisImporter { // -- Constants -- /** Debugging flag. */ private static final boolean DEBUG = false; /** Network path to OMEIS. */ private static final String OMEIS_PATH = "http://localhost/cgi-bin/omeis"; // -- Static fields -- /** * Whether or not to print an HTTP header, * specified by -http-response CLI flag. */ private static boolean http = false; // -- Fields -- /** Reader for handling file formats. */ private Reader reader; /** Parser for interrogating files. */ private Parser parser; /** Metadata object, for gathering OME-XML metadata. */ private ome.xml.meta.AbstractOMEXMLMetadata omexmlMeta; private boolean stitch; // -- Constructor -- public OmeisImporter() { this(true); } public OmeisImporter(boolean stitchFiles) { stitch = stitchFiles; // TODO replace this with a general-purpose reader instantiated in the context ReaderFilter rf = new ReaderFilter(null); reader = rf; rf.enable(ChannelFiller.class); rf.enable(ChannelSeparator.class); try { parser = reader.getFormat().createParser(); } catch (FormatException e) { if(DEBUG) log("Failed to create a parser for format: " + reader.getFormat() + e.getMessage()); } if (stitch) reader = rf.enable(FileStitcher.class); try { ServiceFactory factory = new ServiceFactory(); OMEXMLService service = factory.getInstance(OMEXMLService.class); omexmlMeta = (ome.xml.meta.AbstractOMEXMLMetadata) service.createOMEXMLMetadata(); } catch (DependencyException de) { } catch (ServiceException se) { } } // -- OmeisImporter API methods - main functionality -- /** Prints out the build date for the Bio-Formats OMEIS utility. */ public void printVersion() { if (http) printHttpResponseHeader(); System.out.println("Bio-Formats OMEIS importer, built on @date@."); } /** * Tests whether Bio-Formats is potentially capable of importing the given * file IDs. Outputs the IDs it can potentially import, one group per line, * with elements of the each group separated by spaces. */ public void testIds(int[] fileIds) throws OmeisException, FormatException, IOException { Arrays.sort(fileIds); // set up file path mappings String[] ids = new String[fileIds.length]; for (int i=0; i<fileIds.length; i++) { Hashtable fileInfo = getFileInfo(fileIds[i]); ids[i] = (String) fileInfo.get("Name"); String path = getLocalFilePath(fileIds[i]); Location.mapId(ids[i], path); } // check types and groups if (http) printHttpResponseHeader(); boolean[] done = new boolean[fileIds.length]; StringBuffer sb = new StringBuffer(); for (int i=0; i<fileIds.length; i++) { if (done[i]) continue; // already part of another group if (ids[i] == null) continue; // invalid id if (!reader.getFormat().createChecker().isFormat(ids[i])) continue; // unknown format reader.setSource(ids[i]); parser.parse(ids[i]); String[] files = parser.getUsedFiles(); if (files == null) continue; // invalid files list sb.setLength(0); for (int j=files.length - 1; j>=0; j for (int ii=i; ii<fileIds.length; ii++) { if (files[j] == null) { log("Warning: FileID " + fileIds[ii] + " ('" + ids[ii] + "') has null used file } else if (files[j].equals(ids[ii])) { if (done[ii]) { log("Warning: FileID " + fileIds[ii] + " ('" + ids[ii] + "') already belongs to a group"); } done[ii] = true; if (j < files.length - 1) sb.append(" "); sb.append(fileIds[ii]); break; } } } System.out.println(sb.toString()); } } /** * Attempts to import the given file IDs using Bio-Formats, as a single * group. Pixels are saved to the pixels file designated by OMEIS, and an * OME-XML metadata block describing the successfully imported data is * dumped to standard output. */ public void importIds(int[] fileIds) throws OmeisException, FormatException, IOException { boolean doLittle = isLittleEndian(); Arrays.sort(fileIds); // set up file path mappings String[] ids = new String[fileIds.length]; for (int i=0; i<fileIds.length; i++) { Hashtable fileInfo = getFileInfo(fileIds[i]); ids[i] = (String) fileInfo.get("Name"); String path = getLocalFilePath(fileIds[i]); Location.mapId(ids[i], path); } // read file group String id = ids[0]; String path = Location.getMappedId(id); if (DEBUG) log("Reading file '" + id + "' --> " + path); // verify that all given file IDs were grouped by the reader reader.setSource(id); parser.parse(id); String[] used = parser.getUsedFiles(); if (used == null) { throw new FormatException("Invalid file list for " + path); } if (used.length != ids.length) { throw new FormatException("File list length mismatch for " + path + ": used=" + a2s(used) + "; ids=" + a2s(ids)); } boolean[] done = new boolean[ids.length]; int numLeft = ids.length; for (int i=0; i<used.length; i++) { for (int j=0; j<ids.length; j++) { if (done[j]) continue; if (used[i].equals(ids[j])) { done[j] = true; numLeft break; } } } if (numLeft > 0) { throw new FormatException( "File list does not correspond to ID list for " + path); } int imageCount = reader.getImageCount(); // get DOM and Pixels elements for the file's OME-XML metadata OMENode ome = (OMENode) omexmlMeta.getRoot(); Document omeDoc = ome.getDOMElement().getOwnerDocument(); Vector pix = DOMUtil.findElementList("Pixels", omeDoc); if (pix.size() != imageCount) { throw new FormatException("Pixels element count (" + pix.size() + ") does not match series count (" + imageCount + ") for '" + id + "'"); } if (DEBUG) log(imageCount + " series detected."); for (int i=0; i<imageCount; i++) { // gather pixels information for this series int sizeX = reader.getDatasetMetadata().getAxisLength(i, Axes.X); int sizeY = reader.getDatasetMetadata().getAxisLength(i, Axes.Y); int sizeZ = reader.getDatasetMetadata().getAxisLength(i, Axes.Z); int sizeC = reader.getDatasetMetadata().getAxisLength(i, Axes.CHANNEL); int sizeT = reader.getDatasetMetadata().getAxisLength(i, Axes.TIME); int pixelType = reader.getDatasetMetadata().getPixelType(i); int bytesPerPixel; boolean isSigned, isFloat; switch (pixelType) { case FormatTools.INT8: bytesPerPixel = 1; isSigned = true; isFloat = false; break; case FormatTools.UINT8: bytesPerPixel = 1; isSigned = false; isFloat = false; break; case FormatTools.INT16: bytesPerPixel = 2; isSigned = true; isFloat = false; break; case FormatTools.UINT16: bytesPerPixel = 2; isSigned = false; isFloat = false; break; case FormatTools.INT32: bytesPerPixel = 4; isSigned = true; isFloat = false; break; case FormatTools.UINT32: bytesPerPixel = 4; isSigned = false; isFloat = false; break; case FormatTools.FLOAT: bytesPerPixel = 4; isSigned = true; isFloat = true; break; case FormatTools.DOUBLE: bytesPerPixel = 8; isSigned = true; isFloat = true; break; default: throw new FormatException("Unknown pixel type for '" + id + "' series #" + i + ": " + pixelType); } boolean little = reader.getDatasetMetadata().isLittleEndian(i); boolean swap = doLittle != little && bytesPerPixel > 1 && !isFloat; // ask OMEIS to allocate new pixels file int pixelsId = newPixels(sizeX, sizeY, sizeZ, sizeC, sizeT, bytesPerPixel, isSigned, isFloat); String pixelsPath = getLocalPixelsPath(pixelsId); if (DEBUG) { log("Series #" + i + ": id=" + pixelsId + ", path=" + pixelsPath); } // write pixels to file FileOutputStream out = new FileOutputStream(pixelsPath); imageCount = reader.getImageCount(); if (DEBUG) { log("Processing " + imageCount + " planes (sizeZ=" + sizeZ + ", sizeC=" + sizeC + ", sizeT=" + sizeT + "): "); } // OMEIS expects XYZCT order -- // interleaved RGB files will be handled a bit more slowly due to this // ordering (ChannelSeparator must read each plane three times), but // caching performed by the OS helps some for (int t=0; t<sizeT; t++) { for (int c=0; c<sizeC; c++) { for (int z=0; z<sizeZ; z++) { int ndx = FormatTools.getIndex(reader, i, z, c, t); if (DEBUG) { log("Reading plane #" + ndx + ": z=" + z + ", c=" + c + ", t=" + t); } byte[] plane = reader.openPlane(i, ndx).getBytes(); if (swap) { // swap endianness for (int b=0; b<plane.length; b+=bytesPerPixel) { for (int k=0; k<bytesPerPixel/2; k++) { int i1 = b + k; int i2 = b + bytesPerPixel - k - 1; byte b1 = plane[i1]; byte b2 = plane[i2]; plane[i1] = b2; plane[i2] = b1; } } } out.write(plane); } } } out.close(); if (DEBUG) log("[done]"); // tell OMEIS we're done pixelsId = finishPixels(pixelsId); if (DEBUG) log("finishPixels called (new id=" + pixelsId + ")"); // get SHA1 hash for finished pixels String sha1 = getPixelsSHA1(pixelsId); if (DEBUG) log("SHA1=" + sha1); // inject important extra attributes into proper Pixels element Element pixels = (Element) pix.elementAt(i); pixels.setAttribute("FileSHA1", sha1); pixels.setAttribute("ImageServerID", "" + pixelsId); pixels.setAttribute("DimensionOrder", "XYZCT"); // ignored anyway String pType = pixels.getAttribute("PixelType"); if (pType.startsWith("u")) { pixels.setAttribute("PixelType", pType.replace('u', 'U')); } if (DEBUG) log("Pixel attributes injected."); } reader.close(); // accumulate XML into buffer ByteArrayOutputStream xml = new ByteArrayOutputStream(); try { DOMUtil.writeXML(xml, omeDoc); } catch (javax.xml.transform.TransformerException exc) { throw new FormatException(exc); } // output OME-XML to standard output xml.close(); String xmlString = new String(xml.toByteArray(), Constants.ENCODING); if (DEBUG) log(xmlString); if (http) printHttpResponseHeader(); System.out.println(xmlString); } // -- OmeisImporter API methods - OMEIS method calls -- /** Gets path to original file corresponding to the given file ID. */ public String getLocalFilePath(int fileId) throws OmeisException { // ./omeis Method=GetLocalPath FileID=fid String[] s; try { s = omeis("GetLocalPath", "FileID=" + fileId); } catch (IOException exc) { throw new OmeisException(exc); } if (s.length > 1) { log("Warning: ignoring " + (s.length - 1) + " extraneous lines in OMEIS GetLocalPath call"); } else if (s.length < 1) { throw new OmeisException( "Failed to obtain local path for file ID " + fileId); } return s[0]; } /** * Gets information about the file corresponding to the given file ID. * @return hashtable containing the information as key/value pairs */ public Hashtable getFileInfo(int fileId) throws OmeisException { // ./omeis Method=FileInfo FileID=fid String[] s; try { s = omeis("FileInfo", "FileID=" + fileId); } catch (IOException exc) { throw new OmeisException(exc); } Hashtable info = new Hashtable(); for (int i=0; i<s.length; i++) { int equals = s[i].indexOf("="); if (equals < 0) { log("Warning: ignoring extraneous line in OMEIS FileInfo call: " + s[i]); } else { String key = s[i].substring(0, equals); String value = s[i].substring(equals + 1); info.put(key, value); } } return info; } /** * Instructs OMEIS to construct a new Pixels object. * @return pixels ID of the newly created pixels */ public int newPixels(int sizeX, int sizeY, int sizeZ, int sizeC, int sizeT, int bytesPerPixel, boolean isSigned, boolean isFloat) throws OmeisException { // ./omeis Method=NewPixels Dims=sx,sy,sz,sc,st,Bpp IsSigned=0 IsFloat=0 String[] s; try { s = omeis("NewPixels", "Dims=" + sizeX + "," + sizeY + "," + sizeZ + "," + sizeC + "," + sizeT + "," + bytesPerPixel + " IsSigned=" + (isSigned ? 1 : 0) + " IsFloat=" + (isFloat ? 1 : 0)); } catch (IOException exc) { throw new OmeisException(exc); } if (s.length > 1) { log("Warning: ignoring " + (s.length - 1) + " extraneous lines in OMEIS NewPixels call output"); } else if (s.length < 1) { throw new OmeisException("Failed to obtain pixels ID from NewPixels"); } int pid = -1; try { pid = Integer.parseInt(s[0]); } catch (NumberFormatException exc) { } if (pid <= 0) { throw new OmeisException("Invalid pixels ID from NewPixels: " + s[0]); } return pid; } /** Gets whether the local system uses little-endian byte order. */ public boolean isLittleEndian() throws OmeisException { // ./omeis Method=GetNativeEndian String[] s; try { s = omeis("GetNativeEndian", ""); } catch (IOException exc) { throw new OmeisException(exc); } if (s.length > 1) { log("Warning: ignoring " + (s.length - 1) + " extraneous lines in OMEIS GetLocalPath call output"); } else if (s.length < 1) { throw new OmeisException("Failed to obtain endianness value"); } if ("little".equalsIgnoreCase(s[0])) return true; else if ("big".equalsIgnoreCase(s[0])) return false; else throw new OmeisException("Invalid endianness value: " + s[0]); } /** Gets path to Pixels file corresponding to the given pixels ID. */ public String getLocalPixelsPath(int pixelsId) throws OmeisException { // ./omeis Method=GetLocalPath PixelsID=pid String[] s; try { s = omeis("GetLocalPath", "PixelsID=" + pixelsId); } catch (IOException exc) { throw new OmeisException(exc); } if (s.length > 1) { log("Warning: ignoring " + (s.length - 1) + " extraneous lines in OMEIS GetLocalPath call"); } else if (s.length < 1) { throw new OmeisException( "Failed to obtain local path for pixels ID " + pixelsId); } return s[0]; } /** * Instructs OMEIS to process the Pixels file * corresponding to the given pixels ID. * @return final (possibly changed) pixels ID of the processed pixels */ public int finishPixels(int pixelsId) throws OmeisException { // ./omeis Method=FinishPixels PixelsID=pid String[] s; try { s = omeis("FinishPixels", "PixelsID=" + pixelsId); } catch (IOException exc) { throw new OmeisException(exc); } if (s.length > 1) { log("Warning: ignoring " + (s.length - 1) + " extraneous lines in OMEIS FinishPixels call output"); } else if (s.length < 1) { throw new OmeisException("Failed to obtain pixels ID from FinishPixels"); } int pid = -1; try { pid = Integer.parseInt(s[0]); } catch (NumberFormatException exc) { } if (pid <= 0) { throw new OmeisException("Invalid pixels ID from FinishPixels: " + s[0]); } return pid; } /** Gets SHA1 hash for the pixels corresponding to the given pixels ID. */ public String getPixelsSHA1(int pixelsId) throws OmeisException { // ./omeis Method=PixelsSHA1 PixelsID=pid String[] s; try { s = omeis("PixelsSHA1", "PixelsID=" + pixelsId); } catch (IOException exc) { throw new OmeisException(exc); } if (s.length > 1) { log("Warning: ignoring " + (s.length - 1) + " extraneous lines in OMEIS PixelsSHA1 call"); } else if (s.length < 1) { throw new OmeisException( "Failed to obtain SHA1 for pixels ID " + pixelsId); } return s[0]; } // -- Helper methods -- /** Calls OMEIS, returning an array of strings (one per line of output). */ private String[] omeis(String method, String params) throws IOException { // build OMEIS URL StringBuffer sb = new StringBuffer(OMEIS_PATH); sb.append("?Method="); sb.append(method); StringTokenizer st = new StringTokenizer(params); while (st.hasMoreTokens()) { sb.append("&"); sb.append(st.nextToken()); } String url = sb.toString(); // call OMEIS via HTTP BufferedReader in = new BufferedReader( new InputStreamReader(new URL(url).openStream(), Constants.ENCODING)); Vector v = new Vector(); while (true) { String line = in.readLine(); if (line == null) break; v.add(line); } String[] results = new String[v.size()]; v.copyInto(results); return results; } /** Prints a debugging message. */ private void log(String msg) { System.err.println("Bio-Formats: " + msg); } /** Gets a printable version of the given array of strings. */ private String a2s(String[] s) { StringBuffer sb = new StringBuffer(); if (s == null) return "null"; sb.append("["); if (s.length > 0) sb.append(s[0]); for (int i=1; i<s.length; i++) { sb.append(" "); sb.append(s[i]); } sb.append("]"); return sb.toString(); } /** Prints an HTTP error response header. */ private void printHttpErrorHeader() { System.out.print("Status: 500 Server Error\r\n"); System.out.print("Content-Type: text/plain\r\n\r\n"); } /** Prints an HTTP response header. */ private void printHttpResponseHeader() { System.out.print("Status: 200 OK\r\n"); System.out.print("Content-Type: text/plain\r\n\r\n"); } // -- Main method -- /** * Run ./omebf with a list of file IDs to import those IDs. * Run with the -test flag to ask Bio-Formats whether it * thinks it can import those files. * @throws FormatException */ public static void main(String[] args) { boolean version = false, test = false, stitch = true; int[] fileIds = new int[args.length]; // parse command line arguments int num = 0; for (int i=0; i<args.length; i++) { if ("-version".equalsIgnoreCase(args[i])) version = true; else if ("-test".equalsIgnoreCase(args[i])) test = true; else if ("-http-response".equalsIgnoreCase(args[i])) http = true; else if ("-nostitch".equalsIgnoreCase(args[i])) stitch = false; else { try { int q = Integer.parseInt(args[i]); fileIds[num++] = q; } catch (NumberFormatException exc) { System.err.println("Warning: ignoring parameter: " + args[i]); } } } int[] trimIds = new int[num]; System.arraycopy(fileIds, 0, trimIds, 0, num); fileIds = trimIds; OmeisImporter importer = new OmeisImporter(stitch); // process the IDs try { if (version) importer.printVersion(); else if (test) importer.testIds(fileIds); else importer.importIds(fileIds); } catch (Throwable t) { // NB: We really do want to catch all exception types here, // to redirect output properly for the OME server. if (http) { importer.printHttpErrorHeader(); System.out.println("An exception occurred while processing FileIDs:"); t.printStackTrace(System.out); } System.err.println("An exception occurred:"); t.printStackTrace(); System.exit(1); } } }
package edu.msu.nscl.olog; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Multimap; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.ws.rs.core.MultivaluedMap; import javax.jcr.RepositoryException; import javax.ws.rs.core.Response; import org.apache.ibatis.exceptions.PersistenceException; import org.apache.ibatis.session.SqlSession; import org.apache.ibatis.session.SqlSessionFactory; /** * JDBC query to retrieve logs from the directory . * * @author Eric Berryman taken from Ralph Lange <Ralph.Lange@bessy.de> */ public class FindLogsQuery { private enum SearchType { LOG, TAG }; private Multimap<String, String> value_matches = ArrayListMultimap.create(); private Multimap<String, String> logPaginate_matches = ArrayListMultimap.create(); private Multimap<String, String> date_matches = ArrayListMultimap.create(); private List<String> log_matches = new ArrayList(); private List<Long> logId_matches = new ArrayList(); private List<String> logbook_matches = new ArrayList(); private List<String> tag_matches = new ArrayList(); private List<String> tag_patterns = new ArrayList(); private List<Long> jcr_search_ids = new ArrayList(); private static SqlSessionFactory ssf = MyBatisSession.getSessionFactory(); /** * Creates a new instance of FindLogsQuery, sorting the query parameters. * Logbook matches and tag string matches go to the first inner query, * tag pattern matches are queried separately, * name matches go to the outer query. * Logbook and tag names are converted to lowercase before being matched. * * @param matches the map of matches to apply */ private FindLogsQuery(MultivaluedMap<String, String> matches) throws RepositoryException { for (Map.Entry<String, List<String>> match : matches.entrySet()) { String key = match.getKey().toLowerCase(); if (key.equals("search")) { log_matches.addAll(match.getValue()); JcrSearch js = new JcrSearch(); jcr_search_ids = js.searchForIds(match.getValue().get(0)); } else if (key.equals("tag")) { addTagMatches(match.getValue()); } else if (key.equals("logbook")) { addLogbookMatches(match.getValue()); } else if (key.equals("page")) { logPaginate_matches.putAll(key, match.getValue()); } else if (key.equals("limit")) { logPaginate_matches.putAll(key, match.getValue()); } else if (key.equals("start")) { date_matches.putAll(key, match.getValue()); } else if (key.equals("end")) { date_matches.putAll(key, match.getValue()); } else { value_matches.putAll(key, match.getValue()); } } } private FindLogsQuery(SearchType type, Collection<String> matches) { if (type == SearchType.LOG) { log_matches.addAll(matches); } else { addTagMatches(matches); } } private FindLogsQuery(SearchType type, String name) { if (type == SearchType.LOG) { log_matches.add(name); } else { addTagMatches(Collections.singleton(name)); } } private FindLogsQuery(SearchType type, Long logId) { if (type == SearchType.LOG) { logId_matches.add(logId); } } private void addLogbookMatches(Collection<String> matches) { for (String m : matches) { logbook_matches.add(m); } if (logbook_matches.size() == 1) { String match = logbook_matches.get(0); logbook_matches.clear(); logbook_matches.addAll(Arrays.asList(match.split(","))); } } private void addTagMatches(Collection<String> matches) { for (String m : matches) { if (m.contains("?") || m.contains("*")) { tag_patterns.add(m); } else { tag_matches.add(m); } } } /** * Creates and executes the logbook and tag string match subquery using GROUP. * * @param con connection to use * @return a set of log ids that match */ //TODO: need to add search params like olog; logs between dates, search all fields, files, etc. private Set<Long> getIdsFromLogbookAndTagMatch() throws CFException { SqlSession ss = ssf.openSession(); try { Set<Long> ids = new HashSet<Long>(); // set of matching log ids List<String> params = new ArrayList<String>(); // parameter list for this query for (String tag : tag_matches) { params.add(tag); } int size = tag_matches.size(); HashMap<String, Object> hm = new HashMap<String, Object>(); hm.put("list", params); hm.put("size", size); ArrayList<XmlLog> logs = (ArrayList<XmlLog>) ss.selectList("mappings.LogMapping.getIdsFromLogbookAndTagMatch", hm); if (logs != null) { Iterator<XmlLog> iterator = logs.iterator(); while (iterator.hasNext()) { XmlLog log = iterator.next(); ids.add(log.getId()); } } return ids; } catch (PersistenceException e) { throw new CFException(Response.Status.INTERNAL_SERVER_ERROR, "MyBatis exception: " + e); } finally { ss.close(); } } /** * Creates and executes the properties string match subquery using GROUP. * * @param con connection to use * @return a set of log ids that match */ private Set<Long> getIdsFromPropertiesMatch() throws CFException { SqlSession ss = ssf.openSession(); try { Set<Long> ids = new HashSet<Long>(); // set of matching log ids List<String> values = new ArrayList<String>(); List<String> names = new ArrayList<String>(); for (Map.Entry<String, Collection<String>> match : value_matches.asMap().entrySet()) { names.add(match.getKey().toLowerCase()); for (String value : match.getValue()) { values.add(convertFileGlobToSQLPattern(value)); } } int size = value_matches.asMap().size(); HashMap<String, Object> hm = new HashMap<String, Object>(); hm.put("propNameList", names); hm.put("propValueList", values); hm.put("size", size); ArrayList<XmlLog> logs = (ArrayList<XmlLog>) ss.selectList("mappings.LogMapping.getIdsFromPropertiesMatch", hm); if (logs != null) { Iterator<XmlLog> iterator = logs.iterator(); while (iterator.hasNext()) { XmlLog log = iterator.next(); ids.add(log.getId()); } } return ids; } catch (PersistenceException e) { throw new CFException(Response.Status.INTERNAL_SERVER_ERROR, "MyBatis exception: " + e); } finally { ss.close(); } } /** * Creates and executes the tag string match subquery using GROUP. * * @param con connection to use * @return a set of log ids that match */ private Set<Long> getIdsFromTagMatch(String match) throws CFException { SqlSession ss = ssf.openSession(); try { Set<Long> ids = new HashSet<Long>(); ArrayList<XmlLog> logs = (ArrayList<XmlLog>) ss.selectList("mappings.LogMapping.getIdsFromTagMatch", match); if (logs != null) { Iterator<XmlLog> iterator = logs.iterator(); while (iterator.hasNext()) { XmlLog log = iterator.next(); ids.add(log.getId()); } } return ids; } catch (PersistenceException e) { throw new CFException(Response.Status.INTERNAL_SERVER_ERROR, "MyBatis exception: " + e); } finally { ss.close(); } } /** * Creates and executes the logbook string match subquery using GROUP. * * @return a set of log ids that match */ private Set<Long> getIdsFromLogbookMatch(String match) throws CFException { SqlSession ss = ssf.openSession(); try { Set<Long> ids = new HashSet<Long>(); ArrayList<XmlLog> logs = (ArrayList<XmlLog>) ss.selectList("mappings.LogMapping.getIdsFromLogbookMatch", match); if (logs != null) { Iterator<XmlLog> iterator = logs.iterator(); while (iterator.hasNext()) { XmlLog log = iterator.next(); ids.add(log.getId()); } } return ids; } catch (PersistenceException e) { throw new CFException(Response.Status.INTERNAL_SERVER_ERROR, "MyBatis exception: " + e); } finally { ss.close(); } } /** * Creates and executes the pagination subquery using GROUP BY. * * @param con connection to use * @return a set of log ids that match */ Set<Long> getIdsFromPagination() throws CFException { SqlSession ss = ssf.openSession(); try { Set<Long> idsList = new HashSet<Long>(); Set<Long> idsSearchList = new HashSet<Long>(); Set<String> valuesList = new HashSet<String>(); Set<Long> returnIds = new HashSet<Long>(); HashMap<String, Object> hm = new HashMap<String, Object>(); if (!tag_matches.isEmpty()) { for (String tag : tag_matches) { Set<Long> ids = getIdsFromTagMatch(tag); if (ids.isEmpty()) { return null; } idsList.addAll(ids); } } if (!value_matches.isEmpty()) { Set<Long> ids = getIdsFromPropertiesMatch(); if (ids.isEmpty()) { return null; } idsList.addAll(ids); } if (!tag_patterns.isEmpty()) { for (String p : tag_patterns) { Set<Long> ids = getIdsFromTagMatch(p); if (ids.isEmpty()) { return null; } idsList.addAll(ids); } } if (!logbook_matches.isEmpty()) { if (idsList.isEmpty()) { for (String logbook : logbook_matches) { Set<Long> ids = getIdsFromLogbookMatch(logbook); if (ids.isEmpty()) { return null; } idsList.addAll(ids); } } else { Set<Long> id_results = new HashSet<Long>(); for (String logbook : logbook_matches) { Set<Long> ids = getIdsFromLogbookMatch(logbook); if (ids.isEmpty()) { return null; } id_results.addAll(ids); } Set<Long> temp_set = new HashSet<Long>(); for (Long id : idsList) { if (id_results.contains(id)) { temp_set.add(id); } } idsList.clear(); idsList.addAll(temp_set); } } if (!date_matches.isEmpty()) { String start = null, end = null; for (Map.Entry<String, Collection<String>> match : date_matches.asMap().entrySet()) { if (match.getKey().toLowerCase().equals("start")) { start = match.getValue().iterator().next(); } if (match.getKey().toLowerCase().equals("end")) { end = match.getValue().iterator().next(); } } if (start != null && end != null) { hm.put("start", Long.valueOf(start)); hm.put("end", Long.valueOf(end)); } } if (!logId_matches.isEmpty()) { for (long i : logId_matches) { idsList.add(i); } } if (!log_matches.isEmpty()) { for (String value : log_matches) { valuesList.add(convertFileGlobToSQLPattern(value)); } } if (!jcr_search_ids.isEmpty()) { for (long i : jcr_search_ids) { idsSearchList.add(i); } } if (!logPaginate_matches.isEmpty()) { String limit = null, offset = null; for (Map.Entry<String, Collection<String>> match : logPaginate_matches.asMap().entrySet()) { if (match.getKey().toLowerCase().equals("limit")) { limit = match.getValue().iterator().next(); } if (match.getKey().toLowerCase().equals("page")) { offset = match.getValue().iterator().next(); } } if (limit != null && offset != null) { Long longOffset = Long.valueOf(offset) * Long.valueOf(limit) - Long.valueOf(limit); hm.put("limit", Long.valueOf(limit)); hm.put("offset", longOffset); } } if (idsSearchList.size() > 0) { hm.put("idsSearchList", idsSearchList); } if (idsList.size() > 0) { hm.put("idsList", idsList); } if (valuesList.size() > 0) { hm.put("valuesList", valuesList); } ArrayList<XmlLog> logs = (ArrayList<XmlLog>) ss.selectList("mappings.LogMapping.getIdsFromPagination", hm); if (logs != null) { Iterator<XmlLog> iterator = logs.iterator(); while (iterator.hasNext()) { XmlLog log = iterator.next(); returnIds.add(log.getId()); } } return returnIds; } catch (PersistenceException e) { throw new CFException(Response.Status.INTERNAL_SERVER_ERROR, "MyBatis exception: " + e); } finally { ss.close(); } } /** * Creates and executes a JDBC based query using subqueries for * logbook and tag matches. * * @param con connection to use * @return result set with columns named <tt>log</tt>, <tt>logbook</tt>, * <tt>value</tt>, null if no results * @throws CFException wrapping an SQLException */ private ArrayList<XmlLog> executeQuery() throws CFException { SqlSession ss = ssf.openSession(); try { List<Long> idsList = new ArrayList<Long>(); Set<Long> paginate_result = new HashSet<Long>(); Set<Long> ids = getIdsFromPagination(); if (ids == null || ids.isEmpty()) { return null; } else { paginate_result.addAll(ids); } if (!paginate_result.isEmpty()) { for (long i : paginate_result) { idsList.add(i); } } ArrayList<XmlLog> logs = (ArrayList<XmlLog>) ss.selectList("mappings.LogMapping.getLogsFromIds", idsList); return logs; } catch (PersistenceException e) { throw new CFException(Response.Status.INTERNAL_SERVER_ERROR, "MyBatis exception: " + e); } finally { ss.close(); } } /* Regexp for this pattern: "((\\\\)*)((\\\*)|(\*)|(\\\?)|(\?)|(%)|(_))" * i.e. any number of "\\" (group 1) -> same number of "\\" * then any of "\*" (group 4) -> "*" * "*" (group 5) -> "%" * "\?" (group 6) -> "?" * "?" (group 7) -> "_" * "%" (group 8) -> "\%" * "_" (group 9) -> "\_" */ private static Pattern pat = Pattern.compile("((\\\\\\\\)*)((\\\\\\*)|(\\*)|(\\\\\\?)|(\\?)|(%)|(_))"); private static final int grp[] = {4, 5, 6, 7, 8, 9}; private static final String rpl[] = {"*", "%", "?", "_", "\\%", "\\_"}; /** * Translates the specified file glob pattern <tt>in</tt> * into the corresponding SQL pattern. * * @param in file glob pattern * @return SQL pattern */ private static String convertFileGlobToSQLPattern(String in) { StringBuffer out = new StringBuffer(); Matcher m = pat.matcher(in); while (m.find()) { StringBuilder rep = new StringBuilder(); if (m.group(1) != null) { rep.append(m.group(1)); } for (int i = 0; i < grp.length; i++) { if (m.group(grp[i]) != null) { rep.append(rpl[i]); break; } } m.appendReplacement(out, rep.toString()); } m.appendTail(out); return out.toString(); } /** * Finds logs by matching logbook/tag values and/or log and/or tag names. * * @param matches MultiMap of query parameters * @return XmlLogs container with all found logs and their logbooks/tags */ public static XmlLogs findLogsByMultiMatch(MultivaluedMap<String, String> matches) throws CFException, RepositoryException { FindLogsQuery q = new FindLogsQuery(matches); XmlLogs xmlLogs = new XmlLogs(); ArrayList<XmlLog> logs = q.executeQuery(); if (logs != null) { Iterator<XmlLog> iterator = logs.iterator(); while (iterator.hasNext()) { xmlLogs.addXmlLog(iterator.next()); } } return xmlLogs; } /** * Returns logs found by matching logbook/tag and/or log names. * * @param name query to be used for matching * @return XmlLogs container with all found logs and their logbooks/tags */ public static XmlLogs findLogsByLogbookName(String name) throws CFException { FindLogsQuery q = new FindLogsQuery(SearchType.TAG, name); XmlLogs xmlLogs = null; ArrayList<XmlLog> logs = q.executeQuery(); if (logs != null) { Iterator<XmlLog> iterator = logs.iterator(); while (iterator.hasNext()) { xmlLogs.addXmlLog(iterator.next()); } } return xmlLogs; } /** * Return single log found by log id. * * @param logId id to look for * @return XmlLog with found log and its logbooks * @throws CFException on SQLException */ public static XmlLog findLogById(Long logId) throws CFException { FindLogsQuery q = new FindLogsQuery(SearchType.LOG, logId); XmlLog xmlLog = null; ArrayList<XmlLog> logs = q.executeQuery(); if (logs != null) { Iterator<XmlLog> iterator = logs.iterator(); while (iterator.hasNext()) { xmlLog = iterator.next(); } } return xmlLog; } }
package org.openoffice.test.tools; import com.sun.star.beans.PropertyState; import com.sun.star.beans.PropertyValue; import com.sun.star.document.MacroExecMode; import com.sun.star.drawing.XDrawPage; import com.sun.star.drawing.XDrawPageSupplier; import com.sun.star.drawing.XDrawPages; import com.sun.star.drawing.XDrawPagesSupplier; import com.sun.star.frame.XComponentLoader; import com.sun.star.frame.XController; import com.sun.star.frame.XFrame; import com.sun.star.frame.XModel; import com.sun.star.lang.XComponent; import com.sun.star.lang.XMultiServiceFactory; import com.sun.star.lang.XServiceInfo; import com.sun.star.uno.UnoRuntime; import com.sun.star.uno.XInterface; import com.sun.star.util.CloseVetoException; import com.sun.star.util.XCloseable; import com.sun.star.util.XModifiable; import java.util.logging.Level; import java.util.logging.Logger; /** provides a small wrapper around a document */ public class OfficeDocument { public OfficeDocument( XMultiServiceFactory orb, XComponent document ) { m_orb = orb; m_documentComponent = document; } protected static XComponent implLoadAsComponent( XMultiServiceFactory orb, String documentOrFactoryURL ) throws com.sun.star.uno.Exception { return implLoadAsComponent( orb, documentOrFactoryURL, new PropertyValue[0] ); } protected static XComponent implLoadAsComponent( XMultiServiceFactory orb, String documentOrFactoryURL, final PropertyValue[] i_args ) throws com.sun.star.uno.Exception { XComponentLoader aLoader = UnoRuntime.queryInterface( XComponentLoader.class, orb.createInstance( "com.sun.star.frame.Desktop" ) ); XComponent document = UnoRuntime.queryInterface( XComponent.class, aLoader.loadComponentFromURL( documentOrFactoryURL, "_blank", 0, i_args ) ); return document; } private static OfficeDocument implLoadDocument( XMultiServiceFactory orb, String documentOrFactoryURL ) throws com.sun.star.uno.Exception { return implLoadDocument( orb, documentOrFactoryURL, new PropertyValue[0] ); } private static OfficeDocument implLoadDocument( XMultiServiceFactory orb, String documentOrFactoryURL, final PropertyValue[] i_args ) throws com.sun.star.uno.Exception { XComponent document = implLoadAsComponent( orb, documentOrFactoryURL, i_args ); XServiceInfo xSI = UnoRuntime.queryInterface( XServiceInfo.class, document ); if ( xSI.supportsService( "com.sun.star.sheet.SpreadsheetDocument" ) ) return new SpreadsheetDocument( orb, document ); return new OfficeDocument( orb, document ); } public static OfficeDocument loadDocument( XMultiServiceFactory orb, String documentURL ) throws com.sun.star.uno.Exception { return implLoadDocument( orb, documentURL ); } public static OfficeDocument blankTextDocument( XMultiServiceFactory orb ) throws com.sun.star.uno.Exception { return blankDocument( orb, DocumentType.WRITER ); } public static OfficeDocument blankXMLForm( XMultiServiceFactory orb ) throws com.sun.star.uno.Exception { return blankDocument( orb, DocumentType.XMLFORM ); } public static OfficeDocument blankDocument( XMultiServiceFactory orb, DocumentType eType ) throws com.sun.star.uno.Exception { final PropertyValue[] args = new PropertyValue[] { new PropertyValue( "MacroExecutionMode", -1, MacroExecMode.ALWAYS_EXECUTE, PropertyState.DIRECT_VALUE ) }; return implLoadDocument( orb, getDocumentFactoryURL( eType ), args ); } public boolean close() { try { XCloseable closeDoc = UnoRuntime.queryInterface( XCloseable.class, m_documentComponent ); closeDoc.close( true ); return true; } catch ( CloseVetoException e ) { Logger.getLogger( OfficeDocument.class.getName() ).log( Level.SEVERE, "closing the document was vetoed", e ); } return false; } public XComponent getDocument( ) { return m_documentComponent; } public boolean isModified() { XModifiable modify = (XModifiable)query( XModifiable.class ); return modify.isModified(); } public Object query( Class aInterfaceClass ) { return UnoRuntime.queryInterface( aInterfaceClass, m_documentComponent ); } public XMultiServiceFactory getOrb( ) { return m_orb; } /** retrieves the current view of the document @return the view component, queried for the interface described by aInterfaceClass */ public OfficeDocumentView getCurrentView( ) { // get the model interface for the document XModel xDocModel = UnoRuntime.queryInterface( XModel.class, m_documentComponent ); // get the current controller for the document - as a controller is tied to a view, // this gives us the currently active view for the document. XController xController = xDocModel.getCurrentController(); if ( classify() == DocumentType.CALC ) return new SpreadsheetView( m_orb, this, xController ); return new OfficeDocumentView( m_orb, this, xController ); } /** reloads the document * * The reload is done by dispatching the respective URL at a frame of the document. * As a consequence, if you have references to a view of the document, or any interface * of the document, they will become invalid. * The Model instance itself, at which you called reload, will still be valid, it will * automatically update its internal state after the reload. * * Another consequence is that if the document does not have a view at all, it cannot * be reloaded. */ public void reload() throws Exception { OfficeDocumentView view = getCurrentView(); XFrame frame = view.getController().getFrame(); XModel oldModel = frame.getController().getModel(); getCurrentView().dispatch( ".uno:Reload" ); m_documentComponent = UnoRuntime.queryInterface( XComponent.class, frame.getController().getModel() ); XModel newModel = getCurrentView().getController().getModel(); if ( UnoRuntime.areSame( oldModel, newModel ) ) throw new java.lang.IllegalStateException( "reload failed" ); } /** returns a URL which can be used to create a document of a certain type */ public static String getDocumentFactoryURL( DocumentType eType ) { if ( eType == DocumentType.WRITER ) return "private:factory/swriter"; if ( eType == DocumentType.CALC ) return "private:factory/scalc"; if ( eType == DocumentType.DRAWING ) return "private:factory/sdraw"; if ( eType == DocumentType.XMLFORM ) return "private:factory/swriter?slot=21053"; return "private:factory/swriter"; } /** classifies a document */ public DocumentType classify( ) { XServiceInfo xSI = UnoRuntime.queryInterface( XServiceInfo.class, m_documentComponent ); if ( xSI.supportsService( "com.sun.star.text.TextDocument" ) ) return DocumentType.WRITER; else if ( xSI.supportsService( "com.sun.star.sheet.SpreadsheetDocument" ) ) return DocumentType.CALC; else if ( xSI.supportsService( "com.sun.star.drawing.DrawingDocument" ) ) return DocumentType.DRAWING; return DocumentType.UNKNOWN; } /** retrieves a com.sun.star.drawing.DrawPage of the document, denoted by index * @param index * the index of the draw page * @throws * com.sun.star.lang.IndexOutOfBoundsException * com.sun.star.lang.WrappedTargetException */ protected XDrawPage getDrawPage( int index ) throws com.sun.star.lang.IndexOutOfBoundsException, com.sun.star.lang.WrappedTargetException { XDrawPagesSupplier xSuppPages = UnoRuntime.queryInterface( XDrawPagesSupplier.class, getDocument() ); XDrawPages xPages = xSuppPages.getDrawPages(); return UnoRuntime.queryInterface( XDrawPage.class, xPages.getByIndex( index ) ); } /** retrieves the <type scope="com.sun.star.drawing">DrawPage</type> of the document */ protected XDrawPage getMainDrawPage( ) throws com.sun.star.uno.Exception { XDrawPage xReturn; // in case of a Writer document, this is rather easy: simply ask the XDrawPageSupplier XDrawPageSupplier xSuppPage = UnoRuntime.queryInterface( XDrawPageSupplier.class, getDocument() ); if ( null != xSuppPage ) xReturn = xSuppPage.getDrawPage(); else { // the model itself is no draw page supplier - okay, it may be a Writer or Calc document // (or any other multi-page document) XDrawPagesSupplier xSuppPages = UnoRuntime.queryInterface( XDrawPagesSupplier.class, getDocument() ); XDrawPages xPages = xSuppPages.getDrawPages(); xReturn = UnoRuntime.queryInterface( XDrawPage.class, xPages.getByIndex( 0 ) ); // Note that this is no really error-proof code: If the document model does not support the // XDrawPagesSupplier interface, or if the pages collection returned is empty, this will break. } return xReturn; } /** creates a component at the service factory provided by the document */ public XInterface createInstance( String serviceSpecifier ) throws com.sun.star.uno.Exception { XMultiServiceFactory xORB = UnoRuntime.queryInterface( XMultiServiceFactory.class, m_documentComponent ); return (XInterface)xORB.createInstance( serviceSpecifier ); } /** creates a component at the service factory provided by the document, queried for a given interface type */ public <T> T createInstance( String i_serviceSpecifier, Class<T> i_interfaceClass ) throws com.sun.star.uno.Exception { return UnoRuntime.queryInterface( i_interfaceClass, createInstance( i_serviceSpecifier ) ); } /** creates a component at the service factory provided by the document */ public XInterface createInstanceWithArguments( String serviceSpecifier, Object[] arguments ) throws com.sun.star.uno.Exception { XMultiServiceFactory xORB = UnoRuntime.queryInterface( XMultiServiceFactory.class, m_documentComponent ); return (XInterface) xORB.createInstanceWithArguments( serviceSpecifier, arguments ); } private XMultiServiceFactory m_orb; private XComponent m_documentComponent; };
package javaslang.collection; import javaslang.Kind2; import javaslang.Tuple; import javaslang.Tuple2; import javaslang.control.Option; import java.io.Serializable; import java.util.ArrayList; import java.util.Comparator; import java.util.NoSuchElementException; import java.util.Objects; import java.util.function.*; import java.util.stream.Collector; import static javaslang.collection.Comparators.naturalComparator; /** * SortedMap implementation, backed by a Red/Black Tree. * * @param <K> Key type * @param <V> Value type * @author Daniel Dietrich * @since 2.0.0 */ // DEV-NOTE: use entries.min().get() in favor of iterator().next(), it is faster! public final class TreeMap<K, V> implements Kind2<TreeMap<?, ?>, K, V>, SortedMap<K, V>, Serializable { private static final long serialVersionUID = 1L; private final RedBlackTree<Tuple2<K, V>> entries; private TreeMap(RedBlackTree<Tuple2<K, V>> entries) { this.entries = entries; } /** * Returns a {@link Collector} which may be used in conjunction with * {@link java.util.stream.Stream#collect(Collector)} to obtain a * {@link TreeMap}. * <p> * The natural comparator is used to compare TreeMap keys. * * @param <K> The key type * @param <V> The value type * @return A {@link TreeMap} Collector. */ public static <K extends Comparable<? super K>, V> Collector<Tuple2<K, V>, ArrayList<Tuple2<K, V>>, TreeMap<K, V>> collector() { return collector((Comparator<? super K> & Serializable) K::compareTo); } /** * Returns a {@link Collector} which may be used in conjunction with * {@link java.util.stream.Stream#collect(Collector)} to obtain a * {@link TreeMap}. * * @param <K> The key type * @param <V> The value type * @param keyComparator A key comparator * @return A {@link TreeMap} Collector. */ public static <K, V> Collector<Tuple2<K, V>, ArrayList<Tuple2<K, V>>, TreeMap<K, V>> collector(Comparator<? super K> keyComparator) { Objects.requireNonNull(keyComparator, "keyComparator is null"); final Supplier<ArrayList<Tuple2<K, V>>> supplier = ArrayList::new; final BiConsumer<ArrayList<Tuple2<K, V>>, Tuple2<K, V>> accumulator = ArrayList::add; final BinaryOperator<ArrayList<Tuple2<K, V>>> combiner = (left, right) -> { left.addAll(right); return left; }; final Function<ArrayList<Tuple2<K, V>>, TreeMap<K, V>> finisher = list -> ofEntries(keyComparator, list); return Collector.of(supplier, accumulator, combiner, finisher); } /** * Returns the empty TreeMap. The underlying key comparator is the natural comparator of K. * * @param <K> The key type * @param <V> The value type * @return A new empty TreeMap. */ public static <K extends Comparable<? super K>, V> TreeMap<K, V> empty() { return empty((Comparator<? super K> & Serializable) K::compareTo); } /** * Returns the empty TreeMap using the given key comparator. * * @param <K> The key type * @param <V> The value type * @param keyComparator The comparator used to sort the entries by their key. * @return A new empty TreeMap. */ public static <K, V> TreeMap<K, V> empty(Comparator<? super K> keyComparator) { Objects.requireNonNull(keyComparator, "keyComparator is null"); return new TreeMap<>(RedBlackTree.empty(new EntryComparator<>(keyComparator))); } /** * Narrows a widened {@code TreeMap<? extends K, ? extends V>} to {@code TreeMap<K, V>} * by performing a type-safe cast. This is eligible because immutable/read-only * collections are covariant. * <p> * CAUTION: If {@code K} is narrowed, the underlying {@code Comparator} might fail! * * @param treeMap A {@code TreeMap}. * @param <K> Key type * @param <V> Value type * @return the given {@code treeMap} instance as narrowed type {@code TreeMap<K, V>}. */ @SuppressWarnings("unchecked") public static <K, V> TreeMap<K, V> narrow(TreeMap<? extends K, ? extends V> treeMap) { return (TreeMap<K, V>) treeMap; } /** * Returns a singleton {@code TreeMap}, i.e. a {@code TreeMap} of one entry. * The underlying key comparator is the natural comparator of K. * * @param <K> The key type * @param <V> The value type * @param entry A map entry. * @return A new TreeMap containing the given entry. */ public static <K extends Comparable<? super K>, V> TreeMap<K, V> of(Tuple2<? extends K, ? extends V> entry) { return of((Comparator<? super K> & Serializable) K::compareTo, entry); } /** * Returns a {@code TreeMap}, from a source java.util.Map. * * @param map A map entry. * @param <K> The key type * @param <V> The value type * @return A new Map containing the given map */ public static <K extends Comparable<? super K>, V> TreeMap<K, V> ofAll(java.util.Map<? extends K, ? extends V> map) { Objects.requireNonNull(map, "map is null"); RedBlackTree<Tuple2<K, V>> result = RedBlackTree.empty(new EntryComparator<>((Comparator<? super K> & Serializable) K::compareTo)); for (java.util.Map.Entry<? extends K, ? extends V> entry : map.entrySet()) { result = result.insert(Tuple.of(entry.getKey(), entry.getValue())); } return new TreeMap<>(result); } /** * Returns a singleton {@code TreeMap}, i.e. a {@code TreeMap} of one entry using a specific key comparator. * * @param <K> The key type * @param <V> The value type * @param entry A map entry. * @param keyComparator The comparator used to sort the entries by their key. * @return A new TreeMap containing the given entry. */ public static <K, V> TreeMap<K, V> of(Comparator<? super K> keyComparator, Tuple2<? extends K, ? extends V> entry) { Objects.requireNonNull(keyComparator, "keyComparator is null"); Objects.requireNonNull(entry, "entry is null"); return TreeMap.<K, V> empty(keyComparator).put(entry); } /** * Returns a singleton {@code TreeMap}, i.e. a {@code TreeMap} of one element. * * @param key A singleton map key. * @param value A singleton map value. * @param <K> The key type * @param <V> The value type * @return A new Map containing the given entry */ public static <K extends Comparable<? super K>, V> TreeMap<K, V> of(K key, V value) { return of((Comparator<? super K> & Serializable) K::compareTo, key, value); } /** * Creates a {@code TreeMap} of the given list of key-value pairs. * * @param <K> The key type * @param <V> The value type * @return A new Map containing the given entries */ public static <K extends Comparable<? super K>, V> TreeMap<K, V> of(K k1, V v1, K k2, V v2) { return of((Comparator<? super K> & Serializable) K::compareTo, k1, v1, k2, v2); } /** * Creates a {@code TreeMap} of the given list of key-value pairs. * * @param <K> The key type * @param <V> The value type * @return A new Map containing the given entries */ public static <K extends Comparable<? super K>, V> TreeMap<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3) { return of((Comparator<? super K> & Serializable) K::compareTo, k1, v1, k2, v2, k3, v3); } /** * Creates a {@code TreeMap} of the given list of key-value pairs. * * @param <K> The key type * @param <V> The value type * @return A new Map containing the given entries */ public static <K extends Comparable<? super K>, V> TreeMap<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4) { return of((Comparator<? super K> & Serializable) K::compareTo, k1, v1, k2, v2, k3, v3, k4, v4); } /** * Creates a {@code TreeMap} of the given list of key-value pairs. * * @param <K> The key type * @param <V> The value type * @return A new Map containing the given entries */ public static <K extends Comparable<? super K>, V> TreeMap<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5) { return of((Comparator<? super K> & Serializable) K::compareTo, k1, v1, k2, v2, k3, v3, k4, v4, k5, v5); } /** * Creates a {@code TreeMap} of the given list of key-value pairs. * * @param <K> The key type * @param <V> The value type * @return A new Map containing the given entries */ public static <K extends Comparable<? super K>, V> TreeMap<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6) { return of((Comparator<? super K> & Serializable) K::compareTo, k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6); } /** * Creates a {@code TreeMap} of the given list of key-value pairs. * * @param <K> The key type * @param <V> The value type * @return A new Map containing the given entries */ public static <K extends Comparable<? super K>, V> TreeMap<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6, K k7, V v7) { return of((Comparator<? super K> & Serializable) K::compareTo, k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6, k7, v7); } /** * Creates a {@code TreeMap} of the given list of key-value pairs. * * @param <K> The key type * @param <V> The value type * @return A new Map containing the given entries */ public static <K extends Comparable<? super K>, V> TreeMap<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6, K k7, V v7, K k8, V v8) { return of((Comparator<? super K> & Serializable) K::compareTo, k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6, k7, v7, k8, v8); } /** * Creates a {@code TreeMap} of the given list of key-value pairs. * * @param <K> The key type * @param <V> The value type * @return A new Map containing the given entries */ public static <K extends Comparable<? super K>, V> TreeMap<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6, K k7, V v7, K k8, V v8, K k9, V v9) { return of((Comparator<? super K> & Serializable) K::compareTo, k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6, k7, v7, k8, v8, k9, v9); } /** * Creates a {@code TreeMap} of the given list of key-value pairs. * * @param <K> The key type * @param <V> The value type * @return A new Map containing the given entries */ public static <K extends Comparable<? super K>, V> TreeMap<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6, K k7, V v7, K k8, V v8, K k9, V v9, K k10, V v10) { return of((Comparator<? super K> & Serializable) K::compareTo, k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6, k7, v7, k8, v8, k9, v9, k10, v10); } /** * Returns a singleton {@code TreeMap}, i.e. a {@code TreeMap} of one element. * * @param key A singleton map key. * @param value A singleton map value. * @param <K> The key type * @param <V> The value type * @param keyComparator The comparator used to sort the entries by their key. * @return A new Map containing the given entry */ public static <K, V> TreeMap<K, V> of(Comparator<? super K> keyComparator, K key, V value) { Objects.requireNonNull(keyComparator, "keyComparator is null"); return TreeMap.<K, V> empty(keyComparator).put(key, value); } /** * Creates a {@code TreeMap} of the given list of key-value pairs. * * @param keyComparator The comparator used to sort the entries by their key. * @param <K> The key type * @param <V> The value type * @return A new Map containing the given entries */ public static <K, V> TreeMap<K, V> of(Comparator<? super K> keyComparator, K k1, V v1, K k2, V v2) { return of(keyComparator, k1, v1).put(k2, v2); } /** * Creates a {@code TreeMap} of the given list of key-value pairs. * * @param keyComparator The comparator used to sort the entries by their key. * @param <K> The key type * @param <V> The value type * @return A new Map containing the given entries */ public static <K, V> TreeMap<K, V> of(Comparator<? super K> keyComparator, K k1, V v1, K k2, V v2, K k3, V v3) { return of(keyComparator, k1, v1, k2, v2).put(k3, v3); } /** * Creates a {@code TreeMap} of the given list of key-value pairs. * * @param keyComparator The comparator used to sort the entries by their key. * @param <K> The key type * @param <V> The value type * @return A new Map containing the given entries */ public static <K, V> TreeMap<K, V> of(Comparator<? super K> keyComparator, K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4) { return of(keyComparator, k1, v1, k2, v2, k3, v3).put(k4, v4); } /** * Creates a {@code TreeMap} of the given list of key-value pairs. * * @param keyComparator The comparator used to sort the entries by their key. * @param <K> The key type * @param <V> The value type * @return A new Map containing the given entries */ public static <K, V> TreeMap<K, V> of(Comparator<? super K> keyComparator, K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5) { return of(keyComparator, k1, v1, k2, v2, k3, v3, k4, v4).put(k5, v5); } /** * Creates a {@code TreeMap} of the given list of key-value pairs. * * @param keyComparator The comparator used to sort the entries by their key. * @param <K> The key type * @param <V> The value type * @return A new Map containing the given entries */ public static <K, V> TreeMap<K, V> of(Comparator<? super K> keyComparator, K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6) { return of(keyComparator, k1, v1, k2, v2, k3, v3, k4, v4, k5, v5).put(k6, v6); } /** * Creates a {@code TreeMap} of the given list of key-value pairs. * * @param keyComparator The comparator used to sort the entries by their key. * @param <K> The key type * @param <V> The value type * @return A new Map containing the given entries */ public static <K, V> TreeMap<K, V> of(Comparator<? super K> keyComparator, K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6, K k7, V v7) { return of(keyComparator, k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6).put(k7, v7); } /** * Creates a {@code TreeMap} of the given list of key-value pairs. * * @param keyComparator The comparator used to sort the entries by their key. * @param <K> The key type * @param <V> The value type * @return A new Map containing the given entries */ public static <K, V> TreeMap<K, V> of(Comparator<? super K> keyComparator, K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6, K k7, V v7, K k8, V v8) { return of(keyComparator, k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6, k7, v7).put(k8, v8); } /** * Creates a {@code TreeMap} of the given list of key-value pairs. * * @param keyComparator The comparator used to sort the entries by their key. * @param <K> The key type * @param <V> The value type * @return A new Map containing the given entries */ public static <K, V> TreeMap<K, V> of(Comparator<? super K> keyComparator, K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6, K k7, V v7, K k8, V v8, K k9, V v9) { return of(keyComparator, k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6, k7, v7, k8, v8).put(k9, v9); } /** * Creates a {@code TreeMap} of the given list of key-value pairs. * * @param keyComparator The comparator used to sort the entries by their key. * @param <K> The key type * @param <V> The value type * @return A new Map containing the given entries */ public static <K, V> TreeMap<K, V> of(Comparator<? super K> keyComparator, K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6, K k7, V v7, K k8, V v8, K k9, V v9, K k10, V v10) { return of(keyComparator, k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6, k7, v7, k8, v8, k9, v9).put(k10, v10); } /** * Returns a TreeMap containing {@code n} values of a given Function {@code f} * over a range of integer values from 0 to {@code n - 1}. * * @param <K> The key type * @param <V> The value type * @param keyComparator The comparator used to sort the entries by their key * @param n The number of elements in the TreeMap * @param f The Function computing element values * @return A TreeMap consisting of elements {@code f(0),f(1), ..., f(n - 1)} * @throws NullPointerException if {@code keyComparator} or {@code f} are null */ @SuppressWarnings("unchecked") public static <K, V> TreeMap<K, V> tabulate(Comparator<? super K> keyComparator, int n, Function<? super Integer, ? extends Tuple2<? extends K, ? extends V>> f) { Objects.requireNonNull(keyComparator, "keyComparator is null"); Objects.requireNonNull(f, "f is null"); return ofEntries(keyComparator, Collections.tabulate(n, (Function<? super Integer, ? extends Tuple2<K, V>>) f)); } /** * Returns a TreeMap containing {@code n} values of a given Function {@code f} * over a range of integer values from 0 to {@code n - 1}. * The underlying key comparator is the natural comparator of K. * * @param <K> The key type * @param <V> The value type * @param n The number of elements in the TreeMap * @param f The Function computing element values * @return A TreeMap consisting of elements {@code f(0),f(1), ..., f(n - 1)} * @throws NullPointerException if {@code f} is null */ public static <K extends Comparable<? super K>, V> TreeMap<K, V> tabulate(int n, Function<? super Integer, ? extends Tuple2<? extends K, ? extends V>> f) { Objects.requireNonNull(f, "f is null"); return tabulate((Comparator<? super K> & Serializable) K::compareTo, n, f); } /** * Returns a TreeMap containing {@code n} values supplied by a given Supplier {@code s}. * * @param <K> The key type * @param <V> The value type * @param keyComparator The comparator used to sort the entries by their key * @param n The number of elements in the TreeMap * @param s The Supplier computing element values * @return A TreeMap of size {@code n}, where each element contains the result supplied by {@code s}. * @throws NullPointerException if {@code keyComparator} or {@code s} are null */ @SuppressWarnings("unchecked") public static <K, V> TreeMap<K, V> fill(Comparator<? super K> keyComparator, int n, Supplier<? extends Tuple2<? extends K, ? extends V>> s) { Objects.requireNonNull(keyComparator, "keyComparator is null"); Objects.requireNonNull(s, "s is null"); return ofEntries(keyComparator, Collections.fill(n, (Supplier<? extends Tuple2<K, V>>) s)); } /** * Returns a TreeMap containing {@code n} values supplied by a given Supplier {@code s}. * The underlying key comparator is the natural comparator of K. * * @param <K> The key type * @param <V> The value type * @param n The number of elements in the TreeMap * @param s The Supplier computing element values * @return A TreeMap of size {@code n}, where each element contains the result supplied by {@code s}. * @throws NullPointerException if {@code s} is null */ public static <K extends Comparable<? super K>, V> TreeMap<K, V> fill(int n, Supplier<? extends Tuple2<? extends K, ? extends V>> s) { Objects.requireNonNull(s, "s is null"); return fill((Comparator<? super K> & Serializable) K::compareTo, n, s); } /** * Creates a {@code TreeMap} of the given entries using the natural key comparator. * * @param <K> The key type * @param <V> The value type * @param entries Map entries * @return A new TreeMap containing the given entries. */ @SuppressWarnings("varargs") @SafeVarargs public static <K extends Comparable<? super K>, V> TreeMap<K, V> ofEntries(Tuple2<? extends K, ? extends V>... entries) { return ofEntries((Comparator<? super K> & Serializable) K::compareTo, entries); } /** * Creates a {@code TreeMap} of the given entries using the natural key comparator. * * @param <K> The key type * @param <V> The value type * @param entries Map entries * @return A new TreeMap containing the given entries. */ @SuppressWarnings("varargs") @SafeVarargs public static <K extends Comparable<? super K>, V> TreeMap<K, V> ofEntries(java.util.Map.Entry<? extends K, ? extends V>... entries) { return ofEntries((Comparator<? super K> & Serializable) K::compareTo, entries); } /** * Creates a {@code TreeMap} of the given entries using the given key comparator. * * @param <K> The key type * @param <V> The value type * @param entries Map entries * @param keyComparator A key comparator * @return A new TreeMap containing the given entries. */ @SuppressWarnings("unchecked") @SafeVarargs public static <K, V> TreeMap<K, V> ofEntries(Comparator<? super K> keyComparator, Tuple2<? extends K, ? extends V>... entries) { Objects.requireNonNull(keyComparator, "keyComparator is null"); Objects.requireNonNull(entries, "entries is null"); RedBlackTree<Tuple2<K, V>> tree = RedBlackTree.empty(new EntryComparator<>(keyComparator)); for (Tuple2<? extends K, ? extends V> entry : entries) { tree = tree.insert((Tuple2<K, V>) entry); } return tree.isEmpty() ? empty(keyComparator) : new TreeMap<>(tree); } /** * Creates a {@code TreeMap} of the given entries using the given key comparator. * * @param <K> The key type * @param <V> The value type * @param entries Map entries * @param keyComparator A key comparator * @return A new TreeMap containing the given entries. */ @SafeVarargs public static <K, V> TreeMap<K, V> ofEntries(Comparator<? super K> keyComparator, java.util.Map.Entry<? extends K, ? extends V>... entries) { Objects.requireNonNull(keyComparator, "keyComparator is null"); Objects.requireNonNull(entries, "entries is null"); RedBlackTree<Tuple2<K, V>> tree = RedBlackTree.empty(new EntryComparator<>(keyComparator)); for (java.util.Map.Entry<? extends K, ? extends V> entry : entries) { tree = tree.insert(Tuple.of(entry.getKey(), entry.getValue())); } return tree.isEmpty() ? empty(keyComparator) : new TreeMap<>(tree); } /** * Creates a {@code TreeMap} of the given entries. * * @param <K> The key type * @param <V> The value type * @param entries Map entries * @return A new TreeMap containing the given entries. */ public static <K extends Comparable<? super K>, V> TreeMap<K, V> ofEntries(Iterable<? extends Tuple2<? extends K, ? extends V>> entries) { return ofEntries((Comparator<? super K> & Serializable) K::compareTo, entries); } /** * Creates a {@code TreeMap} of the given entries. * * @param <K> The key type * @param <V> The value type * @param entries Map entries * @param keyComparator A key comparator * @return A new TreeMap containing the given entries. */ @SuppressWarnings("unchecked") public static <K, V> TreeMap<K, V> ofEntries(Comparator<? super K> keyComparator, Iterable<? extends Tuple2<? extends K, ? extends V>> entries) { Objects.requireNonNull(keyComparator, "keyComparator is null"); Objects.requireNonNull(entries, "entries is null"); if (entries instanceof TreeMap) { return (TreeMap<K, V>) entries; } else { RedBlackTree<Tuple2<K, V>> tree = RedBlackTree.empty(new EntryComparator<>(keyComparator)); for (Tuple2<? extends K, ? extends V> entry : entries) { tree = tree.insert((Tuple2<K, V>) entry); } return new TreeMap<>(tree); } } @Override public <K2, V2> TreeMap<K2, V2> bimap(Function<? super K, ? extends K2> keyMapper, Function<? super V, ? extends V2> valueMapper) { return bimap(naturalComparator(), keyMapper, valueMapper); } @Override public Tuple2<V, TreeMap<K, V>> computeIfAbsent(K key, Function<? super K, ? extends V> mappingFunction) { return Maps.computeIfAbsent(this, key, mappingFunction); } @Override public Tuple2<Option<V>, TreeMap<K, V>> computeIfPresent(K key, BiFunction<? super K, ? super V, ? extends V> remappingFunction) { return Maps.computeIfPresent(this, key, remappingFunction); } @Override public <K2, V2> TreeMap<K2, V2> bimap(Comparator<? super K2> keyComparator, Function<? super K, ? extends K2> keyMapper, Function<? super V, ? extends V2> valueMapper) { Objects.requireNonNull(keyMapper, "keyMapper is null"); Objects.requireNonNull(valueMapper, "valueMapper is null"); return createTreeMap(new EntryComparator<>(keyComparator), entries.iterator().map(entry -> Tuple.of(keyMapper.apply(entry._1), valueMapper.apply(entry._2)))); } @Override public boolean containsKey(K key) { final V ignored = null; return entries.contains(new Tuple2<>(key, ignored)); } @Override public TreeMap<K, V> distinct() { return Maps.distinct(this); } @Override public TreeMap<K, V> distinctBy(Comparator<? super Tuple2<K, V>> comparator) { return Maps.distinctBy(this, this::createFromEntries, comparator); } @Override public <U> TreeMap<K, V> distinctBy(Function<? super Tuple2<K, V>, ? extends U> keyExtractor) { return Maps.distinctBy(this, this::createFromEntries, keyExtractor); } @Override public TreeMap<K, V> drop(int n) { return Maps.drop(this, this::createFromEntries, this::emptyInstance, n); } @Override public TreeMap<K, V> dropRight(int n) { return Maps.dropRight(this, this::createFromEntries, this::emptyInstance, n); } @Override public TreeMap<K, V> dropUntil(Predicate<? super Tuple2<K, V>> predicate) { return Maps.dropUntil(this, this::createFromEntries, predicate); } @Override public TreeMap<K, V> dropWhile(Predicate<? super Tuple2<K, V>> predicate) { return Maps.dropWhile(this, this::createFromEntries, predicate); } @Override public TreeMap<K, V> filter(BiPredicate<? super K, ? super V> predicate) { return Maps.filter(this, this::createFromEntries, predicate); } @Override public TreeMap<K, V> filter(Predicate<? super Tuple2<K, V>> predicate) { return Maps.filter(this, this::createFromEntries, predicate); } @Override public TreeMap<K, V> filterKeys(Predicate<? super K> predicate) { return Maps.filterKeys(this, this::createFromEntries, predicate); } @Override public TreeMap<K, V> filterValues(Predicate<? super V> predicate) { return Maps.filterValues(this, this::createFromEntries, predicate); } @Override public <K2, V2> TreeMap<K2, V2> flatMap(BiFunction<? super K, ? super V, ? extends Iterable<Tuple2<K2, V2>>> mapper) { return flatMap(naturalComparator(), mapper); } @Override public <K2, V2> TreeMap<K2, V2> flatMap(Comparator<? super K2> keyComparator, BiFunction<? super K, ? super V, ? extends Iterable<Tuple2<K2, V2>>> mapper) { Objects.requireNonNull(mapper, "mapper is null"); return createTreeMap(new EntryComparator<>(keyComparator), entries.iterator().flatMap(entry -> mapper.apply(entry._1, entry._2))); } @Override public Option<V> get(K key) { final V ignored = null; return entries.find(new Tuple2<>(key, ignored)).map(Tuple2::_2); } @Override public V getOrElse(K key, V defaultValue) { return get(key).getOrElse(defaultValue); } @Override public <C> Map<C, TreeMap<K, V>> groupBy(Function<? super Tuple2<K, V>, ? extends C> classifier) { return Maps.groupBy(this, this::createFromEntries, classifier); } @Override public Iterator<TreeMap<K, V>> grouped(int size) { return Maps.grouped(this, this::createFromEntries, size); } @Override public Tuple2<K, V> head() { if (isEmpty()) { throw new NoSuchElementException("head of empty TreeMap"); } else { return entries.min().get(); } } @Override public TreeMap<K, V> init() { if (isEmpty()) { throw new UnsupportedOperationException("init of empty TreeMap"); } else { final Tuple2<K, V> max = entries.max().get(); return new TreeMap<>(entries.delete(max)); } } @Override public Option<TreeMap<K, V>> initOption() { return Maps.initOption(this); } @Override public boolean isEmpty() { return entries.isEmpty(); } @Override public Iterator<Tuple2<K, V>> iterator() { return entries.iterator(); } @SuppressWarnings("unchecked") @Override public Comparator<K> keyComparator() { return ((EntryComparator<K, V>) entries.comparator()).keyComparator; } @Override public SortedSet<K> keySet() { return TreeSet.ofAll(keyComparator(), iterator().map(Tuple2::_1)); } @Override public <K2, V2> TreeMap<K2, V2> map(BiFunction<? super K, ? super V, Tuple2<K2, V2>> mapper) { return map(naturalComparator(), mapper); } @Override public <K2, V2> TreeMap<K2, V2> map(Comparator<? super K2> keyComparator, BiFunction<? super K, ? super V, Tuple2<K2, V2>> mapper) { Objects.requireNonNull(mapper, "mapper is null"); return createTreeMap(new EntryComparator<>(keyComparator), entries.iterator().map(entry -> mapper.apply(entry._1, entry._2))); } @Override public <K2> TreeMap<K2, V> mapKeys(Function<? super K, ? extends K2> keyMapper) { Objects.requireNonNull(keyMapper, "keyMapper is null"); return map((k, v) -> Tuple.of(keyMapper.apply(k), v)); } @Override public <K2> TreeMap<K2, V> mapKeys(Function<? super K, ? extends K2> keyMapper, BiFunction<? super V, ? super V, ? extends V> valueMerge) { final Comparator<K2> comparator = naturalComparator(); return Collections.mapKeys(this, TreeMap.<K2, V> empty(comparator), keyMapper, valueMerge); } @Override public <W> TreeMap<K, W> mapValues(Function<? super V, ? extends W> valueMapper) { Objects.requireNonNull(valueMapper, "valueMapper is null"); return map(keyComparator(), (k, v) -> Tuple.of(k, valueMapper.apply(v))); } @Override public TreeMap<K, V> merge(Map<? extends K, ? extends V> that) { return Maps.merge(this, this::createFromEntries, that); } @Override public <U extends V> TreeMap<K, V> merge(Map<? extends K, U> that, BiFunction<? super V, ? super U, ? extends V> collisionResolution) { return Maps.merge(this, this::createFromEntries, that, collisionResolution); } @Override public Tuple2<TreeMap<K, V>, TreeMap<K, V>> partition(Predicate<? super Tuple2<K, V>> predicate) { return Maps.partition(this, this::createFromEntries, predicate); } @Override public TreeMap<K, V> peek(Consumer<? super Tuple2<K, V>> action) { return Maps.peek(this, action); } @Override public <U extends V> TreeMap<K, V> put(K key, U value, BiFunction<? super V, ? super U, ? extends V> merge) { return Maps.put(this, key, value, merge); } @Override public TreeMap<K, V> put(K key, V value) { return new TreeMap<>(entries.insert(new Tuple2<>(key, value))); } @Override public TreeMap<K, V> put(Tuple2<? extends K, ? extends V> entry) { return Maps.put(this, entry); } @Override public <U extends V> TreeMap<K, V> put(Tuple2<? extends K, U> entry, BiFunction<? super V, ? super U, ? extends V> merge) { return Maps.put(this, entry, merge); } @Override public TreeMap<K, V> remove(K key) { final V ignored = null; final Tuple2<K, V> entry = new Tuple2<>(key, ignored); if (entries.contains(entry)) { return new TreeMap<>(entries.delete(entry)); } else { return this; } } @Override public TreeMap<K, V> removeAll(BiPredicate<? super K, ? super V> predicate) { return Maps.removeAll(this, this::createFromEntries, predicate); } @Override public TreeMap<K, V> removeAll(Iterable<? extends K> keys) { final V ignored = null; RedBlackTree<Tuple2<K, V>> removed = entries; for (K key : keys) { final Tuple2<K, V> entry = new Tuple2<>(key, ignored); if (removed.contains(entry)) { removed = removed.delete(entry); } } if (removed.size() == entries.size()) { return this; } else { return new TreeMap<>(removed); } } @Override public TreeMap<K, V> removeKeys(Predicate<? super K> predicate) { return Maps.removeKeys(this, this::createFromEntries, predicate); } @Override public TreeMap<K, V> removeValues(Predicate<? super V> predicate) { return Maps.removeValues(this, this::createFromEntries, predicate); } @Override public TreeMap<K, V> replace(Tuple2<K, V> currentElement, Tuple2<K, V> newElement) { return Maps.replace(this, currentElement, newElement); } @Override public TreeMap<K, V> replaceAll(Tuple2<K, V> currentElement, Tuple2<K, V> newElement) { return Maps.replaceAll(this, currentElement, newElement); } @Override public TreeMap<K, V> retainAll(Iterable<? extends Tuple2<K, V>> elements) { Objects.requireNonNull(elements, "elements is null"); RedBlackTree<Tuple2<K, V>> tree = RedBlackTree.empty(entries.comparator()); for (Tuple2<K, V> entry : elements) { if (contains(entry)) { tree = tree.insert(entry); } } return new TreeMap<>(tree); } @Override public TreeMap<K, V> scan( Tuple2<K, V> zero, BiFunction<? super Tuple2<K, V>, ? super Tuple2<K, V>, ? extends Tuple2<K, V>> operation) { return Maps.scan(this, this::emptyInstance, zero, operation); } @Override public int size() { return entries.size(); } @Override public Iterator<TreeMap<K, V>> sliding(int size) { return Maps.sliding(this, this::createFromEntries, size); } @Override public Iterator<TreeMap<K, V>> sliding(int size, int step) { return Maps.sliding(this, this::createFromEntries, size, step); } @Override public Tuple2<TreeMap<K, V>, TreeMap<K, V>> span(Predicate<? super Tuple2<K, V>> predicate) { return Maps.span(this, this::createFromEntries, predicate); } @Override public TreeMap<K, V> tail() { if (isEmpty()) { throw new UnsupportedOperationException("tail of empty TreeMap"); } else { final Tuple2<K, V> min = entries.min().get(); return new TreeMap<>(entries.delete(min)); } } @Override public Option<TreeMap<K, V>> tailOption() { return Maps.tailOption(this); } @Override public TreeMap<K, V> take(int n) { return Maps.take(this, this::createFromEntries, n); } @Override public TreeMap<K, V> takeRight(int n) { return Maps.takeRight(this, this::createFromEntries, n); } @Override public TreeMap<K, V> takeUntil(Predicate<? super Tuple2<K, V>> predicate) { return Maps.takeUntil(this, this::createFromEntries, predicate); } @Override public TreeMap<K, V> takeWhile(Predicate<? super Tuple2<K, V>> predicate) { return Maps.takeWhile(this, this::createFromEntries, predicate); } @Override public java.util.TreeMap<K, V> toJavaMap() { return toJavaMap(() -> new java.util.TreeMap<>(keyComparator()), t -> t); } @Override public Seq<V> values() { return iterator().map(Tuple2::_2).toStream(); } /** * Internal factory method, used with Tuple2 comparator instead of a key comparator. * * @param comparator An Tuple2 comparator * @param entries Map entries * @param <K> Key type * @param <V> Value type * @return A new TreeMap. */ @SuppressWarnings("unchecked") private static <K, V> TreeMap<K, V> createTreeMap(Comparator<? super Tuple2<K, V>> comparator, Iterable<? extends Tuple2<? extends K, ? extends V>> entries) { RedBlackTree<Tuple2<K, V>> tree = RedBlackTree.empty(comparator); for (Tuple2<? extends K, ? extends V> entry : entries) { tree = tree.insert((Tuple2<K, V>) entry); } return tree.isEmpty() ? (TreeMap<K, V>) empty() : new TreeMap<>(tree); } // -- Object @Override public boolean equals(Object o) { if (o == this) { return true; } else if (o instanceof TreeMap) { final TreeMap<?, ?> that = (TreeMap<?, ?>) o; return entries.equals(that.entries); } else { return false; } } @Override public int hashCode() { return entries.hashCode(); } @Override public String stringPrefix() { return "TreeMap"; } @Override public String toString() { return mkString(stringPrefix() + "(", ", ", ")"); } private TreeMap<K, V> createFromEntries(Iterable<Tuple2<K, V>> tuples) { return createTreeMap(entries.comparator(), tuples); } private TreeMap<K, V> emptyInstance() { return isEmpty() ? this : new TreeMap<>(entries.emptyInstance()); } /** * Used to compare entries by key and store the keyComparator for later access. * * @param <K> key type * @param <V> value type, needed at compile time for the Comparator interface */ static class EntryComparator<K, V> implements Comparator<Tuple2<K, V>>, Serializable { private static final long serialVersionUID = 1L; final Comparator<K> keyComparator; @SuppressWarnings("unchecked") EntryComparator(Comparator<? super K> keyComparator) { this.keyComparator = (Comparator<K>) keyComparator; } @Override public int compare(Tuple2<K, V> e1, Tuple2<K, V> e2) { return keyComparator.compare(e1._1, e2._1); } } }
package javaslang.control; import javaslang.*; import javaslang.collection.Iterator; import javaslang.collection.List; import javaslang.collection.Seq; import java.util.NoSuchElementException; import java.util.Objects; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; public interface Validation<E, T> extends Value<T> { /** * Creates a {@link Valid} that contains the given {@code value}. * * @param <E> type of the error * @param <T> type of the given {@code value} * @param value A value * @return {@code Valid(value)} * @throws NullPointerException if value is null */ static <E, T> Validation<E, T> valid(T value) { Objects.requireNonNull(value, "value is null"); return new Valid<>(value); } /** * Creates an {@link Invalid} that contains the given {@code error}. * * @param <E> type of the given {@code error} * @param <T> type of the value * @param error An error * @return {@code Invalid(error)} * @throws NullPointerException if error is null */ static <E, T> Validation<E, T> invalid(E error) { Objects.requireNonNull(error, "error is null"); return new Invalid<>(error); } /** * Creates a {@code Validation} of an {@code Either}. * * @param either An {@code Either} * @param <E> type of the given {@code error} * @param <T> type of the value * @return A {@code Valid(either.get())} if either is a Right, otherwise {@code Invalid(either.getLeft())}. * @throws NullPointerException if either is null */ static <E, T> Validation<E, T> fromEither(Either<E, T> either) { Objects.requireNonNull(either, "either is null"); return either.isRight() ? valid(either.get()) : invalid(either.getLeft()); } /** * Reduces many {@code Validation} instances into a single {@code Validation} by transforming an * {@code Iterable<Validation<? extends T>>} into a {@code Validation<Seq<T>>}. * * @param <E> value type in the case of invalid * @param <T> value type in the case of valid * @param values An iterable of Validation instances. * @return A valid Validation of a sequence of values if all Validation instances are valid * or an invalid Validation containing an accumulated List of errors. * @throws NullPointerException if values is null */ static <E, T> Validation<List<E>, Seq<T>> sequence(Iterable<? extends Validation<List<E>, T>> values) { Objects.requireNonNull(values, "values is null"); List<E> errors = List.empty(); List<T> list = List.empty(); for (Validation<List<E>, T> value : values) { if (value.isInvalid()) { errors = errors.prependAll(value.getError().reverse()); } else if (errors.isEmpty()) { list = list.prepend(value.get()); } } return errors.isEmpty() ? valid(list.reverse()) : invalid(errors.reverse()); } /** * Narrows a widened {@code Validation<? extends E, ? extends T>} to {@code Validation<E, T>} * by performing a type safe-cast. This is eligible because immutable/read-only * collections are covariant. * * @param validation A {@code Validation}. * @param <E> type of error * @param <T> type of valid value * @return the given {@code validation} instance as narrowed type {@code Validation<E, T>}. */ @SuppressWarnings("unchecked") static <E, T> Validation<E, T> narrow(Validation<? extends E, ? extends T> validation) { return (Validation<E, T>) validation; } /** * Combines two {@code Validation}s into a {@link Builder}. * * @param <E> type of error * @param <T1> type of first valid value * @param <T2> type of second valid value * @param validation1 first validation * @param validation2 second validation * @return an instance of Builder&lt;E,T1,T2&gt; * @throws NullPointerException if validation1 or validation2 is null */ static <E, T1, T2> Builder<E, T1, T2> combine(Validation<E, T1> validation1, Validation<E, T2> validation2) { Objects.requireNonNull(validation1, "validation1 is null"); Objects.requireNonNull(validation2, "validation2 is null"); return new Builder<>(validation1, validation2); } /** * Combines three {@code Validation}s into a {@link Builder3}. * * @param <E> type of error * @param <T1> type of first valid value * @param <T2> type of second valid value * @param <T3> type of third valid value * @param validation1 first validation * @param validation2 second validation * @param validation3 third validation * @return an instance of Builder3&lt;E,T1,T2,T3&gt; * @throws NullPointerException if validation1, validation2 or validation3 is null */ static <E, T1, T2, T3> Builder3<E, T1, T2, T3> combine(Validation<E, T1> validation1, Validation<E, T2> validation2, Validation<E, T3> validation3) { Objects.requireNonNull(validation1, "validation1 is null"); Objects.requireNonNull(validation2, "validation2 is null"); Objects.requireNonNull(validation3, "validation3 is null"); return new Builder3<>(validation1, validation2, validation3); } /** * Combines four {@code Validation}s into a {@link Builder4}. * * @param <E> type of error * @param <T1> type of first valid value * @param <T2> type of second valid value * @param <T3> type of third valid value * @param <T4> type of fourth valid value * @param validation1 first validation * @param validation2 second validation * @param validation3 third validation * @param validation4 fourth validation * @return an instance of Builder3&lt;E,T1,T2,T3,T4&gt; * @throws NullPointerException if validation1, validation2, validation3 or validation4 is null */ static <E, T1, T2, T3, T4> Builder4<E, T1, T2, T3, T4> combine(Validation<E, T1> validation1, Validation<E, T2> validation2, Validation<E, T3> validation3, Validation<E, T4> validation4) { Objects.requireNonNull(validation1, "validation1 is null"); Objects.requireNonNull(validation2, "validation2 is null"); Objects.requireNonNull(validation3, "validation3 is null"); Objects.requireNonNull(validation4, "validation4 is null"); return new Builder4<>(validation1, validation2, validation3, validation4); } /** * Combines five {@code Validation}s into a {@link Builder5}. * * @param <E> type of error * @param <T1> type of first valid value * @param <T2> type of second valid value * @param <T3> type of third valid value * @param <T4> type of fourth valid value * @param <T5> type of fifth valid value * @param validation1 first validation * @param validation2 second validation * @param validation3 third validation * @param validation4 fourth validation * @param validation5 fifth validation * @return an instance of Builder3&lt;E,T1,T2,T3,T4,T5&gt; * @throws NullPointerException if validation1, validation2, validation3, validation4 or validation5 is null */ static <E, T1, T2, T3, T4, T5> Builder5<E, T1, T2, T3, T4, T5> combine(Validation<E, T1> validation1, Validation<E, T2> validation2, Validation<E, T3> validation3, Validation<E, T4> validation4, Validation<E, T5> validation5) { Objects.requireNonNull(validation1, "validation1 is null"); Objects.requireNonNull(validation2, "validation2 is null"); Objects.requireNonNull(validation3, "validation3 is null"); Objects.requireNonNull(validation4, "validation4 is null"); Objects.requireNonNull(validation5, "validation5 is null"); return new Builder5<>(validation1, validation2, validation3, validation4, validation5); } /** * Combines six {@code Validation}s into a {@link Builder6}. * * @param <E> type of error * @param <T1> type of first valid value * @param <T2> type of second valid value * @param <T3> type of third valid value * @param <T4> type of fourth valid value * @param <T5> type of fifth valid value * @param <T6> type of sixth valid value * @param validation1 first validation * @param validation2 second validation * @param validation3 third validation * @param validation4 fourth validation * @param validation5 fifth validation * @param validation6 sixth validation * @return an instance of Builder3&lt;E,T1,T2,T3,T4,T5,T6&gt; * @throws NullPointerException if validation1, validation2, validation3, validation4, validation5 or validation6 is null */ static <E, T1, T2, T3, T4, T5, T6> Builder6<E, T1, T2, T3, T4, T5, T6> combine(Validation<E, T1> validation1, Validation<E, T2> validation2, Validation<E, T3> validation3, Validation<E, T4> validation4, Validation<E, T5> validation5, Validation<E, T6> validation6) { Objects.requireNonNull(validation1, "validation1 is null"); Objects.requireNonNull(validation2, "validation2 is null"); Objects.requireNonNull(validation3, "validation3 is null"); Objects.requireNonNull(validation4, "validation4 is null"); Objects.requireNonNull(validation5, "validation5 is null"); Objects.requireNonNull(validation6, "validation6 is null"); return new Builder6<>(validation1, validation2, validation3, validation4, validation5, validation6); } /** * Combines seven {@code Validation}s into a {@link Builder7}. * * @param <E> type of error * @param <T1> type of first valid value * @param <T2> type of second valid value * @param <T3> type of third valid value * @param <T4> type of fourth valid value * @param <T5> type of fifth valid value * @param <T6> type of sixth valid value * @param <T7> type of seventh valid value * @param validation1 first validation * @param validation2 second validation * @param validation3 third validation * @param validation4 fourth validation * @param validation5 fifth validation * @param validation6 sixth validation * @param validation7 seventh validation * @return an instance of Builder3&lt;E,T1,T2,T3,T4,T5,T6,T7&gt; * @throws NullPointerException if validation1, validation2, validation3, validation4, validation5, validation6 or validation7 is null */ static <E, T1, T2, T3, T4, T5, T6, T7> Builder7<E, T1, T2, T3, T4, T5, T6, T7> combine(Validation<E, T1> validation1, Validation<E, T2> validation2, Validation<E, T3> validation3, Validation<E, T4> validation4, Validation<E, T5> validation5, Validation<E, T6> validation6, Validation<E, T7> validation7) { Objects.requireNonNull(validation1, "validation1 is null"); Objects.requireNonNull(validation2, "validation2 is null"); Objects.requireNonNull(validation3, "validation3 is null"); Objects.requireNonNull(validation4, "validation4 is null"); Objects.requireNonNull(validation5, "validation5 is null"); Objects.requireNonNull(validation6, "validation6 is null"); Objects.requireNonNull(validation7, "validation7 is null"); return new Builder7<>(validation1, validation2, validation3, validation4, validation5, validation6, validation7); } /** * Combines eight {@code Validation}s into a {@link Builder8}. * * @param <E> type of error * @param <T1> type of first valid value * @param <T2> type of second valid value * @param <T3> type of third valid value * @param <T4> type of fourth valid value * @param <T5> type of fifth valid value * @param <T6> type of sixth valid value * @param <T7> type of seventh valid value * @param <T8> type of eighth valid value * @param validation1 first validation * @param validation2 second validation * @param validation3 third validation * @param validation4 fourth validation * @param validation5 fifth validation * @param validation6 sixth validation * @param validation7 seventh validation * @param validation8 eigth validation * @return an instance of Builder3&lt;E,T1,T2,T3,T4,T5,T6,T7,T8&gt; * @throws NullPointerException if validation1, validation2, validation3, validation4, validation5, validation6, validation7 or validation8 is null */ static <E, T1, T2, T3, T4, T5, T6, T7, T8> Builder8<E, T1, T2, T3, T4, T5, T6, T7, T8> combine(Validation<E, T1> validation1, Validation<E, T2> validation2, Validation<E, T3> validation3, Validation<E, T4> validation4, Validation<E, T5> validation5, Validation<E, T6> validation6, Validation<E, T7> validation7, Validation<E, T8> validation8) { Objects.requireNonNull(validation1, "validation1 is null"); Objects.requireNonNull(validation2, "validation2 is null"); Objects.requireNonNull(validation3, "validation3 is null"); Objects.requireNonNull(validation4, "validation4 is null"); Objects.requireNonNull(validation5, "validation5 is null"); Objects.requireNonNull(validation6, "validation6 is null"); Objects.requireNonNull(validation7, "validation7 is null"); Objects.requireNonNull(validation8, "validation8 is null"); return new Builder8<>(validation1, validation2, validation3, validation4, validation5, validation6, validation7, validation8); } /** * Check whether this is of type {@code Valid} * * @return true if is a Valid, false if is an Invalid */ boolean isValid(); /** * Check whether this is of type {@code Invalid} * * @return true if is an Invalid, false if is a Valid */ boolean isInvalid(); /** * Returns this {@code Validation} if it is valid, otherwise return the alternative. * * @param other An alternative {@code Validation} * @return this {@code Validation} if it is valid, otherwise return the alternative. */ @SuppressWarnings("unchecked") default Validation<E, T> orElse(Validation<? extends E, ? extends T> other) { Objects.requireNonNull(other, "other is null"); return isValid() ? this : (Validation<E, T>) other; } /** * Returns this {@code Validation} if it is valid, otherwise return the result of evaluating supplier. * * @param supplier An alternative {@code Validation} supplier * @return this {@code Validation} if it is valid, otherwise return the result of evaluating supplier. */ @SuppressWarnings("unchecked") default Validation<E, T> orElse(Supplier<Validation<? extends E, ? extends T>> supplier) { Objects.requireNonNull(supplier, "supplier is null"); return isValid() ? this : (Validation<E, T>) supplier.get(); } @Override default boolean isEmpty() { return isInvalid(); } /** * Gets the value of this Validation if is a Valid or throws if this is an Invalid * * @return The value of this Validation * @throws NoSuchElementException if this is an Invalid */ @Override T get(); /** * Gets the error of this Validation if is an Invalid or throws if this is a Valid * * @return The error of this Invalid * @throws RuntimeException if this is a Valid */ E getError(); /** * Returns this as {@code Either}. * * @return {@code Either.right(get())} if this is valid, otherwise {@code Either.left(getError())}. */ default Either<E, T> toEither() { return isValid() ? Either.right(get()) : Either.left(getError()); } @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); /** * Performs the given action for the value contained in {@code Valid}, or do nothing * if this is an Invalid. * * @param action the action to be performed on the contained value * @throws NullPointerException if action is null */ @Override default void forEach(Consumer<? super T> action) { Objects.requireNonNull(action, "action is null"); if (isValid()) { action.accept(get()); } } /** * Performs the action in {@code fInvalid} on {@code error} if this is an {@code Invalid}, * or {@code fValid} on {@code value} if this is a {@code Valid}. * Returns an object of type U. * * <p> * <code> * For example:<br> * Validation&lt;List&lt;String&gt;,String&gt; valid = ...;<br> * Integer i = valid.fold(List::length, String::length); * </code> * </p> * * @param <U> the fold result type * @param fInvalid the invalid fold operation * @param fValid the valid fold operation * @return an instance of type U * @throws NullPointerException if fInvalid or fValid is null */ default <U> U fold(Function<? super E, ? extends U> fInvalid, Function<? super T, ? extends U> fValid) { Objects.requireNonNull(fInvalid, "function fInvalid null"); Objects.requireNonNull(fValid, "function fValid null"); if (isInvalid()) { E error = this.getError(); return fInvalid.apply(error); } else { T value = this.get(); return fValid.apply(value); } } /** * Flip the valid/invalid values for this Validation. If this is a Valid&lt;E,T&gt;, returns Invalid&lt;T,E&gt;. * Or if this is an Invalid&lt;E,T&gt;, return a Valid&lt;T,E&gt;. * * @return a flipped instance of Validation */ default Validation<T, E> swap() { if (isInvalid()) { E error = this.getError(); return Validation.valid(error); } else { T value = this.get(); return Validation.invalid(value); } } @Override default <U> Validation<E, U> map(Function<? super T, ? extends U> f) { Objects.requireNonNull(f, "function f is null"); if (isInvalid()) { return Validation.invalid(this.getError()); } else { T value = this.get(); return Validation.valid(f.apply(value)); } } /** * Whereas map only performs a mapping on a valid Validation, and leftMap performs a mapping on an invalid * Validation, bimap allows you to provide mapping actions for both, and will give you the result based * on what type of Validation this is. Without this, you would have to do something like: * * validation.map(...).leftMap(...); * * @param <E2> type of the mapping result if this is an invalid * @param <T2> type of the mapping result if this is a valid * @param errorMapper the invalid mapping operation * @param valueMapper the valid mapping operation * @return an instance of Validation&lt;U,R&gt; * @throws NullPointerException if invalidMapper or validMapper is null */ default <E2, T2> Validation<E2, T2> bimap(Function<? super E, ? extends E2> errorMapper, Function<? super T, ? extends T2> valueMapper) { Objects.requireNonNull(errorMapper, "errorMapper is null"); Objects.requireNonNull(valueMapper, "valueMapper is null"); if (isInvalid()) { E error = this.getError(); return Validation.invalid(errorMapper.apply(error)); } else { T value = this.get(); return Validation.valid(valueMapper.apply(value)); } } /** * Applies a function f to the error of this Validation if this is an Invalid. Otherwise does nothing * if this is a Valid. * * @param <U> type of the error resulting from the mapping * @param f a function that maps the error in this Invalid * @return an instance of Validation&lt;U,T&gt; * @throws NullPointerException if mapping operation f is null */ default <U> Validation<U, T> leftMap(Function<? super E, ? extends U> f) { Objects.requireNonNull(f, "function f is null"); if (isInvalid()) { E error = this.getError(); return Validation.invalid(f.apply(error)); } else { return Validation.valid(this.get()); } } default <U> Validation<List<E>, U> ap(Validation<List<E>, ? extends Function<? super T, ? extends U>> validation) { Objects.requireNonNull(validation, "validation is null"); if (isValid()) { if (validation.isValid()) { Function<? super T, ? extends U> f = validation.get(); U u = f.apply(this.get()); return valid(u); } else { List<E> errors = validation.getError(); return invalid(errors); } } else { if (validation.isValid()) { E error = this.getError(); return invalid(List.of(error)); } else { List<E> errors = validation.getError(); E error = this.getError(); return invalid(errors.append(error)); } } } /** * Combines two {@code Validation}s to form a {@link Builder}, which can then be used to perform further * combines, or apply a function to it in order to transform the {@link Builder} into a {@code Validation}. * * @param <U> type of the value contained in validation * @param validation the validation object to combine this with * @return an instance of Builder */ default <U> Builder<E, T, U> combine(Validation<E, U> validation) { return new Builder<>(this, validation); } // -- Implementation of Value default Option<Validation<E, T>> filter(Predicate<? super T> predicate) { Objects.requireNonNull(predicate, "predicate is null"); return isInvalid() || predicate.test(get()) ? Option.some(this) : Option.none(); } @SuppressWarnings("unchecked") default <U> Validation<E, U> flatMap(Function<? super T, ? extends Validation<E, ? extends U>> mapper) { Objects.requireNonNull(mapper, "mapper is null"); return isInvalid() ? (Validation<E, U>) this : (Validation<E, U>) mapper.apply(get()); } @Override default Validation<E, T> peek(Consumer<? super T> action) { if (isValid()) { action.accept(get()); } return this; } @Override default boolean isSingleValued() { return true; } @Override default Iterator<T> iterator() { return isValid() ? Iterator.of(get()) : Iterator.empty(); } /** * A valid Validation * * @param <E> type of the error of this Validation * @param <T> type of the value of this Validation */ final class Valid<E, T> implements Validation<E, T> { private final T value; /** * Construct a {@code Valid} * * @param value The value of this success */ private Valid(T value) { this.value = value; } @Override public boolean isValid() { return true; } @Override public boolean isInvalid() { return false; } @Override public T get() { return value; } @Override public E getError() throws RuntimeException { throw new NoSuchElementException("error of 'valid' Validation"); } @Override public boolean equals(Object obj) { return (obj == this) || (obj instanceof Valid && Objects.equals(value, ((Valid<?, ?>) obj).value)); } @Override public int hashCode() { return Objects.hashCode(value); } @Override public String stringPrefix() { return "Valid"; } @Override public String toString() { return stringPrefix() + "(" + value + ")"; } } /** * An invalid Validation * * @param <E> type of the error of this Validation * @param <T> type of the value of this Validation */ final class Invalid<E, T> implements Validation<E, T> { private final E error; /** * Construct an {@code Invalid} * * @param error The value of this error */ private Invalid(E error) { this.error = error; } @Override public boolean isValid() { return false; } @Override public boolean isInvalid() { return true; } @Override public T get() throws RuntimeException { throw new NoSuchElementException("get of 'invalid' Validation"); } @Override public E getError() { return error; } @Override public boolean equals(Object obj) { return (obj == this) || (obj instanceof Invalid && Objects.equals(error, ((Invalid<?, ?>) obj).error)); } @Override public int hashCode() { return Objects.hashCode(error); } @Override public String stringPrefix() { return "Invalid"; } @Override public String toString() { return stringPrefix() + "(" + error + ")"; } } final class Builder<E, T1, T2> { private Validation<E, T1> v1; private Validation<E, T2> v2; private Builder(Validation<E, T1> v1, Validation<E, T2> v2) { this.v1 = v1; this.v2 = v2; } public <R> Validation<List<E>, R> ap(Function2<T1, T2, R> f) { return v2.ap(v1.ap(Validation.valid(f.curried()))); } public <T3> Builder3<E, T1, T2, T3> combine(Validation<E, T3> v3) { return new Builder3<>(v1, v2, v3); } } final class Builder3<E, T1, T2, T3> { private Validation<E, T1> v1; private Validation<E, T2> v2; private Validation<E, T3> v3; private Builder3(Validation<E, T1> v1, Validation<E, T2> v2, Validation<E, T3> v3) { this.v1 = v1; this.v2 = v2; this.v3 = v3; } public <R> Validation<List<E>, R> ap(Function3<T1, T2, T3, R> f) { return v3.ap(v2.ap(v1.ap(Validation.valid(f.curried())))); } public <T4> Builder4<E, T1, T2, T3, T4> combine(Validation<E, T4> v4) { return new Builder4<>(v1, v2, v3, v4); } } final class Builder4<E, T1, T2, T3, T4> { private Validation<E, T1> v1; private Validation<E, T2> v2; private Validation<E, T3> v3; private Validation<E, T4> v4; private Builder4(Validation<E, T1> v1, Validation<E, T2> v2, Validation<E, T3> v3, Validation<E, T4> v4) { this.v1 = v1; this.v2 = v2; this.v3 = v3; this.v4 = v4; } public <R> Validation<List<E>, R> ap(Function4<T1, T2, T3, T4, R> f) { return v4.ap(v3.ap(v2.ap(v1.ap(Validation.valid(f.curried()))))); } public <T5> Builder5<E, T1, T2, T3, T4, T5> combine(Validation<E, T5> v5) { return new Builder5<>(v1, v2, v3, v4, v5); } } final class Builder5<E, T1, T2, T3, T4, T5> { private Validation<E, T1> v1; private Validation<E, T2> v2; private Validation<E, T3> v3; private Validation<E, T4> v4; private Validation<E, T5> v5; private Builder5(Validation<E, T1> v1, Validation<E, T2> v2, Validation<E, T3> v3, Validation<E, T4> v4, Validation<E, T5> v5) { this.v1 = v1; this.v2 = v2; this.v3 = v3; this.v4 = v4; this.v5 = v5; } public <R> Validation<List<E>, R> ap(Function5<T1, T2, T3, T4, T5, R> f) { return v5.ap(v4.ap(v3.ap(v2.ap(v1.ap(Validation.valid(f.curried())))))); } public <T6> Builder6<E, T1, T2, T3, T4, T5, T6> combine(Validation<E, T6> v6) { return new Builder6<>(v1, v2, v3, v4, v5, v6); } } final class Builder6<E, T1, T2, T3, T4, T5, T6> { private Validation<E, T1> v1; private Validation<E, T2> v2; private Validation<E, T3> v3; private Validation<E, T4> v4; private Validation<E, T5> v5; private Validation<E, T6> v6; private Builder6(Validation<E, T1> v1, Validation<E, T2> v2, Validation<E, T3> v3, Validation<E, T4> v4, Validation<E, T5> v5, Validation<E, T6> v6) { this.v1 = v1; this.v2 = v2; this.v3 = v3; this.v4 = v4; this.v5 = v5; this.v6 = v6; } public <R> Validation<List<E>, R> ap(Function6<T1, T2, T3, T4, T5, T6, R> f) { return v6.ap(v5.ap(v4.ap(v3.ap(v2.ap(v1.ap(Validation.valid(f.curried()))))))); } public <T7> Builder7<E, T1, T2, T3, T4, T5, T6, T7> combine(Validation<E, T7> v7) { return new Builder7<>(v1, v2, v3, v4, v5, v6, v7); } } final class Builder7<E, T1, T2, T3, T4, T5, T6, T7> { private Validation<E, T1> v1; private Validation<E, T2> v2; private Validation<E, T3> v3; private Validation<E, T4> v4; private Validation<E, T5> v5; private Validation<E, T6> v6; private Validation<E, T7> v7; private Builder7(Validation<E, T1> v1, Validation<E, T2> v2, Validation<E, T3> v3, Validation<E, T4> v4, Validation<E, T5> v5, Validation<E, T6> v6, Validation<E, T7> v7) { this.v1 = v1; this.v2 = v2; this.v3 = v3; this.v4 = v4; this.v5 = v5; this.v6 = v6; this.v7 = v7; } public <R> Validation<List<E>, R> ap(Function7<T1, T2, T3, T4, T5, T6, T7, R> f) { return v7.ap(v6.ap(v5.ap(v4.ap(v3.ap(v2.ap(v1.ap(Validation.valid(f.curried())))))))); } public <T8> Builder8<E, T1, T2, T3, T4, T5, T6, T7, T8> combine(Validation<E, T8> v8) { return new Builder8<>(v1, v2, v3, v4, v5, v6, v7, v8); } } final class Builder8<E, T1, T2, T3, T4, T5, T6, T7, T8> { private Validation<E, T1> v1; private Validation<E, T2> v2; private Validation<E, T3> v3; private Validation<E, T4> v4; private Validation<E, T5> v5; private Validation<E, T6> v6; private Validation<E, T7> v7; private Validation<E, T8> v8; private Builder8(Validation<E, T1> v1, Validation<E, T2> v2, Validation<E, T3> v3, Validation<E, T4> v4, Validation<E, T5> v5, Validation<E, T6> v6, Validation<E, T7> v7, Validation<E, T8> v8) { this.v1 = v1; this.v2 = v2; this.v3 = v3; this.v4 = v4; this.v5 = v5; this.v6 = v6; this.v7 = v7; this.v8 = v8; } public <R> Validation<List<E>, R> ap(Function8<T1, T2, T3, T4, T5, T6, T7, T8, R> f) { return v8.ap(v7.ap(v6.ap(v5.ap(v4.ap(v3.ap(v2.ap(v1.ap(Validation.valid(f.curried()))))))))); } } }
package org.apache.batik.transcoder.image; import java.awt.AlphaComposite; import java.awt.Cursor; import java.awt.Dimension; import java.awt.Graphics2D; import java.awt.Paint; import java.awt.Point; import java.awt.Rectangle; import java.awt.Shape; import java.awt.geom.AffineTransform; import java.awt.geom.Dimension2D; import java.awt.geom.Rectangle2D; import java.awt.image.BufferedImage; import org.apache.batik.bridge.BridgeContext; import org.apache.batik.bridge.BridgeException; import org.apache.batik.bridge.GVTBuilder; import org.apache.batik.bridge.UserAgent; import org.apache.batik.bridge.ViewBox; import org.apache.batik.dom.svg.DefaultSVGContext; import org.apache.batik.dom.svg.SAXSVGDocumentFactory; import org.apache.batik.dom.svg.SVGDOMImplementation; import org.apache.batik.dom.svg.SVGOMDocument; import org.apache.batik.dom.util.DocumentFactory; import org.apache.batik.ext.awt.image.GraphicsUtil; import org.apache.batik.gvt.GraphicsNode; import org.apache.batik.gvt.GraphicsNodeRenderContext; import org.apache.batik.gvt.event.EventDispatcher; import org.apache.batik.gvt.renderer.ImageRenderer; import org.apache.batik.gvt.renderer.ImageRendererFactory; import org.apache.batik.transcoder.TranscoderException; import org.apache.batik.transcoder.TranscoderOutput; import org.apache.batik.transcoder.TranscodingHints; import org.apache.batik.transcoder.XMLAbstractTranscoder; import org.apache.batik.transcoder.image.resources.Messages; import org.apache.batik.transcoder.keys.BooleanKey; import org.apache.batik.transcoder.keys.FloatKey; import org.apache.batik.transcoder.keys.LengthKey; import org.apache.batik.transcoder.keys.PaintKey; import org.apache.batik.transcoder.keys.PaintKey; import org.apache.batik.transcoder.keys.Rectangle2DKey; import org.apache.batik.transcoder.keys.StringKey; import org.apache.batik.util.SVGConstants; import org.w3c.dom.DOMException; import org.w3c.dom.DOMImplementation; import org.w3c.dom.Document; import org.w3c.dom.svg.SVGAElement; import org.w3c.dom.svg.SVGDocument; import org.w3c.dom.svg.SVGSVGElement; // <!> FIXME : Those import clauses will change with new design import org.apache.batik.gvt.renderer.StaticRendererFactory; /** * This class enables to transcode an input to an image of any format. * * <p>Two transcoding hints (<tt>KEY_WIDTH</tt> and * <tt>KEY_HEIGHT</tt>) can be used to respectively specify the image * width and the image height. If only one of these keys is specified, * the transcoder preserves the aspect ratio of the original image. * * <p>The <tt>KEY_BACKGROUND_COLOR</tt> defines the background color * to use for opaque image formats, or the background color that may * be used for image formats that support alpha channel. * * <p>The <tt>KEY_AOI</tt> represents the area of interest to paint * in device space. * * <p>Three additional transcoding hints that act on the SVG * processor can be specified: * * <p><tt>KEY_LANGUAGE</tt> to set the default language to use (may be * used by a &lt;switch> SVG element for example), * <tt>KEY_USER_STYLESHEET_URI</tt> to fix the URI of a user * stylesheet, and <tt>KEY_PIXEL_TO_MM</tt> to specify the pixel to * millimeter conversion factor. * * @author <a href="mailto:Thierry.Kormann@sophia.inria.fr">Thierry Kormann</a> * @version $Id$ */ public abstract class ImageTranscoder extends XMLAbstractTranscoder { /** The user agent dedicated to an <tt>ImageTranscoder</tt>. */ protected UserAgent userAgent = new ImageTranscoderUserAgent(); /** * Constructs a new <tt>ImageTranscoder</tt>. */ protected ImageTranscoder() { hints.put(KEY_DOCUMENT_ELEMENT_NAMESPACE_URI, SVGConstants.SVG_NAMESPACE_URI); hints.put(KEY_DOCUMENT_ELEMENT, SVGConstants.SVG_SVG_TAG); hints.put(KEY_DOM_IMPLEMENTATION, SVGDOMImplementation.getDOMImplementation()); } /** * Transcodes the specified Document as an image in the specified output. * @param document the document to transcode * @param output the ouput where to transcode * @exception TranscoderException if an error occured while transcoding */ protected void transcode(Document document, TranscoderOutput output) throws TranscoderException { if (!(document instanceof SVGOMDocument)) { throw new TranscoderException( Messages.formatMessage("notsvg", null)); } SVGDocument svgDoc = (SVGDocument)document; SVGSVGElement root = svgDoc.getRootElement(); // initialize the SVG document with the appropriate context String parserClassname = (String)hints.get(KEY_XML_PARSER_CLASSNAME); DefaultSVGContext svgCtx = new DefaultSVGContext(); svgCtx.setPixelToMM(userAgent.getPixelToMM()); ((SVGOMDocument)document).setSVGContext(svgCtx); // build the GVT tree GVTBuilder builder = new GVTBuilder(); ImageRendererFactory rendFactory = new StaticRendererFactory(); GraphicsNodeRenderContext rc = rendFactory.getRenderContext(); BridgeContext ctx = new BridgeContext(userAgent, rc); GraphicsNode gvtRoot; try { gvtRoot = builder.build(ctx, svgDoc); } catch (BridgeException ex) { throw new TranscoderException(ex); } // get the 'width' and 'height' attributes of the SVG document float docWidth = (float)ctx.getDocumentSize().getWidth(); float docHeight = (float)ctx.getDocumentSize().getHeight(); ctx = null; builder = null; // compute the image's width and height according the hints float imgWidth = -1; if (hints.containsKey(KEY_WIDTH)) { imgWidth = ((Float)hints.get(KEY_WIDTH)).floatValue(); } float imgHeight = -1; if (hints.containsKey(KEY_HEIGHT)) { imgHeight = ((Float)hints.get(KEY_HEIGHT)).floatValue(); } float width, height; if (imgWidth > 0 && imgHeight > 0) { width = imgWidth; height = imgHeight; } else if (imgHeight > 0) { width = (docWidth * imgHeight) / docHeight; height = imgHeight; } else if (imgWidth > 0) { width = imgWidth; height = (docHeight * imgWidth) / docWidth; } else { width = docWidth; height = docHeight; } // compute the preserveAspectRatio matrix AffineTransform Px = ViewBox.getPreserveAspectRatioTransform(root, width, height); if (Px.isIdentity() && (width != docWidth || height != docHeight)) { // The document has no viewBox, we need to resize it by hand. // we want to keep the document size ratio float d = Math.max(docWidth, docHeight); float dd = Math.max(width, height); float scale = dd/d; Px = AffineTransform.getScaleInstance(scale, scale); } // take the AOI into account if any if (hints.containsKey(KEY_AOI)) { Rectangle2D aoi = (Rectangle2D)hints.get(KEY_AOI); // transform the AOI into the image's coordinate system aoi = Px.createTransformedShape(aoi).getBounds2D(); AffineTransform Mx = new AffineTransform(); double sx = width / aoi.getWidth(); double sy = height / aoi.getHeight(); Mx.scale(sx, sy); double tx = -aoi.getX(); double ty = -aoi.getY(); Mx.translate(tx, ty); // take the AOI transformation matrix into account // we apply first the preserveAspectRatio matrix Px.preConcatenate(Mx); } // prepare the image to be painted int w = (int)width; int h = (int)height; // paint the SVG document using the bridge package // create the appropriate renderer ImageRenderer renderer = rendFactory.createImageRenderer(); renderer.updateOffScreen(w, h); renderer.setTransform(Px); renderer.setTree(gvtRoot); gvtRoot = null; // We're done with it... try { // now we are sure that the aoi is the image size Shape raoi = new Rectangle2D.Float(0, 0, width, height); // Warning: the renderer's AOI must be in user space renderer.repaint(Px.createInverse().createTransformedShape(raoi)); BufferedImage rend = renderer.getOffScreen(); renderer = null; // We're done with it... BufferedImage dest = createImage(w, h); Graphics2D g2d = GraphicsUtil.createGraphics(dest); if (hints.containsKey(KEY_BACKGROUND_COLOR)) { Paint bgcolor = (Paint)hints.get(KEY_BACKGROUND_COLOR); g2d.setComposite(AlphaComposite.SrcOver); g2d.setPaint(bgcolor); g2d.fillRect(0, 0, w, h); g2d.dispose(); } g2d.drawRenderedImage(rend, new AffineTransform()); rend = null; // We're done with it... writeImage(dest, output); } catch (Exception ex) { ex.printStackTrace(); throw new TranscoderException(ex); } } /** * Creates a <tt>DocumentFactory</tt> that is used to create an SVG DOM * tree. The specified DOM Implementation is ignored and the Batik * SVG DOM Implementation is automatically used. * * @param domImpl the DOM Implementation (not used) * @param parserClassname the XML parser classname */ protected DocumentFactory createDocumentFactory(DOMImplementation domImpl, String parserClassname) { return new SAXSVGDocumentFactory(parserClassname); } /** * Creates a new image with the specified dimension. * @param width the image width in pixels * @param height the image height in pixels */ public abstract BufferedImage createImage(int width, int height); /** * Writes the specified image to the specified output. * @param img the image to write * @param output the output where to store the image * @param TranscoderException if an error occured while storing the image */ public abstract void writeImage(BufferedImage img, TranscoderOutput output) throws TranscoderException; // UserAgent implementation /** * A user agent implementation for <tt>ImageTranscoder</tt>. */ protected class ImageTranscoderUserAgent implements UserAgent { /** * Returns the default size of this user agent (400x400). */ public Dimension2D getViewportSize() { return new Dimension(400, 400); } /** * Displays the specified error message using the <tt>ErrorHandler</tt>. */ public void displayError(String message) { try { handler.error(new TranscoderException(message)); } catch (TranscoderException ex) { throw new RuntimeException(); } } /** * Displays the specified error using the <tt>ErrorHandler</tt>. */ public void displayError(Exception e) { try { handler.error(new TranscoderException(e)); } catch (TranscoderException ex) { throw new RuntimeException(); } } /** * Displays the specified message using the <tt>ErrorHandler</tt>. */ public void displayMessage(String message) { try { handler.warning(new TranscoderException(message)); } catch (TranscoderException ex) { throw new RuntimeException(); } } /** * Returns the pixel to millimeter conversion factor specified in the * <tt>TranscodingHints</tt> or 0.3528 if any. */ public float getPixelToMM() { if (hints.containsKey(KEY_PIXEL_TO_MM)) { return ((Float)hints.get(KEY_PIXEL_TO_MM)).floatValue(); } else { // return 0.3528f; // 72 dpi return 0.26458333333333333333333333333333f; // 96dpi } } /** * Returns the user language specified in the * <tt>TranscodingHints</tt> or "en" (english) if any. */ public String getLanguages() { if (hints.containsKey(KEY_LANGUAGE)) { return (String)hints.get(KEY_LANGUAGE); } else { return "en"; } } /** * Returns the user stylesheet specified in the * <tt>TranscodingHints</tt> or null if any. */ public String getUserStyleSheetURI() { return (String)hints.get(KEY_USER_STYLESHEET_URI); } /** * Returns the XML parser to use from the TranscodingHints. */ public String getXMLParserClassName() { return (String)hints.get(KEY_XML_PARSER_CLASSNAME); } /** * Unsupported operation. */ public EventDispatcher getEventDispatcher() { return null; } /** * Unsupported operation. */ public void openLink(SVGAElement elt) { } /** * Unsupported operation. */ public void setSVGCursor(Cursor cursor) { } /** * Unsupported operation. */ public void runThread(Thread t) { } /** * Unsupported operation. */ public AffineTransform getTransform() { return null; } /** * Unsupported operation. */ public Point getClientAreaLocationOnScreen() { return new Point(); } } // Keys definition /** * The image width key. * <TABLE BORDER="0" CELLSPACING="0" CELLPADDING="1"> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Key: </TH> * <TD VALIGN="TOP">KEY_WIDTH</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Value: </TH> * <TD VALIGN="TOP">Float</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Default: </TH> * <TD VALIGN="TOP">The width of the top most svg element</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Required: </TH> * <TD VALIGN="TOP">No</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Description: </TH> * <TD VALIGN="TOP">Specify the width of the image to create.</TD></TR> * </TABLE> */ public static final TranscodingHints.Key KEY_WIDTH = new LengthKey(); /** * The image height key. * <TABLE BORDER="0" CELLSPACING="0" CELLPADDING="1"> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Key: </TH> * <TD VALIGN="TOP">KEY_HEIGHT</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Value: </TH> * <TD VALIGN="TOP">Float</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Default: </TH> * <TD VALIGN="TOP">The height of the top most svg element</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Required: </TH> * <TD VALIGN="TOP">No</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Description: </TH> * <TD VALIGN="TOP">Specify the height of the image to create.</TD></TR> * </TABLE> */ public static final TranscodingHints.Key KEY_HEIGHT = new LengthKey(); /** * The area of interest key. * <TABLE BORDER="0" CELLSPACING="0" CELLPADDING="1"> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Key: </TH> * <TD VALIGN="TOP">KEY_AOI</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Value: </TH> * <TD VALIGN="TOP">Rectangle2D</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Default: </TH> * <TD VALIGN="TOP">The document's size</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Required: </TH> * <TD VALIGN="TOP">No</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Description: </TH> * <TD VALIGN="TOP">Specify the area of interest to render. The * rectangle coordinates must be specified in pixels and in the * document coordinates system.</TD></TR> * </TABLE> */ public static final TranscodingHints.Key KEY_AOI = new Rectangle2DKey(); /** * The language key. * <TABLE BORDER="0" CELLSPACING="0" CELLPADDING="1"> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Key: </TH> * <TD VALIGN="TOP">KEY_LANGUAGE</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Value: </TH> * <TD VALIGN="TOP">String</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Default: </TH> * <TD VALIGN="TOP">"en"</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Required: </TH> * <TD VALIGN="TOP">No</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Description: </TH> * <TD VALIGN="TOP">Specify the preferred language of the document. * </TD></TR> * </TABLE> */ public static final TranscodingHints.Key KEY_LANGUAGE = new StringKey(); /** * The user stylesheet URI key. * <TABLE BORDER="0" CELLSPACING="0" CELLPADDING="1"> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Key: </TH> * <TD VALIGN="TOP">KEY_USER_STYLESHEET_URI</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Value: </TH> * <TD VALIGN="TOP">String</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Default: </TH> * <TD VALIGN="TOP">null</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Required: </TH> * <TD VALIGN="TOP">No</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Description: </TH> * <TD VALIGN="TOP">Specify the user style sheet.</TD></TR> * </TABLE> */ public static final TranscodingHints.Key KEY_USER_STYLESHEET_URI = new StringKey(); /** * The pixel to millimeter conversion factor key. * <TABLE BORDER="0" CELLSPACING="0" CELLPADDING="1"> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Key: </TH> * <TD VALIGN="TOP">KEY_PIXEL_TO_MM</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Value: </TH> * <TD VALIGN="TOP">Float</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Default: </TH> * <TD VALIGN="TOP">0.33</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Required: </TH> * <TD VALIGN="TOP">No</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Description: </TH> * <TD VALIGN="TOP">Specify the pixel to millimeter conversion factor. * </TD></TR> * </TABLE> */ public static final TranscodingHints.Key KEY_PIXEL_TO_MM = new FloatKey(); /** * The image background paint key. * <TABLE BORDER="0" CELLSPACING="0" CELLPADDING="1"> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Key: </TH> * <TD VALIGN="TOP">KEY_BACKGROUND_COLOR</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Value: </TH> * <TD VALIGN="TOP">Paint</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Default: </TH> * <TD VALIGN="TOP">null</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Required: </TH> * <TD VALIGN="TOP">No</TD></TR> * <TR> * <TH VALIGN="TOP" ALIGN="RIGHT"><P ALIGN="RIGHT">Description: </TH> * <TD VALIGN="TOP">Specify the background color to use. * The color is required by opaque image formats and is used by * image formats that support alpha channel.</TD></TR> * </TABLE> */ public static final TranscodingHints.Key KEY_BACKGROUND_COLOR = new PaintKey(); }
package org.ccnx.ccn.io.content; import java.io.IOException; import java.io.InvalidObjectException; import java.util.Arrays; import java.util.EnumSet; import java.util.HashSet; import java.util.logging.Level; import org.ccnx.ccn.CCNHandle; import org.ccnx.ccn.CCNInterestListener; import org.ccnx.ccn.ContentVerifier; import org.ccnx.ccn.config.ConfigurationException; import org.ccnx.ccn.config.SystemConfiguration; import org.ccnx.ccn.impl.CCNFlowControl; import org.ccnx.ccn.impl.CCNFlowControl.SaveType; import org.ccnx.ccn.impl.CCNFlowControl.Shape; import org.ccnx.ccn.impl.repo.RepositoryFlowControl; import org.ccnx.ccn.impl.security.crypto.ContentKeys; import org.ccnx.ccn.impl.support.Log; import org.ccnx.ccn.impl.support.Tuple; import org.ccnx.ccn.io.CCNInputStream; import org.ccnx.ccn.io.CCNVersionedInputStream; import org.ccnx.ccn.io.CCNVersionedOutputStream; import org.ccnx.ccn.io.ErrorStateException; import org.ccnx.ccn.io.LinkCycleException; import org.ccnx.ccn.io.NoMatchingContentFoundException; import org.ccnx.ccn.io.CCNAbstractInputStream.FlagTypes; import org.ccnx.ccn.io.content.Link.LinkObject; import org.ccnx.ccn.profiles.SegmentationProfile; import org.ccnx.ccn.profiles.VersioningProfile; import org.ccnx.ccn.protocol.CCNTime; import org.ccnx.ccn.protocol.ContentName; import org.ccnx.ccn.protocol.ContentObject; import org.ccnx.ccn.protocol.Interest; import org.ccnx.ccn.protocol.KeyLocator; import org.ccnx.ccn.protocol.PublisherPublicKeyDigest; import org.ccnx.ccn.protocol.SignedInfo.ContentType; /** * Extends a NetworkObject to add specifics for using a CCN-based backing store. Each time * the object is saved creates a new CCN version. Readers can open a specific version or * not specify a version, in which case the latest available version is read. Defaults * allow for saving data to a repository or directly to the network. * * Need to support four use models: * dimension 1: synchronous - ask for and block, the latest version or a specific version * dimension 2: asynchronous - ask for and get in the background, the latest version or a specific * version * When possible, keep track of the latest version known so that the latest version queries * can attempt to do better than that. Start by using only in the background load case, as until * something comes back we can keep using the old one and the propensity for blocking is high. * * Support for subclasses or users specifying different flow controllers with * different behavior. Build in support for either the simplest standard flow * controller, or a standard repository-backed flow controller. * * These objects attempt to maintain a CCN copy of the current state of their data. In descriptions * below, an object that is "dirty" is one whose data has been modified locally, but not yet * saved to the network. * * While CCNNetworkObject could be used directly, it almost never is; it is usually * more effective to define a subclass specialized to save/retrieve a specific object * type. * * Updates, 12/09: Move to creating a flow controller in the write constructor if * one isn't passed in. Read constructors still lazily create flow controllers on * first write (tradeoff); preemptive construction (and registering for interests) * can be achieved by calling the setupSave() method which creates a flow controller * if one hasn't been created already. Move to a strong default of saving * to a repository, unless overridden by the subclass itself. Change of repository/raw * nature can be made with the setRawSave() and setRepositorySave() methods. * * TODO: Note that the CCNNetworkObject class hierarchy currently has a plethora of constructors. * It is also missing some important functionality -- encryption, the ability to specify * freshness, and so on. Expect new constructors to deal with the latter deficiencies, and * a cleanup of the constructor architecture overall in the near term. */ public abstract class CCNNetworkObject<E> extends NetworkObject<E> implements CCNInterestListener { protected static final byte [] GONE_OUTPUT = "GONE".getBytes(); /** * Unversioned "base" name. */ protected ContentName _baseName; /** * The most recent version we have read/written. */ protected byte [] _currentVersionComponent; /** * Cached versioned name. */ protected ContentName _currentVersionName; /** * Flag to indicate whether content has been explicitly marked as GONE * in the latest version we know about. Use an explicit flag to separate from * the option for valid null content, or content that has not yet been updated. */ protected boolean _isGone = false; /** * The first segment for the stored data */ protected ContentObject _firstSegment = null; /** * If the name we started with was actually a link, detect that, store the link, * and dereference it to get the content. Call updateLink() to update the link * itself, and if updated, to update the dereferenced value. * * If the initial link is a link, recursion should push that into the link of * this LinkObject, and read its data. If that is a link, it should push again -- * this should chain through links till we reach an object of the desired type, * or blow up. (It won't handle encrypted links, though; we may need to distinguish * between ENCR and ENCRL. Having encrypted links would be handy, to send people * off in random directions. But it matters a lot to be able to tell if the decryption * is a LINK or not.) * * Writing linked objects is better done by separately writing the object and * the link, as it gives you more control over what is happening. If you attempt * to save this object, it may break the link (as the link may link to the particular * version retrieved). You can use this inner link object to manually update the link * to the target; but there are no good defaults about how to update the data. So * you need to specify the new link value yourself. For now we don't prevent users * from getting their data and their links de-syncrhonized. */ protected LinkObject _dereferencedLink; protected PublisherPublicKeyDigest _currentPublisher; protected KeyLocator _currentPublisherKeyLocator; protected CCNHandle _handle; protected CCNFlowControl _flowControl; protected boolean _disableFlowControlRequest = false; protected PublisherPublicKeyDigest _publisher; // publisher we write under, if null, use handle defaults protected KeyLocator _keyLocator; // locator to find publisher key protected SaveType _saveType = null; // what kind of flow controller to make if we don't have one protected Integer _freshnessSeconds = null; // if we want to set short freshness protected ContentKeys _keys; protected ContentVerifier _verifier; /** * Controls ongoing update. */ Interest _currentInterest = null; boolean _continuousUpdates = false; HashSet<UpdateListener> _updateListeners = null; /** * Basic write constructor. This will set the object's internal data but it will not save it * until save() is called. Unless overridden by the subclass, will default to save to * a repository. Can be changed to save directly to the network using setRawSave(). * If a subclass sets the default behavior to raw saves, this can be overridden on a * specific instance using setRepositorySave(). * @param type Wrapped class type. * @param contentIsMutable is the wrapped class type mutable or not * @param name Name under which to save object. * @param data Data to save. * @param handle CCNHandle to use for network operations. If null, a new one is created using CCNHandle#open(). * @throws IOException If there is an error setting up network backing store. */ public CCNNetworkObject(Class<E> type, boolean contentIsMutable, ContentName name, E data, SaveType saveType, CCNHandle handle) throws IOException { this(type, contentIsMutable, name, data, saveType, null, null, handle); } /** * Basic write constructor. This will set the object's internal data but it will not save it * until save() is called. Unless overridden by the subclass, will default to save to * a repository. Can be changed to save directly to the network using setRawSave(). * If a subclass sets the default behavior to raw saves, this can be overridden on a * specific instance using setRepositorySave(). * @param type Wrapped class type. * @param contentIsMutable is the wrapped class type mutable or not * @param name Name under which to save object. * @param data Data to save. * @param raw If true, saves to network by default, if false, saves to repository by default. * @param publisher The key to use to sign this data, or our default if null. * @param locator The key locator to use to let others know where to get our key. * @param handle CCNHandle to use for network operations. If null, a new one is created using CCNHandle#open(). * @throws IOException If there is an error setting up network backing store. */ public CCNNetworkObject(Class<E> type, boolean contentIsMutable, ContentName name, E data, SaveType saveType, PublisherPublicKeyDigest publisher, KeyLocator locator, CCNHandle handle) throws IOException { super(type, contentIsMutable, data); if (null == handle) { try { handle = CCNHandle.open(); } catch (ConfigurationException e) { throw new IllegalArgumentException("handle null, and cannot create one: " + e.getMessage(), e); } } _handle = handle; _verifier = handle.defaultVerifier(); _baseName = name; _publisher = publisher; _keyLocator = locator; _saveType = saveType; // Make our flow controller and register interests for our base name, if we have one. // Otherwise, create flow controller when we need one. if (null != name) { createFlowController(); } } /** * Specialized constructor, allowing subclasses to override default flow controller * (and hence backing store) behavior. * @param type Wrapped class type. * @param contentIsMutable is the wrapped class type mutable or not * @param name Name under which to save object. * @param data Data to save. * @param publisher The key to use to sign this data, or our default if null. * @param locator The key locator to use to let others know where to get our key. * @param flowControl Flow controller to use. A single flow controller object * is used for all this instance's writes, we use underlying streams to call * CCNFlowControl#startWrite(ContentName, Shape) on each save. Calls to * setRawSave() and setRepositorySave() will replace this flow controller * with a raw or repository flow controller, and should not be used with * this type of object (which obviously cares about what flow controller to use). * @throws IOException If there is an error setting up network backing store. */ protected CCNNetworkObject(Class<E> type, boolean contentIsMutable, ContentName name, E data, PublisherPublicKeyDigest publisher, KeyLocator locator, CCNFlowControl flowControl) throws IOException { super(type, contentIsMutable, data); _baseName = name; _publisher = publisher; _keyLocator = locator; if (null == flowControl) { throw new IOException("FlowControl cannot be null!"); } _flowControl = flowControl; _handle = _flowControl.getHandle(); _saveType = _flowControl.saveType(); _verifier = _handle.defaultVerifier(); // Register interests for our base name, if we have one. if (null != name) { flowControl.addNameSpace(name); } } /** * Read constructor. Will try to pull latest version of this object, or a specific * named version if specified in the name. If read times out, will leave object in * its uninitialized state. * @param type Wrapped class type. * @param contentIsMutable is the wrapped class type mutable or not * @param name Name from which to read the object. If versioned, will read that specific * version. If unversioned, will attempt to read the latest version available. * @param handle CCNHandle to use for network operations. If null, a new one is created using CCNHandle#open(). * @throws ContentDecodingException if there is a problem decoding the object. * @throws IOException if there is an error setting up network backing store. */ public CCNNetworkObject(Class<E> type, boolean contentIsMutable, ContentName name, CCNHandle handle) throws ContentDecodingException, IOException { this(type, contentIsMutable, name, (PublisherPublicKeyDigest)null, handle); } /** * Read constructor. Will try to pull latest version of this object, or a specific * named version if specified in the name. If read times out, will leave object in * its uninitialized state. * * @param type Wrapped class type. * @param contentIsMutable is the wrapped class type mutable or not * @param name Name from which to read the object. If versioned, will read that specific * version. If unversioned, will attempt to read the latest version available. * @param publisher Particular publisher we require to have signed the content, or null for any publisher. * @param flowControl Flow controller to use. A single flow controller object * is used for all this instance's writes, we use underlying streams to call * CCNFlowControl#startWrite(ContentName, Shape) on each save. * @throws ContentDecodingException if there is a problem decoding the object. * @throws IOException if there is an error setting up network backing store. */ protected CCNNetworkObject(Class<E> type, boolean contentIsMutable, ContentName name, PublisherPublicKeyDigest publisher, CCNFlowControl flowControl) throws ContentDecodingException, IOException { super(type, contentIsMutable); if (null == flowControl) { throw new IOException("FlowControl cannot be null!"); } _flowControl = flowControl; _handle = _flowControl.getHandle(); _saveType = _flowControl.saveType(); _verifier = _handle.defaultVerifier(); update(name, publisher); } /** * Read constructor. Will try to pull latest version of this object, or a specific * named version if specified in the name. If read times out, will leave object in * its uninitialized state. * * @param type Wrapped class type. * @param contentIsMutable is the wrapped class type mutable or not * @param name Name from which to read the object. If versioned, will read that specific * version. If unversioned, will attempt to read the latest version available. * @param publisher Particular publisher we require to have signed the content, or null for any publisher. * @param handle CCNHandle to use for network operations. If null, a new one is created using CCNHandle#open(). * @throws ContentDecodingException if there is a problem decoding the object. * @throws IOException if there is an error setting up network backing store. */ public CCNNetworkObject(Class<E> type, boolean contentIsMutable, ContentName name, PublisherPublicKeyDigest publisher, CCNHandle handle) throws ContentDecodingException, IOException { super(type, contentIsMutable); if (null == handle) { try { handle = CCNHandle.open(); } catch (ConfigurationException e) { throw new IllegalArgumentException("handle null, and cannot create one: " + e.getMessage(), e); } } _handle = handle; _verifier = handle.defaultVerifier(); _baseName = name; update(name, publisher); } /** * Read constructor if you already have a segment of the object. Used by streams. * @param type Wrapped class type. * @param contentIsMutable is the wrapped class type mutable or not * @param firstSegment First segment of the object, retrieved by other means. * @param raw If true, defaults to raw network writes, if false, repository writes. * @param handle CCNHandle to use for network operations. If null, a new one is created using CCNHandle#open(). * @throws ContentDecodingException if there is a problem decoding the object. * @throws IOException if there is an error setting up network backing store. */ public CCNNetworkObject(Class<E> type, boolean contentIsMutable, ContentObject firstSegment, CCNHandle handle) throws ContentDecodingException, IOException { super(type, contentIsMutable); if (null == handle) { try { handle = CCNHandle.open(); } catch (ConfigurationException e) { throw new IllegalArgumentException("handle null, and cannot create one: " + e.getMessage(), e); } } _handle = handle; _verifier = handle.defaultVerifier(); update(firstSegment); } /** * Read constructor if you already have a segment of the object. Used by streams. * @param type Wrapped class type. * @param contentIsMutable is the wrapped class type mutable or not * @param firstSegment First segment of the object, retrieved by other means. * @param flowControl Flow controller to use. A single flow controller object * is used for all this instance's writes, we use underlying streams to call * CCNFlowControl#startWrite(ContentName, Shape) on each save. * @throws ContentDecodingException if there is a problem decoding the object. * @throws IOException if there is an error setting up network backing store. */ protected CCNNetworkObject(Class<E> type, boolean contentIsMutable, ContentObject firstSegment, CCNFlowControl flowControl) throws ContentDecodingException, IOException { super(type, contentIsMutable); if (null == flowControl) throw new IllegalArgumentException("flowControl cannot be null!"); _flowControl = flowControl; _handle = _flowControl.getHandle(); _saveType = _flowControl.saveType(); _verifier = _handle.defaultVerifier(); update(firstSegment); } /** * Copy constructor. Handle it piece by piece, though it means * updating this whenever the structure changes (rare). */ protected CCNNetworkObject(Class<E> type, CCNNetworkObject<? extends E> other) { super(type, other); _baseName = other._baseName; _currentVersionComponent = other._currentVersionComponent; _currentVersionName = other._currentVersionName; _isGone = other._isGone; _currentPublisher = other._currentPublisher; _currentPublisherKeyLocator = other._currentPublisherKeyLocator; _handle = other._handle; _flowControl = other._flowControl; _disableFlowControlRequest = other._disableFlowControlRequest; _publisher = other._publisher; _keyLocator = other._keyLocator; _saveType = other._saveType; _keys = (null != other._keys) ? other._keys.clone() : null; _firstSegment = other._firstSegment; _verifier = other._verifier; // Do not copy update behavior. Even if other one is updating, we won't // pick that up. Have to kick off manually. } /** * Maximize laziness of flow controller creation, to make it easiest for client code to * decide how to store this object. * When we create the flow controller, we add the base name namespace, so it will respond * to requests for latest version. Create them immediately in write constructors, * when we have a strong expectation that we will save data, if we have a namespace * to start listening on. Otherwise wait till we are going to write. * @return * @throws IOException */ protected synchronized void createFlowController() throws IOException { if (null == _flowControl) { if (null == _saveType) { Log.finer("Not creating flow controller yet, no saveType set."); return; } switch (_saveType) { case RAW: _flowControl = new CCNFlowControl(_handle); break; case REPOSITORY: _flowControl = new RepositoryFlowControl(_handle); break; default: throw new IOException("Unknown save type: " + _saveType); } if (_disableFlowControlRequest) _flowControl.disable(); // Have to register the version root. If we just register this specific version, we won't // see any shorter interests -- i.e. for get latest version. _flowControl.addNameSpace(_baseName); if (Log.isLoggable(Level.INFO)) Log.info("Created " + _saveType + " flow controller, for prefix {0}, save type " + _flowControl.saveType(), _baseName); } } /** * Start listening to interests on our base name, if we aren't already. * @throws IOException */ public synchronized void setupSave(SaveType saveType) throws IOException { setSaveType(saveType); setupSave(); } public synchronized void setupSave() throws IOException { if (null != _flowControl) { if (null != _baseName) { _flowControl.addNameSpace(_baseName); } return; } createFlowController(); } /** * Finalizer. Somewhat dangerous, but currently best way to close * lingering open registrations. Can't close the handle, till we ref count. */ @Override protected void finalize() throws Throwable { close(); } /** * Close flow controller, remove listeners. Have to call setupSave to save with this object again, * re-add listeners. * @return */ public synchronized void close() { clearListeners(); if (null != _flowControl) { _flowControl.close(); } } public SaveType saveType() { return _saveType; } /** * Used by subclasses to specify a mandatory save type in * read constructors. Only works on objects whose flow * controller has not yet been set, to not override * manually-set FC's. */ protected void setSaveType(SaveType saveType) throws IOException { if (null == _flowControl) { _saveType = saveType; } else if (saveType != _saveType){ throw new IOException("Cannot change save type, flow controller already set!"); } } /** * If you want to set the lifetime of objects saved with this instance. * @param freshnessSeconds If null, will unset any freshness seconds (will * write objects that stay in cache till forced out); if a value will constrain * how long objects will stay in cache. */ public void setFreshnessSeconds(Integer freshnessSeconds) { _freshnessSeconds = freshnessSeconds; } /** * Override point where subclasses can modify each input stream before * it is read. Subclasses should at least set the flags using getInputStreamFlags, * or call super.setInputStreamProperties. */ protected void setInputStreamProperties(CCNInputStream inputStream) { // default -- just set any flags inputStream.setFlags(getInputStreamFlags()); } /** * Override point where subclasses can specify set of flags on input stream * at point it is read or where necessary created. * @return */ protected EnumSet<FlagTypes> getInputStreamFlags() { return null; } /** * Allow verifier to be specified. Could put this in the constructors; though they * are already complicated enough. If not set, the default verifier for the key manager * used by the object's handle is used. * @param verifier the verifier to use. Cannot be null. */ public void setVerifier(ContentVerifier verifier) { if (null != verifier) _verifier = verifier; } /** * Attempts to find a version after the latest one we have, or times out. If * it times out, it simply leaves the object unchanged. * @return returns true if it found an update, false if not * @throws ContentDecodingException if there is a problem decoding the object. * @throws IOException if there is an error setting up network backing store. */ public boolean update(long timeout) throws ContentDecodingException, IOException { if (null == _baseName) { throw new IllegalStateException("Cannot retrieve an object without giving a name!"); } // Look for first segment of version after ours, or first version if we have none. ContentObject firstSegment = VersioningProfile.getFirstBlockOfLatestVersion(getVersionedName(), null, null, timeout, _handle.defaultVerifier(), _handle); if (null != firstSegment) { return update(firstSegment); } return false; } /** * The regular update does a call to do multi-hop get latest version -- i.e. it will try * multiple times to find the latest version of a piece of content, even if interposed caches * have something older. While that's great when you really need the latest, sometimes you are * happy with the latest available version available in your local ccnd cache; or you really * know there is only one version available and you don't want to try multiple times (and incur * a timeout) in an attempt to get a later version that does not exist. This call, updateAny, * claims to get "any" version available. In reality, it will do a single-hop latest version; * i.e. if there are two versions say in your local ccnd cache (or repo with nothing in the ccnd * cache), it will pull the later one. But * it won't move beyond those to find a newer version available at a writer, or to find a later * version in the repo than one in the ccnd cache. Use this if you know there is only one version * of something, or you want a fast path to the latest version where it really doesn't have to * be the "absolute" latest. * * Like all update methods, it will start from the version you've got -- so it is guaranteed to find * something after the current version this object knows about (if it has already found something), * and to time out and return false if there isn't anything later. */ public boolean updateAny(long timeout) throws ContentDecodingException, IOException { if (null == _baseName) { throw new IllegalStateException("Cannot retrieve an object without giving a name!"); } // Look for first segment of version after ours, or first version if we have none. ContentObject firstSegment = VersioningProfile.getFirstBlockOfAnyLaterVersion(getVersionedName(), null, null, timeout, _verifier, _handle); if (null != firstSegment) { return update(firstSegment); } return false; } public boolean updateAny() throws ContentDecodingException, IOException { return updateAny(SystemConfiguration.getDefaultTimeout()); } /** * Calls update(long) with the default timeout SystemConfiguration.getDefaultTimeout(). * @return see update(long). * @throws ContentDecodingException if there is a problem decoding the object. * @throws IOException if there is an error setting up network backing store. */ public boolean update() throws ContentDecodingException, IOException { return update(SystemConfiguration.getDefaultTimeout()); } /** * Load data into object. If name is versioned, load that version. If * name is not versioned, look for latest version. * @param name Name of object to read. * @param publisher Desired publisher, or null for any. * @throws ContentDecodingException if there is a problem decoding the object. * @throws IOException if there is an error setting up network backing store. */ public boolean update(ContentName name, PublisherPublicKeyDigest publisher) throws ContentDecodingException, IOException { Log.info("Updating object to {0}.", name); CCNVersionedInputStream is = new CCNVersionedInputStream(name, publisher, _handle); return update(is); } /** * Load a stream starting with a specific object. * @param object * @throws ContentDecodingException if there is a problem decoding the object. * @throws IOException if there is an error setting up network backing store. */ public boolean update(ContentObject object) throws ContentDecodingException, IOException { CCNInputStream is = new CCNInputStream(object, getInputStreamFlags(), _handle); setInputStreamProperties(is); is.seek(0); // in case it wasn't the first segment return update(is); } /** * Updates the object from a CCNInputStream or one of its subclasses. Used predominantly * by internal methods, most clients should use update() or update(long). Exposed for * special-purpose use and experimentation. * @param inputStream Stream to read object from. * @return true if an update found, false if not. * @throws ContentDecodingException if there is a problem decoding the object. * @throws IOException if there is an error setting up network backing store. */ public synchronized boolean update(CCNInputStream inputStream) throws ContentDecodingException, IOException { // Allow subclasses to modify input stream processing prior to first read. setInputStreamProperties(inputStream); Tuple<ContentName, byte []> nameAndVersion = null; try { if (inputStream.isGone()) { if (Log.isLoggable(Level.FINE)) Log.fine("Reading from GONE stream: {0}", inputStream.getBaseName()); _data = null; // This will have a final version and a segment nameAndVersion = VersioningProfile.cutTerminalVersion(inputStream.deletionInformation().name()); _currentPublisher = inputStream.deletionInformation().signedInfo().getPublisherKeyID(); _currentPublisherKeyLocator = inputStream.deletionInformation().signedInfo().getKeyLocator(); _available = true; _isGone = true; _isDirty = false; _lastSaved = digestContent(); } else { super.update(inputStream); nameAndVersion = VersioningProfile.cutTerminalVersion(inputStream.getBaseName()); _currentPublisher = inputStream.publisher(); _currentPublisherKeyLocator = inputStream.publisherKeyLocator(); _isGone = false; } _firstSegment = inputStream.getFirstSegment(); // preserve first segment } catch (NoMatchingContentFoundException nme) { if (Log.isLoggable(Level.INFO)) Log.info("NoMatchingContentFoundException in update from input stream {0}, timed out before data was available.", inputStream.getBaseName()); nameAndVersion = VersioningProfile.cutTerminalVersion(inputStream.getBaseName()); _baseName = nameAndVersion.first(); // used to fire off an updateInBackground here, to hopefully get a second // chance on scooping up the content. But that seemed likely to confuse // people and leave the object in an undetermined state. So allow caller // to manage that themselves. // not an error state, merely a not ready state. return false; } catch (LinkCycleException lce) { if (Log.isLoggable(Level.INFO)) Log.info("Link cycle exception: {0}", lce.getMessage()); setError(lce); throw lce; } _baseName = nameAndVersion.first(); _currentVersionComponent = nameAndVersion.second(); _currentVersionName = null; // cached if used _dereferencedLink = inputStream.getDereferencedLink(); // gets stack of links used, if any clearError(); // Signal readers. newVersionAvailable(false); return true; } /** * Update this object in the background -- asynchronously. This call updates the * object a single time, after the first update (the requested version or the * latest version), the object will not self-update again unless requested. * To use, create an object using a write constructor, setting the data field * to null. Then call updateInBackground() to retrieve the object's data asynchronously. * To wait on data arrival, call either waitForData() or wait() on the object itself. * @throws IOException */ public void updateInBackground() throws IOException { updateInBackground(false); } /** * Update this object in the background -- asynchronously. * To use, create an object using a write constructor, setting the data field * to null. Then call updateInBackground() to retrieve the object's data asynchronously. * To wait for an update to arrive, call wait() on this object itself. * @param continuousUpdates If true, updates the * object continuously to the latest version available, a single time if it is false. * @throws IOException */ public void updateInBackground(boolean continuousUpdates) throws IOException { if (null == _baseName) { throw new IllegalStateException("Cannot retrieve an object without giving a name!"); } // Look for latest version. updateInBackground(getVersionedName(), continuousUpdates, null); } public void updateInBackground(ContentName latestVersionKnown, boolean continuousUpdates) throws IOException { updateInBackground(latestVersionKnown, continuousUpdates, null); } public void updateInBackground(boolean continuousUpdates, UpdateListener listener) throws IOException { updateInBackground(getVersionedName(), continuousUpdates, listener); } /** * Update this object in the background -- asynchronously. * To use, create an object using a write constructor, setting the data field * to null. Then call updateInBackground() to retrieve the object's data asynchronously. * To wait for an update to arrive, call wait() on this object itself. * @param latestVersionKnown the name of the latest version we know of, or an unversioned * name if no version known * @param continuousUpdates If true, updates the * object continuously to the latest version available, a single time if it is false. * @throws IOException */ public synchronized void updateInBackground(ContentName latestVersionKnown, boolean continuousUpdates, UpdateListener listener) throws IOException { Log.info("updateInBackground: getting latest version after {0} in background.", latestVersionKnown); cancelInterest(); if (null != listener) { addListener(listener); } _continuousUpdates = continuousUpdates; _currentInterest = VersioningProfile.firstBlockLatestVersionInterest(latestVersionKnown, null); Log.info("updateInBackground: initial interest: {0}", _currentInterest); _handle.expressInterest(_currentInterest, this); } /** * Cancel an outstanding updateInBackground(). */ public synchronized void cancelInterest() { _continuousUpdates = false; if (null != _currentInterest) { _handle.cancelInterest(_currentInterest, this); } } public synchronized void addListener(UpdateListener listener) { if (null == _updateListeners) { _updateListeners = new HashSet<UpdateListener>(); } else if (_updateListeners.contains(listener)) { return; // don't re-add } _updateListeners.add(listener); } /** * Does this object already have this listener. Uses Object.equals * for comparison; so will only say yes if it has this *exact* listener * instance already registered. * @param listener * @return */ public synchronized boolean hasListener(UpdateListener listener) { if (null == _updateListeners) { return false; } return (_updateListeners.contains(listener)); } public void removeListener(UpdateListener listener) { if (null == _updateListeners) return; synchronized (this) { _updateListeners.remove(listener); } } public void clearListeners() { if (null == _updateListeners) return; synchronized(_updateListeners) { _updateListeners.clear(); } } /** * Save to existing name, if content is dirty. Update version. * This is the default form of save -- if the object has been told to use * a repository backing store, by either giving it a repository flow controller, * calling saveToRepository() on it for its first save, or specifying false * to a constructor that allows a raw argument, it will save to a repository. * Otherwise will perform a raw save. * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ public boolean save() throws ContentEncodingException, IOException { return saveInternal(null, false, null); } /** * Method for CCNFilterListeners to save an object in response to an Interest * callback. An Interest has already been received, so the object can output * one ContentObject as soon as one is ready. Ideally this Interest will have * been received on the CCNHandle the object is using for output. If the object * is not dirty, it will not be saved, and the Interest will not be consumed. * If the Interest does not match this object, the Interest will not be consumed; * it is up to the caller to ensure that the Interest would be matched by writing * this object. (If the Interest doesn't match, no initial block will be output * even if the object is saved; the object will wait for matching Interests prior * to writing its blocks.) */ public boolean save(Interest outstandingInterest) throws ContentEncodingException, IOException { return saveInternal(null, false, outstandingInterest); } /** * Save to existing name, if content is dirty. Saves to specified version. * This is the default form of save -- if the object has been told to use * a repository backing store, by either giving it a repository flow controller, * calling saveToRepository() on it for its first save, or specifying false * to a constructor that allows a raw argument, it will save to a repository. * Otherwise will perform a raw save. * @param version Version to save to. * @return true if object was saved, false if it was not (if it was not dirty). * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ public boolean save(CCNTime version) throws ContentEncodingException, IOException { return saveInternal(version, false, null); } /** * Save to existing name, if content is dirty. Saves to specified version. * Method for CCNFilterListeners to save an object in response to an Interest * callback. An Interest has already been received, so the object can output * one ContentObject as soon as one is ready. Ideally this Interest will have * been received on the CCNHandle the object is using for output. If the object * is not dirty, it will not be saved, and the Interest will not be consumed. * If the Interest does not match this object, the Interest will not be consumed; * it is up to the caller to ensure that the Interest would be matched by writing * this object. (If the Interest doesn't match, no initial block will be output * even if the object is saved; the object will wait for matching Interests prior * to writing its blocks.) */ public boolean save(CCNTime version, Interest outstandingInterest) throws ContentEncodingException, IOException { return saveInternal(version, false, outstandingInterest); } /** * Save content to specific version. Internal form that performs actual save. * @param version If version is non-null, assume that is the desired * version. If not, set version based on current time. * @param gone Are we saving this content as gone or not. * @return return Returns true if it saved data, false if it thought data was not dirty and didn't * save. * TODO allow freshness specification * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ protected synchronized boolean saveInternal(CCNTime version, boolean gone, Interest outstandingInterest) throws ContentEncodingException, IOException { if (null == _baseName) { throw new IllegalStateException("Cannot save an object without giving it a name!"); } // move object to this name // need to make sure we get back the actual name we're using, // even if output stream does automatic versioning // probably need to refactor save behavior -- right now, internalWriteObject // either writes the object or not; we need to only make a new name if we do // write the object, and figure out if that's happened. Also need to make // parent behavior just write, put the dirty check higher in the state. if (!gone && !isDirty()) { Log.info("Object not dirty. Not saving."); return false; } if (!gone && (null == _data)) { // skip some of the prep steps that have side effects rather than getting this exception later from superclass throw new InvalidObjectException("No data to save!"); } // Create the flow controller, if we haven't already. createFlowController(); // This is the point at which we care if we don't have a flow controller if (null == _flowControl) { throw new IOException("Cannot create flow controller! Specified save type is " + _saveType + "!"); } // Handle versioning ourselves to make name handling easier. VOS should respect it. // We might have been handed a _baseName that was versioned. For most general behavior, // have to treat it as a normal name and that we are supposed to put our own version // underneath it. To save as a specific version, need to use save(version). ContentName name = _baseName; if (null != version) { name = VersioningProfile.addVersion(_baseName, version); } else { name = VersioningProfile.addVersion(_baseName); } // DKS if we add the versioned name, we don't handle get latest version. // We re-add the baseName here in case an update has changed it. // TODO -- perhaps disallow updates for unrelated names. _flowControl.addNameSpace(_baseName); if (!gone) { // CCNVersionedOutputStream will version an unversioned name. // If it gets a versioned name, will respect it. // This will call startWrite on the flow controller. CCNVersionedOutputStream cos = new CCNVersionedOutputStream(name, _keyLocator, _publisher, contentType(), _keys, _flowControl); cos.setFreshnessSeconds(_freshnessSeconds); if (null != outstandingInterest) { cos.addOutstandingInterest(outstandingInterest); } save(cos); // superclass stream save. calls flush but not close on a wrapping // digest stream; want to make sure we end up with a single non-MHT signed // segment and no header on small objects cos.close(); // Grab digest and segment number after close because for short objects there may not be // a segment generated until the close _firstSegment = cos.getFirstSegment(); } else { // saving object as gone, currently this is always one empty segment so we don't use an OutputStream ContentName segmentedName = SegmentationProfile.segmentName(name, SegmentationProfile.BASE_SEGMENT ); byte [] empty = new byte[0]; byte [] finalBlockID = SegmentationProfile.getSegmentNumberNameComponent(SegmentationProfile.BASE_SEGMENT); ContentObject goneObject = ContentObject.buildContentObject(segmentedName, ContentType.GONE, empty, _publisher, _keyLocator, null, finalBlockID); // The segmenter in the stream does an addNameSpace of the versioned name. Right now // this not only adds the prefix (ignored) but triggers the repo start write. _flowControl.addNameSpace(name); _flowControl.startWrite(name, Shape.STREAM); // Streams take care of this for the non-gone case. _flowControl.put(goneObject); _firstSegment = goneObject; _flowControl.beforeClose(); _flowControl.afterClose(); _lastSaved = GONE_OUTPUT; } _currentPublisher = _firstSegment.signedInfo().getPublisherKeyID(); _currentPublisherKeyLocator = _firstSegment.signedInfo().getKeyLocator(); _currentVersionComponent = name.lastComponent(); _currentVersionName = name; setDirty(false); _available = true; newVersionAvailable(true); Log.finest("Saved object {0} publisher {1} key locator {2}", name, _currentPublisher, _currentPublisherKeyLocator); return true; } /** * Convenience method to the data and save it in a single operation. * @param data new data for object, set with setData * @return * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ public boolean save(E data) throws ContentEncodingException, IOException { return save(null, data); } /** * Convenience method to the data and save it as a particular version in a single operation. * @param version the desired version * @param data new data for object, set with setData * @return * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ public synchronized boolean save(CCNTime version, E data) throws ContentEncodingException, IOException { setData(data); return save(version); } /** * Deprecated; use either object defaults or setRepositorySave() to indicate writes * should go to a repository, then call save() to write. * If raw=true or DEFAULT_RAW=true specified, this must be the first call to save made * for this object to force repository storage (overriding default). * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ @Deprecated public synchronized boolean saveToRepository(CCNTime version) throws ContentEncodingException, IOException { if (null == _baseName) { throw new IllegalStateException("Cannot save an object without giving it a name!"); } setSaveType(SaveType.REPOSITORY); return save(version); } /** * Deprecated; use either object defaults or setRepositorySave() to indicate writes * should go to a repository, then call save() to write. * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ @Deprecated public boolean saveToRepository() throws ContentEncodingException, IOException { return saveToRepository((CCNTime)null); } /** * Deprecated; use either object defaults or setRepositorySave() to indicate writes * should go to a repository, then call save() to write. * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ @Deprecated public boolean saveToRepository(E data) throws ContentEncodingException, IOException { return saveToRepository(null, data); } /** * Deprecated; use either object defaults or setRepositorySave() to indicate writes * should go to a repository, then call save() to write. * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ @Deprecated public synchronized boolean saveToRepository(CCNTime version, E data) throws ContentEncodingException, IOException { setData(data); return saveToRepository(version); } /** * Save this object as GONE. Intended to mark the latest version, rather * than a specific version as GONE. So for now, require that name handed in * is *not* already versioned; throw an IOException if it is. * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ public synchronized boolean saveAsGone() throws ContentEncodingException, IOException { return saveAsGone(null); } /** * For use by CCNFilterListeners, saves a GONE object and emits an inital * block in response to an already-received Interest. * Save this object as GONE. Intended to mark the latest version, rather * than a specific version as GONE. So for now, require that name handed in * is *not* already versioned; throw an IOException if it is. * @throws IOException */ public synchronized boolean saveAsGone(Interest outstandingInterest) throws ContentEncodingException, IOException { if (null == _baseName) { throw new IllegalStateException("Cannot save an object without giving it a name!"); } _data = null; _isGone = true; setDirty(true); return saveInternal(null, true, outstandingInterest); } /** * Deprecated; use either object defaults or setRepositorySave() to indicate writes * should go to a repository, then call save() to write. * If raw=true or DEFAULT_RAW=true specified, this must be the first call to save made * for this object. * @throws ContentEncodingException if there is an error encoding the content * @throws IOException if there is an error reading the content from the network */ @Deprecated public synchronized boolean saveToRepositoryAsGone() throws ContentEncodingException, IOException { setSaveType(SaveType.REPOSITORY); return saveAsGone(); } /** * Turn off flow control for this object. Warning - calling this risks packet drops. It should only * be used for tests or other special circumstances in which * you "know what you are doing". */ public synchronized void disableFlowControl() { if (null != _flowControl) _flowControl.disable(); _disableFlowControlRequest = true; } /** * Used to signal waiters and listeners that a new version is available. * @param wasSave is a new version available because we were saved, or because * we found a new version on the network? */ protected void newVersionAvailable(boolean wasSave) { if (Log.isLoggable(Level.FINER)) { Log.finer("newVersionAvailable: New version of object available: {0}", getVersionedName()); } // by default signal all waiters this.notifyAll(); // and any registered listeners if (null != _updateListeners) { for (UpdateListener listener : _updateListeners) { listener.newVersionAvailable(this, wasSave); } } } /** * Will return immediately if this object already has data, otherwise * will wait indefinitely for the initial data to appear. */ public void waitForData() { if (available()) return; synchronized (this) { while (!available()) { try { wait(); } catch (InterruptedException e) { } } } } /** * Will wait for data to arrive. Callers should use * available() to determine whether data has arrived or not. * If data already available, will return immediately (in other * words, this is only useful to wait for the first update to * an object, or to ensure that it has data). To wait for later * updates, call wait() on the object itself. * @param timeout In milliseconds. If 0, will wait forever (if data does not arrive). */ public void waitForData(long timeout) { if (available()) return; synchronized (this) { long startTime = System.currentTimeMillis(); boolean keepTrying = true; while (!available() && keepTrying) { // deal with spontaneous returns from wait() try { long waitTime = timeout - (System.currentTimeMillis() - startTime); if (waitTime > 0) wait(waitTime); else keepTrying = false; } catch (InterruptedException ie) {} } } } public boolean isGone() { return _isGone; } @Override protected byte [] digestContent() throws IOException { if (isGone()) { return GONE_OUTPUT; } return super.digestContent(); } @Override protected synchronized E data() throws ContentNotReadyException, ContentGoneException, ErrorStateException { if (isGone()) { throw new ContentGoneException("Content is gone!"); } return super.data(); } @Override public synchronized void setData(E newData) { _isGone = false; // clear gone, even if we're setting to null; only saveAsGone can set as gone super.setData(newData); } public synchronized CCNTime getVersion() throws IOException { if (isSaved()) return VersioningProfile.getVersionComponentAsTimestamp(getVersionComponent()); return null; } public synchronized ContentName getBaseName() { return _baseName; } public CCNHandle getHandle() { return _handle; } public synchronized byte [] getVersionComponent() throws IOException { if (isSaved()) return _currentVersionComponent; return null; } /** * Returns the first segment number for this object. * @return The index of the first segment of stream data or null if no segments generated yet. */ public Long firstSegmentNumber() { if (null != _firstSegment) { return SegmentationProfile.getSegmentNumber(_firstSegment.name()); } else { return null; } } /** * Returns the digest of the first segment of this object which may be used * to help identify object instance unambiguously. * * @return The digest of the first segment of this object if available, null otherwise */ public byte[] getFirstDigest() { // Do not attempt to force update here to leave control over whether reading // or writing with the object creator. The return value may be null if the // object is not in a state of having a first segment if (null != _firstSegment) { return _firstSegment.digest(); } else { return null; } } /** * Returns the first segment of this object. */ public ContentObject getFirstSegment() { return _firstSegment; } /** * If we traversed a link to get this object, make it available. */ public synchronized LinkObject getDereferencedLink() { return _dereferencedLink; } /** * Use only if you know what you are doing. */ public synchronized void setDereferencedLink(LinkObject dereferencedLink) { _dereferencedLink = dereferencedLink; } /** * Add a LinkObject to the stack we had to dereference to get here. */ public synchronized void pushDereferencedLink(LinkObject dereferencedLink) { if (null == dereferencedLink) { return; } if (null != _dereferencedLink) { if (null != dereferencedLink.getDereferencedLink()) { if (Log.isLoggable(Level.WARNING)) { Log.warning("Merging two link stacks -- {0} already has a dereferenced link from {1}. Behavior unpredictable.", dereferencedLink.getVersionedName(), dereferencedLink.getDereferencedLink().getVersionedName()); } } dereferencedLink.pushDereferencedLink(_dereferencedLink); } setDereferencedLink(dereferencedLink); } /** * If the object has been saved or read from the network, returns the (cached) versioned * name. Otherwise returns the base name. * @return */ public synchronized ContentName getVersionedName() { try { if (isSaved()) { if ((null == _currentVersionName) && (null != _currentVersionComponent)) // cache; only read lock necessary _currentVersionName = new ContentName(_baseName, _currentVersionComponent); return _currentVersionName; } return getBaseName(); } catch (IOException e) { if (Log.isLoggable(Level.WARNING)) Log.warning("Invalid state for object {0}, cannot get current version name: {1}", getBaseName(), e); return getBaseName(); } } public synchronized PublisherPublicKeyDigest getContentPublisher() throws IOException { if (isSaved()) return _currentPublisher; return null; } public synchronized KeyLocator getPublisherKeyLocator() throws IOException { if (isSaved()) return _currentPublisherKeyLocator; return null; } /** * Change the publisher information we use when we sign commits to this object. * Takes effect on the next save(). Useful for objects created with a read constructor, * but who want to override default publisher information. * @param signingKey indicates the identity we want to use to sign future writes to this * object. If null, will default to key manager's (user's) default key. * @param locator the key locator (key lookup location) information to attach to future * writes to this object. If null, will be the default value associated with the * chosen signing key. */ public synchronized void setOurPublisherInformation(PublisherPublicKeyDigest publisherIdentity, KeyLocator keyLocator) { _publisher = publisherIdentity; _keyLocator = keyLocator; } public synchronized Interest handleContent(ContentObject co, Interest interest) { try { boolean hasNewVersion = false; byte [][] excludes = null; try { if (Log.isLoggable(Level.INFO)) Log.info("updateInBackground: handleContent: " + _currentInterest + " retrieved " + co.name()); if (VersioningProfile.startsWithLaterVersionOf(co.name(), _currentInterest.name())) { // OK, we have something that is a later version of our desired object. // We're not sure it's actually the first content segment. hasNewVersion = true; if (VersioningProfile.isVersionedFirstSegment(_currentInterest.name(), co, null)) { if (Log.isLoggable(Level.INFO)) Log.info("updateInBackground: Background updating of {0}, got first segment: {1}", getVersionedName(), co.name()); // Streams assume caller has verified. So we verify here. // TODO add support for settable verifiers if (!_verifier.verify(co)) { if (Log.isLoggable(Log.FAC_SIGNING, Level.WARNING)) { Log.warning(Log.FAC_SIGNING, "CCNNetworkObject: content object received from background update did not verify! Ignoring object: {0}", co.fullName()); } hasNewVersion = false; // TODO -- exclude this one by digest, otherwise we're going // to get it back! For now, just copy the top-level part of GLV // behavior and exclude this version component. This isn't the right // answer, malicious objects can exclude new versions. But it's not clear // if the right answer is to do full gLV here and let that machinery // handle things, pulling potentially multiple objects in a callback, // or we just have to wait for issue #100011, and the ability to selectively // exclude content digests. excludes = new byte [][]{co.name().component(_currentInterest.name().count())}; if (Log.isLoggable(Level.INFO)) Log.info("updateInBackground: handleContent: got content for {0} that doesn't verify ({1}), excluding bogus version {2} as temporary workaround FIX WHEN POSSIBLE", _currentInterest.name(), co.fullName(), ContentName.componentPrintURI(excludes[0])); } else { update(co); } } else { // Have something that is not the first segment, like a repo write or a later segment. Go back // for first segment. ContentName latestVersionName = co.name().cut(_currentInterest.name().count() + 1); Log.info("updateInBackground: handleContent (network object): Have version information, now querying first segment of {0}", latestVersionName); // This should verify the first segment when we get it. update(latestVersionName, co.signedInfo().getPublisherKeyID()); } } else { excludes = new byte [][]{co.name().component(_currentInterest.name().count() - 1)}; if (Log.isLoggable(Level.INFO)) Log.info("updateInBackground: handleContent: got content for {0} that doesn't match: {1}", _currentInterest.name(), co.name()); } } catch (IOException ex) { if (Log.isLoggable(Level.INFO)) Log.info("updateInBackground: Exception {0}: {1} attempting to update based on object : {2}", ex.getClass().getName(), ex.getMessage(), co.name()); // alright, that one didn't work, try to go on. } if (hasNewVersion) { if (_continuousUpdates) { if (Log.isLoggable(Level.INFO)) Log.info("updateInBackground: handleContent: got a new version, continuous updates, calling updateInBackground recursively then returning null."); updateInBackground(true); } else { if (Log.isLoggable(Level.INFO)) Log.info("updateInBackground: handleContent: got a new version, not continuous updates, returning null."); _continuousUpdates = false; } // the updates above call newVersionAvailable return null; // implicit cancel of interest } else { if (null != excludes) { _currentInterest.exclude().add(excludes); } if (Log.isLoggable(Level.INFO)) Log.info("updateInBackground: handleContent: no new version, returning new interest for expression: {0}", _currentInterest); return _currentInterest; } } catch (IOException ex) { if (Log.isLoggable(Level.INFO)) Log.info("updateInBackground: handleContent: Exception {0}: {1} attempting to request further updates : {2}", ex.getClass().getName(), ex.getMessage(), _currentInterest); return null; } } /** * Subclasses that need to write an object of a particular type can override. * DKS TODO -- verify type on read, modulo that ENCR overrides everything. * @return */ public ContentType contentType() { return ContentType.DATA; } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = prime * result + ((_baseName == null) ? 0 : _baseName.hashCode()); result = prime * result + ((_currentPublisher == null) ? 0 : _currentPublisher .hashCode()); result = prime * result + Arrays.hashCode(_currentVersionComponent); return result; } @SuppressWarnings("unchecked") // cast to obj<E> @Override public boolean equals(Object obj) { // should hold read lock? if (this == obj) return true; if (!super.equals(obj)) return false; if (getClass() != obj.getClass()) return false; CCNNetworkObject<E> other = (CCNNetworkObject<E>) obj; if (_baseName == null) { if (other._baseName != null) return false; } else if (!_baseName.equals(other._baseName)) return false; if (_currentPublisher == null) { if (other._currentPublisher != null) return false; } else if (!_currentPublisher.equals(other._currentPublisher)) return false; if (!Arrays.equals(_currentVersionComponent, other._currentVersionComponent)) return false; return true; } @Override public String toString() { try { if (isSaved()) { return getVersionedName() + ": " + (isGone() ? "GONE" : "\nData:" + data()) + "\n Publisher: " + getContentPublisher() + "\n Publisher KeyLocator: " + getPublisherKeyLocator() + "\n"; } else if (available()) { return getBaseName() + " (unsaved): " + data(); } else { return getBaseName() + " (unsaved, no data)"; } } catch (IOException e) { Log.info("Unexpected exception retrieving object information: {0}", e); return getBaseName() + ": unexpected exception " + e; } } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.jme3.gde.android; import java.io.BufferedOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.util.ArrayList; import java.util.List; import org.netbeans.api.project.Project; import org.netbeans.api.project.ProjectInformation; import org.openide.DialogDisplayer; import org.openide.NotifyDescriptor; import org.openide.NotifyDescriptor.Message; import org.openide.filesystems.FileChooserBuilder; import org.openide.filesystems.FileLock; import org.openide.filesystems.FileObject; import org.openide.filesystems.FileUtil; import org.openide.util.Exceptions; import org.openide.util.NbPreferences; import org.openide.util.Utilities; import org.openide.xml.XMLUtil; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.xml.sax.InputSource; import org.xml.sax.SAXException; /** * * @author normenhansen */ public class AndroidSdkTool { /** * Starts the Android target configuration utility. */ public static void startAndroidTool() { startAndroidTool(false); } public static void startAndroidTool(boolean modal) { final String path = getAndroidToolPath(); if (path == null) { return; } Thread thread = new Thread(new Runnable() { @Override public void run() { String[] command = new String[]{path}; ProcessBuilder builder = new ProcessBuilder(command); try { Process proc = builder.start(); OutputReader outReader = new OutputReader(proc.getInputStream()); OutputReader errReader = new OutputReader(proc.getErrorStream()); outReader.start(); errReader.start(); proc.waitFor(); } catch (InterruptedException ex) { Exceptions.printStackTrace(ex); } catch (IOException ex) { Exceptions.printStackTrace(ex); } } }); if (modal) { thread.run(); } else { thread.start(); } } /** * Returns a FileObject for the android SDK folder, null if none is specified * @return */ public static FileObject getSdkFolder() { String path = getSdkPath(); if (path == null) { return null; } FileObject fileObj = FileUtil.toFileObject(new File(path)); if (fileObj == null) { return null; } return fileObj; } /** * Returns a String with the path to the SDK or null if none is specified. * @return */ public static String getSdkPath() { String path = NbPreferences.forModule(AndroidSdkTool.class).get("sdk_path", null); if (path == null) { FileChooserBuilder builder = new FileChooserBuilder(AndroidSdkTool.class); builder.setTitle("Please select Android SDK Folder"); builder.setDirectoriesOnly(true); File file = builder.showOpenDialog(); if (file != null) { FileObject folder = FileUtil.toFileObject(file); if (folder.getFileObject("tools") == null) { Message msg = new NotifyDescriptor.Message( "Not a valid SDK folder!", NotifyDescriptor.ERROR_MESSAGE); DialogDisplayer.getDefault().notifyLater(msg); } else { String name = file.getPath(); NbPreferences.forModule(AndroidSdkTool.class).put("sdk_path", name); return name; } } } else { return path; } return null; } /** * Returns a string with the path to the android tool, specific for platform (.exe for windows) * @return */ public static String getAndroidToolPath() { FileObject executable = null; FileObject folder = getSdkFolder(); if (folder == null) { return null; } if (Utilities.isWindows()) { executable = folder.getFileObject("tools/android.bat"); } else { executable = folder.getFileObject("tools/android"); } if (executable != null) { return FileUtil.toFile(executable).getPath(); } else { return null; } } /** * Gets a list of android targets registered in the SDK * @return */ public static List<AndroidTarget> getTargetList() { ArrayList<AndroidTarget> list = new ArrayList<AndroidTarget>(); final String path = getAndroidToolPath(); if (path == null) { return list; } String[] command = new String[]{path, "list", "targets"}; ProcessBuilder builder = new ProcessBuilder(command); try { Process proc = builder.start(); ListReader outReader = new ListReader(proc.getInputStream(), list); OutputReader errReader = new OutputReader(proc.getErrorStream()); outReader.start(); errReader.start(); proc.waitFor(); } catch (InterruptedException ex) { Exceptions.printStackTrace(ex); } catch (IOException ex) { Exceptions.printStackTrace(ex); } return list; } //TODO: check mainJmeClass public static void checkProject(Project project, String target, String name, String activity, String packag, String mainJmeClass) { final String path = getAndroidToolPath(); if (path == null) { return; } FileObject folder = project.getProjectDirectory().getFileObject("mobile"); if (folder == null) { try { folder = project.getProjectDirectory().createFolder("mobile"); createProject(project, target, name, activity, packag, mainJmeClass); } catch (IOException ex) { Exceptions.printStackTrace(ex); return; } } else { updateProject(project, target, name); } } public static void createProject(Project project, String target, String name, String activity, String packag, String mainJmeClass) { final String path = getAndroidToolPath(); if (path == null) { return; } FileObject folder = project.getProjectDirectory().getFileObject("mobile"); if (folder == null) { try { folder = project.getProjectDirectory().createFolder("mobile"); } catch (IOException ex) { Exceptions.printStackTrace(ex); return; } } String[] command = new String[]{path, "create", "project", "--target", target, "--name", name, "--path", FileUtil.toFile(folder).getPath(), "--activity", activity, "--package", packag}; ProcessBuilder builder = new ProcessBuilder(command); FileLock lock = null; try { Process proc = builder.start(); OutputReader outReader = new OutputReader(proc.getInputStream()); OutputReader errReader = new OutputReader(proc.getErrorStream()); outReader.start(); errReader.start(); proc.waitFor(); String mainActName = "mobile/src/" + packag.replaceAll("\\.", "/") + "/MainActivity.java"; FileObject mainAct = project.getProjectDirectory().getFileObject(mainActName); if (mainAct != null) { lock = mainAct.lock(); OutputStreamWriter out = new OutputStreamWriter(new BufferedOutputStream(mainAct.getOutputStream(lock))); out.write(mainActivityString(mainJmeClass, packag)); out.close(); lock.releaseLock(); } else { throw new IOException("Cannot find " + mainActName); } } catch (InterruptedException ex) { Exceptions.printStackTrace(ex); } catch (IOException ex) { if (lock != null) { lock.releaseLock(); } Exceptions.printStackTrace(ex); } updateAndroidManifest(project); updateAndroidApplicationName(project, name); } public static void updateProject(Project project, String target, String name) { final String path = getAndroidToolPath(); if (path == null) { return; } FileObject folder = project.getProjectDirectory().getFileObject("mobile"); if (folder == null) { return; } String[] command = new String[]{path, "update", "project", "--target", target, "--name", name, "--path", FileUtil.toFile(folder).getPath()}; ProcessBuilder builder = new ProcessBuilder(command); try { Process proc = builder.start(); OutputReader outReader = new OutputReader(proc.getInputStream()); OutputReader errReader = new OutputReader(proc.getErrorStream()); outReader.start(); errReader.start(); proc.waitFor(); } catch (InterruptedException ex) { Exceptions.printStackTrace(ex); } catch (IOException ex) { Exceptions.printStackTrace(ex); } updateAndroidApplicationName(project, name); } private static void updateAndroidManifest(Project project) { FileObject manifest = project.getProjectDirectory().getFileObject("mobile/AndroidManifest.xml"); if (manifest == null) { return; } InputStream in = null; FileLock lock = null; OutputStream out = null; try { in = manifest.getInputStream(); Document configuration = XMLUtil.parse(new InputSource(in), false, false, null, null); in.close(); in = null; boolean changed = false; Element sdkElement = XmlHelper.findChildElement(configuration.getDocumentElement(), "uses-sdk"); if (sdkElement == null) { sdkElement = configuration.createElement("uses-sdk"); configuration.getDocumentElement().appendChild(sdkElement); changed = true; } if (!"8".equals(sdkElement.getAttribute("android:minSdkVersion"))) { sdkElement.setAttribute("android:minSdkVersion", "8"); changed = true; } Element screensElement = XmlHelper.findChildElement(configuration.getDocumentElement(), "supports-screens"); if (screensElement == null) { screensElement = configuration.createElement("supports-screens"); screensElement.setAttribute("android:anyDensity", "true"); screensElement.setAttribute("android:xlargeScreens", "true"); screensElement.setAttribute("android:largeScreens", "true"); screensElement.setAttribute("android:smallScreens", "true"); screensElement.setAttribute("android:normalScreens", "true"); configuration.getDocumentElement().appendChild(screensElement); changed = true; } if (changed) { lock = manifest.lock(); out = manifest.getOutputStream(lock); XMLUtil.write(configuration, out, "UTF-8"); out.close(); out = null; lock.releaseLock(); lock = null; } } catch (SAXException ex) { Exceptions.printStackTrace(ex); } catch (IOException ex) { Exceptions.printStackTrace(ex); } finally { if (lock != null) { lock.releaseLock(); } try { if (in != null) { in.close(); } if (out != null) { out.close(); } } catch (IOException ex1) { Exceptions.printStackTrace(ex1); } } } private static void updateAndroidApplicationName(Project project, String name) { FileObject manifest = project.getProjectDirectory().getFileObject("mobile/res/values/strings.xml"); if (manifest == null) { return; } InputStream in = null; FileLock lock = null; OutputStream out = null; try { in = manifest.getInputStream(); Document configuration = XMLUtil.parse(new InputSource(in), false, false, null, null); in.close(); in = null; Element sdkElement = XmlHelper.findChildElementWithAttribute(configuration.getDocumentElement(), "string", "name", "app_name"); if (sdkElement == null) { sdkElement = configuration.createElement("string"); sdkElement.setAttribute("name", "app_name"); configuration.getDocumentElement().appendChild(sdkElement); } if (!sdkElement.getTextContent().trim().equals(name)) { sdkElement.setTextContent(name); lock = manifest.lock(); out = manifest.getOutputStream(lock); XMLUtil.write(configuration, out, "UTF-8"); out.close(); out = null; lock.releaseLock(); lock = null; } } catch (SAXException ex) { Exceptions.printStackTrace(ex); } catch (IOException ex) { Exceptions.printStackTrace(ex); } finally { if (lock != null) { lock.releaseLock(); } try { if (in != null) { in.close(); } if (out != null) { out.close(); } } catch (IOException ex1) { Exceptions.printStackTrace(ex1); } } } private static String mainActivityString(String mainClass, String packag) { String str = "package " + packag + ";\n" + " \n" + "import com.jme3.app.AndroidHarness;\n" + "import android.content.pm.ActivityInfo;\n" + "import com.jme3.system.android.AndroidConfigChooser.ConfigType;\n" + " \n" + "public class MainActivity extends AndroidHarness{\n" + " \n" + " /*\n" + " * Note that you can ignore the errors displayed in this file,\n" + " * the android project will build regardless.\n" + " * Install the 'Android' plugin under Tools->Plugins->Available Plugins\n" + " * to get error checks and code completion for the Android project files.\n" + " */\n" + " \n" + " public MainActivity(){\n" + " // Set the application class to run\n" + " appClass = \"" + mainClass + "\";\n" + " // Try ConfigType.FASTEST; or ConfigType.LEGACY if you have problems\n" + " eglConfigType = ConfigType.BEST;\n" + " // Exit Dialog title & message\n" + " exitDialogTitle = \"Exit?\";\n" + " exitDialogMessage = \"Press Yes\";\n" + " // Enable verbose logging\n" + " eglConfigVerboseLogging = false;\n" + " // Choose screen orientation\n" + " screenOrientation = ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE;\n" + " // Invert the MouseEvents X (default = true)\n" + " mouseEventsInvertX = true;\n" + " // Invert the MouseEvents Y (default = true)\n" + " mouseEventsInvertY = true;\n" + " }\n" + " \n" + "}\n"; return str; } public static class AndroidTarget { private int id; private String name; private String title; private String platform; private int apiLevel; private int revision; private String skins; public AndroidTarget() { } public AndroidTarget(String name) { this.name = name; } public int getId() { return id; } public void setId(int id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getPlatform() { return platform; } public void setPlatform(String platform) { this.platform = platform; } public int getApiLevel() { return apiLevel; } public void setApiLevel(int apiLevel) { this.apiLevel = apiLevel; } public int getRevision() { return revision; } public void setRevision(int revision) { this.revision = revision; } public String getSkins() { return skins; } public void setSkins(String skins) { this.skins = skins; } @Override public String toString() { return getTitle(); } @Override public boolean equals(Object obj) { if (obj instanceof String && getName() != null) { return getName().equals(obj); } else { return super.equals(obj); } } } }
// modification, are permitted provided that the following conditions are met: // documentation and/or other materials provided with the distribution. // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. package jodd.io.findfile; import jodd.io.FileNameUtil; import jodd.util.ClassLoaderUtil; import jodd.util.InExRules; import jodd.util.StringUtil; import jodd.util.Wildcard; import jodd.util.ArraysUtil; import jodd.io.FileUtil; import jodd.io.StreamUtil; import jodd.io.ZipUtil; import java.net.URL; import java.util.zip.ZipFile; import java.util.zip.ZipEntry; import java.util.Enumeration; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.FileNotFoundException; import static jodd.util.InExRuleMatcher.WILDCARD_PATH_RULE_MATCHER; import static jodd.util.InExRuleMatcher.WILDCARD_RULE_MATCHER; /** * Simple utility that scans <code>URL</code>s for classes. * Its purpose is to help scanning class paths for some classes. * Content of Jar files is also examined. * <p> * Scanning starts in included all mode (blacklist mode) for both jars and lists. * User can set explicit excludes. Of course, mode can be changed. * <p> * All paths are matched using {@link Wildcard#matchPath(String, String) path-style} * wildcard matcher. All entries are matched using {@link Wildcard#match(String, String) common-style} * wildcard matcher. * * @see ClassScanner */ public abstract class ClassFinder { private static final String CLASS_FILE_EXT = ".class"; private static final String JAR_FILE_EXT = ".jar"; /** * Array of system jars that are excluded from the search. * By default, these paths are common for linux, windows and mac. */ protected static String[] systemJars = new String[] { "**/jre/lib/*.jar", "**/jre/lib/ext/*.jar", "**/Java/Extensions/*.jar", "**/Classes/*.jar" }; protected final InExRules<String, String> rulesJars = createJarRules(); /** * Creates JAR rules. By default, excludes all system jars. */ protected InExRules<String, String> createJarRules() { InExRules<String, String> rulesJars = new InExRules<>(WILDCARD_PATH_RULE_MATCHER); for (String systemJar : systemJars) { rulesJars.exclude(systemJar); } return rulesJars; } /** * Returns system jars. */ public static String[] getSystemJars() { return systemJars; } /** * Specify excluded jars. */ public void setExcludedJars(String... excludedJars) { for (String excludedJar : excludedJars) { rulesJars.exclude(excludedJar); } } /** * Specify included jars. */ public void setIncludedJars(String... includedJars) { for (String includedJar : includedJars) { rulesJars.include(includedJar); } } /** * Sets white/black list mode for jars. */ public void setIncludeAllJars(boolean blacklist) { if (blacklist) { rulesJars.blacklist(); } else { rulesJars.whitelist(); } } /** * Sets white/black list mode for jars. */ public void setExcludeAllJars(boolean whitelist) { if (whitelist) { rulesJars.whitelist(); } else { rulesJars.blacklist(); } } protected final InExRules<String, String> rulesEntries = createEntriesRules(); protected InExRules<String, String> createEntriesRules() { return new InExRules<>(WILDCARD_RULE_MATCHER); } /** * Sets included set of names that will be considered during configuration. * @see jodd.util.InExRules */ public void setIncludedEntries(String... includedEntries) { for (String includedEntry : includedEntries) { rulesEntries.include(includedEntry); } } /** * Sets white/black list mode for entries. */ public void setIncludeAllEntries(boolean blacklist) { if (blacklist) { rulesEntries.blacklist(); } else { rulesEntries.whitelist(); } } /** * Sets white/black list mode for entries. */ public void setExcludeAllEntries(boolean whitelist) { if (whitelist) { rulesEntries.whitelist(); } else { rulesEntries.blacklist(); } } /** * Sets excluded names that narrows included set of packages. * @see jodd.util.InExRules */ public void setExcludedEntries(String... excludedEntries) { for (String excludedEntry : excludedEntries) { rulesEntries.exclude(excludedEntry); } } /** * If set to <code>true</code> all files will be scanned and not only classes. */ protected boolean includeResources; /** * If set to <code>true</code> exceptions for entry scans are ignored. */ protected boolean ignoreException; public boolean isIncludeResources() { return includeResources; } public void setIncludeResources(boolean includeResources) { this.includeResources = includeResources; } public boolean isIgnoreException() { return ignoreException; } /** * Sets if exceptions during scanning process should be ignored or not. */ public void setIgnoreException(boolean ignoreException) { this.ignoreException = ignoreException; } /** * Scans several URLs. If (#ignoreExceptions} is set, exceptions * per one URL will be ignored and loops continues. */ protected void scanUrls(URL... urls) { for (URL path : urls) { scanUrl(path); } } /** * Scans single URL for classes and jar files. * Callback {@link #onEntry(EntryData)} is called on * each class name. */ protected void scanUrl(URL url) { File file = FileUtil.toFile(url); if (file == null) { if (ignoreException == false) { throw new FindFileException("URL is not a valid file: " + url); } } scanPath(file); } protected void scanPaths(File... paths) { for (File path : paths) { scanPath(path); } } protected void scanPaths(String... paths) { for (String path : paths) { scanPath(path); } } protected void scanPath(String path) { scanPath(new File(path)); } /** * Returns <code>true</code> if some JAR file has to be accepted. */ protected boolean acceptJar(File jarFile) { String path = jarFile.getAbsolutePath(); path = FileNameUtil.separatorsToUnix(path); return rulesJars.match(path); } /** * Scans single path. */ protected void scanPath(File file) { String path = file.getAbsolutePath(); if (StringUtil.endsWithIgnoreCase(path, JAR_FILE_EXT) == true) { if (acceptJar(file) == false) { return; } scanJarFile(file); } else if (file.isDirectory() == true) { scanClassPath(file); } } /** * Scans classes inside single JAR archive. Archive is scanned as a zip file. * @see #onEntry(EntryData) */ protected void scanJarFile(File file) { ZipFile zipFile; try { zipFile = new ZipFile(file); } catch (IOException ioex) { if (ignoreException == false) { throw new FindFileException("Invalid zip: " + file.getName(), ioex); } return; } Enumeration entries = zipFile.entries(); while (entries.hasMoreElements()) { ZipEntry zipEntry = (ZipEntry) entries.nextElement(); String zipEntryName = zipEntry.getName(); try { if (StringUtil.endsWithIgnoreCase(zipEntryName, CLASS_FILE_EXT)) { String entryName = prepareEntryName(zipEntryName, true); EntryData entryData = new EntryData(entryName, zipFile, zipEntry); try { scanEntry(entryData); } finally { entryData.closeInputStreamIfOpen(); } } else if (includeResources == true) { String entryName = prepareEntryName(zipEntryName, false); EntryData entryData = new EntryData(entryName, zipFile, zipEntry); try { scanEntry(entryData); } finally { entryData.closeInputStreamIfOpen(); } } } catch (RuntimeException rex) { if (ignoreException == false) { ZipUtil.close(zipFile); throw rex; } } } ZipUtil.close(zipFile); } /** * Scans single classpath directory. * @see #onEntry(EntryData) */ protected void scanClassPath(File root) { String rootPath = root.getAbsolutePath(); if (rootPath.endsWith(File.separator) == false) { rootPath += File.separatorChar; } FindFile ff = new FindFile().setIncludeDirs(false).setRecursive(true).searchPath(rootPath); File file; while ((file = ff.nextFile()) != null) { String filePath = file.getAbsolutePath(); try { if (StringUtil.endsWithIgnoreCase(filePath, CLASS_FILE_EXT)) { scanClassFile(filePath, rootPath, file, true); } else if (includeResources == true) { scanClassFile(filePath, rootPath, file, false); } } catch (RuntimeException rex) { if (ignoreException == false) { throw rex; } } } } protected void scanClassFile(String filePath, String rootPath, File file, boolean isClass) { if (StringUtil.startsWithIgnoreCase(filePath, rootPath) == true) { String entryName = prepareEntryName(filePath.substring(rootPath.length()), isClass); EntryData entryData = new EntryData(entryName, file); try { scanEntry(entryData); } finally { entryData.closeInputStreamIfOpen(); } } } /** * Prepares resource and class names. For classes, it strips '.class' from the end and converts * all (back)slashes to dots. For resources, it replaces all backslashes to slashes. */ protected String prepareEntryName(String name, boolean isClass) { String entryName = name; if (isClass) { entryName = name.substring(0, name.length() - 6); // 6 == ".class".length() entryName = StringUtil.replaceChar(entryName, '/', '.'); entryName = StringUtil.replaceChar(entryName, '\\', '.'); } else { entryName = '/' + StringUtil.replaceChar(entryName, '\\', '/'); } return entryName; } /** * Returns <code>true</code> if some entry name has to be accepted. * @see #prepareEntryName(String, boolean) * @see #scanEntry(EntryData) */ protected boolean acceptEntry(String entryName) { return rulesEntries.match(entryName); } /** * If entry name is {@link #acceptEntry(String) accepted} invokes {@link #onEntry(EntryData)} a callback}. */ protected void scanEntry(EntryData entryData) { if (acceptEntry(entryData.getName()) == false) { return; } try { onEntry(entryData); } catch (Exception ex) { throw new FindFileException("Scan entry error: " + entryData, ex); } } /** * Called during classpath scanning when class or resource is found. * <ul> * <li>Class name is java-alike class name (pk1.pk2.class) that may be immediately used * for dynamic loading.</li> * <li>Resource name starts with '\' and represents either jar path (\pk1/pk2/res) or relative file path (\pk1\pk2\res).</li> * </ul> * * <code>InputStream</code> is provided by InputStreamProvider and opened lazy. * Once opened, input stream doesn't have to be closed - this is done by this class anyway. */ protected abstract void onEntry(EntryData entryData) throws Exception; /** * Returns type signature bytes used for searching in class file. */ protected byte[] getTypeSignatureBytes(Class type) { String name = 'L' + type.getName().replace('.', '/') + ';'; return name.getBytes(); } /** * Returns <code>true</code> if class contains {@link #getTypeSignatureBytes(Class) type signature}. * It searches the class content for bytecode signature. This is the fastest way of finding if come * class uses some type. Please note that if signature exists it still doesn't means that class uses * it in expected way, therefore, class should be loaded to complete the scan. */ protected boolean isTypeSignatureInUse(InputStream inputStream, byte[] bytes) { try { byte[] data = StreamUtil.readBytes(inputStream); int index = ArraysUtil.indexOf(data, bytes); return index != -1; } catch (IOException ioex) { throw new FindFileException("Read error", ioex); } } /** * Loads class by its name. If {@link #ignoreException} is set, * no exception is thrown, but <code>null</code> is returned. */ protected Class loadClass(String className) throws ClassNotFoundException { try { return ClassLoaderUtil.loadClass(className); } catch (ClassNotFoundException cnfex) { if (ignoreException) { return null; } throw cnfex; } catch (Error error) { if (ignoreException) { return null; } throw error; } } /** * Provides input stream on demand. Input stream is not open until get(). */ protected static class EntryData { private final File file; private final ZipFile zipFile; private final ZipEntry zipEntry; private final String name; EntryData(String name, ZipFile zipFile, ZipEntry zipEntry) { this.name = name; this.zipFile = zipFile; this.zipEntry = zipEntry; this.file = null; inputStream = null; } EntryData(String name, File file) { this.name = name; this.file = file; this.zipEntry = null; this.zipFile = null; inputStream = null; } private InputStream inputStream; /** * Returns entry name. */ public String getName() { return name; } /** * Returns <code>true</code> if archive. */ public boolean isArchive() { return zipFile != null; } /** * Returns archive name or <code>null</code> if entry is not inside archived file. */ public String getArchiveName() { if (zipFile != null) { return zipFile.getName(); } return null; } /** * Opens zip entry or plain file and returns its input stream. */ public InputStream openInputStream() { if (zipFile != null) { try { inputStream = zipFile.getInputStream(zipEntry); return inputStream; } catch (IOException ioex) { throw new FindFileException("Input stream error: '" + zipFile.getName() + "', entry: '" + zipEntry.getName() + "'." , ioex); } } try { inputStream = new FileInputStream(file); return inputStream; } catch (FileNotFoundException fnfex) { throw new FindFileException("Unable to open: " + file.getAbsolutePath(), fnfex); } } /** * Closes input stream if opened. */ void closeInputStreamIfOpen() { if (inputStream == null) { return; } StreamUtil.close(inputStream); inputStream = null; } @Override public String toString() { return "EntryData{" + name + '\'' +'}'; } } }
package com.hexa.client.ui.miracle.editors; import com.google.gwt.event.dom.client.BlurEvent; import com.google.gwt.event.dom.client.BlurHandler; import com.google.gwt.event.dom.client.KeyCodes; import com.google.gwt.event.dom.client.KeyUpEvent; import com.google.gwt.event.dom.client.KeyUpHandler; import com.google.gwt.event.shared.HandlerRegistration; import com.google.gwt.user.client.ui.Composite; import com.google.gwt.user.client.ui.TextBox; import com.hexa.client.interfaces.IAsyncCallback; import com.hexa.client.ui.css.Css; public class TextEditor extends Composite { TextBox tb = new TextBox(); HandlerRegistration blurRegistration; HandlerRegistration keyUpRegistration; public TextEditor() { tb.addStyleName( Css.css().borderBoxSizing() ); initWidget( tb ); } @Override protected void onAttach() { super.onAttach(); tb.setFocus( true ); tb.selectAll(); } public void edit( String currentText, final IAsyncCallback<String> callback, int width, int height ) { tb.setWidth( width + "px" ); tb.setText( currentText ); // on lost focus, cancel edition if( blurRegistration != null ) blurRegistration.removeHandler(); blurRegistration = tb.addBlurHandler( new BlurHandler() { @Override public void onBlur( BlurEvent event ) { callback.onSuccess( null ); } } ); if( keyUpRegistration != null ) keyUpRegistration.removeHandler(); keyUpRegistration = tb.addKeyUpHandler( new KeyUpHandler() { @Override public void onKeyUp( KeyUpEvent event ) { if( event.getNativeKeyCode() == KeyCodes.KEY_ENTER ) { event.preventDefault(); event.stopPropagation(); callback.onSuccess( tb.getText() ); } } } ); } }
package net.runelite.client.plugins.cannon; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import lombok.Getter; import net.runelite.api.coords.WorldPoint; enum CannonSpots { ABERRANT_SPECTRES(new WorldPoint(2456, 9791, 0)), ANKOU(new WorldPoint(3177, 10193, 0)), BANDIT(new WorldPoint(3037, 3700, 0)), BEAR(new WorldPoint(3113, 3672, 0)), BLACK_DEMONS(new WorldPoint(2859, 9778, 0), new WorldPoint(2841, 9791, 0), new WorldPoint(1421, 10089, 1), new WorldPoint(3174, 10154, 0), new WorldPoint(3089, 9960, 0)), BLACK_DRAGON(new WorldPoint(3239, 10206, 0)), BLACK_KNIGHTS(new WorldPoint(2906, 9685, 0), new WorldPoint(3053, 3852, 0)), BLOODVELDS(new WorldPoint(2439, 9821, 0), new WorldPoint(2448, 9821, 0), new WorldPoint(2472, 9832, 0), new WorldPoint(2453, 9817, 0), new WorldPoint(3597, 9743, 0)), BLUE_DRAGON(new WorldPoint(1933, 8973, 1)), BRINE_RAT(new WorldPoint(2707, 10132, 0)), CAVE_HORROR(new WorldPoint(3785, 9460, 0)), DAGGANOTH(new WorldPoint(2524, 10020, 0)), DARK_BEAST(new WorldPoint(1992, 4655, 0)), DARK_WARRIOR(new WorldPoint(3030, 3632, 0)), DUST_DEVIL(new WorldPoint(3218, 9366, 0)), EARTH_WARRIOR(new WorldPoint(3120, 9987, 0)), ELDER_CHAOS_DRUID(new WorldPoint(3237, 3622, 0)), ELVES(new WorldPoint(3278, 6098, 0)), FIRE_GIANTS(new WorldPoint(2393, 9782, 0), new WorldPoint(2412, 9776, 0), new WorldPoint(2401, 9780, 0), new WorldPoint(3047, 10340, 0)), GREATER_DEMONS(new WorldPoint(1435, 10086, 2), new WorldPoint(3224, 10132, 0)), GREEN_DRAGON(new WorldPoint(3225, 10068, 0)), HELLHOUNDS(new WorldPoint(2431, 9776, 0), new WorldPoint(2413, 9786, 0), new WorldPoint(2783, 9686, 0), new WorldPoint(3198, 10071, 0)), HILL_GIANT(new WorldPoint(3044, 10318, 0)), ICE_GIANT(new WorldPoint(3207, 10164, 0)), ICE_WARRIOR(new WorldPoint(2955, 3876, 0)), KALPHITE(new WorldPoint(3307, 9528, 0)), LESSER_DEMON(new WorldPoint(2838, 9559, 0), new WorldPoint(3163, 10114, 0)), LIZARDMEN(new WorldPoint(1500, 3703, 0)), LIZARDMEN_SHAMAN(new WorldPoint(1423, 3715, 0)), MAGIC_AXE(new WorldPoint(3190, 3960, 0)), MAMMOTH(new WorldPoint(3168, 3595, 0)), MINIONS_OF_SCARABAS(new WorldPoint(3297, 9252, 0)), ROGUE(new WorldPoint(3285, 3930, 0)), SCORPION(new WorldPoint(3233, 10335, 0)), SKELETON(new WorldPoint(3018, 3592, 0)), SMOKE_DEVIL(new WorldPoint(2398, 9444, 0)), SPIDER(new WorldPoint(3169, 3886, 0)), SUQAHS(new WorldPoint(2114, 3943, 0)), TROLLS(new WorldPoint(2401, 3856, 0), new WorldPoint(1242, 3517, 0)), ZOMBIE(new WorldPoint(3172, 3677, 0)); @Getter private static final List<WorldPoint> cannonSpots = new ArrayList<>(); static { for (CannonSpots cannonSpot : values()) { cannonSpots.addAll(Arrays.asList(cannonSpot.spots)); } } private final WorldPoint[] spots; CannonSpots(WorldPoint... spots) { this.spots = spots; } }
package com.marcobehler.saito.core.processing; import com.marcobehler.saito.core.SaitoModel; import com.marcobehler.saito.core.files.DataFile; import com.marcobehler.saito.core.files.Layout; import com.marcobehler.saito.core.files.Other; import com.marcobehler.saito.core.files.Template; import com.marcobehler.saito.core.util.PathUtils; import lombok.extern.slf4j.Slf4j; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.regex.Pattern; /** * @author Marco Behler <marco@marcobehler.com> */ @Slf4j public class SourceScanner { private static final Pattern layoutPattern = Pattern.compile("(?i)layouts[\\\\|/][^_].+\\.ftl"); private static final Pattern templatePattern = Pattern.compile("(?i).+\\.html\\.ftl"); private static final Pattern filePattern = Pattern.compile("(?i).+\\..+"); private static final Pattern dataPattern = Pattern.compile("(?i).+\\.json"); public SaitoModel scan(Path directory) { SaitoModel result = new SaitoModel(); Path sourcesDir = directory.resolve("source"); scanSourceDirectory(sourcesDir, result); Path dataDir = directory.resolve("data"); scanDataDirectory(dataDir, result); return result; } private void scanSourceDirectory(Path directory, SaitoModel result) { Path absoluteDirectory = directory.toAbsolutePath().normalize(); try { Files.walk(directory).parallel().forEach(p -> { Path relativePath = PathUtils.relativize(absoluteDirectory, p); if (layoutPattern.matcher(relativePath.toString()).matches()) { result.getLayouts().add(new Layout(directory, relativePath)); } else if (templatePattern.matcher(relativePath.toString()).matches()) { result.getTemplates().add(new Template(directory, relativePath)); } else if (filePattern.matcher(relativePath.toString()).matches()) { result.getOthers().add(new Other(directory, relativePath)); } }); } catch (IOException e) { log.error("Problem walking {}", directory, e); } } private void scanDataDirectory(Path directory, SaitoModel result) { if (Files.exists(directory)) { log.info("No 'data' directory found in project dir, skipping..."); return; } Path absoluteDirectory = directory.toAbsolutePath().normalize(); try { Files.walk(directory).parallel().forEach(p -> { Path relativePath = PathUtils.relativize(absoluteDirectory, p); if (dataPattern.matcher(relativePath.toString()).matches()) { result.getDataFiles().add(new DataFile(directory, relativePath)); } }); } catch (IOException e) { log.error("Problem walking {}", directory, e); } } }
package uk.org.taverna.scufl2.rdfxml; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.util.Stack; import org.apache.commons.io.IOUtils; import org.apache.commons.io.output.ByteArrayOutputStream; import org.junit.Before; import org.junit.Test; import uk.org.taverna.scufl2.api.ExampleWorkflow; import uk.org.taverna.scufl2.api.activity.Activity; import uk.org.taverna.scufl2.api.common.WorkflowBean; import uk.org.taverna.scufl2.api.configurations.Configuration; import uk.org.taverna.scufl2.api.container.WorkflowBundle; import uk.org.taverna.scufl2.api.port.InputActivityPort; import uk.org.taverna.scufl2.api.port.OutputActivityPort; import uk.org.taverna.scufl2.api.profiles.Profile; import uk.org.taverna.scufl2.api.property.PropertyResource; public class TestPropertyParsing { private static final String PROFILE_RDF = "megaProfile.rdf"; protected ProfileParser profileParser = new ProfileParser(); private URL profileUrl; private WorkflowBundle bundle; private Profile profile; @Test public void activity() throws Exception { assertEquals(17, profile.getActivities().size()); Activity contentList = profile.getActivities() .getByName("Content_list"); assertEquals("Content_list", contentList.getName()); assertEquals("http://ns.taverna.org.uk/2010/activity/xml-splitter/in", contentList.getConfigurableType().toASCIIString()); assertEquals(1, contentList.getInputPorts().size()); InputActivityPort personName = contentList.getInputPorts().getByName( "WSArrayofData"); assertEquals("WSArrayofData", personName.getName()); assertEquals(1, personName.getDepth().intValue()); assertEquals(1, contentList.getOutputPorts().size()); OutputActivityPort hello = contentList.getOutputPorts().getByName( "output"); assertEquals("output", hello.getName()); assertEquals(0, hello.getDepth().intValue()); assertEquals(0, hello.getGranularDepth().intValue()); } @Test public void configuration() throws Exception { assertEquals(17, profile.getConfigurations().size()); Configuration get_XML_result = profile.getConfigurations().getByName( "Get_XML_result"); assertEquals("Get_XML_result", get_XML_result.getName()); assertEquals(profile.getActivities().getByName("Get_XML_result"), get_XML_result.getConfigures()); assertEquals("http://ns.taverna.org.uk/2010/activity/wsdl#Config", get_XML_result.getConfigurableType().toASCIIString()); } public void loadProfileDocument() { profileUrl = getClass().getResource(PROFILE_RDF); assertNotNull("Could not find profile document " + PROFILE_RDF, profileUrl); } @Test public void parserStackEmpty() throws Exception { Stack<WorkflowBean> stack = profileParser.getParserState().getStack(); assertEquals(1, stack.size()); assertEquals(bundle, stack.peek()); } public void prepareParserState() throws URISyntaxException { bundle = new ExampleWorkflow().makeWorkflowBundle(); bundle.getProfiles().clear(); bundle.setMainProfile(null); profileParser.getParserState().setLocation(URI.create("/")); profileParser.getParserState().push(bundle); } @Test public void propertyResource() throws Exception { Configuration get_XML_result = profile.getConfigurations().getByName( "Get_XML_result"); PropertyResource propResource = get_XML_result.getPropertyResource(); assertEquals("http://ns.taverna.org.uk/2010/activity/wsdl#Config", propResource.getTypeURI().toASCIIString()); assertNull(propResource.getResourceURI()); assertEquals(1, propResource.getProperties().size()); URI wsdlOperation = URI .create("http://ns.taverna.org.uk/2010/activity/wsdl#operation"); PropertyResource operation = (PropertyResource) propResource .getProperty(wsdlOperation); URI wsdl = URI .create("http://ns.taverna.org.uk/2010/activity/wsdl/operation"); assertEquals(wsdl, operation.getTypeURI()); assertEquals("poll", operation.getPropertyAsString(wsdl.resolve("#name"))); assertEquals( "http: operation.getPropertyAsReference(wsdl.resolve("#wsdl")) .getResourceURI().toASCIIString()); } @Before public void readProfile() throws Exception { loadProfileDocument(); prepareParserState(); profileParser.readProfile(URI.create("/profile/tavernaWorkbench/"), URI.create("profile/tavernaWorkbench.rdf"), profileUrl.openStream()); profile = bundle.getProfiles().getByName("taverna-2.2.0"); assertNotNull(profile); } @Test public void xmlOutput() throws Exception { ByteArrayOutputStream output = new ByteArrayOutputStream(); new RDFXMLSerializer(bundle).profileDoc(output, profile, URI.create("profile/profile.rdf")); String profileStr = new String(output.toByteArray(), "UTF-8"); String expectedProfile = IOUtils.toString(profileUrl.openStream(), "UTF-8"); assertEquals(expectedProfile, profileStr); // System.out.println(profileStr); } }
package jolie.net; import com.google.gwt.user.client.rpc.SerializationException; import com.google.gwt.user.server.rpc.RPC; import com.google.gwt.user.server.rpc.RPCRequest; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.net.URI; import java.net.URLDecoder; import java.net.URLEncoder; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.zip.DeflaterOutputStream; import java.util.zip.GZIPOutputStream; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.OutputKeys; import javax.xml.transform.Result; import javax.xml.transform.Source; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import jolie.Interpreter; import jolie.lang.Constants; import jolie.lang.NativeType; import jolie.net.http.HttpMessage; import jolie.net.http.HttpParser; import jolie.net.http.HttpUtils; import jolie.net.http.Method; import jolie.net.http.MultiPartFormDataParser; import jolie.net.http.json.JsonUtils; import jolie.net.ports.Interface; import jolie.net.protocols.CommProtocol; import jolie.runtime.ByteArray; import jolie.runtime.Value; import jolie.runtime.ValueVector; import jolie.runtime.VariablePath; import jolie.runtime.typing.OneWayTypeDescription; import jolie.runtime.typing.OperationTypeDescription; import jolie.runtime.typing.RequestResponseTypeDescription; import jolie.runtime.typing.Type; import jolie.runtime.typing.TypeCastingException; import jolie.util.LocationParser; import jolie.xml.XmlUtils; import joliex.gwt.client.JolieService; import joliex.gwt.server.JolieGWTConverter; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.xml.sax.InputSource; import org.xml.sax.SAXException; /** * HTTP protocol implementation * @author Fabrizio Montesi * 14 Nov 2012 - Saverio Giallorenzo - Fabrizio Montesi: support for status codes */ public class HttpProtocol extends CommProtocol { private static final byte[] NOT_IMPLEMENTED_HEADER = "HTTP/1.1 501 Not Implemented".getBytes(); private static final int DEFAULT_STATUS_CODE = 200; private static final int DEFAULT_REDIRECTION_STATUS_CODE = 303; private static final Map< Integer, String > statusCodeDescriptions = new HashMap< Integer, String >(); private static final Set< Integer > locationRequiredStatusCodes = new HashSet< Integer >(); static { locationRequiredStatusCodes.add( 301 ); locationRequiredStatusCodes.add( 302 ); locationRequiredStatusCodes.add( 303 ); locationRequiredStatusCodes.add( 307 ); locationRequiredStatusCodes.add( 308 ); } static { // Initialise the HTTP Status code map. statusCodeDescriptions.put( 100,"Continue" ); statusCodeDescriptions.put( 101,"Switching Protocols" ); statusCodeDescriptions.put( 102,"Processing" ); statusCodeDescriptions.put( 200,"OK" ); statusCodeDescriptions.put( 201,"Created" ); statusCodeDescriptions.put( 202,"Accepted" ); statusCodeDescriptions.put( 203,"Non-Authoritative Information" ); statusCodeDescriptions.put( 204,"No Content" ); statusCodeDescriptions.put( 205,"Reset Content" ); statusCodeDescriptions.put( 206,"Partial Content" ); statusCodeDescriptions.put( 207,"Multi-Status" ); statusCodeDescriptions.put( 208,"Already Reported" ); statusCodeDescriptions.put( 226,"IM Used" ); statusCodeDescriptions.put( 300,"Multiple Choices" ); statusCodeDescriptions.put( 301,"Moved Permanently" ); statusCodeDescriptions.put( 302,"Found" ); statusCodeDescriptions.put( 303,"See Other" ); statusCodeDescriptions.put( 304,"Not Modified" ); statusCodeDescriptions.put( 305,"Use Proxy" ); statusCodeDescriptions.put( 306,"Reserved" ); statusCodeDescriptions.put( 307,"Temporary Redirect" ); statusCodeDescriptions.put( 308,"Permanent Redirect" ); statusCodeDescriptions.put( 400,"Bad Request" ); statusCodeDescriptions.put( 401,"Unauthorized" ); statusCodeDescriptions.put( 402,"Payment Required" ); statusCodeDescriptions.put( 403,"Forbidden" ); statusCodeDescriptions.put( 404,"Not Found" ); statusCodeDescriptions.put( 405,"Method Not Allowed" ); statusCodeDescriptions.put( 406,"Not Acceptable" ); statusCodeDescriptions.put( 407,"Proxy Authentication Required" ); statusCodeDescriptions.put( 408,"Request Timeout" ); statusCodeDescriptions.put( 409,"Conflict" ); statusCodeDescriptions.put( 410,"Gone" ); statusCodeDescriptions.put( 411,"Length Required" ); statusCodeDescriptions.put( 412,"Precondition Failed" ); statusCodeDescriptions.put( 413,"Request Entity Too Large" ); statusCodeDescriptions.put( 414,"Request-URI Too Long" ); statusCodeDescriptions.put( 415,"Unsupported Media Type" ); statusCodeDescriptions.put( 416,"Requested Range Not Satisfiable" ); statusCodeDescriptions.put( 417,"Expectation Failed" ); statusCodeDescriptions.put( 422,"Unprocessable Entity" ); statusCodeDescriptions.put( 423,"Locked" ); statusCodeDescriptions.put( 424,"Failed Dependency" ); statusCodeDescriptions.put( 426,"Upgrade Required" ); statusCodeDescriptions.put( 427,"Unassigned" ); statusCodeDescriptions.put( 428,"Precondition Required" ); statusCodeDescriptions.put( 429,"Too Many Requests" ); statusCodeDescriptions.put( 430,"Unassigned" ); statusCodeDescriptions.put( 431,"Request Header Fields Too Large" ); statusCodeDescriptions.put( 500,"Internal Server Error" ); statusCodeDescriptions.put( 501,"Not Implemented" ); statusCodeDescriptions.put( 502,"Bad Gateway" ); statusCodeDescriptions.put( 503,"Service Unavailable" ); statusCodeDescriptions.put( 504,"Gateway Timeout" ); statusCodeDescriptions.put( 505,"HTTP Version Not Supported" ); statusCodeDescriptions.put( 507,"Insufficient Storage" ); statusCodeDescriptions.put( 508,"Loop Detected" ); statusCodeDescriptions.put( 509,"Unassigned" ); statusCodeDescriptions.put( 510,"Not Extended" ); statusCodeDescriptions.put( 511,"Network Authentication Required" ); } private static class Parameters { private static final String DEBUG = "debug"; private static final String COOKIES = "cookies"; private static final String METHOD = "method"; private static final String ALIAS = "alias"; private static final String MULTIPART_HEADERS = "multipartHeaders"; private static final String CONCURRENT = "concurrent"; private static final String USER_AGENT = "userAgent"; private static final String HOST = "host"; private static final String HEADERS = "headers"; private static final String STATUS_CODE = "statusCode"; private static final String REDIRECT = "redirect"; private static final String DEFAULT_OPERATION = "default"; private static class MultiPartHeaders { private static final String FILENAME = "filename"; } } private static class Headers { private static final String JOLIE_MESSAGE_ID = "X-Jolie-MessageID"; } private String inputId = null; private final Transformer transformer; private final DocumentBuilderFactory docBuilderFactory; private final DocumentBuilder docBuilder; private final URI uri; private final boolean inInputPort; private MultiPartFormDataParser multiPartFormDataParser = null; public final static String CRLF = new String( new char[] { 13, 10 } ); public String name() { return "http"; } public boolean isThreadSafe() { return checkBooleanParameter( Parameters.CONCURRENT ); } public HttpProtocol( VariablePath configurationPath, URI uri, boolean inInputPort, TransformerFactory transformerFactory, DocumentBuilderFactory docBuilderFactory, DocumentBuilder docBuilder ) throws TransformerConfigurationException { super( configurationPath ); this.uri = uri; this.inInputPort = inInputPort; this.transformer = transformerFactory.newTransformer(); this.docBuilderFactory = docBuilderFactory; this.docBuilder = docBuilder; transformer.setOutputProperty( OutputKeys.OMIT_XML_DECLARATION, "yes" ); } private void valueToDocument( Value value, Node node, Document doc ) { node.appendChild( doc.createTextNode( value.strValue() ) ); Element currentElement; for( Entry< String, ValueVector > entry : value.children().entrySet() ) { if ( !entry.getKey().startsWith( "@" ) ) { for( Value val : entry.getValue() ) { currentElement = doc.createElement( entry.getKey() ); node.appendChild( currentElement ); Map< String, ValueVector > attrs = jolie.xml.XmlUtils.getAttributesOrNull( val ); if ( attrs != null ) { for( Entry< String, ValueVector > attrEntry : attrs.entrySet() ) { currentElement.setAttribute( attrEntry.getKey(), attrEntry.getValue().first().strValue() ); } } valueToDocument( val, currentElement, doc ); } } } } public String getMultipartHeaderForPart( String operationName, String partName ) { if ( hasOperationSpecificParameter( operationName, Parameters.MULTIPART_HEADERS ) ) { Value v = getOperationSpecificParameterFirstValue( operationName, Parameters.MULTIPART_HEADERS ); if ( v.hasChildren( partName ) ) { v = v.getFirstChild( partName ); if ( v.hasChildren( Parameters.MultiPartHeaders.FILENAME ) ) { v = v.getFirstChild( Parameters.MultiPartHeaders.FILENAME ); return v.strValue(); } } } return null; } private final static String BOUNDARY = "----Jol13H77p77Bound4r155"; private void send_appendCookies( CommMessage message, String hostname, StringBuilder headerBuilder ) { Value cookieParam = null; if ( hasOperationSpecificParameter( message.operationName(), Parameters.COOKIES ) ) { cookieParam = getOperationSpecificParameterFirstValue( message.operationName(), Parameters.COOKIES ); } else if ( hasParameter( Parameters.COOKIES ) ) { cookieParam = getParameterFirstValue( Parameters.COOKIES ); } if ( cookieParam != null ) { Value cookieConfig; String domain; StringBuilder cookieSB = new StringBuilder(); for( Entry< String, ValueVector > entry : cookieParam.children().entrySet() ) { cookieConfig = entry.getValue().first(); if ( message.value().hasChildren( cookieConfig.strValue() ) ) { domain = cookieConfig.hasChildren( "domain" ) ? cookieConfig.getFirstChild( "domain" ).strValue() : ""; if ( domain.isEmpty() || hostname.endsWith( domain ) ) { cookieSB .append( entry.getKey() ) .append( '=' ) .append( message.value().getFirstChild( cookieConfig.strValue() ).strValue() ) .append( ";" ); } } } if ( cookieSB.length() > 0 ) { headerBuilder .append( "Cookie: " ) .append( cookieSB ) .append( CRLF ); } } } private void send_appendSetCookieHeader( CommMessage message, StringBuilder headerBuilder ) { Value cookieParam = null; if ( hasOperationSpecificParameter( message.operationName(), Parameters.COOKIES ) ) { cookieParam = getOperationSpecificParameterFirstValue( message.operationName(), Parameters.COOKIES ); } else if ( hasParameter( Parameters.COOKIES ) ) { cookieParam = getParameterFirstValue( Parameters.COOKIES ); } if ( cookieParam != null ) { Value cookieConfig; for( Entry< String, ValueVector > entry : cookieParam.children().entrySet() ) { cookieConfig = entry.getValue().first(); if ( message.value().hasChildren( cookieConfig.strValue() ) ) { headerBuilder .append( "Set-Cookie: " ) .append( entry.getKey() ).append( '=' ) .append( message.value().getFirstChild( cookieConfig.strValue() ).strValue() ) .append( "; expires=" ) .append( cookieConfig.hasChildren( "expires" ) ? cookieConfig.getFirstChild( "expires" ).strValue() : "" ) .append( "; domain=" ) .append( cookieConfig.hasChildren( "domain" ) ? cookieConfig.getFirstChild( "domain" ).strValue() : "" ) .append( "; path=" ) .append( cookieConfig.hasChildren( "path" ) ? cookieConfig.getFirstChild( "path" ).strValue() : "" ); if ( cookieConfig.hasChildren( "secure" ) && cookieConfig.getFirstChild( "secure" ).intValue() > 0 ) { headerBuilder.append( "; secure" ); } headerBuilder.append( CRLF ); } } } } private String encoding = null; private String requestFormat = null; private void send_appendQuerystring( Value value, String charset, StringBuilder headerBuilder ) throws IOException { if ( value.children().isEmpty() == false ) { headerBuilder.append( '?' ); for( Entry< String, ValueVector > entry : value.children().entrySet() ) { for( Value v : entry.getValue() ) { headerBuilder .append( entry.getKey() ) .append( '=' ) .append( URLEncoder.encode( v.strValue(), charset ) ) .append( '&' ); } } } } private void send_appendJsonQueryString( CommMessage message, String charset, StringBuilder headerBuilder ) throws IOException { if ( message.value().hasChildren() == false ) { headerBuilder.append( "?=" ); JsonUtils.valueToJsonString( message.value(), getSendType( message ), headerBuilder ); } } private void send_appendParsedAlias( String alias, Value value, String charset, StringBuilder headerBuilder ) throws IOException { int offset = 0; ArrayList<String> aliasKeys = new ArrayList<String>(); String currStrValue; String currKey; StringBuilder result = new StringBuilder( alias ); Matcher m = Pattern.compile( "%(!)?\\{[^\\}]*\\}" ).matcher( alias ); while( m.find() ) { if ( m.group( 1 ) == null ) { // We have to use URLEncoder currKey = alias.substring( m.start() + 2, m.end() - 1 ); if ( "$".equals( currKey ) ) { currStrValue = URLEncoder.encode( value.strValue(), charset ); } else { currStrValue = URLEncoder.encode( value.getFirstChild( currKey ).strValue(), charset ); aliasKeys.add( currKey ); } } else { // We have to insert the string raw currKey = alias.substring( m.start() + 3, m.end() - 1 ); if ( "$".equals( currKey ) ) { currStrValue = value.strValue(); } else { currStrValue = value.getFirstChild( currKey ).strValue(); aliasKeys.add( currKey ); } } result.replace( m.start() + offset, m.end() + offset, currStrValue ); offset += currStrValue.length() - 3 - currKey.length(); } // removing used keys for( int k = 0; k < aliasKeys.size(); k++ ) { value.children().remove( aliasKeys.get( k ) ); } headerBuilder.append( result ); } private String getCharset() { String charset = "UTF-8"; if ( hasParameter( "charset" ) ) { charset = getStringParameter( "charset" ); } return charset; } private String send_getFormat() { String format = "xml"; if ( inInputPort && requestFormat != null ) { format = requestFormat; requestFormat = null; } else if ( hasParameter( "format" ) ) { format = getStringParameter( "format" ); } return format; } private static class EncodedContent { private ByteArray content = null; private String contentType = ""; private String contentDisposition = ""; } private EncodedContent send_encodeContent( CommMessage message, Method method, String charset, String format ) throws IOException { EncodedContent ret = new EncodedContent(); if ( inInputPort == false && method == Method.GET ) { // We are building a GET request return ret; } if ( "xml".equals( format ) ) { Document doc = docBuilder.newDocument(); Element root = doc.createElement( message.operationName() + (( inInputPort ) ? "Response" : "") ); doc.appendChild( root ); if ( message.isFault() ) { Element faultElement = doc.createElement( message.fault().faultName() ); root.appendChild( faultElement ); valueToDocument( message.fault().value(), faultElement, doc ); } else { valueToDocument( message.value(), root, doc ); } Source src = new DOMSource( doc ); ByteArrayOutputStream tmpStream = new ByteArrayOutputStream(); Result dest = new StreamResult( tmpStream ); try { transformer.transform( src, dest ); } catch( TransformerException e ) { throw new IOException( e ); } ret.content = new ByteArray( tmpStream.toByteArray() ); ret.contentType = "text/xml"; } else if ( "binary".equals( format ) ) { if ( message.value().isByteArray() ) { ret.content = (ByteArray) message.value().valueObject(); ret.contentType = "application/octet-stream"; } } else if ( "html".equals( format ) ) { ret.content = new ByteArray( message.value().strValue().getBytes( charset ) ); ret.contentType = "text/html"; } else if ( "multipart/form-data".equals( format ) ) { ret.contentType = "multipart/form-data; boundary=" + BOUNDARY; ByteArrayOutputStream bStream = new ByteArrayOutputStream(); StringBuilder builder = new StringBuilder(); for( Entry< String, ValueVector > entry : message.value().children().entrySet() ) { if ( !entry.getKey().startsWith( "@" ) ) { builder.append( "--" ).append( BOUNDARY ).append( CRLF ); builder.append( "Content-Disposition: form-data; name=\"" ).append( entry.getKey() ).append( '\"' ); boolean isBinary = false; if ( hasOperationSpecificParameter( message.operationName(), Parameters.MULTIPART_HEADERS ) ) { Value specOpParam = getOperationSpecificParameterFirstValue( message.operationName(), Parameters.MULTIPART_HEADERS ); if ( specOpParam.hasChildren( "partName" ) ) { ValueVector partNames = specOpParam.getChildren( "partName" ); for( int p = 0; p < partNames.size(); p++ ) { if ( partNames.get( p ).hasChildren( "part" ) ) { if ( partNames.get( p ).getFirstChild( "part" ).strValue().equals( entry.getKey() ) ) { isBinary = true; if ( partNames.get( p ).hasChildren( "filename" ) ) { builder.append( "; filename=\"" ).append( partNames.get( p ).getFirstChild( "filename" ).strValue() ).append( "\"" ); } if ( partNames.get( p ).hasChildren( "contentType" ) ) { builder.append( CRLF ).append( "Content-Type:" ).append( partNames.get( p ).getFirstChild( "contentType" ).strValue() ); } } } } } } builder.append( CRLF ).append( CRLF ); if ( isBinary ) { bStream.write( builder.toString().getBytes( charset ) ); bStream.write( entry.getValue().first().byteArrayValue().getBytes() ); builder.delete( 0, builder.length() - 1 ); builder.append( CRLF ); } else { builder.append( entry.getValue().first().strValue() ).append( CRLF ); } } } builder.append( "--" + BOUNDARY + "--" ); bStream.write( builder.toString().getBytes( charset )); ret.content = new ByteArray( bStream.toByteArray() ); } else if ( "x-www-form-urlencoded".equals( format ) ) { ret.contentType = "application/x-www-form-urlencoded"; Iterator< Entry< String, ValueVector > > it = message.value().children().entrySet().iterator(); Entry< String, ValueVector > entry; StringBuilder builder = new StringBuilder(); while( it.hasNext() ) { entry = it.next(); builder.append( entry.getKey() ) .append( "=" ) .append( URLEncoder.encode( entry.getValue().first().strValue(), "UTF-8" ) ); if ( it.hasNext() ) { builder.append( '&' ); } } ret.content = new ByteArray( builder.toString().getBytes( charset ) ); } else if ( "text/x-gwt-rpc".equals( format ) ) { ret.contentType = "text/x-gwt-rpc"; try { if ( message.isFault() ) { ret.content = new ByteArray( RPC.encodeResponseForFailure( JolieService.class.getMethods()[0], JolieGWTConverter.jolieToGwtFault( message.fault() ) ).getBytes( charset ) ); } else { joliex.gwt.client.Value v = new joliex.gwt.client.Value(); JolieGWTConverter.jolieToGwtValue( message.value(), v ); ret.content = new ByteArray( RPC.encodeResponseForSuccess( JolieService.class.getMethods()[0], v ).getBytes( charset ) ); } } catch( SerializationException e ) { throw new IOException( e ); } } else if ( "json".equals( format ) || "application/json".equals( format ) ) { ret.contentType = "application/json"; StringBuilder jsonStringBuilder = new StringBuilder(); if ( message.isFault() ) { Value jolieJSONFault = Value.create(); jolieJSONFault.getFirstChild( "jolieFault" ).getFirstChild( "faultName" ).setValue( message.fault().faultName() ); if ( message.fault().value().hasChildren() ) { jolieJSONFault.getFirstChild( "jolieFault" ).getFirstChild( "data" ).deepCopy( message.fault().value() ); } JsonUtils.valueToJsonString( jolieJSONFault, getSendType( message ), jsonStringBuilder ); } else { JsonUtils.valueToJsonString( message.value(), getSendType( message ), jsonStringBuilder ); } ret.content = new ByteArray( jsonStringBuilder.toString().getBytes( charset ) ); } else if ( "raw".equals( format ) ) { ret.content = new ByteArray( message.value().strValue().getBytes( charset ) ); } return ret; } private boolean isLocationNeeded( int statusCode ) { return locationRequiredStatusCodes.contains( statusCode ); } private void send_appendResponseHeaders( CommMessage message, StringBuilder headerBuilder ) { int statusCode = DEFAULT_STATUS_CODE; String statusDescription = null; if( hasParameter( Parameters.STATUS_CODE ) ) { statusCode = getIntParameter( Parameters.STATUS_CODE ); if ( !statusCodeDescriptions.containsKey( statusCode ) ) { Interpreter.getInstance().logWarning( "HTTP protocol for operation " + message.operationName() + " is sending a message with status code " + statusCode + ", which is not in the HTTP specifications." ); statusDescription = "Internal Server Error"; } else if ( isLocationNeeded( statusCode ) && !hasParameter( Parameters.REDIRECT ) ) { // if statusCode is a redirection code, location parameter is needed Interpreter.getInstance().logWarning( "HTTP protocol for operation " + message.operationName() + " is sending a message with status code " + statusCode + ", which expects a redirect parameter but the latter is not set." ); } } else if ( hasParameter( Parameters.REDIRECT ) ) { statusCode = DEFAULT_REDIRECTION_STATUS_CODE; } if ( statusDescription == null ) { statusDescription = statusCodeDescriptions.get( statusCode ); } headerBuilder.append( "HTTP/1.1 " + statusCode + " " + statusDescription + CRLF ); // if redirect has been set, the redirect location parameter is set if ( hasParameter( Parameters.REDIRECT ) ) { headerBuilder.append( "Location: " + getStringParameter( Parameters.REDIRECT ) + CRLF ); } send_appendSetCookieHeader( message, headerBuilder ); headerBuilder.append( "Server: Jolie" ).append( CRLF ); StringBuilder cacheControlHeader = new StringBuilder(); if ( hasParameter( "cacheControl" ) ) { Value cacheControl = getParameterFirstValue( "cacheControl" ); if ( cacheControl.hasChildren( "maxAge" ) ) { cacheControlHeader.append( "max-age=" ).append( cacheControl.getFirstChild( "maxAge" ).intValue() ); } } if ( cacheControlHeader.length() > 0 ) { headerBuilder.append( "Cache-Control: " ).append( cacheControlHeader ).append( CRLF ); } } private void send_appendRequestMethod( Method method, StringBuilder headerBuilder ) { headerBuilder.append( method.id() ); } private void send_appendRequestPath( CommMessage message, Method method, StringBuilder headerBuilder, String charset ) throws IOException { if ( uri.getPath().length() < 1 || uri.getPath().charAt( 0 ) != '/' ) { headerBuilder.append( '/' ); } headerBuilder.append( uri.getPath() ); String alias = getOperationSpecificStringParameter( message.operationName(), Parameters.ALIAS ); if ( alias.isEmpty() ) { headerBuilder.append( message.operationName() ); } else { send_appendParsedAlias( alias, message.value(), charset, headerBuilder ); } if ( method == Method.GET ) { boolean jsonFormat = false; if ( getParameterFirstValue( "method" ).hasChildren( "queryFormat" ) ) { if ( getParameterFirstValue( "method" ).getFirstChild( "queryFormat" ).strValue().equals( "json" ) ) { jsonFormat = true; send_appendJsonQueryString( message, charset, headerBuilder ); } } if ( !jsonFormat ) { send_appendQuerystring( message.value(), charset, headerBuilder ); } } } private static void send_appendAuthorizationHeader( CommMessage message, StringBuilder headerBuilder ) { if ( message.value().hasChildren( jolie.lang.Constants.Predefined.HTTP_BASIC_AUTHENTICATION.token().content() ) ) { Value v = message.value().getFirstChild( jolie.lang.Constants.Predefined.HTTP_BASIC_AUTHENTICATION.token().content() ); //String realm = v.getFirstChild( "realm" ).strValue(); String userpass = v.getFirstChild( "userid" ).strValue() + ":" + v.getFirstChild( "password" ).strValue(); sun.misc.BASE64Encoder encoder = new sun.misc.BASE64Encoder(); userpass = encoder.encode( userpass.getBytes() ); headerBuilder.append( "Authorization: Basic " ).append( userpass ).append( CRLF ); } } private void send_appendHeader( CommMessage message, StringBuilder headerBuilder ) { Value v = getParameterFirstValue( "addHeader" ); if ( v != null ) { if ( v.hasChildren("header") ) { for( Value head : v.getChildren("header") ) { String header = head.strValue() + ": " + head.getFirstChild( "value" ).strValue(); headerBuilder.append( header ).append( CRLF ); } } } } private Method send_getRequestMethod( CommMessage message ) throws IOException { try { Method method = hasOperationSpecificParameter( message.operationName(), Parameters.METHOD ) ? Method.fromString( getOperationSpecificStringParameter( message.operationName(), Parameters.METHOD ).toUpperCase() ) : hasParameter( Parameters.METHOD ) ? Method.fromString( getStringParameter( Parameters.METHOD ).toUpperCase() ) : Method.POST; return method; } catch( Method.UnsupportedMethodException e ) { throw new IOException( e ); } } private void send_appendRequestHeaders( CommMessage message, Method method, StringBuilder headerBuilder, String charset ) throws IOException { send_appendRequestMethod( method, headerBuilder ); headerBuilder.append( ' ' ); send_appendRequestPath( message, method, headerBuilder, charset ); headerBuilder.append( " HTTP/1.1" + CRLF ); headerBuilder.append( "Host: " + uri.getHost() + CRLF ); send_appendCookies( message, uri.getHost(), headerBuilder ); send_appendAuthorizationHeader( message, headerBuilder ); if ( checkBooleanParameter( "compression", true ) ) { headerBuilder.append( "Accept-Encoding: gzip, deflate" + CRLF ); } send_appendHeader( message, headerBuilder ); } private void send_appendGenericHeaders( CommMessage message, EncodedContent encodedContent, String charset, StringBuilder headerBuilder ) throws IOException { String param; if ( checkBooleanParameter( "keepAlive", true ) == false || channel().toBeClosed() ) { channel().setToBeClosed( true ); headerBuilder.append( "Connection: close" + CRLF ); } if ( checkBooleanParameter( Parameters.CONCURRENT, true ) ) { headerBuilder.append( Headers.JOLIE_MESSAGE_ID ).append( ": " ).append( message.id() ).append( CRLF ); } if ( encodedContent.content != null ) { String contentType = getStringParameter( "contentType" ); if ( contentType.length() > 0 ) { encodedContent.contentType = contentType; } headerBuilder.append( "Content-Type: " + encodedContent.contentType ); if ( charset != null ) { headerBuilder.append( "; charset=" + charset.toLowerCase() ); } headerBuilder.append( CRLF ); param = getStringParameter( "contentTransferEncoding" ); if ( !param.isEmpty() ) { headerBuilder.append( "Content-Transfer-Encoding: " + param + CRLF ); } String contentDisposition = getStringParameter( "contentDisposition" ); if ( contentDisposition.length() > 0 ) { encodedContent.contentDisposition = contentDisposition; headerBuilder.append( "Content-Disposition: " + encodedContent.contentDisposition + CRLF ); } boolean compression = ( encoding != null ) && checkBooleanParameter( "compression", true ); String compressionTypes = getStringParameter( "compressionTypes", "text/html text/css text/plain text/xml text/x-js application/json application/javascript" ); if ( compressionTypes.length() > 0 && !compressionTypes.contains( encodedContent.contentType ) ) { compression = false; } if ( compression ) { if ( encoding.contains( "gzip" ) ) { ByteArrayOutputStream baOutStream = new ByteArrayOutputStream(); GZIPOutputStream outStream = new GZIPOutputStream( baOutStream ); outStream.write( encodedContent.content.getBytes() ); outStream.close(); encodedContent.content = new ByteArray( baOutStream.toByteArray() ); headerBuilder.append( "Content-Encoding: gzip" + CRLF ); } else if ( encoding.contains( "deflate" ) ) { ByteArrayOutputStream baOutStream = new ByteArrayOutputStream(); DeflaterOutputStream outStream = new DeflaterOutputStream( baOutStream ); outStream.write( encodedContent.content.getBytes() ); outStream.close(); encodedContent.content = new ByteArray( baOutStream.toByteArray() ); headerBuilder.append( "Content-Encoding: deflate" + CRLF ); } } headerBuilder.append( "Content-Length: " + (encodedContent.content.size()) + CRLF ); //headerBuilder.append( "Content-Length: " + (encodedContent.content.size() + 2) + CRLF ); } else { headerBuilder.append( "Content-Length: 0" + CRLF ); } } private void send_logDebugInfo( CharSequence header, EncodedContent encodedContent ) { if ( checkBooleanParameter( "debug" ) ) { StringBuilder debugSB = new StringBuilder(); debugSB.append( "[HTTP debug] Sending:\n" ); debugSB.append( header ); if ( getParameterVector( "debug" ).first().getFirstChild( "showContent" ).intValue() > 0 && encodedContent.content != null ) { debugSB.append( encodedContent.content.toString() ); } Interpreter.getInstance().logInfo( debugSB.toString() ); } } public void send( OutputStream ostream, CommMessage message, InputStream istream ) throws IOException { Method method = send_getRequestMethod( message ); String charset = getCharset(); String format = send_getFormat(); EncodedContent encodedContent = send_encodeContent( message, method, charset, format ); StringBuilder headerBuilder = new StringBuilder(); if ( inInputPort ) { // We're responding to a request send_appendResponseHeaders( message, headerBuilder ); } else { // We're sending a notification or a solicit send_appendRequestHeaders( message, method, headerBuilder, charset ); } send_appendGenericHeaders( message, encodedContent, charset, headerBuilder ); headerBuilder.append( CRLF ); send_logDebugInfo( headerBuilder, encodedContent ); inputId = message.operationName(); /*if ( charset == null ) { charset = "UTF8"; }*/ ostream.write( headerBuilder.toString().getBytes( charset ) ); if ( encodedContent.content != null ) { ostream.write( encodedContent.content.getBytes() ); //ostream.write( CRLF.getBytes( charset ) ); } } private void parseXML( HttpMessage message, Value value ) throws IOException { try { if ( message.size() > 0 ) { DocumentBuilder builder = docBuilderFactory.newDocumentBuilder(); InputSource src = new InputSource( new ByteArrayInputStream( message.content() ) ); Document doc = builder.parse( src ); XmlUtils.documentToValue( doc, value ); } } catch( ParserConfigurationException pce ) { throw new IOException( pce ); } catch( SAXException saxe ) { throw new IOException( saxe ); } } private static void parseJson( HttpMessage message, Value value, boolean strictEncoding ) throws IOException { JsonUtils.parseJsonIntoValue( new InputStreamReader( new ByteArrayInputStream( message.content() ) ), value, strictEncoding ); } private static void parseForm( HttpMessage message, Value value, String charset ) throws IOException { String line = new String( message.content(), "UTF8" ); String[] pair; for( String item : line.split( "&" ) ) { pair = item.split( "=", 2 ); value.getChildren( pair[0] ).first().setValue( URLDecoder.decode( pair[1], charset ) ); } } private void parseMultiPartFormData( HttpMessage message, Value value ) throws IOException { multiPartFormDataParser = new MultiPartFormDataParser( message, value ); multiPartFormDataParser.parse(); } private static String parseGWTRPC( HttpMessage message, Value value ) throws IOException { RPCRequest request = RPC.decodeRequest( new String( message.content(), "UTF8" ) ); String operationName = (String)request.getParameters()[0]; joliex.gwt.client.Value requestValue = (joliex.gwt.client.Value)request.getParameters()[1]; JolieGWTConverter.gwtToJolieValue( requestValue, value ); return operationName; } private void recv_checkForSetCookie( HttpMessage message, Value value ) throws IOException { if ( hasParameter( Parameters.COOKIES ) ) { String type; Value cookies = getParameterFirstValue( Parameters.COOKIES ); Value cookieConfig; Value v; for( HttpMessage.Cookie cookie : message.setCookies() ) { if ( cookies.hasChildren( cookie.name() ) ) { cookieConfig = cookies.getFirstChild( cookie.name() ); if ( cookieConfig.isString() ) { v = value.getFirstChild( cookieConfig.strValue() ); type = cookieConfig.hasChildren( "type" ) ? cookieConfig.getFirstChild( "type" ).strValue() : "string"; recv_assignCookieValue( cookie.value(), v, type ); } } /*currValue = Value.create(); currValue.getNewChild( "expires" ).setValue( cookie.expirationDate() ); currValue.getNewChild( "path" ).setValue( cookie.path() ); currValue.getNewChild( "name" ).setValue( cookie.name() ); currValue.getNewChild( "value" ).setValue( cookie.value() ); currValue.getNewChild( "domain" ).setValue( cookie.domain() ); currValue.getNewChild( "secure" ).setValue( (cookie.secure() ? 1 : 0) ); cookieVec.add( currValue );*/ } } } private void recv_assignCookieValue( String cookieValue, Value value, String typeKeyword ) throws IOException { NativeType type = NativeType.fromString( typeKeyword ); if ( NativeType.INT == type ) { try { value.setValue( new Integer( cookieValue ) ); } catch( NumberFormatException e ) { throw new IOException( e ); } } else if ( NativeType.LONG == type ) { try { value.setValue( new Long( cookieValue ) ); } catch( NumberFormatException e ) { throw new IOException( e ); } } else if ( NativeType.STRING == type ) { value.setValue( cookieValue ); } else if ( NativeType.DOUBLE == type ) { try { value.setValue( new Double( cookieValue ) ); } catch( NumberFormatException e ) { throw new IOException( e ); } } else if ( NativeType.BOOL == type ) { value.setValue( Boolean.valueOf( cookieValue ) ); } else { value.setValue( cookieValue ); } } private void recv_checkForCookies( HttpMessage message, DecodedMessage decodedMessage ) throws IOException { Value cookies = null; if ( hasOperationSpecificParameter( decodedMessage.operationName, Parameters.COOKIES ) ) { cookies = getOperationSpecificParameterFirstValue( decodedMessage.operationName, Parameters.COOKIES ); } else if ( hasParameter( Parameters.COOKIES ) ) { cookies = getParameterFirstValue( Parameters.COOKIES ); } if ( cookies != null ) { Value v; String type; for( Entry< String, String > entry : message.cookies().entrySet() ) { if ( cookies.hasChildren( entry.getKey() ) ) { Value cookieConfig = cookies.getFirstChild( entry.getKey() ); if ( cookieConfig.isString() ) { v = decodedMessage.value.getFirstChild( cookieConfig.strValue() ); if ( cookieConfig.hasChildren( "type" ) ) { type = cookieConfig.getFirstChild( "type" ).strValue(); } else { type = "string"; } recv_assignCookieValue( entry.getValue(), v, type ); } } } } } private void recv_checkForGenericHeader( HttpMessage message, DecodedMessage decodedMessage ) throws IOException { Value headers = null; if ( hasOperationSpecificParameter( decodedMessage.operationName, Parameters.HEADERS ) ) { headers = getOperationSpecificParameterFirstValue( decodedMessage.operationName, Parameters.HEADERS ); } else if ( hasParameter( Parameters.HEADERS ) ) { headers = getParameterFirstValue( Parameters.HEADERS ); } if ( headers != null ) { for( String headerName : headers.children().keySet() ) { String headerAlias = headers.getFirstChild( headerName ).strValue(); headerName = headerName.replace( "_", "-" ); decodedMessage.value.getFirstChild( headerAlias ).setValue( message.getPropertyOrEmptyString( headerName ) ); } } } private static void recv_parseQueryString( HttpMessage message, Value value ) { Map< String, Integer > indexes = new HashMap< String, Integer >(); String queryString = message.requestPath() == null ? "" : message.requestPath(); String[] kv = queryString.split( "\\?" ); Integer index; if ( kv.length > 1 ) { queryString = kv[1]; String[] params = queryString.split( "&" ); for( String param : params ) { kv = param.split( "=", 2 ); if ( kv.length > 1 ) { index = indexes.get( kv[0] ); if ( index == null ) { index = 0; indexes.put( kv[0], index ); } value.getChildren( kv[0] ).get( index ).setValue( kv[1] ); indexes.put( kv[0], index + 1 ); } } } } /* * Prints debug information about a received message */ private void recv_logDebugInfo( HttpMessage message ) { StringBuilder debugSB = new StringBuilder(); debugSB.append( "[HTTP debug] Receiving:\n" ); debugSB.append( "HTTP Code: " + message.statusCode() + "\n" ); debugSB.append( "Resource: " + message.requestPath() + "\n" ); debugSB.append( "--> Header properties\n" ); for( Entry< String, String > entry : message.properties() ) { debugSB.append( '\t' + entry.getKey() + ": " + entry.getValue() + '\n' ); } for( HttpMessage.Cookie cookie : message.setCookies() ) { debugSB.append( "\tset-cookie: " + cookie.toString() + '\n' ); } for( Entry< String, String > entry : message.cookies().entrySet() ) { debugSB.append( "\tcookie: " + entry.getKey() + '=' + entry.getValue() + '\n' ); } if ( getParameterFirstValue( "debug" ).getFirstChild( "showContent" ).intValue() > 0 && message.content() != null ) { debugSB.append( "--> Message content\n" ); debugSB.append( new String( message.content() ) ); } Interpreter.getInstance().logInfo( debugSB.toString() ); } private void recv_parseRequestFormat( HttpMessage message ) throws IOException { requestFormat = null; String type = message.getPropertyOrEmptyString( "content-type" ).split( ";" )[0]; if ( "text/x-gwt-rpc".equals( type ) ) { requestFormat = "text/x-gwt-rpc"; } else if ( "application/json".equals( type ) ) { requestFormat = "application/json"; } } private void recv_parseMessage( HttpMessage message, DecodedMessage decodedMessage, String charset ) throws IOException { String format = "xml"; if ( hasParameter( "format" ) ) { format = getStringParameter( "format" ); } String type = message.getProperty( "content-type" ).split( ";" )[0]; if ( "text/html".equals( type ) ) { decodedMessage.value.setValue( new String( message.content() ) ); } else if ( "application/x-www-form-urlencoded".equals( type ) ) { parseForm( message, decodedMessage.value, charset ); } else if ( "text/xml".equals( type ) ) { parseXML( message, decodedMessage.value ); } else if ( "text/x-gwt-rpc".equals( type ) ) { decodedMessage.operationName = parseGWTRPC( message, decodedMessage.value ); } else if ( "multipart/form-data".equals( type ) ) { parseMultiPartFormData( message, decodedMessage.value ); } else if ( "application/octet-stream".equals( type ) || type.startsWith( "image/" ) || "application/zip".equals( type ) ) { decodedMessage.value.setValue( new ByteArray( message.content() ) ); } else if ( "application/json".equals( type ) || "json".equals( format ) ) { boolean strictEncoding = checkStringParameter( "json_encoding", "strict" ); parseJson( message, decodedMessage.value, strictEncoding ); } else if ( "xml".equals( format ) || "rest".equals( format ) ) { parseXML( message, decodedMessage.value ); } else { decodedMessage.value.setValue( new String( message.content() ) ); } } private String getDefaultOperation( HttpMessage.Type t ) { if ( hasParameter( Parameters.DEFAULT_OPERATION ) ) { Value dParam = getParameterFirstValue( Parameters.DEFAULT_OPERATION ); String method = t == HttpMessage.Type.GET ? "get" : t == HttpMessage.Type.HEAD ? "head" : t == HttpMessage.Type.POST ? "post" : t == HttpMessage.Type.PUT ? "put" : t == HttpMessage.Type.DELETE ? "delete" : null; if ( method == null || dParam.hasChildren( method ) == false ) { return dParam.strValue(); } else { return dParam.getFirstChild( method ).strValue(); } } return null; } private void recv_checkReceivingOperation( HttpMessage message, DecodedMessage decodedMessage ) { if ( decodedMessage.operationName == null ) { String requestPath = message.requestPath().split( "\\?" )[0]; decodedMessage.operationName = requestPath; Matcher m = LocationParser.RESOURCE_SEPARATOR_PATTERN.matcher( decodedMessage.operationName ); if ( m.find() ) { int resourceStart = m.end(); if ( m.find() ) { decodedMessage.resourcePath = requestPath.substring( resourceStart - 1, m.start() ); decodedMessage.operationName = requestPath.substring( m.end(), requestPath.length() ); } } } if ( decodedMessage.resourcePath.equals( "/" ) && !channel().parentInputPort().canHandleInputOperation( decodedMessage.operationName ) ) { String defaultOpId = getDefaultOperation( message.type() ); if ( defaultOpId != null ) { Value body = decodedMessage.value; decodedMessage.value = Value.create(); decodedMessage.value.getChildren( "data" ).add( body ); decodedMessage.value.getFirstChild( "operation" ).setValue( decodedMessage.operationName ); if ( message.userAgent() != null ) { decodedMessage.value.getFirstChild( Parameters.USER_AGENT ).setValue( message.userAgent() ); } Value cookies = decodedMessage.value.getFirstChild( "cookies" ); for( Entry< String, String > cookie : message.cookies().entrySet() ) { cookies.getFirstChild( cookie.getKey() ).setValue( cookie.getValue() ); } decodedMessage.operationName = defaultOpId; } } } private void recv_checkForMultiPartHeaders( DecodedMessage decodedMessage ) { if ( multiPartFormDataParser != null ) { String target; for( Entry< String, MultiPartFormDataParser.PartProperties > entry : multiPartFormDataParser.getPartPropertiesSet() ) { if ( entry.getValue().filename() != null ) { target = getMultipartHeaderForPart( decodedMessage.operationName, entry.getKey() ); if ( target != null ) { decodedMessage.value.getFirstChild( target ).setValue( entry.getValue().filename() ); } } } multiPartFormDataParser = null; } } private void recv_checkForMessageProperties( HttpMessage message, DecodedMessage decodedMessage ) throws IOException { recv_checkForCookies( message, decodedMessage ); recv_checkForGenericHeader( message, decodedMessage ); recv_checkForMultiPartHeaders( decodedMessage ); if ( message.userAgent() != null && hasParameter( Parameters.USER_AGENT ) ) { getParameterFirstValue( Parameters.USER_AGENT ).setValue( message.userAgent() ); } if ( getParameterVector( Parameters.HOST ) != null ) { getParameterFirstValue( Parameters.HOST ).setValue( message.getPropertyOrEmptyString( Parameters.HOST ) ); } } private static class DecodedMessage { private String operationName = null; private Value value = Value.create(); private String resourcePath = "/"; private long id = CommMessage.GENERIC_ID; } private void recv_checkForStatusCode( HttpMessage message ) { if ( hasParameter( Parameters.STATUS_CODE ) ) { getParameterFirstValue( Parameters.STATUS_CODE ).setValue( message.statusCode() ); } } public CommMessage recv( InputStream istream, OutputStream ostream ) throws IOException { CommMessage retVal = null; DecodedMessage decodedMessage = new DecodedMessage(); HttpMessage message = new HttpParser( istream ).parse(); if ( message.isSupported() == false ) { ostream.write( NOT_IMPLEMENTED_HEADER ); ostream.write( CRLF.getBytes() ); ostream.write( CRLF.getBytes() ); ostream.flush(); return null; } if ( message.getProperty( "connection" ) != null ) { HttpUtils.recv_checkForChannelClosing( message, channel() ); } else { channel().setToBeClosed( checkBooleanParameter( "keepAlive", true ) == false ); } if ( checkBooleanParameter( Parameters.DEBUG ) ) { recv_logDebugInfo( message ); } recv_checkForStatusCode( message ); String charset = getCharset(); encoding = message.getProperty( "accept-encoding" ); recv_parseRequestFormat( message ); if ( message.size() > 0 ) { recv_parseMessage( message, decodedMessage, charset ); } if ( checkBooleanParameter( Parameters.CONCURRENT ) ) { String messageId = message.getProperty( Headers.JOLIE_MESSAGE_ID ); if ( messageId != null ) { try { decodedMessage.id = Long.parseLong( messageId ); } catch( NumberFormatException e ) {} } } if ( message.isResponse() ) { recv_checkForSetCookie( message, decodedMessage.value ); retVal = new CommMessage( decodedMessage.id, inputId, decodedMessage.resourcePath, decodedMessage.value, null ); } else if ( message.isError() == false ) { if ( message.isGet() ) { recv_parseQueryString( message, decodedMessage.value ); } recv_checkReceivingOperation( message, decodedMessage ); recv_checkForMessageProperties( message, decodedMessage ); retVal = new CommMessage( decodedMessage.id, decodedMessage.operationName, decodedMessage.resourcePath, decodedMessage.value, null ); } if ( retVal != null && "/".equals( retVal.resourcePath() ) && channel().parentPort() != null && (channel().parentPort().getInterface().containsOperation( retVal.operationName() ) || channel().parentInputPort().getAggregatedOperation( retVal.operationName() ) != null) ) { try { // The message is for this service boolean hasInput = false; OneWayTypeDescription oneWayTypeDescription = null; if ( channel().parentInputPort() != null ) { if ( channel().parentInputPort().getAggregatedOperation( retVal.operationName() ) != null ) { oneWayTypeDescription = channel().parentInputPort().getAggregatedOperation( retVal.operationName() ).getOperationTypeDescription().asOneWayTypeDescription(); hasInput = true; } } if ( !hasInput ) { Interface iface = channel().parentPort().getInterface(); oneWayTypeDescription = iface.oneWayOperations().get( retVal.operationName() ); } if ( oneWayTypeDescription != null ) { // We are receiving a One-Way message oneWayTypeDescription.requestType().cast( retVal.value() ); } else { hasInput = false; RequestResponseTypeDescription rrTypeDescription = null; if ( channel().parentInputPort() != null ) { if ( channel().parentInputPort().getAggregatedOperation( retVal.operationName() ) != null ) { rrTypeDescription = channel().parentInputPort().getAggregatedOperation( retVal.operationName() ).getOperationTypeDescription().asRequestResponseTypeDescription(); hasInput = true; } } if ( !hasInput ) { Interface iface = channel().parentPort().getInterface(); rrTypeDescription = iface.requestResponseOperations().get( retVal.operationName() ); } if ( retVal.isFault() ) { Type faultType = rrTypeDescription.faults().get( retVal.fault().faultName() ); if ( faultType != null ) { faultType.cast( retVal.value() ); } } else { if ( message.isResponse() ) { rrTypeDescription.responseType().cast( retVal.value() ); } else { rrTypeDescription.requestType().cast( retVal.value() ); } } } } catch( TypeCastingException e ) { // TODO: do something here? } } return retVal; } private Type getSendType( CommMessage message ) throws IOException { Type ret = null; if ( channel().parentPort() == null ) { throw new IOException( "Could not retrieve communication port for HTTP protocol" ); } OperationTypeDescription opDesc = channel().parentPort().getOperationTypeDescription( message.operationName(), Constants.ROOT_RESOURCE_PATH ); if ( opDesc == null ) { throw new IOException( "Operation " + message.operationName() + " not declared in port interface for HTTP protocol" ); } if ( opDesc.asOneWayTypeDescription() != null ) { if ( message.isFault() ) { ret = Type.UNDEFINED; } else { OneWayTypeDescription ow = opDesc.asOneWayTypeDescription(); ret = ow.requestType(); } } else if ( opDesc.asRequestResponseTypeDescription() != null ) { RequestResponseTypeDescription rr = opDesc.asRequestResponseTypeDescription(); if ( message.isFault() ) { ret = rr.getFaultType( message.fault().faultName() ); if ( ret == null ) { ret = Type.UNDEFINED; } } else { ret = ( inInputPort ) ? rr.responseType() : rr.requestType(); } } else { throw new IOException( "Internal error" ); } return ret; } }
package edu.co.sena; /** * * @author duvan */ public class linux { /** * @param args the command line arguments */ public static void main(String[] args) { System.out.println("Linux Version 2.0"); } }
package io.particle.cloudsdk.example_app; import android.annotation.SuppressLint; import android.content.Intent; import android.os.AsyncTask; import android.os.Bundle; import android.util.Log; import android.widget.EditText; import java.io.IOException; import androidx.annotation.NonNull; import androidx.appcompat.app.AppCompatActivity; import io.particle.android.sdk.cloud.ParticleCloud; import io.particle.android.sdk.cloud.ParticleCloudSDK; import io.particle.android.sdk.cloud.ParticleDevice; import io.particle.android.sdk.cloud.exceptions.ParticleCloudException; import io.particle.android.sdk.utils.Async; import io.particle.android.sdk.utils.Toaster; public class LoginActivity extends AppCompatActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); ParticleCloudSDK.init(this); setContentView(R.layout.activity_login); findViewById(R.id.login_button).setOnClickListener( v -> { final String email = ((EditText) findViewById(R.id.email)).getText().toString(); final String password = ((EditText) findViewById(R.id.password)).getText().toString(); // Don't (This is safe, but more work!) // @SuppressLint("StaticFieldLeak") // AsyncTask task = new AsyncTask() { // @Override // protected Object doInBackground(Object[] params) { // try { // ParticleCloudSDK.getCloud().logIn(email, password); // } catch (final ParticleCloudException e) { // Runnable mainThread = () -> { // Toaster.l(LoginActivity.this, e.getBestMessage()); // e.printStackTrace(); // Log.d("info", e.getBestMessage()); //// Log.d("info", e.getCause().toString()); // runOnUiThread(mainThread); // return null; // task.execute(); Async.executeAsync(ParticleCloudSDK.getCloud(), new Async.ApiWork<ParticleCloud, Object>() { private ParticleDevice mDevice; @Override public Object callApi(@NonNull ParticleCloud sparkCloud) throws ParticleCloudException, IOException { sparkCloud.logIn(email, password); sparkCloud.getDevices(); try { mDevice = sparkCloud.getDevices().get(0); } catch (IndexOutOfBoundsException iobEx) { throw new RuntimeException("Your account must have at least one device for this example app to work"); } Object obj; try { obj = mDevice.getVariable("analogvalue"); Log.d("BANANA", "analogvalue: " + obj); } catch (ParticleDevice.VariableDoesNotExistException e) { Toaster.s(LoginActivity.this, "Error reading variable"); } try { String strVariable = mDevice.getStringVariable("stringvalue"); Log.d("BANANA", "stringvalue: " + strVariable); } catch (ParticleDevice.VariableDoesNotExistException e) { Toaster.s(LoginActivity.this, "Error reading variable"); } try { double dVariable = mDevice.getDoubleVariable("doublevalue"); Log.d("BANANA", "doublevalue: " + dVariable); } catch (ParticleDevice.VariableDoesNotExistException e) { Toaster.s(LoginActivity.this, "Error reading variable"); } try { int intVariable = mDevice.getIntVariable("analogvalue"); Log.d("BANANA", "int analogvalue: " + intVariable); } catch (ParticleDevice.VariableDoesNotExistException e) { Toaster.s(LoginActivity.this, "Error reading variable"); } return -1; } @Override public void onSuccess(@NonNull Object value) { Toaster.l(LoginActivity.this, "Logged in"); Intent intent = ValueActivity.buildIntent(LoginActivity.this, 123, mDevice.getID()); startActivity(intent); } @Override public void onFailure(@NonNull ParticleCloudException e) { Toaster.l(LoginActivity.this, e.getBestMessage()); e.printStackTrace(); Log.d("info", e.getBestMessage()); } }); } ); } }
package org.voltdb.export; import static org.hamcrest.Matchers.arrayContaining; import java.text.SimpleDateFormat; import java.util.ArrayDeque; import java.util.Arrays; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.hamcrest.TypeSafeDiagnosingMatcher; import org.voltdb.exportclient.ExportToFileClient; import org.voltdb.types.TimestampType; import org.voltdb.utils.Encoder; import com.google.common.base.Preconditions; public class ExportToFileTestVerifier { private final ArrayDeque<String[]> m_data = new ArrayDeque<String[]>(); protected final ThreadLocal<SimpleDateFormat> m_ODBCDateformat = new ThreadLocal<SimpleDateFormat>() { @Override protected SimpleDateFormat initialValue() { return new SimpleDateFormat(ExportToFileClient.ODBC_DATE_FORMAT_STRING); } }; public ExportToFileTestVerifier() { } void addRow( String [] data) { Preconditions.checkArgument( data == null || data.length > 0, "row size does not match expected row size" ); m_data.offer(data); } void addRow( Object [] data) { Preconditions.checkArgument(data != null && data.length > 0, "row without column data"); String [] row = new String[data.length]; for (int i = 0; i < data.length; ++i) { Object cval = data[i]; if (cval == null) { row[i] = "NULL"; } else if (cval instanceof byte[]) { row[i] = Encoder.hexEncode((byte[])cval); } else if (cval instanceof String) { row[i] = (String)cval; } else if (cval instanceof TimestampType) { row[i] = m_ODBCDateformat.get().format( ((TimestampType)cval).asApproximateJavaDate() ); } else { row[i] = cval.toString(); } } m_data.offer(row); } Matcher<String[]> isExpectedRow() { return new TypeSafeDiagnosingMatcher<String[]>() { String [] expected = m_data.peek(); @Override public void describeTo(Description d) { d.appendText("row ["); if (expected != null) { d.appendValueList("", ", ", "", Arrays.<String>asList(expected)); } } @Override protected boolean matchesSafely(String[] gotten, Description d) { d.appendText(" row ["); boolean match = expected != null; if( ! match) { d.appendText("{ EOD exhausted expected rows }"); } if( match) { String [] atHead = m_data.poll(); String [] toBeMatched = Arrays.copyOfRange( gotten, ExportToFileClient.INTERNAL_FIELD_COUNT, gotten.length ); match = arrayContaining(atHead).matches(toBeMatched); if( ! match) { d.appendValueList("", ", ", "", Arrays.asList(toBeMatched)); } } d.appendText("]"); return match; } }; } }
package dr.app.beast; import beagle.BeagleFactory; import beagle.BeagleFlag; import beagle.BeagleInfo; import dr.app.plugin.Plugin; import dr.app.plugin.PluginLoader; import dr.app.util.Arguments; import dr.app.util.Utils; import dr.inference.mcmc.MCMC; import dr.inference.mcmcmc.MCMCMC; import dr.inference.mcmcmc.MCMCMCOptions; import dr.math.MathUtils; import dr.util.ErrorLogHandler; import dr.util.MessageLogHandler; import dr.util.Version; import dr.xml.XMLObjectParser; import dr.xml.XMLParser; import jam.util.IconUtils; import javax.swing.*; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.util.*; import java.util.logging.*; public class BeastMain { private final static Version version = new BeastVersion(); public static final double DEFAULT_DELTA = 1.0; public static final int DEFAULT_SWAP_CHAIN_EVERY = 100; static class BeastConsoleApp extends jam.console.ConsoleApplication { XMLParser parser = null; public BeastConsoleApp(String nameString, String aboutString, javax.swing.Icon icon) throws IOException { super(nameString, aboutString, icon, false); getDefaultFrame().setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE); } public void doStop() { Iterator iter = parser.getThreads(); while (iter.hasNext()) { Thread thread = (Thread) iter.next(); thread.stop(); } } public void setTitle(String title) { getDefaultFrame().setTitle(title); } } public BeastMain(File inputFile, BeastConsoleApp consoleApp, int maxErrorCount, final boolean verbose, boolean parserWarning, boolean strictXML, List<String> additionalParsers, boolean useMC3, double[] chainTemperatures, int swapChainsEvery) { if (inputFile == null) { throw new RuntimeException("Error: no input file specified"); } String fileName = inputFile.getName(); final Logger infoLogger = Logger.getLogger("dr.app.beast"); try { FileReader fileReader = new FileReader(inputFile); XMLParser parser = new BeastParser(new String[]{fileName}, additionalParsers, verbose, parserWarning, strictXML); if (consoleApp != null) { consoleApp.parser = parser; } // Add a handler to handle warnings and errors. This is a ConsoleHandler // so the messages will go to StdOut.. Logger logger = Logger.getLogger("dr"); Handler messageHandler = new MessageLogHandler(); messageHandler.setFilter(new Filter() { public boolean isLoggable(LogRecord record) { return record.getLevel().intValue() < Level.WARNING.intValue(); } }); logger.addHandler(messageHandler); // // Add a handler to handle warnings and errors. This is a ConsoleHandler // // so the messages will go to StdErr.. // handler = new ConsoleHandler(); // handler.setFilter(new Filter() { // public boolean isLoggable(LogRecord record) { // if (verbose) { // return record.getLevel().intValue() >= Level.WARNING.intValue(); // } else { // return record.getLevel().intValue() >= Level.SEVERE.intValue(); // logger.addHandler(handler); logger.setUseParentHandlers(false); infoLogger.info("Parsing XML file: " + fileName); infoLogger.info(" File encoding: " + fileReader.getEncoding()); // This is a special logger that is for logging numerical and statistical errors // during the MCMC run. It will tolerate up to maxErrorCount before throwing a // RuntimeException to shut down the run. //Logger errorLogger = Logger.getLogger("error"); messageHandler = new ErrorLogHandler(maxErrorCount); messageHandler.setLevel(Level.WARNING); logger.addHandler(messageHandler); for (String pluginName : PluginLoader.getAvailablePlugins()) { Plugin plugin = PluginLoader.loadPlugin(pluginName); if (plugin != null) { Set<XMLObjectParser> parserSet = plugin.getParsers(); for (XMLObjectParser pluginParser : parserSet) { parser.addXMLObjectParser(pluginParser); } } } if (!useMC3) { // just parse the file running all threads... parser.parse(fileReader, true); } else { int chainCount = chainTemperatures.length; MCMC[] chains = new MCMC[chainCount]; MCMCMCOptions options = new MCMCMCOptions(chainTemperatures, swapChainsEvery); Logger.getLogger("dr.apps.beast").info("Starting cold chain plus hot chains with temperatures: "); for (int i = 1; i < chainTemperatures.length; i++) { Logger.getLogger("dr.apps.beast").info("Hot Chain " + i + ": " + chainTemperatures[i]); } Logger.getLogger("dr.apps.beast").info("Parsing XML file: " + fileName); // parse the file for the initial cold chain returning the MCMC object chains[0] = (MCMC) parser.parse(fileReader, MCMC.class); if (chains[0] == null) { throw new dr.xml.XMLParseException("BEAST XML file is missing an MCMC element"); } fileReader.close(); chainTemperatures[0] = 1.0; for (int i = 1; i < chainCount; i++) { // parse the file once for each hot chain fileReader = new FileReader(inputFile); // turn off all messages for subsequent reads of the file (they will be the same as the // first time). messageHandler.setLevel(Level.OFF); parser = new BeastParser(new String[]{fileName}, null, verbose, parserWarning, strictXML); chains[i] = (MCMC) parser.parse(fileReader, MCMC.class); if (chains[i] == null) { throw new dr.xml.XMLParseException("BEAST XML file is missing an MCMC element"); } fileReader.close(); } // restart messages messageHandler.setLevel(Level.ALL); MCMCMC mc3 = new MCMCMC(chains, options); Thread thread = new Thread(mc3); thread.start(); } } catch (java.io.IOException ioe) { infoLogger.severe("File error: " + ioe.getMessage()); throw new RuntimeException("Terminate"); } catch (org.xml.sax.SAXParseException spe) { if (spe.getMessage() != null && spe.getMessage().equals("Content is not allowed in prolog")) { infoLogger.severe("Parsing error - the input file, " + fileName + ", is not a valid XML file."); } else { infoLogger.severe("Error running file: " + fileName); infoLogger.severe("Parsing error - poorly formed XML (possibly not an XML file):\n" + spe.getMessage()); } throw new RuntimeException("Terminate"); } catch (org.w3c.dom.DOMException dome) { infoLogger.severe("Error running file: " + fileName); infoLogger.severe("Parsing error - poorly formed XML:\n" + dome.getMessage()); throw new RuntimeException("Terminate"); } catch (dr.xml.XMLParseException pxe) { if (pxe.getMessage() != null && pxe.getMessage().equals("Unknown root document element, beauti")) { infoLogger.severe("Error running file: " + fileName); infoLogger.severe( "The file you just tried to run in BEAST is actually a BEAUti document.\n" + "Although this uses XML, it is not a format that BEAST understands.\n" + "These files are used by BEAUti to save and load your settings so that\n" + "you can go back and alter them. To generate a BEAST file you must\n" + "select the 'Generate BEAST File' option, either from the File menu or\n" + "the button at the bottom right of the window."); } else { infoLogger.severe("Parsing error - poorly formed BEAST file, " + fileName + ":\n" + pxe.getMessage()); } throw new RuntimeException("Terminate"); } catch (RuntimeException rex) { if (rex.getMessage() != null && rex.getMessage().startsWith("The initial posterior is zero")) { infoLogger.warning("Error running file: " + fileName); infoLogger.severe( "The initial model is invalid because state has a zero probability.\n\n" + "If the log likelihood of the tree is -Inf, his may be because the\n" + "initial, random tree is so large that it has an extremely bad\n" + "likelihood which is being rounded to zero.\n\n" + "Alternatively, it may be that the product of starting mutation rate\n" + "and tree height is extremely small or extremely large. \n\n" + "Finally, it may be that the initial state is incompatible with\n" + "one or more 'hard' constraints (on monophyly or bounds on parameter\n" + "values. This will result in Priors with zero probability.\n\n" + "The individual components of the posterior are as follows:\n" + rex.getMessage() + "\n" + "For more information go to <http://beast.bio.ed.ac.uk/>."); } else { // This call never returns as another RuntimeException exception is raised by // the error log handler??? infoLogger.warning("Error running file: " + fileName); System.err.println("Fatal exception: " + rex.getMessage()); rex.printStackTrace(System.err); } throw new RuntimeException("Terminate"); } catch (Exception ex) { infoLogger.warning("Error running file: " + fileName); infoLogger.severe("Fatal exception: " + ex.getMessage()); System.err.println("Fatal exception: " + ex.getMessage()); ex.printStackTrace(System.err); throw new RuntimeException("Terminate"); } } public static void centreLine(String line, int pageWidth) { int n = pageWidth - line.length(); int n1 = n / 2; for (int i = 0; i < n1; i++) { System.out.print(" "); } System.out.println(line); } public static void printTitle() { System.out.println(); centreLine("BEAST " + version.getVersionString() + ", " + version.getDateString(), 60); centreLine("Bayesian Evolutionary Analysis Sampling Trees", 60); for (String creditLine : version.getCredits()) { centreLine(creditLine, 60); } System.out.println(); } public static void printUsage(Arguments arguments) { arguments.printUsage("beast", "[<input-file-name>]"); System.out.println(); System.out.println(" Example: beast test.xml"); System.out.println(" Example: beast -window test.xml"); System.out.println(" Example: beast -help"); System.out.println(); } private static long updateSeedByRank(long seed, int rank) { return seed + 1000 * 1000 * rank; } //Main method public static void main(String[] args) throws java.io.IOException { // There is a major issue with languages that use the comma as a decimal separator. // To ensure compatibility between programs in the package, enforce the US locale. Locale.setDefault(Locale.US); Arguments arguments = new Arguments( new Arguments.Option[]{ new Arguments.Option("verbose", "Give verbose XML parsing messages"), new Arguments.Option("warnings", "Show warning messages about BEAST XML file"), new Arguments.Option("strict", "Fail on non-conforming BEAST XML file"), new Arguments.Option("window", "Provide a console window"), new Arguments.Option("options", "Display an options dialog"), new Arguments.Option("working", "Change working directory to input file's directory"), new Arguments.LongOption("seed", "Specify a random number generator seed"), new Arguments.StringOption("prefix", "PREFIX", "Specify a prefix for all output log filenames"), new Arguments.Option("overwrite", "Allow overwriting of log files"), new Arguments.IntegerOption("errors", "Specify maximum number of numerical errors before stopping"), new Arguments.IntegerOption("threads", "The number of computational threads to use (default auto)"), new Arguments.Option("java", "Use Java only, no native implementations"), new Arguments.RealOption("threshold", 0.0, Double.MAX_VALUE, "Full evaluation test threshold (default 1E-6)"), new Arguments.Option("beagle", "Use beagle library if available"), new Arguments.Option("beagle_info", "BEAGLE: show information on available resources"), new Arguments.StringOption("beagle_order", "order", "BEAGLE: set order of resource use"), new Arguments.IntegerOption("beagle_instances", "BEAGLE: divide site patterns amongst instances"), new Arguments.Option("beagle_CPU", "BEAGLE: use CPU instance"), new Arguments.Option("beagle_GPU", "BEAGLE: use GPU instance if available"), new Arguments.Option("beagle_SSE", "BEAGLE: use SSE extensions if available"), new Arguments.Option("beagle_cuda", "BEAGLE: use CUDA parallization if available"), new Arguments.Option("beagle_opencl", "BEAGLE: use OpenCL parallization if available"), new Arguments.Option("beagle_single", "BEAGLE: use single precision if available"), new Arguments.Option("beagle_double", "BEAGLE: use double precision if available"), new Arguments.StringOption("beagle_scaling", new String[]{"default", "dynamic", "delayed", "always", "none"}, false, "BEAGLE: specify scaling scheme to use"), new Arguments.IntegerOption("beagle_rescale", "BEAGLE: frequency of rescaling (dynamic scaling only)"), new Arguments.Option("mpi", "Use MPI rank to label output"), new Arguments.IntegerOption("mc3_chains", 1, Integer.MAX_VALUE, "number of chains"), new Arguments.RealOption("mc3_delta", 0.0, Double.MAX_VALUE, "temperature increment parameter"), new Arguments.RealArrayOption("mc3_temperatures", -1, "a comma-separated list of the hot chain temperatures"), new Arguments.IntegerOption("mc3_swap", 1, Integer.MAX_VALUE, "frequency at which chains temperatures will be swapped"), new Arguments.Option("version", "Print the version and credits and stop"), new Arguments.Option("help", "Print this information and stop"), }); int argumentCount = 0; try { argumentCount = arguments.parseArguments(args); } catch (Arguments.ArgumentException ae) { System.out.println(); System.out.println(ae.getMessage()); System.out.println(); printUsage(arguments); System.exit(1); } if (arguments.hasOption("version")) { printTitle(); } if (arguments.hasOption("help")) { printUsage(arguments); } if (arguments.hasOption("version") || arguments.hasOption("help")) { System.exit(0); } List<String> additionalParsers = new ArrayList<String>(); final boolean verbose = arguments.hasOption("verbose"); final boolean parserWarning = arguments.hasOption("warnings"); // if dev, then auto turn on, otherwise default to turn off final boolean strictXML = arguments.hasOption("strict"); final boolean window = arguments.hasOption("window"); final boolean options = arguments.hasOption("options") || (argumentCount == 0); final boolean working = arguments.hasOption("working"); String fileNamePrefix = null; boolean allowOverwrite = arguments.hasOption("overwrite"); boolean useMPI = arguments.hasOption("mpi"); long seed = MathUtils.getSeed(); boolean useJava = false; if (arguments.hasOption("threshold")) { double evaluationThreshold = arguments.getRealOption("threshold"); System.setProperty("mcmc.evaluation.threshold", Double.toString(evaluationThreshold)); } int threadCount = -1; if (arguments.hasOption("java")) { useJava = true; } if (arguments.hasOption("prefix")) { fileNamePrefix = arguments.getStringOption("prefix"); } int chainCount = 1; if (arguments.hasOption("mc3_chains")) { chainCount = arguments.getIntegerOption("mc3_chains"); } else if (arguments.hasOption("mc3_temperatures")) { chainCount = 1 + arguments.getRealArrayOption("mc3_temperatures").length; } double delta = DEFAULT_DELTA; if (arguments.hasOption("mc3_delta")) { if (arguments.hasOption("mc3_temperatures")) { System.err.println("Either the -mc3_delta or the -mc3_temperatures option should be used, not both"); System.err.println(); printUsage(arguments); System.exit(1); } delta = arguments.getRealOption("mc3_delta"); } double[] chainTemperatures = new double[chainCount]; chainTemperatures[0] = 1.0; if (arguments.hasOption("mc3_temperatures")) { double[] hotChainTemperatures = arguments.getRealArrayOption("mc3_temperatures"); assert hotChainTemperatures.length == chainCount - 1; System.arraycopy(hotChainTemperatures, 0, chainTemperatures, 1, chainCount - 1); } else { for (int i = 1; i < chainCount; i++) { chainTemperatures[i] = 1.0 / (1.0 + (delta * i)); } } int swapChainsEvery = DEFAULT_SWAP_CHAIN_EVERY; if (arguments.hasOption("mc3_swap")) { swapChainsEvery = arguments.getIntegerOption("mc3_swap"); } boolean useMC3 = chainCount > 1; long beagleFlags = 0; boolean beagleShowInfo = arguments.hasOption("beagle_info"); // if any beagle flag is specified then use beagle... boolean useBeagle = arguments.hasOption("beagle") || arguments.hasOption("beagle_CPU") || arguments.hasOption("beagle_GPU") || arguments.hasOption("beagle_SSE") || arguments.hasOption("beagle_cuda") || arguments.hasOption("beagle_opencl") || arguments.hasOption("beagle_double") || arguments.hasOption("beagle_single") || arguments.hasOption("beagle_order") || arguments.hasOption("beagle_scaling") || arguments.hasOption("beagle_rescale") || arguments.hasOption("beagle_instances") || beagleShowInfo; if (arguments.hasOption("beagle_CPU")) { beagleFlags |= BeagleFlag.PROCESSOR_CPU.getMask(); } if (arguments.hasOption("beagle_GPU")) { beagleFlags |= BeagleFlag.PROCESSOR_GPU.getMask(); } if (arguments.hasOption("beagle_cuda")) { beagleFlags |= BeagleFlag.FRAMEWORK_CUDA.getMask(); } if (arguments.hasOption("beagle_opencl")) { beagleFlags |= BeagleFlag.FRAMEWORK_OPENCL.getMask(); } if (arguments.hasOption("beagle_SSE")) { beagleFlags |= BeagleFlag.PROCESSOR_CPU.getMask(); beagleFlags |= BeagleFlag.VECTOR_SSE.getMask(); } if (arguments.hasOption("beagle_double")) { beagleFlags |= BeagleFlag.PRECISION_DOUBLE.getMask(); } if (arguments.hasOption("beagle_single")) { beagleFlags |= BeagleFlag.PRECISION_SINGLE.getMask(); } if (arguments.hasOption("beagle_order")) { System.setProperty("beagle.resource.order", arguments.getStringOption("beagle_order")); } if (arguments.hasOption("beagle_instances")) { System.setProperty("beagle.instance.count", Integer.toString(arguments.getIntegerOption("beagle_instances"))); } if (arguments.hasOption("beagle_scaling")) { System.setProperty("beagle.scaling", arguments.getStringOption("beagle_scaling")); } if (arguments.hasOption("beagle_rescale")) { System.setProperty("beagle.rescale", Integer.toString(arguments.getIntegerOption("beagle_rescale"))); } if (arguments.hasOption("threads")) { // threadCount defaults to -1 unless the user specifies an option threadCount = arguments.getIntegerOption("threads"); if (threadCount < 0) { printTitle(); System.err.println("The the number of threads should be >= 0"); System.exit(1); } } if (arguments.hasOption("seed")) { seed = arguments.getLongOption("seed"); if (seed <= 0) { printTitle(); System.err.println("The random number seed should be > 0"); System.exit(1); } } if (useMPI) { String[] nullArgs = new String[0]; try { BeastMPI.Init(nullArgs); } catch (Exception e) { throw new RuntimeException("Unable to access MPI."); } int rank = BeastMPI.COMM_WORLD.Rank(); System.setProperty("mpi.rank.postfix", String.valueOf(rank)); } String rankProp = System.getProperty("mpi.rank.postfix"); if (rankProp != null) { int rank = Integer.valueOf(rankProp); seed = updateSeedByRank(seed, rank); } int maxErrorCount = 0; if (arguments.hasOption("errors")) { maxErrorCount = arguments.getIntegerOption("errors"); if (maxErrorCount < 0) { maxErrorCount = 0; } } BeastConsoleApp consoleApp = null; String nameString = "BEAST " + version.getVersionString(); if (window) { System.setProperty("com.apple.macos.useScreenMenuBar", "true"); System.setProperty("apple.laf.useScreenMenuBar", "true"); System.setProperty("apple.awt.showGrowBox", "true"); javax.swing.Icon icon = IconUtils.getIcon(BeastMain.class, "images/beast.png"); String aboutString = "<html><div style=\"font-family:sans-serif;\"><center>" + "<div style=\"font-size:12;\"><p>Bayesian Evolutionary Analysis Sampling Trees<br>" + "Version " + version.getVersionString() + ", " + version.getDateString() + "</p>" + version.getHTMLCredits() + "</div></center></div></html>"; consoleApp = new BeastConsoleApp(nameString, aboutString, icon); } printTitle(); File inputFile = null; if (options && !beagleShowInfo) { String titleString = "<html><center><p>Bayesian Evolutionary Analysis Sampling Trees<br>" + "Version " + version.getVersionString() + ", " + version.getDateString() + "</p></center></html>"; javax.swing.Icon icon = IconUtils.getIcon(BeastMain.class, "images/beast.png"); BeastDialog dialog = new BeastDialog(new JFrame(), titleString, icon); dialog.setAllowOverwrite(allowOverwrite); dialog.setSeed(seed); dialog.setUseBeagle(useBeagle); if (BeagleFlag.PROCESSOR_GPU.isSet(beagleFlags)) { dialog.setPreferBeagleGPU(); } dialog.setPreferBeagleSSE(BeagleFlag.VECTOR_SSE.isSet(beagleFlags)); if (BeagleFlag.PRECISION_SINGLE.isSet(beagleFlags)) { dialog.setPreferBeagleSingle(); } if (!dialog.showDialog(nameString)) { return; } if (dialog.allowOverwrite()) { allowOverwrite = true; } seed = dialog.getSeed(); threadCount = dialog.getThreadPoolSize(); useBeagle = dialog.useBeagle(); if (useBeagle) { beagleShowInfo = dialog.showBeagleInfo(); if (dialog.preferBeagleCPU()) { beagleFlags |= BeagleFlag.PROCESSOR_CPU.getMask(); } if (dialog.preferBeagleSSE()) { beagleFlags |= BeagleFlag.VECTOR_SSE.getMask(); } if (dialog.preferBeagleGPU()) { beagleFlags |= BeagleFlag.PROCESSOR_GPU.getMask(); } if (dialog.preferBeagleDouble()) { beagleFlags |= BeagleFlag.PRECISION_DOUBLE.getMask(); } if (dialog.preferBeagleSingle()) { beagleFlags |= BeagleFlag.PRECISION_SINGLE.getMask(); } System.setProperty("beagle.scaling", dialog.scalingScheme()); } inputFile = dialog.getInputFile(); if (!beagleShowInfo && inputFile == null) { System.err.println("No input file specified"); return; } } if (useBeagle) { BeagleInfo.printVersionInformation(); if (BeagleInfo.getVersion().startsWith("1.")) { System.err.println("WARNING: You are currenly using BEAGLE v1.x. For best performance and compatibility\n" + "with models in BEAST, please upgrade to BEAGLE v2.0 at http://beagle-lib.googlecode.com/\n"); } } if (beagleShowInfo) { BeagleInfo.printResourceList(); return; } if (inputFile == null) { String[] args2 = arguments.getLeftoverArguments(); if (args2.length > 1) { System.err.println("Unknown option: " + args2[1]); System.err.println(); printUsage(arguments); return; } String inputFileName = null; if (args2.length > 0) { inputFileName = args2[0]; inputFile = new File(inputFileName); } if (inputFileName == null) { // No input file name was given so throw up a dialog box... inputFile = Utils.getLoadFile("BEAST " + version.getVersionString() + " - Select XML input file"); } } if (inputFile != null && inputFile.getParent() != null && working) { System.setProperty("user.dir", inputFile.getParent()); } if (window) { if (inputFile == null) { consoleApp.setTitle("null"); } else { consoleApp.setTitle(inputFile.getName()); } } if (useJava) { System.setProperty("java.only", "true"); } if (fileNamePrefix != null && fileNamePrefix.trim().length() > 0) { System.setProperty("file.name.prefix", fileNamePrefix.trim()); } if (allowOverwrite) { System.setProperty("log.allow.overwrite", "true"); } if (useBeagle) { additionalParsers.add("beagle"); } if (beagleFlags != 0) { System.setProperty("beagle.preferred.flags", Long.toString(beagleFlags)); } if (threadCount >= 0) { System.setProperty("thread.count", String.valueOf(threadCount)); } MathUtils.setSeed(seed); System.out.println(); System.out.println("Random number seed: " + seed); System.out.println(); try { new BeastMain(inputFile, consoleApp, maxErrorCount, verbose, parserWarning, strictXML, additionalParsers, useMC3, chainTemperatures, swapChainsEvery); } catch (RuntimeException rte) { if (window) { System.out.println(); System.out.println("BEAST has terminated with an error. Please select QUIT from the menu."); // logger.severe will throw a RTE but we want to keep the console visible } else { System.exit(1); } } if (useMPI) { BeastMPI.Finalize(); } if (!window) { System.exit(0); } } }
package uk.ac.ic.wlgitbridge.snapshot.base; import com.google.api.client.http.*; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import org.asynchttpclient.AsyncHttpClient; import static org.asynchttpclient.Dsl.*; import uk.ac.ic.wlgitbridge.snapshot.exception.FailedConnectionException; import uk.ac.ic.wlgitbridge.util.Instance; import uk.ac.ic.wlgitbridge.util.Log; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.util.concurrent.*; public abstract class Request<T extends Result> { public static final AsyncHttpClient httpClient = asyncHttpClient(); private static final Executor executor = Executors.newCachedThreadPool(); private final String url; private Future<HttpResponse> future; public Request(String url) { this.url = url; } public CompletableFuture<T> request() { switch (httpMethod()) { case GET: performGetRequest(); break; case POST: performPostRequest(); break; default: break; } CompletableFuture<T> ret = new CompletableFuture<>(); executor.execute(() -> { try { ret.complete(getResult()); } catch (Throwable t) { ret.completeExceptionally(t); } }); return ret; } private T getResult() throws MissingRepositoryException, FailedConnectionException, ForbiddenException { try { HttpResponse response = future.get(); Log.info( "{} {} ({}B) -> " + url, response.getStatusCode(), response.getStatusMessage(), response.getHeaders().getContentLength() ); JsonElement json = Instance.gson.fromJson( response.parseAsString(), JsonElement.class ); return parseResponse(json); } catch (InterruptedException e) { throw new FailedConnectionException(); } catch (ExecutionException e) { Throwable cause = e.getCause(); if (cause instanceof HttpResponseException) { HttpResponseException httpCause = (HttpResponseException) cause; int sc = httpCause.getStatusCode(); if (sc == HttpServletResponse.SC_UNAUTHORIZED || sc == HttpServletResponse.SC_FORBIDDEN) { throw new ForbiddenException(); } else if (sc == HttpServletResponse.SC_NOT_FOUND) { try { JsonObject json = Instance.gson.fromJson(httpCause.getContent(), JsonObject.class); String message = json.get("message").getAsString(); String newRemote; if (json.has("newRemote")) { newRemote = json.get("newRemote").getAsString(); } else { newRemote = null; } if ("Exported to v2".equals(message)) { throw new MissingRepositoryException( MissingRepositoryException.buildExportedToV2Message(newRemote) ); } } catch (IllegalStateException | ClassCastException | NullPointerException _) { // disregard any errors that arose while handling the JSON } throw new MissingRepositoryException(); } else if (sc >= 400 && sc < 500) { throw new MissingRepositoryException(MissingRepositoryException.GENERIC_REASON); } throw new FailedConnectionException(cause); } else { throw new FailedConnectionException(cause); } } catch (IOException e) { Log.error("Failed to parse JSON.", e); throw new FailedConnectionException(); } } protected abstract HTTPMethod httpMethod(); protected void onBeforeRequest(HttpRequest request) throws IOException { } protected abstract T parseResponse(JsonElement json) throws FailedConnectionException; protected String getPostBody() { return null; } private void performGetRequest() { Log.info("GET -> " + url); try { HttpRequest request = Instance.httpRequestFactory.buildGetRequest( new GenericUrl(url) ); setTimeouts(request); request(request); } catch (IOException e) { e.printStackTrace(); throw new RuntimeException(e); } } private void performPostRequest() { Log.info("POST -> " + url); try { HttpRequest request = Instance.httpRequestFactory.buildPostRequest( new GenericUrl(url), new ByteArrayContent( "application/json", getPostBody().getBytes() ) ); setTimeouts(request); request(request); } catch (IOException e) { e.printStackTrace(); throw new RuntimeException(e); } } private void request(HttpRequest request) throws IOException { onBeforeRequest(request); future = request.executeAsync(); } private void setTimeouts(HttpRequest request) { // timeouts are 20s by default int threeMinutesInMs = 1000 * 60 * 3; request.setConnectTimeout(threeMinutesInMs); request.setReadTimeout(threeMinutesInMs); } }
package GUI; import Information.ClassYear; import Information.Student; import javafx.geometry.Insets; import javafx.geometry.Pos; import javafx.scene.control.ChoiceBox; import javafx.scene.control.Label; import javafx.scene.control.TextField; import javafx.scene.layout.HBox; import javafx.scene.layout.VBox; public class RegistrationStudentInfoEntryBox extends VBox { private Label emplIdLabel, firstNameLabel, lastNameLabel, classyearLabel; private TextField emplIdTextField; private TextField firstNameTextField; private TextField lastNameTextField; private ChoiceBox<ClassYear> yearChoiceBox; private HBox horizontalEmplId, horizontalFirstName, horizontalLastName, horizontalChoiceBox; public RegistrationStudentInfoEntryBox() { this(0); } public RegistrationStudentInfoEntryBox(double spacing) { super(spacing); initializeFields(); iniatlizeLayout(); } private void initializeFields() { emplIdLabel = makeNewLabel("EMPL ID"); firstNameLabel = makeNewLabel("First Name"); lastNameLabel = makeNewLabel("Last Name"); classyearLabel = makeNewLabel("Class Year"); emplIdTextField = new TextField(); firstNameTextField = new TextField(); lastNameTextField = new TextField(); yearChoiceBox = new ChoiceBox<>(); yearChoiceBox.getItems().addAll(ClassYear.values()); yearChoiceBox.setValue(ClassYear.FRESHMAN); horizontalEmplId = makeNewHorizontalLayout(); horizontalFirstName = makeNewHorizontalLayout(); horizontalLastName = makeNewHorizontalLayout(); horizontalChoiceBox = makeNewHorizontalLayout(); } private Label makeNewLabel(String text) { Label label = new Label(text); label.setMinWidth(80); return label; } private HBox makeNewHorizontalLayout() { HBox hBox = new HBox(); hBox.setAlignment(Pos.CENTER); return hBox; } private void iniatlizeLayout() { horizontalEmplId.getChildren().addAll(emplIdLabel, emplIdTextField); horizontalFirstName.getChildren().addAll(firstNameLabel, firstNameTextField); horizontalLastName.getChildren().addAll(lastNameLabel, lastNameTextField); horizontalChoiceBox.getChildren().addAll(classyearLabel, yearChoiceBox); getChildren().addAll(horizontalEmplId, horizontalFirstName, horizontalLastName, horizontalChoiceBox); setPadding(new Insets(20)); setAlignment(Pos.CENTER); } public Integer getEmplId() { Integer emplId = null; emplId = Integer.parseInt(emplIdTextField.getText()); return emplId; } public String getFirstNameText() { return firstNameTextField.getText(); } public String getLastNameText() { return lastNameTextField.getText(); } public ClassYear getClassYear() { return yearChoiceBox.getValue(); } public Student getStudent() { return new Student(getEmplId(), getFirstNameText(), getLastNameText(), getClassYear()); } public TextField getEmplIdTextField() { return emplIdTextField; } public TextField getFirstNameTextField() { return firstNameTextField; } public TextField getLastNameTextField() { return lastNameTextField; } public ChoiceBox<ClassYear> getYearChoiceBox() { return yearChoiceBox; } }
package com.a.eye.skywalking.registry.api; public interface Register { void subscribe(NotifyListener listener); void unSubscribe(); void registry(); }
package org.slc.sli.search.process; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.ReentrantLock; import org.apache.commons.codec.binary.Base64; import org.slc.sli.search.entity.IndexEntity; import org.slc.sli.search.util.IndexEntityConverter; import org.slc.sli.search.util.SearchIndexerException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpEntity; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; import org.springframework.web.client.RestClientException; import org.springframework.web.client.RestTemplate; /** * Indexer is responsible for building elastic search index requests and * sending them to the elastic search server for processing. * * @author dwu * */ public class Indexer { private final Logger logger = LoggerFactory.getLogger(getClass()); private static final int DEFAULT_BULK_SIZE = 5000; private static final int MAX_AGGREGATE_PERIOD = 2000; @Autowired IndexEntityConverter indexEntityConverter; private String esUri; private RestTemplate searchTemplate; private String esUsername; private String esPassword; private int bulkSize = DEFAULT_BULK_SIZE; // queue of indexrequests limited to bulkSize LinkedBlockingQueue<IndexEntity> indexRequests = new LinkedBlockingQueue<IndexEntity>(DEFAULT_BULK_SIZE * 2); private ScheduledExecutorService indexExecutor = Executors.newScheduledThreadPool(1); // last message timestamp private long lastUpdate = 0L; /** * Issue bulk index request if reached the size or not empty and last update past tolerance period * */ private class IndexQueueMonior implements Runnable { public void run() { try { if (indexRequests.size() >= bulkSize || (indexRequests.size() > 0 && (System.currentTimeMillis() - lastUpdate > MAX_AGGREGATE_PERIOD))) { final List<IndexEntity> col = new ArrayList<IndexEntity>(); indexRequests.drainTo(col); indexExecutor.schedule(new Runnable() {public void run() {executeBulkHttp(col);}}, 10, TimeUnit.MILLISECONDS); } } catch (Throwable t) { logger.info("Unable to index with elasticsearch", t); } } } public void init() { searchTemplate = new RestTemplate(); indexExecutor.scheduleAtFixedRate(new IndexQueueMonior(), MAX_AGGREGATE_PERIOD, MAX_AGGREGATE_PERIOD, TimeUnit.MILLISECONDS); } public void destroy() { indexExecutor.shutdown(); } public void index(IndexEntity entity) { try { indexRequests.put(entity); lastUpdate = System.currentTimeMillis(); } catch (InterruptedException e) { throw new SearchIndexerException("Shutting down..."); } } /** * Takes a collection of index requests, builds a bulk http message to send to elastic search * * @param indexRequests */ public void executeBulkHttp(List<IndexEntity> indexRequests) { logger.info("Sending bulk index request with " + indexRequests.size() + "records"); // create bulk http message StringBuilder message = new StringBuilder(); /* * format of message data * { "index" : { "_index" : "test", "_type" : "type1", "_id" : "1" } } * { "field1" : "value1" } */ // add each index request to the message while (!indexRequests.isEmpty()) { message.append(indexEntityConverter.toIndexJson(indexRequests.remove(0))); } // send the message HttpEntity<String> response = sendRESTCall(message.toString()); logger.info("Bulk index response: " + response.getBody()); // TODO: do we need to check the response status of each part of the bulk request? } /** * Send REST query to elasticsearch server * * @param query * @return */ private HttpEntity<String> sendRESTCall(String query) { HttpMethod method = HttpMethod.POST; HttpHeaders headers = new HttpHeaders(); // Basic Authentication when username and password are provided if (esUsername != null && esPassword != null) { headers.set("Authorization", "Basic " + Base64.encodeBase64String((esUsername + ":" + esPassword).getBytes())); } HttpEntity<String> entity = new HttpEntity<String>(query, headers); // make the REST call try { return searchTemplate.exchange(esUri, method, entity, String.class); } catch (RestClientException rce) { logger.error("Error sending elastic search request!", rce); throw rce; } } public void setSearchUrl(String esUrl) { this.esUri = esUrl + "/_bulk"; } public void setSearchUsername(String esUsername) { this.esUsername = esUsername; } public void setSearchPassword(String esPassword) { this.esPassword = esPassword; } public void setBulkSize(int bulkSize) { this.bulkSize = bulkSize; } }
package eg.document; import javax.swing.event.DocumentListener; import javax.swing.event.DocumentEvent; import javax.swing.event.UndoableEditEvent; import javax.swing.event.UndoableEditListener; import javax.swing.event.CaretListener; import javax.swing.event.CaretEvent; import javax.swing.undo.UndoManager; import javax.swing.undo.CompoundEdit; import javax.swing.undo.UndoableEdit; import javax.swing.undo.CannotUndoException; import javax.swing.undo.CannotRedoException; import javax.swing.JTextPane; import javax.swing.SwingWorker; import java.awt.EventQueue; //--Eadgyth--// import eg.Languages; import eg.syntax.Lexer; import eg.syntax.Coloring; import eg.ui.EditArea; import eg.utils.FileUtils; import eg.utils.Finder; /* * Mediates the editing in the {@code EditArea} that shall happen during * typing. * <p> * Methods are used in other classes that show line numbering, syntax * coloring, auto-indentation and undo/redo editing (the latter an inner * class). */ class TypingEdit { private final static char[] UNDO_SEP = {' ', '(', ')', '{', '}', '\n'}; private final EditArea editArea; private final UndoManager undomanager = new DocUndoManager(); private final Lexer lex; private final Coloring col; private final AutoIndent autoInd; private final LineNumbers lineNum; private boolean evaluateText = true; private boolean isTypeEdit = false; private char typed; private int eventType; //0: change, 1: insert, 2: remove private int pos; private int changeLength = 0; private int caret; TypingEdit(EditArea editArea) { this.editArea = editArea; editArea.getDoc().addDocumentListener(docListen); editArea.getDoc().addUndoableEditListener(undomanager); editArea.textArea().addCaretListener(new UndoStopper()); undomanager.setLimit(1000); lex = new Lexer(editArea.getDoc(), editArea.getNormalSet()); col = new Coloring(lex); lineNum = new LineNumbers(editArea); autoInd = new AutoIndent(editArea); } void setDefaultDoc() { enableEvaluateText(false); editArea.setDefDoc(); editArea.getDefDoc().addDocumentListener(docListen); editArea.getDoc().removeUndoableEditListener(undomanager); } void setDoc() { enableEvaluateText(true); editArea.setDoc(); editArea.getDoc().addDocumentListener(docListen); editArea.getDoc().addUndoableEditListener(undomanager); } void enableTypeEdit(boolean isEnabled) { isTypeEdit = isEnabled; lex.enableTypeMode(isEnabled); } void setUpEditing(Languages lang) { undomanager.discardAllEdits(); if (lang == Languages.PLAIN_TEXT) { editArea.allTextToBlack(); enableTypeEdit(false); lex.enableTypeMode(false); autoInd.enableIndent(false); } else { col.selectColorable(lang); colorSection(editArea.getDocText(), null, 0); autoInd.enableIndent(true); } } void changeIndentUnit(String indentUnit) { autoInd.changeIndentUnit(indentUnit); } String getIndentUnit() { return autoInd.getIndentUnit(); } void addAllLineNumbers(String in) { lineNum.addAllLineNumbers(in); } void updateLineNumber(String content) { lineNum.updateLineNumber(content); } void colorSection(String allText, String section, int posStart) { enableTypeEdit(false); col.colorSection(allText, section, posStart); enableTypeEdit(true); } synchronized void undo() { try { int prevLineNr = lineNum.getCurrLineNr(); enableEvaluateText(false); if (undomanager.canUndo()) { undomanager.undo(); } updateAfterUndoRedo(prevLineNr); } catch (CannotUndoException e) { FileUtils.logStack(e); } } synchronized void redo() { try { int prevLineNr = lineNum.getCurrLineNr(); enableEvaluateText(false); if (undomanager.canRedo()) { undomanager.redo(); } updateAfterUndoRedo(prevLineNr); } catch (CannotRedoException e) { FileUtils.logStack(e); } } //--private methods/classes--// private void updateAfterUndoRedo(int prevLineNr) { String allText = editArea.getDocText(); updateLineNumber(allText); if (isTypeEdit) { int newLineNr = lineNum.getCurrLineNr(); if (newLineNr > prevLineNr) { colorSection(allText, null, 0); } else if (newLineNr < prevLineNr) { undomanager.discardAllEdits(); } else { if (pos > 0 & pos < allText.length()) { color(allText, pos); } } } enableEvaluateText(true); } private void enableEvaluateText(boolean enable) { evaluateText = enable; } private void color(String allText, int pos) { EventQueue.invokeLater(() -> { col.colorLine(allText, pos); }); } private final DocumentListener docListen = new DocumentListener() { @Override public void insertUpdate(DocumentEvent de) { pos = de.getOffset(); eventType = 1; if (evaluateText) { changeLength = de.getLength(); String in = editArea.getDocText(); typed = in.charAt(pos); updateLineNumber(in); if (isTypeEdit) { autoInd.setText(in); if (typed != '\n') { color(in, pos); } EventQueue.invokeLater(() -> { autoInd.closeBracketIndent(pos); }); } } } @Override public void removeUpdate(DocumentEvent de) { pos = de.getOffset(); eventType = 2; if (evaluateText) { changeLength = - de.getLength(); typed = '\0'; String in = editArea.getDocText(); updateLineNumber(in); if (isTypeEdit) { color(in, pos); } } } @Override public void changedUpdate(DocumentEvent de) { if (evaluateText) { eventType = 0; } } }; private class UndoStopper implements CaretListener { @Override public void caretUpdate(CaretEvent ce) { caret = editArea.shiftToSelectionStart(ce.getDot()); if (caret > 0) { boolean isStop = true; if (eventType == 1) { isStop = caret - pos != 1 && caret - pos != changeLength; } else if (eventType == 2) { isStop = caret - pos != 0 && caret - pos != changeLength; } if (isStop) { undomanager.discardAllEdits(); } } changeLength = 0; } } private final class DocUndoManager extends UndoManager implements UndoableEditListener { CompoundEdit comp = null; @Override public synchronized void undoableEditHappened(UndoableEditEvent e) { if (!evaluateText) { return; } UndoableEdit ed = e.getEdit(); if (eventType != 0) { addAnEdit(ed); } } @Override public synchronized boolean canUndo() { commitCompound(); return super.canUndo(); } @Override public synchronized boolean canRedo() { commitCompound(); return super.canRedo(); } @Override public synchronized void undo() { super.undo(); } @Override public synchronized void redo() { super.redo(); } @Override public synchronized void discardAllEdits() { if (comp != null) { comp = null; } super.discardAllEdits(); } private synchronized void addAnEdit(UndoableEdit anEdit) { if (comp == null) { comp = new CompoundEdit(); } if ((typed != '\0' & isEditSeparator()) || typed == '\0') { commitCompound(); super.addEdit(anEdit); } else { comp.addEdit(anEdit); } } private synchronized void commitCompound() { if (comp != null) { comp.end(); super.addEdit(comp); comp = null; } } private synchronized boolean isEditSeparator() { int i = 0; for (i = 0; i < UNDO_SEP.length; i++) { if (UNDO_SEP[i] == typed) { break; } } return i != UNDO_SEP.length; } } }
package gridlab.View; import gridlab.ExecuteShellCommand; import gridlab.ModulesItems.Clock; import gridlab.ModulesItems.Generator.Inverter; import gridlab.ModulesItems.Generator.Solar; import gridlab.ModulesItems.Powerflow.*; import gridlab.ModulesItems.Property; import gridlab.ModulesItems.Residental.*; import gridlab.ModulesItems.Tape.Player; import gridlab.ModulesItems.Tape.Recorder; import gridlab.ModulesItems.ToGLMParser; import gridlab.ParentChild; import javax.swing.*; import javax.swing.filechooser.FileNameExtensionFilter; import java.awt.*; import java.awt.event.*; import java.io.*; import java.util.*; public class MainWindow extends JFrame { private JFrame mainFrame; private JPanel modulesPanel; private JPanel objectPanel; private JPanel addedObjectsPanel; private JScrollPane propertiesPanel; private JScrollPane consolePanel; private JButton addButton; private JButton removeButton; private JButton addClock; private JButton connectButton; private JButton connectFTButton; private JButton startSimulationButton; private JMenuBar menuBar; private JToolBar toolBar; private JTextArea fileNameJTextArea = new JTextArea("HelloWorld");; private JPanel drag_drop; private String fileName="Hello1.glm"; private JFileChooser fileChooser; private JList<String> modulesJList; private JList<String> objectsJList; private JList<String> addedObjectsJList; private JList<String> propertiesJList; DefaultListModel<String> modulesItems; DefaultListModel<String> powerflowItems; DefaultListModel<String> residentalItems; DefaultListModel<String> tapeItems; DefaultListModel<String> generatorItems; DefaultListModel<String> objectsItems; DefaultListModel<String> addedObjectsItems; DefaultListModel<String> propertiesItems; HashMap<String,ToGLMParser> objectTable; HashMap<String,JLabel> imagesTable; ArrayList<ParentChild>listOfConn; HashMap<String,String> hashChildParent; Map<String,Point> map; //static int objectCount=0; //private JLabel[] labelsGlobal; private JTextField[] textFieldsGlobal; private int currentObject=0; private String stringCurrentObject=""; private JTextArea consoleOutput=new JTextArea(); private JPopupMenu popupTape; private JPopupMenu popupResidental; private JPopupMenu popupPowerflow; private JPopupMenu popupGenerator; //private JPopupMenu popup; public MainWindow() { hashChildParent=new HashMap<String,String>(); listOfConn=new ArrayList<ParentChild>(); map=new LinkedHashMap<String,Point>(); objectTable=new HashMap<String,ToGLMParser>(); imagesTable=new HashMap<String,JLabel>(); fileChooser=new JFileChooser(); FileNameExtensionFilter filterGLM=new FileNameExtensionFilter("GLM files","glm"); fileChooser.addChoosableFileFilter(filterGLM); FileNameExtensionFilter filterSer=new FileNameExtensionFilter("Save files","ser"); fileChooser.addChoosableFileFilter(filterSer); loadLists(); modulesJList=new JList <String>(modulesItems); objectsJList=new JList <String>(objectsItems); addedObjectsJList=new JList <String>(addedObjectsItems); propertiesJList=new JList<String>(propertiesItems); objectsItems.addElement(" "); objectsJList.setSelectedIndex(0); mainFrame = new JFrame(); modulesPanel = new ModulesPanel(modulesJList); objectPanel = new ModulesPanel(objectsJList); addedObjectsPanel = new ModulesPanel(addedObjectsJList); propertiesPanel = new JScrollPane(); consolePanel = new JScrollPane(consoleOutput); //propertiesPanel.setLayout(null); propertiesPanel.setPreferredSize(new Dimension(450, 300)); consolePanel.setPreferredSize(new Dimension(500,300)); addButton = new JButton("+"); removeButton = new JButton("-"); connectButton=new JButton("Parent Connect"); connectFTButton=new JButton("From To Connect"); startSimulationButton=new JButton("Start symulacji",new ImageIcon("C:\\Users\\Dylek\\Documents\\GitHub\\SP_gridlab-D_GUI\\Gridlab\\Icons\\start16x16.png")); Icon clock = new ImageIcon("Gridlab\\resources\\clock.png"); addClock = new JButton("Add clock"); addClock.setIcon(clock); drag_drop = new MyJPanel(); drag_drop.setPreferredSize(new Dimension(500,300)); loadToolBox(); Container container = mainFrame.getContentPane(); container.setLayout(new FlowLayout(FlowLayout.LEFT)); container.add(toolBar); //container.add(modulesPanel); //container.add(objectPanel); //container.add(addButton); container.add(removeButton); container.add(connectButton); container.add(connectFTButton); //container.add(addClock); container.add(addedObjectsPanel); container.add(propertiesPanel); container.add(drag_drop); container.add(consolePanel); loadListers(); menuBar=new JMenuBar(); mainFrame.setJMenuBar(menuBar); loadMenu(); mainFrame.setSize(new Dimension(1300,600)); mainFrame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); mainFrame.setLocationRelativeTo(null); mainFrame.setTitle("Gridlab-D GUI"); mainFrame.setResizable(false); mainFrame.setVisible(true); } private void loadLists(){ objectsItems=new DefaultListModel<String>(); addedObjectsItems=new DefaultListModel<String>(); propertiesItems=new DefaultListModel<String>(); modulesItems=new DefaultListModel<String>(); modulesItems.addElement("Generator"); modulesItems.addElement("Powerflow"); modulesItems.addElement("Residental"); modulesItems.addElement("Tape"); tapeItems=new DefaultListModel<String>(); tapeItems.addElement("Player"); tapeItems.addElement("Recorder"); residentalItems=new DefaultListModel<String>(); residentalItems.addElement("House"); residentalItems.addElement("Lights"); residentalItems.addElement("Microwave"); residentalItems.addElement("Occupants"); residentalItems.addElement("Plugs"); residentalItems.addElement("Refrigerator"); residentalItems.addElement("Waterheat"); residentalItems.addElement("Clotheswasher"); residentalItems.addElement("Dishwasher"); powerflowItems=new DefaultListModel<String>(); powerflowItems.addElement("Capacitor"); powerflowItems.addElement("Fuse"); powerflowItems.addElement("LineConfiguration"); powerflowItems.addElement("LineSpacing"); powerflowItems.addElement("Load"); powerflowItems.addElement("Meter"); powerflowItems.addElement("Node"); powerflowItems.addElement("OverheadLine"); powerflowItems.addElement("OverheadLineConductor"); powerflowItems.addElement("Regulator"); powerflowItems.addElement("RegulatorConfiguration"); powerflowItems.addElement("Switch"); powerflowItems.addElement("Transformer"); powerflowItems.addElement("TransformerConfiguration"); powerflowItems.addElement("TriplexLine"); powerflowItems.addElement("TriplexLineConductor"); powerflowItems.addElement("TriplexLineConfiguration"); powerflowItems.addElement("TriplexMeter"); powerflowItems.addElement("TriplexNode"); powerflowItems.addElement("UndergroundLine"); powerflowItems.addElement("UnderGroundLineConductor"); generatorItems=new DefaultListModel<String>(); generatorItems.addElement("Inverter"); generatorItems.addElement("Solar"); } private void loadListers(){ connectButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { String[] nodes = showGUIForConnectionsMaking("parent-child","connect child with parent"); if (nodes == null || nodes[0]==null || nodes.length == 0 ) {} else if (!nodes[0].equals(nodes[1])) { String split[] = nodes[0].split(","); Point p1 = new Point(Integer.valueOf(split[0]),Integer.valueOf(split[1])); split = nodes[1].split(","); Point p2 = new Point(Integer.valueOf(split[0]),Integer.valueOf(split[1])); JLabel labelParent = (JLabel)drag_drop.getComponentAt(p1); String parentName=labelParent.getName(); JLabel labelChild = (JLabel)drag_drop.getComponentAt(p2); String childName=labelChild.getName(); ParentChild pair = new ParentChild(labelParent,labelChild); //make parent child connection if(hashChildParent.containsKey(childName) ){ consoleOutput.setText("A child object can have only one parent\n Overriding old parent"); //hashChildParent.remove(childName); for(int i=0;i<listOfConn.size();i++){ if(listOfConn.get(i).getParentJLabel().getName().equals(hashChildParent.get(childName))){ listOfConn.remove(listOfConn.get(i)); } } hashChildParent.replace(childName,parentName); } hashChildParent.put(childName,parentName); Vector<Property> noweProperty=new Vector<Property>(); for (Property p:objectTable.get(childName).GetProperties()) { if(p.GetName().equals("parent")){ noweProperty.add(new Property("parent", parentName, "")); }else{ noweProperty.add(p); } } objectTable.get(childName).SetProperty(noweProperty); listOfConn.add(pair); drag_drop.repaint(); } else { JOptionPane.showMessageDialog(MainWindow.this,"Nodes can't be same","Error",JOptionPane.ERROR_MESSAGE); } } }); connectFTButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { String[] nodes = showGUIForConnectionsFromToMaking("From--Connector--To","make FRom-To connection"); if (nodes == null || nodes[0]==null || nodes.length == 0 ) {} else if (!nodes[0].equals(nodes[2])) { String split[] = nodes[0].split(","); Point pFrom = new Point(Integer.valueOf(split[0]),Integer.valueOf(split[1])); split = nodes[1].split(","); Point pConnector = new Point(Integer.valueOf(split[0]),Integer.valueOf(split[1])); split = nodes[2].split(","); Point pTo = new Point(Integer.valueOf(split[0]),Integer.valueOf(split[1])); JLabel labelFrom = (JLabel)drag_drop.getComponentAt(pFrom); String fromName=labelFrom.getName(); JLabel labelTo = (JLabel)drag_drop.getComponentAt(pTo); String toName=labelTo.getName(); JLabel labelConn= (JLabel)drag_drop.getComponentAt(pConnector); String connName= labelConn.getName(); ParentChild pairTo = new ParentChild(labelConn,labelTo); ParentChild pairFrom =new ParentChild(labelConn,labelFrom); Vector<Property> noweProperty=new Vector<Property>(); for (Property p:objectTable.get(connName).GetProperties()) { if(p.GetName().equals("from")){ noweProperty.add(new Property("from", fromName, "")); }else if(p.GetName().equals("to")){ noweProperty.add(new Property("to", toName, "")); }else { noweProperty.add(p); } } objectTable.get(connName).SetProperty(noweProperty); listOfConn.add(pairTo); listOfConn.add(pairFrom); drag_drop.repaint(); } else { JOptionPane.showMessageDialog(MainWindow.this,"Nie można łączyć tych samych obiektów","Error",JOptionPane.ERROR_MESSAGE); } } }); modulesJList.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent evt) { JList list = (JList)evt.getSource(); if (evt.getClickCount() == 1) { objectsItems.clear(); // Double-click detected int index = list.locationToIndex(evt.getPoint()); switch (index){ case 0: for(int i=0;i<generatorItems.getSize();i++) { objectsItems.addElement(generatorItems.get(i)); } objectsJList.setSelectedIndex(0); break; case 1: for(int i=0;i<powerflowItems.getSize();i++) { objectsItems.addElement(powerflowItems.get(i)); } objectsJList.setSelectedIndex(0); break; case 2:for(int i=0;i<residentalItems.getSize();i++) { objectsItems.addElement(residentalItems.get(i)); } objectsJList.setSelectedIndex(0); break; case 3: for(int i=0;i<tapeItems.getSize();i++) { objectsItems.addElement(tapeItems.get(i)); } objectsJList.setSelectedIndex(0); break; } } } }); addedObjectsJList.addMouseListener(new MouseAdapter(){ public void mouseClicked(MouseEvent evt) { JList list = (JList)evt.getSource(); if (evt.getClickCount() == 1) { if(textFieldsGlobal!=null) { int index = currentObject; String key = addedObjectsItems.get(index); ToGLMParser value = objectTable.get(key); int propAmount = value.GetProperties().size(); for(int i =0; i<propAmount;i++){ value.GetProperties().get(i).SetValue(textFieldsGlobal[i].getText()); } } int index = list.locationToIndex(evt.getPoint()); currentObject=index; String key = addedObjectsItems.get(index); ToGLMParser value = objectTable.get(key); stringCurrentObject=key; int propAmount = value.GetProperties().size(); JPanel params = new JPanel(); params.setLayout(new GridBagLayout()); GridBagConstraints gbc = new GridBagConstraints(); params.setMinimumSize(new Dimension(300,50)); params.setMaximumSize(new Dimension(300, 2500)); JLabel labels[] = new JLabel[propAmount]; JTextField textfields[] = new JTextField[propAmount]; for(int i =0; i<propAmount;i++){ labels[i] = new JLabel(value.GetProperties().get(i).GetName()); textfields[i] = new JTextField(value.GetProperties().get(i).GetValue()); gbc.ipadx=100; gbc.gridx = 0; gbc.gridy = i; params.add(labels[i],gbc); gbc.gridx = 1; gbc.gridy = i; params.add(textfields[i],gbc); } params.setPreferredSize(params.getPreferredSize()); //labelsGlobal=labels; textFieldsGlobal=textfields; // JScrollPane scrollPanel = new JScrollPane(params); propertiesPanel.setViewportView(params); // propertiesPanel.add(params); propertiesPanel.revalidate(); propertiesPanel.repaint(); } } }); addButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { objectCount++; String object=new String(); object=objectsItems.get(objectsJList.getSelectedIndex()); addedObjectsItems.addElement(object+" "+objectCount); //String s=objectsItems.get(objectsJList.getSelectedIndex()); switch (object){ case "Player": objectTable.put((object+" "+objectCount),new Player());System.out.print("utworozno klase"); break; case "Recorder": objectTable.put(object+" "+objectCount,new Recorder()); break; case "Inverter": objectTable.put(object+" "+objectCount,new Inverter());break; case "Solar": objectTable.put(object+" "+objectCount,new Solar());break; case "House": objectTable.put(object+" "+objectCount,new House());break; case "Dishwasher": objectTable.put(object+" "+objectCount,new Dishwasher());break; case "Clotheswasher": objectTable.put(object+" "+objectCount,new Clotheswasher());break; case "Lights": objectTable.put(object+" "+objectCount,new Lights());break; case "Microwave": objectTable.put(object+" "+objectCount,new Microwave());break; case "Occupants": objectTable.put(object+" "+objectCount,new Occupants());break; case "Plugs": objectTable.put(object+" "+objectCount,new Plugs());break; case "Refrigerator": objectTable.put(object+" "+objectCount,new Refrigerator());break; case "Waterheat": objectTable.put(object+" "+objectCount,new Waterheat());break; case "Capacitor": objectTable.put(object+" "+objectCount,new Capacitor());break; case "Fuse": objectTable.put(object+" "+objectCount,new Fuse());break; case "LineConfiguration": objectTable.put(object+" "+objectCount,new LineConfiguration());break; case "LineSpacing": objectTable.put(object+" "+objectCount,new LineSpacing());break; case "Load": objectTable.put(object+" "+objectCount,new Load());break; case "Meter": objectTable.put(object+" "+objectCount,new Meter());break; case "Node": objectTable.put(object+" "+objectCount,new Node());break; case "OverheadLine": objectTable.put(object+" "+objectCount,new OverheadLine() );break; case "OverheadLineConductor": objectTable.put(object+" "+objectCount,new OverheadLineConductor());break; case "Regulator": objectTable.put(object+" "+objectCount,new Regulator());break; case "RegulatorConfiguration": objectTable.put(object+" "+objectCount,new RegulatorConfiguration());break; case "Switch": objectTable.put(object+" "+objectCount,new Switch());break; case "Transformer": objectTable.put(object+" "+objectCount,new Transformer());break; case "TransformerConfiguration": objectTable.put(object+" "+objectCount,new TransformerConfiguration());break; case "TriplexLine": objectTable.put(object+" "+objectCount,new TriplexLine());break; case "TriplexLineConductor": objectTable.put(object+" "+objectCount,new TriplexLineConductor());break; case "TriplexLineConfiguration": objectTable.put(object+" "+objectCount,new TriplexLineConfiguration());break; case "TriplexMeter": objectTable.put(object+" "+objectCount,new TriplexMeter());break; case "TriplexNode": objectTable.put(object+" "+objectCount,new TriplexNode());break; case "UndergroundLine": objectTable.put(object+" "+objectCount,new UndergroundLine());break; case "UnderGroundLineConductor": objectTable.put(object+" "+objectCount,new UnderGroundLineConductor());break; } System.out.print("Dodano"+objectTable.size()); } }); removeButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { String obj=addedObjectsItems.get(addedObjectsJList.getSelectedIndex()); addedObjectsItems.remove(addedObjectsJList.getSelectedIndex()); objectTable.remove(obj); map.remove(obj); removeImageFromPanel(obj); System.out.print("usunieto "+objectTable.size()); textFieldsGlobal=null; propertiesPanel.setViewportView(null); propertiesPanel.revalidate(); propertiesPanel.repaint(); } }); addClock.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { objectCount++; addedObjectsItems.addElement("Clock " + objectCount); objectTable.put("Clock" + " " + objectCount, new Clock()); } }); } public String checkModules(){ String modules = ""; int checkGuard1 = 0; int checkGuard2 = 0; int checkGuard3 = 0; int checkGuard4 = 0; for (ToGLMParser value : objectTable.values()) { if(value.getModule()==0 && checkGuard1 ==0){ modules = modules + "module generators;\n"; checkGuard1++; } if(value.getModule()==1 && checkGuard2 ==0){ modules = modules + "module powerflow;\n"; checkGuard2++; } if(value.getModule()==2 && checkGuard3 ==0){ modules = modules + "module residential;\n"; checkGuard3++; } if(value.getModule()==3 && checkGuard4 ==0){ modules = modules + "module tape;\n"; checkGuard4++; } } return modules; } public void loadMenu(){ JMenu glmMenu=new JMenu("GLM"); JMenu helpMenu=new JMenu("Help"); JMenu fileMenu=new JMenu("File"); menuBar.add(fileMenu); menuBar.add(glmMenu); menuBar.add(helpMenu); menuBar.add(startSimulationButton); JMenuItem clearItem=new JMenuItem("Clear"); JMenuItem exitItem=new JMenuItem("Exit"); JMenuItem saveItem= new JMenuItem("Save"); JMenuItem loadITem=new JMenuItem("Load"); fileMenu.add(saveItem); fileMenu.add(loadITem); fileMenu.addSeparator(); fileMenu.add(clearItem); fileMenu.addSeparator(); fileMenu.add(exitItem); JMenuItem exportItem=new JMenuItem("Export to GLM"); JMenuItem runItem=new JMenuItem("Run Simulation"); glmMenu.add(exportItem); glmMenu.add(runItem); JMenuItem aboutItem=new JMenuItem("About"); JMenuItem wikiItem=new JMenuItem("Gridlab-D wiki"); helpMenu.add(aboutItem); helpMenu.add(wikiItem); //menu listeners exitItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { System.exit(0); } }); clearItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { clearMemory(); /* objectCount=0; currentObject=0; objectsItems.clear(); objectTable.clear(); imagesTable.clear(); drag_drop.removeAll(); drag_drop.revalidate(); drag_drop.repaint(); addedObjectsItems.clear(); propertiesItems.clear(); textFieldsGlobal=null; propertiesPanel.setViewportView(new JPanel()); propertiesPanel.revalidate(); propertiesPanel.repaint(); consoleOutput.setText(""); listOfConn.clear(); map.clear();*/ } }); //TODO saveItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { // int returnVal=fileChooser.showSaveDialog(mainFrame); //fileChooser.setFileFilter(new FileNameExtensionFilter("SaveFile",".ser")); // if(returnVal==JFileChooser.APPROVE_OPTION){ File fileSelected= fileChooser.getSelectedFile(); System.out.println(fileSelected); SaveFileClass save=new SaveFileClass(); save.addedObjectsItems=addedObjectsItems; save.modulesItems=modulesItems; save.powerflowItems=powerflowItems; save.residentalItems=residentalItems; save.tapeItems=tapeItems; save.generatorItems=generatorItems; save.objectsItems=objectsItems; save.stringCurrentObject=stringCurrentObject; save.objectCount=objectCount; save.currentObject=currentObject; save.hashChildParent=hashChildParent; save.imagesTable=imagesTable; save.listOfConn=listOfConn; save.map=map; save.objectTable=objectTable; save.propertiesItems=propertiesItems; String fileName= null; System.out.println("1."); /* try { fileName = fileSelected.getCanonicalPath()+".ser"; } catch (IOException e1) { e1.printStackTrace(); }*/ System.out.println("2."); File file=saveFile(".ser"); try { FileOutputStream fileOut = new FileOutputStream(file); ObjectOutputStream out = new ObjectOutputStream(new BufferedOutputStream(fileOut)); out.writeObject(save); out.close(); fileOut.close(); //System.out.printf("Serialized data is saved in /tmp/employee.ser"); }catch(IOException i) { i.printStackTrace(); } System.out.println("3."); fileChooser.setSelectedFile(new File("")); } }); loadITem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { // int returnVal=fileChooser.showOpenDialog(mainFrame); //fileChooser.setFileFilter(new FileNameExtensionFilter("SaveFile",".ser")); // if(returnVal==JFileChooser.APPROVE_OPTION){ // File fileSelected= fileChooser.getSelectedFile(); SaveFileClass save = null; Object obj; File file=loadFile(); try { FileInputStream fileIn = new FileInputStream(file); ObjectInputStream in = new ObjectInputStream(new BufferedInputStream(fileIn)); save =(SaveFileClass) (in.readObject()); in.close(); fileIn.close(); }catch(IOException i) { i.printStackTrace(); return; }catch(ClassNotFoundException c) { System.out.println("Employee class not found"); c.printStackTrace(); return; } clearMemory(); //addedObjectsItems= save.addedObjectsItems; for(int i=0;i<save.addedObjectsItems.getSize();i++){ addedObjectsItems.addElement(save.addedObjectsItems.get(i)); } modulesItems=save.modulesItems; powerflowItems=save.powerflowItems; residentalItems=save.residentalItems; tapeItems= save.tapeItems; generatorItems= save.generatorItems; objectsItems=save.objectsItems; stringCurrentObject= save.stringCurrentObject; objectCount=save.objectCount; currentObject= save.currentObject; hashChildParent=save.hashChildParent; imagesTable= save.imagesTable; listOfConn= save.listOfConn; map= save.map; propertiesItems = save.propertiesItems; objectTable = save.objectTable; Set<String> keys=imagesTable.keySet(); for (String k:keys) { drag_drop.add(imagesTable.get(k)); map.put(k,imagesTable.get(k).getLocation()); MyMouseAdapter myMouseAdapter=new MyMouseAdapter(); imagesTable.get(k).addMouseListener(myMouseAdapter); imagesTable.get(k).addMouseListener(new MouseAdapterClick()); imagesTable.get(k).addMouseMotionListener(myMouseAdapter); } addedObjectsJList=new JList<String>(addedObjectsItems); //odswiezenie widoku, nie wiem czemu dziala tylko polaczenie miedzy obiektami addedObjectsPanel.revalidate(); addedObjectsPanel.repaint(); drag_drop.revalidate(); drag_drop.repaint(); propertiesPanel.revalidate(); propertiesPanel.repaint(); //test - deserializacja przebiega pomyslnie, bo wyswietla wartosci System.out.print(addedObjectsItems); System.out.print(objectCount); System.out.print(objectTable); } }); runItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { String command; command="gridlabd "; // fileName=fileNameJTextArea.getText(); int returnVal=fileChooser.showOpenDialog(mainFrame); if(returnVal==JFileChooser.APPROVE_OPTION) { try { fileName=fileChooser.getSelectedFile().getCanonicalPath(); } catch (IOException e1) { e1.printStackTrace(); } if (fileName != null && fileName != "") { ExecuteShellCommand execCom=new ExecuteShellCommand(); JPanel conPan=new JPanel(); conPan.setPreferredSize(new Dimension(400, 300)); String str=execCom.executeCommand(command+fileName); consoleOutput.setText(str); consoleOutput.updateUI(); System.out.println("Running in console"); //System.out.println(execCom.executeCommand(command+"waterheater_example.glm")); System.out.println("End of console response"); } fileChooser.setSelectedFile(new File("")); } } }); exportItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { int returnVal=fileChooser.showSaveDialog(mainFrame); if(returnVal==JFileChooser.APPROVE_OPTION){ File fileSelected= fileChooser.getSelectedFile(); File file=new File(fileSelected+".glm"); // creates the file try { file.createNewFile(); } catch (IOException e1) { e1.printStackTrace(); } FileWriter writer = null; try { writer = new FileWriter(file); } catch (IOException e1) { e1.printStackTrace(); } try { writer.write(checkModules()); } catch (IOException e1) { e1.printStackTrace(); } // creates a FileWriter Object for(int i=0;i<addedObjectsItems.size();i++){ ToGLMParser glm= (ToGLMParser) objectTable.get(addedObjectsItems.get(i)); // System.out.println(glm.ToGLM()); // Writes the content to the file try { writer.write(glm.ToGLM()); } catch (IOException e1) { e1.printStackTrace(); } try { writer.flush(); } catch (IOException e1) { e1.printStackTrace(); } } try { writer.close(); } catch (IOException e1) { e1.printStackTrace(); } fileChooser.setSelectedFile(new File("")); } } }); aboutItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { String about="Projekt GUI do programu GRIDLAB-D " + "\n Jest to projekt na zaliczenie zajęć: Studio Projektowe 2016 " + "\n Autorzy: " + "\n Aleksandra Pierzchała " + "\n Paweł Ogorzały " + "\n Marcin Jędrzejczyk "+ "\n Prowadzący:"+ "\n dr inż. Marek Zachera"; JOptionPane.showMessageDialog(mainFrame,about); } }); wikiItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { String about="Więcej informacji o programie GRIDLAB-D na oficialnej stronie"; JOptionPane.showMessageDialog(mainFrame,(about)); } }); startSimulationButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { File file=new File("temp.glm"); // creates the file try { file.createNewFile(); } catch (IOException e1) { e1.printStackTrace(); } FileWriter writer = null; try { writer = new FileWriter(file); } catch (IOException e1) { e1.printStackTrace(); } try { writer.write(checkModules()); } catch (IOException e1) { e1.printStackTrace(); } // creates a FileWriter Object for(int i=0;i<addedObjectsItems.size();i++){ ToGLMParser glm= (ToGLMParser) objectTable.get(addedObjectsItems.get(i)); try { writer.write(glm.ToGLM()); } catch (IOException e1) { e1.printStackTrace(); } try { writer.flush(); } catch (IOException e1) { e1.printStackTrace(); } } try { writer.close(); } catch (IOException e1) { e1.printStackTrace(); } try { fileName=file.getCanonicalPath(); } catch (IOException e1) { e1.printStackTrace(); } ExecuteShellCommand execCom=new ExecuteShellCommand(); JPanel conPan=new JPanel(); conPan.setPreferredSize(new Dimension(400, 300)); String str=execCom.executeCommand("gridlabd "+fileName); consoleOutput.setText(str); consoleOutput.updateUI(); } }); } public void loadToolBox(){ toolBar=new JToolBar(JToolBar.VERTICAL); //toolBar. JButton residentalModule=new JButton(new ImageIcon("Gridlab\\Icons\\residental.png")); JButton generatorModule=new JButton(new ImageIcon("Gridlab\\Icons\\generator.png")); JButton powerflowModule=new JButton(new ImageIcon("Gridlab\\Icons\\powerflow.png")); JButton tapeModule=new JButton(new ImageIcon("Gridlab\\Icons\\Tape2-512.png")); JButton clockModule=new JButton(new ImageIcon("Gridlab\\Icons\\clock-128.png")); toolBar.add(residentalModule); toolBar.add(generatorModule); toolBar.add(powerflowModule); toolBar.add(tapeModule); toolBar.add(clockModule); { popupTape = new JPopupMenu(); for (int i=0;i< tapeItems.size();i++ ) { String str=tapeItems.get(i); popupTape.add(new JMenuItem(new AbstractAction(str) { @Override public void actionPerformed(ActionEvent e) { addObject(str); } })); } } { popupResidental=new JPopupMenu(); for (int i=0;i< residentalItems.size();i++ ) { String str=residentalItems.get(i); popupResidental.add(new JMenuItem(new AbstractAction(str) { @Override public void actionPerformed(ActionEvent e) { addObject(str); } })); } } { popupPowerflow=new JPopupMenu(); for (int i=0;i< powerflowItems.size();i++ ) { String str=powerflowItems.get(i); popupPowerflow.add(new JMenuItem(new AbstractAction(str) { @Override public void actionPerformed(ActionEvent e) { addObject(str); } })); } } { popupGenerator=new JPopupMenu(); for (int i=0;i< generatorItems.size();i++ ) { String str=generatorItems.get(i); popupGenerator.add(new JMenuItem(new AbstractAction(str) { @Override public void actionPerformed(ActionEvent e) { addObject(str); } })); } } //ToolBox listeners residentalModule.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent evt) { popupResidental.show(residentalModule,residentalModule.getWidth()/2,residentalModule.getHeight()/2); } }); powerflowModule.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent evt) { popupPowerflow.show(powerflowModule,powerflowModule.getWidth()/2,powerflowModule.getHeight()/2); } }); generatorModule.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent evt) { popupGenerator.show(generatorModule, generatorModule.getWidth()/2, generatorModule.getHeight()/2); } }); tapeModule.addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent evt) { popupTape.show(tapeModule, tapeModule.getWidth()/2, tapeModule.getHeight()/2); } }); clockModule.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { addObject("Clock"); } }); } private void addObject(String object){ objectCount++; addedObjectsItems.addElement(object+" "+objectCount); switch (object){ case "Clock": objectTable.put("Clock" + " " + objectCount, new Clock()); addImageToPanel(object);break; case "Player": objectTable.put((object+" "+objectCount),new Player()); addImageToPanel(object);break; case "Recorder": objectTable.put(object+" "+objectCount,new Recorder()); addImageToPanel(object);break; case "Inverter": objectTable.put(object+" "+objectCount,new Inverter()); addImageToPanel(object);break; case "Solar": objectTable.put(object+" "+objectCount,new Solar()); addImageToPanel(object);break; case "House": objectTable.put(object+" "+objectCount,new House()); addImageToPanel(object);break; case "Dishwasher": objectTable.put(object+" "+objectCount,new Dishwasher()); addImageToPanel(object);break; case "Clotheswasher": objectTable.put(object+" "+objectCount,new Clotheswasher()); addImageToPanel(object);break; case "Lights": objectTable.put(object+" "+objectCount,new Lights()); addImageToPanel(object);break; case "Microwave": objectTable.put(object+" "+objectCount,new Microwave()); addImageToPanel(object);break; case "Occupants": objectTable.put(object+" "+objectCount,new Occupants()); addImageToPanel(object);break; case "Plugs": objectTable.put(object+" "+objectCount,new Plugs()); addImageToPanel(object);break; case "Refrigerator": objectTable.put(object+" "+objectCount,new Refrigerator()); addImageToPanel(object);break; case "Waterheat": objectTable.put(object+" "+objectCount,new Waterheat()); addImageToPanel(object);break; case "Capacitor": objectTable.put(object+" "+objectCount,new Capacitor()); addImageToPanel(object);break; case "Fuse": objectTable.put(object+" "+objectCount,new Fuse()); addImageToPanel(object);break; case "LineConfiguration": objectTable.put(object+" "+objectCount,new LineConfiguration()); addImageToPanel(object);break; case "LineSpacing": objectTable.put(object+" "+objectCount,new LineSpacing()); addImageToPanel(object);break; case "Load": objectTable.put(object+" "+objectCount,new Load()); addImageToPanel(object);break; case "Meter": objectTable.put(object+" "+objectCount,new Meter()); addImageToPanel(object);break; case "Node": objectTable.put(object+" "+objectCount,new Node()); addImageToPanel(object);break; case "OverheadLine": objectTable.put(object+" "+objectCount,new OverheadLine()); addImageToPanel(object);break; case "OverheadLineConductor": objectTable.put(object+" "+objectCount,new OverheadLineConductor()); addImageToPanel(object);break; case "Regulator": objectTable.put(object+" "+objectCount,new Regulator()); addImageToPanel(object);break; case "RegulatorConfiguration": objectTable.put(object+" "+objectCount,new RegulatorConfiguration()); addImageToPanel(object);break; case "Switch": objectTable.put(object+" "+objectCount,new Switch()); addImageToPanel(object);break; case "Transformer": objectTable.put(object+" "+objectCount,new Transformer()); addImageToPanel(object);break; case "TransformerConfiguration": objectTable.put(object+" "+objectCount,new TransformerConfiguration()); addImageToPanel(object);break; case "TriplexLine": objectTable.put(object+" "+objectCount,new TriplexLine()); addImageToPanel(object);break; case "TriplexLineConductor": objectTable.put(object+" "+objectCount,new TriplexLineConductor()); addImageToPanel(object);break; case "TriplexLineConfiguration": objectTable.put(object+" "+objectCount,new TriplexLineConfiguration()); addImageToPanel(object);break; case "TriplexMeter": objectTable.put(object+" "+objectCount,new TriplexMeter()); addImageToPanel(object);break; case "TriplexNode": objectTable.put(object+" "+objectCount,new TriplexNode()); addImageToPanel(object);break; case "UndergroundLine": objectTable.put(object+" "+objectCount,new UndergroundLine()); addImageToPanel(object);break; case "UnderGroundLineConductor": objectTable.put(object+" "+objectCount,new UnderGroundLineConductor()); addImageToPanel(object);break; default: System.out.println("not know type: "+object); } Vector<Property> noweProperty=new Vector<Property>(); System.out.println("created"+object+" "+objectCount); for (Property p:objectTable.get(object+" "+objectCount).GetProperties()) { //System.out.println(p.toString()); if(p.GetName().equals("name")){ Property k=new Property("name", object+" "+objectCount, ""); // System.out.println("ustawiam imie:"+k.GetValue()); noweProperty.add(k); }else{ noweProperty.add(p); } } objectTable.get(object+" "+objectCount).SetProperty(noweProperty); } public void addImageToPanel(String obj){ String object = obj; JLabel objectLabel=new JLabel(objectTable.get(object+" "+objectCount).getIcon()); objectLabel.setName(object+" "+objectCount); MyMouseAdapter myMouseAdapter=new MyMouseAdapter(); objectLabel.addMouseListener(myMouseAdapter); objectLabel.addMouseMotionListener(myMouseAdapter); objectLabel.setText(object+" "+objectCount); imagesTable.put(object+" "+objectCount,objectLabel); drag_drop.add(imagesTable.get(object+" "+objectCount)); drag_drop.revalidate(); drag_drop.repaint(); map.put(object+" "+objectCount,objectLabel.getLocation()); /*objectLabel.addMouseListener(new MouseAdapter(){ });*/ objectLabel.addMouseListener(new MouseAdapterClick()); } public void removeImageFromPanel(String obj){ drag_drop.remove(imagesTable.get(obj)); System.out.println("*************"+obj+" liczba conn "+listOfConn.size()); for(int i=0;i<listOfConn.size();i++){ if(listOfConn.get(i).getParentJLabel().getName().equals(obj) || listOfConn.get(i).getChildJLabel().getName().equals(obj)){ listOfConn.remove(listOfConn.get(i)); } } //System.out.println("liczba conn"+listOfConn.size()+" " + listOfConn.get(0).getParentJLabel().getName()); //listOfConn.remove(listOfConn.get(0)); System.out.println("liczba conn "+listOfConn.size()); /* for (ParentChild temp:listOfConn){ if(temp.Conntain(imagesTable.get(obj))){ listOfConn.remove(temp); } }*/ imagesTable.remove(obj); drag_drop.revalidate(); drag_drop.repaint(); } private void clearMemory(){ objectCount=0; currentObject=0; objectsItems.clear(); objectTable.clear(); imagesTable.clear(); drag_drop.removeAll(); drag_drop.revalidate(); drag_drop.repaint(); addedObjectsItems.clear(); propertiesItems.clear(); textFieldsGlobal=null; propertiesPanel.setViewportView(new JPanel()); propertiesPanel.revalidate(); propertiesPanel.repaint(); consoleOutput.setText(""); listOfConn.clear(); map.clear(); } /* private String[] showGUIForConnectionsMaking(){ textFieldsGlobal=null; JPanel panel = new JPanel(); panel.setLayout(new GridLayout(map.size()+1,2)); JLabel coto=new JLabel("parent group -child group"); panel.add(coto);panel.add(new JLabel("")); ButtonGroup group1 = new ButtonGroup(); ButtonGroup group2 = new ButtonGroup(); final String nodes[] = new String[2]; Set<String> keySet = map.keySet(); for (String name : keySet) { JRadioButton rButton = new JRadioButton(name); rButton.setActionCommand(map.get(name).x+","+map.get(name).y); rButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { nodes[0] = ((JRadioButton)evt.getSource()).getActionCommand(); } }); group1.add(rButton); panel.add(rButton); JRadioButton rButton1 = new JRadioButton(name); rButton1.setActionCommand(map.get(name).x+","+map.get(name).y); rButton1.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { nodes[1] = ((JRadioButton)evt.getSource()).getActionCommand(); } }); group2.add(rButton1); panel.add(rButton1); } JOptionPane.showMessageDialog(MainWindow.this,panel,"Choose parents for childs",JOptionPane.INFORMATION_MESSAGE); return nodes; }*/ private String[] showGUIForConnectionsMaking(String info1,String info2){ textFieldsGlobal=null; JPanel panel = new JPanel(); panel.setLayout(new GridLayout(map.size()+1,2)); JLabel coto=new JLabel(info1); panel.add(coto);panel.add(new JLabel("")); ButtonGroup group1 = new ButtonGroup(); ButtonGroup group2 = new ButtonGroup(); final String nodes[] = new String[2]; Set<String> keySet = map.keySet(); for (String name : keySet) { JRadioButton rButton = new JRadioButton(name); rButton.setActionCommand(map.get(name).x+","+map.get(name).y); rButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { nodes[0] = ((JRadioButton)evt.getSource()).getActionCommand(); } }); group1.add(rButton); panel.add(rButton); JRadioButton rButton1 = new JRadioButton(name); rButton1.setActionCommand(map.get(name).x+","+map.get(name).y); rButton1.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { nodes[1] = ((JRadioButton)evt.getSource()).getActionCommand(); } }); group2.add(rButton1); panel.add(rButton1); } JOptionPane.showMessageDialog(MainWindow.this,panel,info2,JOptionPane.INFORMATION_MESSAGE); return nodes; } private String[] showGUIForConnectionsFromToMaking(String info1,String info2){ textFieldsGlobal=null; JPanel panel = new JPanel(); panel.setLayout(new GridLayout(map.size()+1,2)); JLabel coto=new JLabel(info1); panel.add(coto);panel.add(new JLabel(""));panel.add(new JLabel("")); ButtonGroup group1 = new ButtonGroup(); ButtonGroup group2 = new ButtonGroup(); ButtonGroup group3 = new ButtonGroup(); final String nodes[] = new String[3]; Set<String> keySet = map.keySet(); for (String name : keySet) { JRadioButton rButton = new JRadioButton(name); rButton.setActionCommand(map.get(name).x+","+map.get(name).y); rButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { nodes[0] = ((JRadioButton)evt.getSource()).getActionCommand(); } }); group1.add(rButton); panel.add(rButton); JRadioButton rButton1 = new JRadioButton(name); rButton1.setActionCommand(map.get(name).x+","+map.get(name).y); rButton1.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { nodes[1] = ((JRadioButton)evt.getSource()).getActionCommand(); } }); group2.add(rButton1); panel.add(rButton1); JRadioButton rButton2 = new JRadioButton(name); rButton2.setActionCommand(map.get(name).x+","+map.get(name).y); rButton2.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { nodes[2] = ((JRadioButton)evt.getSource()).getActionCommand(); } }); group3.add(rButton2); panel.add(rButton2); } JOptionPane.showMessageDialog(MainWindow.this,panel,info2,JOptionPane.INFORMATION_MESSAGE); return nodes; } class MyMouseAdapter extends MouseAdapter { private Point initialLoc; private Point initialLocOnScreen; @Override public void mousePressed(MouseEvent e) { Component comp = (Component) e.getSource(); initialLoc = comp.getLocation(); initialLocOnScreen = e.getLocationOnScreen(); } @Override public void mouseReleased(MouseEvent e) { Component comp = (Component) e.getSource(); Point locOnScreen = e.getLocationOnScreen(); int x = locOnScreen.x - initialLocOnScreen.x + initialLoc.x; int y = locOnScreen.y - initialLocOnScreen.y + initialLoc.y; comp.setLocation(x, y); map.put(((JLabel)comp).getText(),new Point(x,y)); drag_drop.repaint(); } @Override public void mouseDragged(MouseEvent e) { Component comp = (Component) e.getSource(); Point locOnScreen = e.getLocationOnScreen(); int x = locOnScreen.x - initialLocOnScreen.x + initialLoc.x; int y = locOnScreen.y - initialLocOnScreen.y + initialLoc.y; comp.setLocation(x, y); map.put(((JLabel)comp).getText(),new Point(x,y)); drag_drop.repaint(); } } class MouseAdapterClick extends MouseAdapter{ public void mouseClicked(MouseEvent evt) { Component comp = (Component) evt.getSource(); JLabel jb = (JLabel) comp; if (evt.getClickCount() == 1) { if (textFieldsGlobal != null) { ToGLMParser value = objectTable.get(stringCurrentObject); int propAmount = value.GetProperties().size(); for (int i = 0; i < propAmount; i++) { value.GetProperties().get(i).SetValue(textFieldsGlobal[i].getText()); } } stringCurrentObject = jb.getName(); ToGLMParser value = objectTable.get(stringCurrentObject); int propAmount = value.GetProperties().size(); JPanel params = new JPanel(); params.setLayout(new GridBagLayout()); GridBagConstraints gbc = new GridBagConstraints(); params.setMinimumSize(new Dimension(300, 50)); params.setMaximumSize(new Dimension(300, 2500)); JLabel labels[] = new JLabel[propAmount]; JTextField textfields[] = new JTextField[propAmount]; for (int i = 0; i < propAmount; i++) { labels[i] = new JLabel(value.GetProperties().get(i).GetName()); textfields[i] = new JTextField(value.GetProperties().get(i).GetValue()); gbc.ipadx = 100; gbc.gridx = 0; gbc.gridy = i; params.add(labels[i], gbc); gbc.gridx = 1; gbc.gridy = i; params.add(textfields[i], gbc); } params.setPreferredSize(params.getPreferredSize()); //labelsGlobal=labels; textFieldsGlobal = textfields; // JScrollPane scrollPanel = new JScrollPane(params); propertiesPanel.setViewportView(params); // propertiesPanel.add(params); propertiesPanel.revalidate(); propertiesPanel.repaint(); } } } ///do rysowania liniii private class MyJPanel extends JPanel//Creater your own JPanel and override paintComponentMethod. { @Override public void paintComponent(Graphics g) { super.paintComponent(g); for (ParentChild pair : listOfConn ) { JLabel label1 = pair.getParentJLabel(); JLabel label2 = pair.getChildJLabel(); Point point1 = label1.getLocation(); Point point2 = label2.getLocation(); int i = pair.howToDrawLine(); if ( i == 1) { g.drawLine(point1.x , point1.y + label1.getHeight() / 2 , point2.x + label2.getWidth() , point2.y + label2.getHeight() / 2); } else if (i == 2) { g.drawLine(point2.x , point2.y + label2.getHeight() / 2 , point1.x + label1.getWidth() , point1.y + label1.getHeight() / 2); } else if (i == 3) { g.drawLine(point1.x + label1.getWidth() / 2 , point1.y , point2.x + label2.getWidth() / 2, point2.y + label2.getHeight()); } else if (i == 4) { g.drawLine(point2.x + label2.getWidth() / 2 , point2.y , point1.x + label1.getWidth() / 2, point1.y + label1.getHeight()); } } } } private File loadFile() { int returnVal = fileChooser.showOpenDialog(mainFrame); File fileSelected = null; if (returnVal == JFileChooser.APPROVE_OPTION) { fileSelected = fileChooser.getSelectedFile(); } return fileSelected; } private File saveFile(String extension){ int returnVal=fileChooser.showSaveDialog(mainFrame); File fileSelected=null; if(returnVal==JFileChooser.APPROVE_OPTION){ fileSelected= fileChooser.getSelectedFile(); } return new File(fileSelected+extension); } }