answer stringlengths 17 10.2M |
|---|
package org.exist.xquery.functions.util;
import org.exist.dom.QName;
import org.exist.security.UUIDGenerator;
import org.exist.xquery.Cardinality;
import org.exist.xquery.Function;
import org.exist.xquery.FunctionSignature;
import org.exist.xquery.XPathException;
import org.exist.xquery.XQueryContext;
import org.exist.xquery.value.Item;
import org.exist.xquery.value.Sequence;
import org.exist.xquery.value.SequenceType;
import org.exist.xquery.value.StringValue;
import org.exist.xquery.value.Type;
/**
*
* @author wessels
*/
public class UUID extends Function {
public final static FunctionSignature signature =
new FunctionSignature(
new QName("uuid", UtilModule.NAMESPACE_URI, UtilModule.PREFIX),
"Generate a Universally Unique Identifier string.",
FunctionSignature.NO_ARGS,
new SequenceType(Type.STRING, Cardinality.EXACTLY_ONE));
public UUID(XQueryContext context) {
super(context, signature);
}
/* (non-Javadoc)
* @see org.exist.xquery.Expression#eval(org.exist.dom.DocumentSet, org.exist.xquery.value.Sequence, org.exist.xquery.value.Item)
*/
public Sequence eval(Sequence contextSequence, Item contextItem)
throws XPathException {
String uuid = UUIDGenerator.getUUID();
if(uuid==null) {
throw new XPathException("Could not create UUID.");
}
return new StringValue(uuid);
}
} |
package org.hfoss.posit.android;
// NOTE: for now the barcode scanner and the base64coder has been commented out at the following lines:
// 37, 206, 207, 216, and 279-281
import java.net.BindException;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import org.hfoss.adhoc.AdhocData;
import org.hfoss.adhoc.AdhocFind;
import org.hfoss.adhoc.AdhocService;
//import org.hfoss.adhoc.Queues;
//import org.hfoss.adhoc.UdpSender;
//import org.hfoss.posit.android.adhoc.RWGService;
import org.hfoss.posit.android.provider.PositDbHelper;
import org.hfoss.posit.android.utilities.ImageAdapter;
import org.hfoss.posit.android.utilities.Utils;
//import org.hfoss.posit.rwg.RwgSender;
import org.json.JSONException;
import org.json.JSONObject;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.ActivityNotFoundException;
import android.content.ComponentName;
import android.content.ContentValues;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.graphics.Bitmap;
import android.location.Criteria;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.preference.PreferenceManager;
import android.provider.Settings;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.Log;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.AdapterView;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Gallery;
import android.widget.ImageButton;
import android.widget.TextView;
import android.widget.AdapterView.OnItemClickListener;
/**
* Handles both adding new finds and editing existing finds.
* Includes adding and editing name, description, and barcode_id. Also allows user
* to attach photos to the find, as well as delete everything.
*
*/
public class FindActivity extends Activity
implements OnClickListener, OnItemClickListener, LocationListener {
private static final String NO_PROVIDER = "No location service";
private Find mFind;
private long mFindId;
private String mFindGuId = null;
private int mState;
private Gallery mGallery;
private static boolean NEWFIND=true;
private String imageBase64String = null;
private boolean stopThread;
//Temporary files representing pictures taken for a find
//but not yet added to the database
private ArrayList<Bitmap> mTempBitmaps = new ArrayList<Bitmap>();
private ArrayList<ContentValues> mImagesData = new ArrayList<ContentValues>();
//Uris of new images and thumbnails being attached to the find
private List<Uri> mNewImageUris = new LinkedList<Uri>();
private List<Uri> mNewImageThumbnailUris = new LinkedList<Uri>();
private double mLongitude = 0;
private double mLatitude = 0;
private SharedPreferences sp;
private TextView mLatitudeTextView;
private TextView mLongitudeTextView;
private Thread mThread;
private LocationManager mLocationManager;
private String valueName;
private String valueDescription;
private String valueId;
private boolean isClean = true;
public static boolean SAVE_CHECK=false;
public static int PROJECT_ID;
// private static boolean IS_ADHOC = false;
public int INTENT_CHECK=0;// anybody finds more suitable ways please change it
public static final int STATE_EDIT = 1;
public static final int STATE_INSERT= 2;
public static final int BARCODE_READER= 3;
public static final int CAMERA_ACTIVITY= 4;
public static final int NEW_FIND_CAMERA_ACTIVITY = 7;
public static final int SYNC_ACTIVITY= 12;
public static final int IMAGE_VIEW = 13;
private static final String TAG = "FindActivity";
private static final int CONFIRM_DELETE_DIALOG = 0;
private static final int UPDATE_LOCATION = 2;
private static final int CONFIRM_EXIT=3;
private static final int CONFIRM_SAVE_EMPTY_FIND=4;
private static final boolean ENABLED_ONLY = true;
private static final int THUMBNAIL_TARGET_SIZE = 320;
private Context mContext;
/* Listener for checking if the text has changed in any fields */
private TextWatcher textChangedWatcher= new TextWatcher(){
public void afterTextChanged(Editable s){
SAVE_CHECK=true;
Log.i(TAG, "Text Changed called");
}
public void beforeTextChanged(CharSequence arg0,
int arg1, int arg2, int arg3) {
//needed by the class
}
public void onTextChanged(CharSequence s, int start,
int before, int count) {
//needed by the class
}};
Handler updateHandler = new Handler() {
/** Gets called on every message that is received */
public void handleMessage(Message msg) {
switch (msg.what) {
case UPDATE_LOCATION: {
mLatitudeTextView.setText(" " + mLatitude);
mLongitudeTextView.setText(" " + mLongitude);
break;
}
}
super.handleMessage(msg);
}
};
private String mProvider;
private boolean gettingLocationUpdates= false;
/**
* Sets up the various actions for the FindActivity, which are
* inserting new finds in the DB, editing or deleting existing finds,
* and attaching images to the finds
* @param savedInstanceState (not currently used) is to restore state.
*/
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mContext = this;
//finishActivity(ListFindsActivity.FIND_FROM_LIST);
sp = PreferenceManager.getDefaultSharedPreferences(this);
PROJECT_ID = sp.getInt("PROJECT_ID", 0);
// IS_ADHOC = sp.getBoolean("IS_ADHOC", false);
isClean = true;
final Intent intent = getIntent();
String action = intent.getAction();
setContentView(R.layout.add_find);
mLatitudeTextView = (TextView)findViewById(R.id.latitudeText);
mLongitudeTextView = (TextView)findViewById(R.id.longitudeText);
mGallery = (Gallery)findViewById(R.id.picturesTaken);
Button scanButton = (Button)findViewById(R.id.idBarcodeButton);
scanButton.setOnClickListener(this);
// scan button has been removed from the visibility for now and IDs are now randomly assigned by a UUID
scanButton.setVisibility(TextView.GONE);
TextView barcodeError = (TextView)findViewById(R.id.barcodeReaderError);
Button barcodeDownload = (Button)findViewById(R.id.barcodeDownloadButton);
TextView barcodeRestart = (TextView)findViewById(R.id.barcodeReaderRestart);
barcodeDownload.setOnClickListener(this);
barcodeError.setVisibility(TextView.GONE);
barcodeDownload.setVisibility(Button.GONE);
barcodeRestart.setVisibility(TextView.GONE);
// removed this inquiry to ensure user has bar code
// scanner since program currently doesn't use the scanner
// if(!isIntentAvailable(this,"com.google.zxing.client.android.SCAN")) {
// scanButton.setClickable(false);
// barcodeError.setVisibility(TextView.VISIBLE);
// barcodeDownload.setVisibility(Button.VISIBLE);
// barcodeRestart.setVisibility(TextView.VISIBLE);
if (action.equals(Intent.ACTION_EDIT)) {
doEditAction();
INTENT_CHECK=1;
} else if (action.equals(Intent.ACTION_INSERT)) {
doInsertAction();
}
// new save and take picture buttons have been added to make working
// with finds simpler and more intuitive
ImageButton saveButton = (ImageButton)findViewById(R.id.idSaveButton);
saveButton.setOnClickListener(this);
ImageButton takePictureButton = (ImageButton)findViewById(R.id.idTakePictureButton);
takePictureButton.setOnClickListener(this);
} // onCreate()
@Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
mTempBitmaps = savedInstanceState.getParcelableArrayList("bitmaps");
isClean = savedInstanceState.getBoolean("isclean");
displayGallery(mFindId);
// displayGallery(mFindGuId==null); // New find
super.onRestoreInstanceState(savedInstanceState);
}
@Override
protected void onSaveInstanceState(Bundle outState) {
outState.putBoolean("isclean", isClean);
outState.putParcelableArrayList("bitmaps", mTempBitmaps);
super.onSaveInstanceState(outState);
}
/**
* This method is used to check whether or not the user has an intent
* available before an activity is actually started. This is only
* invoked on the Find view to check whether or not the intent for
* the barcode scanner is available. Since the barcode scanner requires
* a downloadable dependency, the user will not be allowed to click the
* "Read Barcode" button unless the phone is able to do so.
* @param context
* @param action
* @return
*/
public static boolean isIntentAvailable(Context context, String action) {
final PackageManager packageManager = context.getPackageManager();
final Intent intent = new Intent(action);
List<ResolveInfo> list =
packageManager.queryIntentActivities(intent, PackageManager.MATCH_DEFAULT_ONLY);
return list.size() > 0;
}
/**
* Inserts a new Find. A TextView handles all the data entry. For new
* Finds, both a time stamp and GPS location are fixed.
*/
public void doInsertAction() {
mState = STATE_INSERT;
Log.i(TAG, "doInsertAction");
TextView tView = (TextView) findViewById(R.id.timeText);
tView.setText(getDateText());
tView.addTextChangedListener(textChangedWatcher);
// instead of reading a bar code, ID's are now randomly assigned by a UUID
EditText idView = (EditText) findViewById(R.id.idText);
idView.setText(UUID.randomUUID().toString());
idView.addTextChangedListener(textChangedWatcher);
// set to gone for now because we don't want to display the ID
idView.setVisibility(EditText.GONE);
TextView idText = (TextView) findViewById(R.id.idNumberText);
idText.setText(idView.getText().toString().substring(0,8)+" ...");
idText.setVisibility(TextView.VISIBLE);
TextView nameView = (TextView) findViewById(R.id.nameText);
nameView.setText("");
nameView.addTextChangedListener(textChangedWatcher);
TextView descView = (TextView) findViewById(R.id.descriptionText);
descView.setText("");
descView.addTextChangedListener(textChangedWatcher);
initializeLocationAndStartGpsThread();
}
/**
* Sets the Find's location to the last known location and starts
* a separate thread to update GPS location.
*/
private void initializeLocationAndStartGpsThread() {
mLocationManager = (LocationManager) getSystemService(LOCATION_SERVICE);
List<String> providers = mLocationManager.getProviders(ENABLED_ONLY);
if(Utils.debug)
Log.i(TAG, "Enabled providers = " + providers.toString());
// String provider = LocationManager.GPS_PROVIDER;
String provider = mLocationManager.getBestProvider(new Criteria(),ENABLED_ONLY);
if(Utils.debug)
Log.i(TAG, "Best provider = " + provider);
setCurrentGpsLocation(null);
mThread = new Thread(new MyThreadRunner());
stopThread = false;
mThread.start();
}
private boolean setLocationProvider() {
Log.i(TAG, "setLocationProvider...()");
// Check for Location service
// mLocationManager = (LocationManager) getSystemService(LOCATION_SERVICE);
mLocationManager = (LocationManager) this.getSystemService(Context.LOCATION_SERVICE);
List<String> providers = mLocationManager.getProviders(ENABLED_ONLY);
mProvider = NO_PROVIDER;
if (providers.contains(LocationManager.GPS_PROVIDER)) {
mProvider = LocationManager.GPS_PROVIDER;
}
else if (providers.contains(LocationManager.NETWORK_PROVIDER)) {
mProvider = LocationManager.NETWORK_PROVIDER;
}
if (mProvider.equals(NO_PROVIDER)) {
// Utils.showToast(this, "Aborting Tracker: " +
Utils.showToast(this, "Aborting Add Find: " +
NO_PROVIDER + "\nYou must have GPS enabled. ");
return false;
}
Log.i(TAG, "setLocationProvider()= " + mProvider);
return true;
}
/**
* Repeatedly attempts to update the Find's location.
*/
class MyThreadRunner implements Runnable {
public void run() {
while (!stopThread) {
Message m = Message.obtain();
m.what = 0;
FindActivity.this.updateHandler.sendMessage(m);
try {
Thread.sleep(5);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
}
/**
* Allows editing of editable data for existing finds. For existing finds,
* we retrieve the Find's data from the DB and display it in a TextView. The
* Find's location and time stamp are not updated.
*/
private void doEditAction() {
mState = STATE_EDIT;
mFindId = getIntent().getLongExtra(PositDbHelper.FINDS_ID, 0);
Log.i(TAG,"Find id = " + mFindId);
// Instantiate a find object and retrieve its data from the DB
mFind = new Find(this, mFindId);
ContentValues values = mFind.getContent();
if (values == null) {
Utils.showToast(this, "No values found for Find " + mFindId);
mState = STATE_INSERT;
} else {
mFind.setGuid(values.getAsString(PositDbHelper.FINDS_GUID));
displayContentInView(values);
}
EditText idView = (EditText) findViewById(R.id.idText);
TextView idText = (TextView) findViewById(R.id.idNumberText);
idText.setText(idView.getText().toString().substring(0,8)+" ...");
idText.setVisibility(TextView.VISIBLE);
displayGallery(mFindId);
}
@Override
protected void onResume() {
super.onResume();
}
@Override
protected void onPause(){
super.onPause();
//finishActivity(ListFindsActivity.FIND_FROM_LIST);
}
@Override
protected void onStop() {
super.onStop();
if (gettingLocationUpdates){
mLocationManager.removeUpdates(this);
}
stopThread = true;
// mDbHelper.close();
}
/* (non-Javadoc)
* @see android.app.Activity#finish()
*/
@Override
public void finish() {
super.finish();
}
@Override
protected void onDestroy() {
super.onDestroy();
// mDbHelper.close();
}
/**
* This method is invoked by showDialog() when a dialog window is created. It displays
* the appropriate dialog box, currently a dialog to confirm that the user wants to
* delete this find and a dialog to warn user that a barcode has already been entered into the system
*/
@Override
protected Dialog onCreateDialog(int id) {
switch (id) {
case CONFIRM_DELETE_DIALOG:
return new AlertDialog.Builder(this)
.setIcon(R.drawable.alert_dialog_icon)
.setTitle(R.string.alert_dialog_2)
.setPositiveButton(R.string.alert_dialog_ok, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
// User clicked OK so do some stuff
if (mFind.delete()) // Assumes find was instantiated in onCreate
{
Utils.showToast(FindActivity.this, R.string.deleted_from_database);
finish();
} else {
Utils.showToast(FindActivity.this, R.string.delete_failed);
}
}
}
)
.setNegativeButton(R.string.alert_dialog_cancel, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
/* User clicked Cancel so do nothing */
}
})
.create();
case CONFIRM_EXIT:
Log.i(TAG, "CONFIRM_EXIT dialog");
return new AlertDialog.Builder(this)
.setIcon(R.drawable.alert_dialog_icon)
.setTitle(R.string.check_saving)
.setPositiveButton(R.string.save, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
Log.i(TAG, "CONFIRM_EXIT setOK onClick");
// User clicked OK so do some stuff
ContentValues contentValues = retrieveContentFromView();
doSave(contentValues);
}
})
.setNeutralButton(R.string.closing, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
Log.i(TAG, "CONFIRM_EXIT setNeutral onClick");
finish();
}
})
.setNegativeButton(R.string.alert_dialog_cancel, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
Log.i(TAG, "CONFIRM_EXIT setCancel onClick");
/* User clicked Cancel so do nothing */
}
})
.create();
case CONFIRM_SAVE_EMPTY_FIND:
return new AlertDialog.Builder(this)
.setIcon(R.drawable.alert_dialog_icon)
.setTitle(R.string.alert_dialog_save_empty_find)
.setPositiveButton(R.string.alert_dialog_ok, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
// User clicked OK so save Find even though no name/description
ContentValues contentValues = retrieveContentFromView();
doSave(contentValues);
}
}
)
.setNegativeButton(R.string.alert_dialog_cancel, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
/* User clicked Cancel so do nothing */
}
})
.create();
default:
return null;
} // switch
}
/**
* Utility method to handle saving of the Find invoked from either
* a menu selection or a alert dialog.
*/
private void doSave(ContentValues contentValues) {
String guid = contentValues.getAsString(PositDbHelper.FINDS_GUID);
Log.i(TAG, "doSave, values= " + contentValues.toString());
if(guid == null || guid.equals("")) {
Log.i(TAG, "doSave, ERROR: null content values");
return;
}
if (mState == STATE_INSERT) { // if this is a new find
mFind = new Find(FindActivity.this, guid);
List<ContentValues> imageValues = Utils.saveImagesAndUris(this, mTempBitmaps);
if (mFind.insertToDB(contentValues, imageValues)) {//insert find into database
Utils.showToast(FindActivity.this, R.string.saved_to_database);
// Is this correct, shouldn't we be setting the _id based on the result
// of the insertion?
mFind.setGuid(contentValues.getAsString(PositDbHelper.FINDS_GUID));
Log.i(TAG, "doSave, id= " + mFind.getguId());
} else {
Utils.showToast(FindActivity.this, R.string.save_failed);
}
} else {
if (mFind.updateToDB(contentValues)) {
Utils.showToast(FindActivity.this, R.string.saved_to_database);
} else {
Utils.showToast(FindActivity.this, R.string.save_failed);
}
}
finish();
}
/**
* This method is used to close the current find activity
* when the back button is hit. We ran into problems with
* activities running on top of each other and not finishing
* and this helps close old activities.
* @param keyCode is an integer representing which key is pressed
* @param event is a KeyEvent that is not used here
* @return a boolean telling whether or not the operation was successful
*/
/*
* TODO: This needs to be fixed. It doesn't work properly.
*
* Here I use an Integer INTENT_CHECK to check whether the the action is insert or edit.
* if the action is edit, the OnKeyDown method will utilize the checkSave method to
* check weather the database has been changed or not
*/
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
Log.i(TAG,"onKeyDown " + keyCode + " " + KeyEvent.KEYCODE_BACK);
// if (INTENT_CHECK==1) {
// checkSave();
if(keyCode == KeyEvent.KEYCODE_BACK && SAVE_CHECK == true) {
// if(keyCode == KeyEvent.KEYCODE_BACK) {
showDialog(CONFIRM_EXIT);
return true;
}
return super.onKeyDown(keyCode, event);
}
/**
* Creates the menu for this activity by inflating a menu resource file.
*/
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.add_finds_menu, menu);
if(mState == STATE_INSERT)
menu.removeItem(R.id.delete_find_menu_item);
return true;
} // onCreateOptionsMenu()
/**
* Handles the various menu item actions.
* @param featureId is unused
* @param item is the MenuItem selected by the user
*/
@Override
public boolean onMenuItemSelected(int featureId, MenuItem item) {
Intent intent;
switch (item.getItemId()) {
case R.id.save_find_menu_item:
ContentValues contentValues = retrieveContentFromView();
// If the adhoc service is running, send the Find through the adhoc network
if (AdhocService.adhocInstance != null) {
Log.d(TAG, "Adhoc service is availabe, sending find peer-to-peer");
boolean success = sendAdhocFind(contentValues,null); //imageBase64String);
if (success) {
Log.i(TAG, "Sent adhoc find");
} else {
Log.i(TAG, "Failed to send adhoc find");
}
}
doSave(contentValues);
//Intent in = new Intent(this, ListFindsActivity.class); //redirect to list finds
//startActivity(in);
break;
case R.id.discard_changes_menu_item:
if (mState == STATE_EDIT) {
displayContentInView(mFind.getContent());
} else {
mTempBitmaps.clear();
onCreate(null);
}
break;
case R.id.camera_menu_item:
intent = new Intent(android.provider.MediaStore.ACTION_IMAGE_CAPTURE);
intent.putExtra("rowId", mFindId);
if (mFind == null) {
Log.i(TAG,"New Find " + mFindId);
startActivityForResult(intent, NEW_FIND_CAMERA_ACTIVITY); //camera for new find
} else {
Log.i(TAG, "Existing FInd " + mFindId);
startActivityForResult(intent, CAMERA_ACTIVITY); //camera for existing find
}
break;
case R.id.delete_find_menu_item:
showDialog(CONFIRM_DELETE_DIALOG);
break;
default:
return false;
}
return true;
} // onMenuItemSelected
/**
* Used with RWG algorithm to transmit finds peer-to-peer through an ad-hoc
* network.
* @param contentValues
*/
private boolean sendAdhocFind(ContentValues contentValues, String image) {
Utils.showToast(this, "Sending ad hoc find");
AdhocFind adhocFind= new AdhocFind(contentValues);
AdhocData<AdhocFind>adhocData = new AdhocData<AdhocFind>(this,adhocFind);
try {
Log.i("Adhoc", "FindActivity.saveAdhoc: Queuing user data for RWG");
// RwgSender.queueUserMessageFromNode(adhocData);
return true;
} catch (Exception e) {
Log.e(TAG, "Exception");
e.printStackTrace();
}
return false;
}
/**
* Retrieves values from the View fields and stores them as <key,value> pairs in a ContentValues.
* This method is invoked from the Save menu item. It also marks the find 'unsynced'
* so it will be updated to the server.
* @return The ContentValues hash table.
*/
private ContentValues retrieveContentFromView() {
ContentValues result = new ContentValues();
EditText eText = (EditText) findViewById(R.id.nameText);
String value = eText.getText().toString();
result.put(PositDbHelper.FINDS_NAME, value);
eText = (EditText) findViewById(R.id.descriptionText);
value = eText.getText().toString();
result.put(PositDbHelper.FINDS_DESCRIPTION, value);
eText = (EditText) findViewById(R.id.idText);
value = eText.getText().toString();
result.put(PositDbHelper.FINDS_GUID, value); // guid
TextView tView = (TextView) findViewById(R.id.longitudeText);
value = tView.getText().toString();
result.put(PositDbHelper.FINDS_LONGITUDE, value);
tView = (TextView) findViewById(R.id.latitudeText);
value = tView.getText().toString();
result.put(PositDbHelper.FINDS_LATITUDE, value);
tView = (TextView) findViewById(R.id.timeText);
value = tView.getText().toString();
// result.put(PositDbHelper.FINDS_TIME, value); // Timestamp added by Db
// Mark the find unsynced
result.put(PositDbHelper.FINDS_SYNCED,PositDbHelper.FIND_NOT_SYNCED);
//Add project id and the revision number (revision is autoincrement)
result.put(PositDbHelper.FINDS_PROJECT_ID, PROJECT_ID);
return result;
}
/**
* Retrieves values from a ContentValues has table and puts them in the View.
* @param contentValues stores <key, value> pairs
*/
private void displayContentInView(ContentValues contentValues) {
EditText eText = (EditText) findViewById(R.id.nameText);
eText.setText(contentValues.getAsString(PositDbHelper.FINDS_NAME));
valueName=eText.getText().toString();
eText = (EditText) findViewById(R.id.descriptionText);
eText.setText(contentValues.getAsString(PositDbHelper.FINDS_DESCRIPTION));
valueDescription=eText.getText().toString();
eText = (EditText) findViewById(R.id.idText);
eText.setText(contentValues.getAsString(PositDbHelper.FINDS_GUID));
eText.setFocusable(false);
valueId=eText.getText().toString();
TextView tView = (TextView) findViewById(R.id.timeText);
if (mState == STATE_EDIT) {
tView.setText(contentValues.getAsString(PositDbHelper.FINDS_TIME));
}
tView = (TextView) findViewById(R.id.longitudeText);
tView.setText(contentValues.getAsString(PositDbHelper.FINDS_LONGITUDE));
tView = (TextView) findViewById(R.id.latitudeText);
tView.setText(contentValues.getAsString(PositDbHelper.FINDS_LATITUDE));
}
/**
* Handles the barcode reader button clicks.
* @param v is the View where the click occurred.
*/
public void onClick(View v) {
Intent intent;
switch (v.getId()) {
case R.id.idBarcodeButton:
intent = new Intent("com.google.zxing.client.android.SCAN");
try {
startActivityForResult(intent, BARCODE_READER);
} catch(ActivityNotFoundException e) {
Log.e(TAG, e.toString());
}
break;
case R.id.barcodeDownloadButton:
intent = new Intent(Intent.ACTION_VIEW);
intent.setData(Uri.parse("market://search?q=pname:com.google.zxing.client.android"));
startActivity(intent);
break;
// new idTakePicture and idSave buttons have been added main view
// to make posit's use simpler
case R.id.idTakePictureButton:
intent = new Intent(android.provider.MediaStore.ACTION_IMAGE_CAPTURE);
intent.putExtra("rowId", mFindId);
if (mFind == null) {
Log.i(TAG,"New Find " + mFindId);
startActivityForResult(intent, NEW_FIND_CAMERA_ACTIVITY); //camera for new find
} else {
Log.i(TAG, "Existing FInd " + mFindId);
startActivityForResult(intent, CAMERA_ACTIVITY); //camera for existing find
}
break;
case R.id.idSaveButton:
long start;
Log.i("start",(start=System.currentTimeMillis())+"");
ContentValues contentValues = retrieveContentFromView();
Log.i("after retrive", (System.currentTimeMillis()-start)+"");
//if (RWGService.isRunning()) {
// If the adhoc service is running, send the Find through the adhoc network
if (AdhocService.adhocInstance != null) {
Log.d(TAG, "Adhoc service is availabe, sending find peer-to-peer");
sendAdhocFind(contentValues,null);//imageBase64String);
}
Log.i("after adhoc check", (System.currentTimeMillis()-start)+"");
String name = contentValues.getAsString(getString(R.string.nameDB));
String description = contentValues.getAsString(getString(R.string.descriptionDB));
if( name.equals("") && description.equals("")) {
showDialog(CONFIRM_SAVE_EMPTY_FIND);
} else {
doSave(contentValues);
}
//Intent in = new Intent(this, ListFindsActivity.class); //redirect to list finds
//startActivity(in);
break;
}
}
/**
* Invoked when one of the Activities started
* from FindActivity menu, such as the BARCODE_READER or the CAMERA, finishes.
* It handles the results of the Activities. RESULT_OK == -1, RESULT_CANCELED = 0
* @param requestCode is the code that launched the sub-activity
* @param resultCode specifies whether the sub-activity was successful or not
* @param data is an Intent storing whatever data is passed back by the sub-activity
*/
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
int rowId;
if (resultCode == RESULT_CANCELED) {
return;
}
switch (requestCode) {
// right now the barcoder reader is invisible, making this case unnecessary
case BARCODE_READER:
String value = data.getStringExtra("SCAN_RESULT");
EditText eText = (EditText) findViewById(R.id.idText);
eText.setText(value);
break;
case CAMERA_ACTIVITY: //for existing find: saves image to db when user clicks "attach"
rowId = data.getIntExtra("rowId", -1);
Bitmap tempImage = (Bitmap) data.getExtras().get("data");
mTempBitmaps.add(tempImage);
List<ContentValues> imageValues = Utils.saveImagesAndUris(this, mTempBitmaps);
if (mFind.insertImagesToDB(imageValues)) {
Utils.showToast(this, R.string.saved_image_to_db);
} else {
Utils.showToast(this, R.string.save_failed);
}
Log.i(TAG, "onActivityResult, inserted images to DB");
displayGallery(mFindId);
mTempBitmaps.clear();
break;
case NEW_FIND_CAMERA_ACTIVITY: //for new finds: stores temporary images in a list
rowId = data.getIntExtra("rowId", -1);
tempImage = (Bitmap) data.getExtras().get("data");
//ByteArrayOutputStream baos = new ByteArrayOutputStream();
//tempImage.compress(Bitmap.CompressFormat.JPEG, 80, baos);
//imageBase64String = new String(baos.toByteArray());
mTempBitmaps.add(tempImage);
displayGallery(mFindId);
break;
case IMAGE_VIEW:
Log.i(TAG, "onActivityResult mFindId = " + mFindId);
finish();
break;
}
}
/**
* Queries for images for this Find and shows them in a Gallery at the bottom of the View.
* @param id is the rowId of the find
*/
private void displayGallery(long id) {
Log.i(TAG, "displayGallery mFindId=" + id);
if (id != 0) { //for existing finds
// Select just those images associated with this find.
mImagesData = mFind.getImagesContentValuesList();
if (mImagesData.size() > 0) {
finishActivity(FindActivity.IMAGE_VIEW);
ImageAdapter adapter = new ImageAdapter(mImagesData, this);
mGallery.setAdapter(adapter);
mGallery.setOnItemClickListener(this);
} else {
Utils.showToast(this, "No images to display.");
}
} else { //for new finds
if (mTempBitmaps.size() > 0) {
finishActivity(FindActivity.IMAGE_VIEW);
ImageAdapter adapter = new ImageAdapter(this, mTempBitmaps);
mGallery.setAdapter(adapter);
mGallery.setOnItemClickListener(this);
}
}
}
/**
* To detect the user clicking on the displayed images. Displays all pictures
* attached to this find by creating a new activity that shows
*/
public void onItemClick(AdapterView<?> arg0, View arg1, int position, long arg3) {
if (mFind != null) {
Log.i(TAG, "onItemClick starting ImageViewActivity for existing find");
try {
String s = mImagesData.get(position).getAsString(PositDbHelper.PHOTOS_IMAGE_URI);
if (s != null) {
Uri uri = Uri.parse(s);
Intent intent = new Intent(Intent.ACTION_VIEW, uri, this, ImageViewActivity.class);
intent.putExtra("position",position);
intent.putExtra("findId", mFindId);
Log.i(TAG, "onItemClick mFindId = " + mFindId);
setResult(RESULT_OK,intent);
startActivityForResult(intent, IMAGE_VIEW);
}
} catch (Exception e) {
Log.e(TAG, e.toString());
}
} else {
Log.i(TAG, "onItemClick starting ImageViewActivity for new find");
Bitmap bm = mTempBitmaps.get(position);
Intent intent = new Intent(this, ImageViewActivity.class);
intent.putExtra("position",position);
intent.putExtra("findId", mFindId);
intent.putExtra("bitmap", bm);
startActivity(intent);
}
}
/**
* Returns the current date and time from the Calendar Instance
* @return a string representing the current time stamp.
*/
private String getDateText() {
Timestamp ts = new Timestamp(System.currentTimeMillis());
return ts.toString();
}
/**
* Invoked by the location service when phone's location changes.
*/
public void onLocationChanged(Location newLocation) {
setCurrentGpsLocation(newLocation);
}
/**
* Resets the GPS location whenever the provider is enabled.
*/
public void onProviderEnabled(String provider) {
setCurrentGpsLocation(null);
}
/**
* Resets the GPS location whenever the provider is disabled.
*/
public void onProviderDisabled(String provider) {
if(Utils.debug)
Log.i(TAG, provider + " disabled");
setCurrentGpsLocation(null);
}
/**
* Resets the GPS location whenever the provider status changes. We
* don't care about the details.
*/
public void onStatusChanged(String provider, int status, Bundle extras) {
setCurrentGpsLocation(null);
}
private void gpsNotEnabled(){
final AlertDialog.Builder builder = new AlertDialog.Builder(this);
final AlertDialog.Builder secBuilder = new AlertDialog.Builder(this);
builder
.setMessage("GPS coordinates could not be retrieved. Would you like to enable additional GPS options?")
.setCancelable(false)
.setPositiveButton("Yes", new DialogInterface.OnClickListener() {
public void onClick(@SuppressWarnings("unused") final DialogInterface dialog, @SuppressWarnings("unused") final int id) {
enableGPS();
}
})
.setNegativeButton("No", new DialogInterface.OnClickListener() {
public void onClick(final DialogInterface dialog, @SuppressWarnings("unused") final int id) {
secBuilder.setMessage("Do you want to disable this alert?")
.setPositiveButton("Yes", new DialogInterface.OnClickListener() {
public void onClick(@SuppressWarnings("unused") final DialogInterface dialog, @SuppressWarnings("unused") final int id) {
disableAlert();
}
})
.setNegativeButton("No", new DialogInterface.OnClickListener() {
public void onClick(@SuppressWarnings("unused") final DialogInterface dialog, @SuppressWarnings("unused") final int id) {
dialog.cancel();
}
});
final AlertDialog secAlert = secBuilder.create();
secAlert.show();
dialog.cancel();
}
});
final AlertDialog alert = builder.create();
alert.show();
}
private void disableAlert(){
Editor edit = sp.edit();
edit.putBoolean("gpsAlertDisabled", true);
edit.commit();
}
private void enableGPS(){
ComponentName toLaunch = new ComponentName("com.android.settings","com.android.settings.SecuritySettings");
Intent i = new Intent(Settings.ACTION_LOCATION_SOURCE_SETTINGS);
i.addCategory(Intent.CATEGORY_LAUNCHER);
i.setComponent(toLaunch);
i.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivityForResult(i, 0);
}
/**
* Sends a message to the update handler with either the current location or
* the last known location.
* @param location is either null or the current location
*/
private void setCurrentGpsLocation(Location location) {
// String bestProvider = "";
mLocationManager = (LocationManager) getSystemService(LOCATION_SERVICE);
List<String> providers = mLocationManager.getProviders(ENABLED_ONLY);
if (location == null) {
if(Utils.debug)
Log.i(TAG, "Enabled providers = " + providers.toString());
// bestProvider = LocationManager.GPS_PROVIDER;
// bestProvider = mLocationManager.getBestProvider(new Criteria(),ENABLED_ONLY);
setLocationProvider();
// request for location only if there's a provider
if (mProvider != null && mProvider.length() != 0 && mProvider != NO_PROVIDER) {
mLocationManager.requestLocationUpdates(mProvider, 10000, 1, this); // Every 30000 millisecs
gettingLocationUpdates = true;
location = mLocationManager.getLastKnownLocation(mProvider);
}
}
if(Utils.debug)
Log.i(TAG, "Best provider = |" + mProvider + "|");
try {
mLongitude = location.getLongitude();
mLatitude = location.getLatitude();
mLatitudeTextView.setText(mLatitude+"");
mLongitudeTextView.setText(mLongitude+"");
Log.i(TAG, "Longitude = " + mLongitude + " text " + mLongitudeTextView.getText().toString());
Message msg = Message.obtain();
msg.what = UPDATE_LOCATION;
this.updateHandler.sendMessage(msg);
} catch (NullPointerException e) {
boolean alertDisabled = sp.getBoolean("gpsAlertDisabled", false);
if(!alertDisabled&&(!providers.contains(LocationManager.GPS_PROVIDER)||!providers.contains(LocationManager.NETWORK_PROVIDER))){
gpsNotEnabled();
}else{
mLongitude = mLatitude = 0; // In case no network and no GPS
Log.e(TAG, e.toString());
e.printStackTrace();
}
}
}
} |
package org.jaudiotagger.tag.id3;
import org.jaudiotagger.FileConstants;
import org.jaudiotagger.logging.ErrorMessage;
import org.jaudiotagger.audio.generic.Utils;
import org.jaudiotagger.audio.mp3.MP3File;
import org.jaudiotagger.tag.*;
import org.jaudiotagger.tag.datatype.Lyrics3Line;
import org.jaudiotagger.tag.id3.framebody.*;
import org.jaudiotagger.tag.lyrics3.*;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Iterator;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Represents an ID3v2.4 frame.
*
* @author : Paul Taylor
* @author : Eric Farng
* @version $Id$
*/
public class ID3v24Frame extends AbstractID3v2Frame
{
private static Pattern validFrameIdentifier = Pattern.compile("[A-Z][0-9A-Z]{3}");
protected static final int FRAME_DATA_LENGTH_SIZE = 4;
protected static final int FRAME_ID_SIZE = 4;
protected static final int FRAME_FLAGS_SIZE = 2;
protected static final int FRAME_SIZE_SIZE = 4;
protected static final int FRAME_ENCRYPTION_INDICATOR_SIZE = 1;
protected static final int FRAME_GROUPING_INDICATOR_SIZE = 1;
protected static final int FRAME_HEADER_SIZE = FRAME_ID_SIZE + FRAME_SIZE_SIZE + FRAME_FLAGS_SIZE;
public ID3v24Frame()
{
}
/**
* Creates a new ID3v2_4Frame of type identifier. An empty
* body of the correct type will be automatically created.
* This constructor should be used when wish to create a new
* frame from scratch using user input
*
* @param identifier defines the type of body to be created
*/
public ID3v24Frame(String identifier)
{
//Super Constructor creates a frame with empty body of type specified
super(identifier);
statusFlags = new StatusFlags();
encodingFlags = new EncodingFlags();
}
/**
* Copy Constructor:Creates a new ID3v2_4Frame datatype based on another frame.
*/
public ID3v24Frame(ID3v24Frame frame)
{
super(frame);
statusFlags = new StatusFlags(frame.getStatusFlags().getOriginalFlags());
encodingFlags = new EncodingFlags(frame.getEncodingFlags().getFlags());
}
private void createV24FrameFromV23Frame(ID3v23Frame frame) throws InvalidFrameException
{
// Is it a straight conversion e.g TALB - TALB
identifier = ID3Tags.convertFrameID23To24(frame.getIdentifier());
//We cant convert unsupported bodie sproperly
if(frame.getBody() instanceof FrameBodyUnsupported)
{
this.frameBody = new FrameBodyUnsupported((FrameBodyUnsupported) frame.getBody());
this.frameBody.setHeader(this);
identifier = frame.getIdentifier();
logger.info("V3:UnsupportedBody:Orig id is:" + frame.getIdentifier() + ":New id is:" + identifier);
return;
}
else if (identifier != null)
{
logger.info("V3:Orig id is:" + frame.getIdentifier() + ":New id is:" + identifier);
this.frameBody = (AbstractTagFrameBody) ID3Tags.copyObject(frame.getBody());
this.frameBody.setHeader(this);
return;
}
// Is it a known v3 frame which needs forcing to v4 frame e.g. TYER - TDRC
else if (ID3Tags.isID3v23FrameIdentifier(frame.getIdentifier()))
{
identifier = ID3Tags.forceFrameID23To24(frame.getIdentifier());
if (identifier != null)
{
logger.info("V3:Orig id is:" + frame.getIdentifier() + ":New id is:" + identifier);
this.frameBody = this.readBody(identifier, (AbstractID3v2FrameBody) frame.getBody());
this.frameBody.setHeader(this);
return;
}
// No mechanism exists to convert it to a v24 frame, e.g deprecated frame e.g TSIZ, so hold
// as a deprecated frame consisting of an array of bytes*/
else
{
this.frameBody = new FrameBodyDeprecated((AbstractID3v2FrameBody) frame.getBody());
this.frameBody.setHeader(this);
identifier = frame.getIdentifier();
logger.info("V3:Deprecated:Orig id is:" + frame.getIdentifier() + ":New id is:" + identifier);
return;
}
}
// Unknown Frame e.g NCON or TDRL (because TDRL unknown to V23)
else
{
this.frameBody = new FrameBodyUnsupported((FrameBodyUnsupported) frame.getBody());
this.frameBody.setHeader(this);
identifier = frame.getIdentifier();
logger.info("V3:Unknown:Orig id is:" + frame.getIdentifier() + ":New id is:" + identifier);
return;
}
}
/**
* Creates a new ID3v2_4Frame datatype based on another frame of different version
* Converts the framebody to the equivalent v24 framebody or to UnsupportedFrameBody if identifier
* is unknown.
*
* @param frame to construct a new frame from
*/
public ID3v24Frame(AbstractID3v2Frame frame) throws InvalidFrameException
{
//Should not be called
if ((frame instanceof ID3v24Frame))
{
throw new UnsupportedOperationException("Copy Constructor not called. Please type cast the argument");
}
//Flags
if (frame instanceof ID3v23Frame)
{
statusFlags = new StatusFlags((ID3v23Frame.StatusFlags) ((ID3v23Frame) frame).getStatusFlags());
encodingFlags = new EncodingFlags(((ID3v23Frame) frame).getEncodingFlags().getFlags());
}
else
{
statusFlags = new StatusFlags();
encodingFlags = new EncodingFlags();
}
// Convert Identifier. If the id was a known id for the original
// version we should be able to convert it to an v24 frame, although it may mean minor
// modification to the data. If it was not recognised originally it should remain
// unknown.
if (frame instanceof ID3v23Frame)
{
createV24FrameFromV23Frame((ID3v23Frame) frame);
}
else if (frame instanceof ID3v22Frame)
{
ID3v23Frame v23Frame = new ID3v23Frame(frame);
createV24FrameFromV23Frame(v23Frame);
}
this.frameBody.setHeader(this);
}
/**
* Creates a new ID3v2_4Frame datatype based on Lyrics3.
*
* @param field
* @throws InvalidTagException
*/
public ID3v24Frame(Lyrics3v2Field field) throws InvalidTagException
{
String id = field.getIdentifier();
String value;
if (id.equals("IND"))
{
throw new InvalidTagException("Cannot create ID3v2.40 frame from Lyrics3 indications field.");
}
else if (id.equals("LYR"))
{
FieldFrameBodyLYR lyric = (FieldFrameBodyLYR) field.getBody();
Lyrics3Line line;
Iterator<Lyrics3Line> iterator = lyric.iterator();
FrameBodySYLT sync;
FrameBodyUSLT unsync;
boolean hasTimeStamp = lyric.hasTimeStamp();
// we'll create only one frame here.
// if there is any timestamp at all, we will create a sync'ed frame.
sync = new FrameBodySYLT((byte) 0, "ENG", (byte) 2, (byte) 1, "", new byte[0]);
unsync = new FrameBodyUSLT((byte) 0, "ENG", "", "");
while (iterator.hasNext())
{
line = iterator.next();
if (hasTimeStamp)
{
// sync.addLyric(line);
}
else
{
unsync.addLyric(line);
}
}
if (hasTimeStamp)
{
this.frameBody = sync;
this.frameBody.setHeader(this);
}
else
{
this.frameBody = unsync;
this.frameBody.setHeader(this);
}
}
else if (id.equals("INF"))
{
value = ((FieldFrameBodyINF) field.getBody()).getAdditionalInformation();
this.frameBody = new FrameBodyCOMM((byte) 0, "ENG", "", value);
this.frameBody.setHeader(this);
}
else if (id.equals("AUT"))
{
value = ((FieldFrameBodyAUT) field.getBody()).getAuthor();
this.frameBody = new FrameBodyTCOM((byte) 0, value);
this.frameBody.setHeader(this);
}
else if (id.equals("EAL"))
{
value = ((FieldFrameBodyEAL) field.getBody()).getAlbum();
this.frameBody = new FrameBodyTALB((byte) 0, value);
this.frameBody.setHeader(this);
}
else if (id.equals("EAR"))
{
value = ((FieldFrameBodyEAR) field.getBody()).getArtist();
this.frameBody = new FrameBodyTPE1((byte) 0, value);
this.frameBody.setHeader(this);
}
else if (id.equals("ETT"))
{
value = ((FieldFrameBodyETT) field.getBody()).getTitle();
this.frameBody = new FrameBodyTIT2((byte) 0, value);
this.frameBody.setHeader(this);
}
else if (id.equals("IMG"))
{
throw new InvalidTagException("Cannot create ID3v2.40 frame from Lyrics3 image field.");
}
else
{
throw new InvalidTagException("Cannot caret ID3v2.40 frame from " + id + " Lyrics3 field");
}
}
/**
* Creates a new ID3v24Frame datatype by reading from byteBuffer.
*
* @param byteBuffer to read from
*/
public ID3v24Frame(ByteBuffer byteBuffer, String loggingFilename) throws InvalidFrameException
{
setLoggingFilename(loggingFilename);
read(byteBuffer);
}
/**
* Creates a new ID3v24Frame datatype by reading from byteBuffer.
*
* @param byteBuffer to read from
* @deprecated use {@link #ID3v24Frame(ByteBuffer,String)} instead
*/
public ID3v24Frame(ByteBuffer byteBuffer) throws InvalidFrameException
{
this(byteBuffer, "");
}
/**
* @param obj
* @return if obj is equivalent to this frame
*/
public boolean equals(Object obj)
{
if ((obj instanceof ID3v24Frame) == false)
{
return false;
}
return super.equals(obj);
}
/**
* Return size of frame
*
* @return int frame size
*/
public int getSize()
{
return frameBody.getSize() + ID3v24Frame.FRAME_HEADER_SIZE;
}
/**
* Read the frame from the specified file.
* Read the frame header then delegate reading of data to frame body.
*
* @param byteBuffer to read the frame from
*/
public void read(ByteBuffer byteBuffer) throws InvalidFrameException
{
byte[] buffer = new byte[FRAME_ID_SIZE];
if (byteBuffer.position() + FRAME_HEADER_SIZE >= byteBuffer.limit())
{
logger.warning(getLoggingFilename() + ":" + "No space to find another frame:");
throw new InvalidFrameException(getLoggingFilename() + ":" + "No space to find another frame");
}
//Read the Frame Identifier
byteBuffer.get(buffer, 0, FRAME_ID_SIZE);
identifier = new String(buffer);
logger.fine(getLoggingFilename() + ":" + "Identifier is" + identifier);
//Is this a valid identifier?
if (isValidID3v2FrameIdentifier(identifier) == false)
{
//If not valid move file pointer back to one byte after
//the original check so can try again.
logger.info(getLoggingFilename() + ":" + "Invalid identifier:" + identifier);
byteBuffer.position(byteBuffer.position() - (FRAME_ID_SIZE - 1));
throw new InvalidFrameIdentifierException(identifier + " is not a valid ID3v2.40 frame");
}
//Read frame size as syncsafe integer
frameSize = ID3SyncSafeInteger.bufferToValue(byteBuffer);
if (frameSize < 0)
{
logger.warning(getLoggingFilename() + ":" + "Invalid Frame size:" + identifier);
throw new InvalidFrameException(identifier + " is invalid frame");
}
else if (frameSize == 0)
{
logger.warning(getLoggingFilename() + ":" + "Empty Frame:" + identifier);
//We dont process this frame or add to framemap becuase contains no useful information
//Skip the two flag bytes so in correct position for subsequent frames
byteBuffer.get();
byteBuffer.get();
throw new EmptyFrameException(identifier + " is empty frame");
}
else if (frameSize > (byteBuffer.remaining() - FRAME_FLAGS_SIZE))
{
logger.warning(getLoggingFilename() + ":" + "Invalid Frame size larger than size before mp3 audio:" + identifier);
throw new InvalidFrameException(identifier + " is invalid frame");
}
if (frameSize > ID3SyncSafeInteger.MAX_SAFE_SIZE)
{
//Set Just after size field this is where we want to be when we leave this if statement
int currentPosition = byteBuffer.position();
//Read as nonsync safe integer
byteBuffer.position(currentPosition - FRAME_ID_SIZE);
int nonSyncSafeFrameSize = byteBuffer.getInt();
//Is the frame size syncsafe, should always be BUT some encoders such as Itunes do not do it properly
//so do an easy check now.
byteBuffer.position(currentPosition - FRAME_ID_SIZE);
boolean isNotSyncSafe = ID3SyncSafeInteger.isBufferNotSyncSafe(byteBuffer);
//not relative so need to move position
byteBuffer.position(currentPosition);
if (isNotSyncSafe)
{
logger.warning(getLoggingFilename() + ":" + "Frame size is NOT stored as a sync safe integer:" + identifier);
//This will return a larger frame size so need to check against buffer size if too large then we are
//buggered , give up
if (nonSyncSafeFrameSize > (byteBuffer.remaining() - -FRAME_FLAGS_SIZE))
{
logger.warning(getLoggingFilename() + ":" + "Invalid Frame size larger than size before mp3 audio:" + identifier);
throw new InvalidFrameException(identifier + " is invalid frame");
}
else
{
frameSize = nonSyncSafeFrameSize;
}
}
else
{
//appears to be sync safe but lets look at the bytes just after the reported end of this
//frame to see if find a valid frame header
//Read the Frame Identifier
byte[] readAheadbuffer = new byte[FRAME_ID_SIZE];
byteBuffer.position(currentPosition + frameSize + FRAME_FLAGS_SIZE);
if (byteBuffer.remaining() < FRAME_ID_SIZE)
{
//There is no padding or framedata we are at end so assume syncsafe
//reset position to just after framesize
byteBuffer.position(currentPosition);
}
else
{
byteBuffer.get(readAheadbuffer, 0, FRAME_ID_SIZE);
//reset position to just after framesize
byteBuffer.position(currentPosition);
String readAheadIdentifier = new String(readAheadbuffer);
if (isValidID3v2FrameIdentifier(readAheadIdentifier))
{
//Everything ok, so continue
}
else if (ID3SyncSafeInteger.isBufferEmpty(readAheadbuffer))
{
//no data found so assume entered padding in which case assume it is last
//frame and we are ok
}
//havent found identifier so maybe not syncsafe or maybe there are no more frames, just padding
else
{
//Ok lets try using a non-syncsafe integer
//size returned will be larger so is it valid
if (nonSyncSafeFrameSize > byteBuffer.remaining() - FRAME_FLAGS_SIZE)
{
//invalid so assume syncsafe
byteBuffer.position(currentPosition);
}
else
{
readAheadbuffer = new byte[FRAME_ID_SIZE];
byteBuffer.position(currentPosition + nonSyncSafeFrameSize + FRAME_FLAGS_SIZE);
if (byteBuffer.remaining() >= FRAME_ID_SIZE)
{
byteBuffer.get(readAheadbuffer, 0, FRAME_ID_SIZE);
readAheadIdentifier = new String(readAheadbuffer);
//reset position to just after framesize
byteBuffer.position(currentPosition);
//ok found a valid identifier using non-syncsafe so assume non-syncsafe size
//and continue
if (isValidID3v2FrameIdentifier(readAheadIdentifier))
{
frameSize = nonSyncSafeFrameSize;
logger.warning(getLoggingFilename() + ":" + "Assuming frame size is NOT stored as a sync safe integer:" + identifier);
}
//no data found so assume entered padding in which case assume it is last
//frame and we are ok whereas we didnt hit padding when using syncsafe integer
//or we wouldnt have got to this point. So assume syncsafe ineteger ended within
//the frame data whereas this has reached end of frames.
else if (ID3SyncSafeInteger.isBufferEmpty(readAheadbuffer))
{
frameSize = nonSyncSafeFrameSize;
logger.warning(getLoggingFilename() + ":" + "Assuming frame size is NOT stored as a sync safe integer:" + identifier);
}
//invalid so assume syncsafe as that is is the standard
else
{
;
}
}
else
{
//reset position to just after framesize
byteBuffer.position(currentPosition);
//If the unsync framesize matches exactly the remaining bytes then assume it has the
//correct size for the last frame
if (byteBuffer.remaining() == 0)
{
frameSize = nonSyncSafeFrameSize;
}
//Inconclusive stick with syncsafe
else
{
;
}
}
}
}
}
}
}
//Read the flag bytes
statusFlags = new StatusFlags(byteBuffer.get());
encodingFlags = new EncodingFlags(byteBuffer.get());
//Read extra bits appended to frame header for various encodings
//These are not included in header size but are included in frame size but wont be read when we actually
//try to read the frame body data
int extraHeaderBytesCount = 0;
if (((EncodingFlags) encodingFlags).isGrouping())
{
extraHeaderBytesCount = ID3v24Frame.FRAME_GROUPING_INDICATOR_SIZE;
byteBuffer.get();
}
if (((EncodingFlags) encodingFlags).isEncryption())
{
//Read the Encryption byte, but do nothing with it
extraHeaderBytesCount += ID3v24Frame.FRAME_ENCRYPTION_INDICATOR_SIZE;
byteBuffer.get();
}
if (((EncodingFlags) encodingFlags).isDataLengthIndicator())
{
//Read the sync safe size field
int datalengthSize = ID3SyncSafeInteger.bufferToValue(byteBuffer);
//Read the Grouping byte, but do nothing with it
extraHeaderBytesCount += FRAME_DATA_LENGTH_SIZE;
logger.info(getLoggingFilename() + ":" + "Frame Size Is:" + frameSize + "Data Length Size:" + datalengthSize);
}
//Work out the real size of the framebody data
int realFrameSize = frameSize - extraHeaderBytesCount;
//Create Buffer that only contains the body of this frame rather than the remainder of tag
ByteBuffer frameBodyBuffer = byteBuffer.slice();
frameBodyBuffer.limit(realFrameSize);
//Do we need to synchronize the frame body
int syncSize = realFrameSize;
if (((EncodingFlags) encodingFlags).isUnsynchronised())
{
//We only want to synchronize the buffer upto the end of this frame (remember this
//buffer contains the remainder of this tag not just this frame), and we cant just
//create a new buffer because when this method returns the position of the buffer is used
//to look for the next frame, so we need to modify the buffer. The action of synchronizing causes
//bytes to be dropped so the existing buffer is large enough to hold the modifications
frameBodyBuffer = ID3Unsynchronization.synchronize(frameBodyBuffer);
syncSize = frameBodyBuffer.limit();
logger.info(getLoggingFilename() + ":" + "Frame Size After Syncing is:" + syncSize);
}
//Read the body data
try
{
frameBody = readBody(identifier, frameBodyBuffer, syncSize);
if (!(frameBody instanceof ID3v24FrameBody))
{
logger.info(getLoggingFilename() + ":" + "Converted frame body with:" + identifier + " to deprecated framebody");
frameBody = new FrameBodyDeprecated((AbstractID3v2FrameBody) frameBody);
}
}
finally
{
//Update position of main buffer, so no attempt is made to reread these bytes
byteBuffer.position(byteBuffer.position() + realFrameSize);
}
}
/**
* Write the frame. Writes the frame header but writing the data is delegated to the
* frame body.
*
* @throws IOException
*/
public void write(ByteArrayOutputStream tagBuffer)
{
boolean unsynchronization;
logger.info("Writing frame to file:" + getIdentifier());
//This is where we will write header, move position to where we can
//write bodybuffer
ByteBuffer headerBuffer = ByteBuffer.allocate(FRAME_HEADER_SIZE);
//Write Frame Body Data to a new stream
ByteArrayOutputStream bodyOutputStream = new ByteArrayOutputStream();
((AbstractID3v2FrameBody) frameBody).write(bodyOutputStream);
//Does it need unsynchronizing, and are we allowing unsychronizing
byte[] bodyBuffer = bodyOutputStream.toByteArray();
if (TagOptionSingleton.getInstance().isUnsyncTags())
{
unsynchronization = ID3Unsynchronization.requiresUnsynchronization(bodyBuffer);
}
else
{
unsynchronization = false;
}
if (unsynchronization)
{
bodyBuffer = ID3Unsynchronization.unsynchronize(bodyBuffer);
logger.info("bodybytebuffer:sizeafterunsynchronisation:" + bodyBuffer.length);
}
//Write Frame Header
//Write Frame ID, the identifier must be 4 bytes bytes long it may not be
//because converted an unknown v2.2 id (only 3 bytes long)
if (getIdentifier().length() == 3)
{
identifier = identifier + ' ';
}
headerBuffer.put(Utils.getDefaultBytes(getIdentifier(), "ISO-8859-1"), 0, FRAME_ID_SIZE);
//Write Frame Size based on size of body buffer (if it has been unsynced then it size
//will have increased accordingly
int size = bodyBuffer.length;
logger.fine("Frame Size Is:" + size);
headerBuffer.put(ID3SyncSafeInteger.valueToBuffer(size));
//Write the Flags
//Status Flags:leave as they were when we read
headerBuffer.put(statusFlags.getWriteFlags());
//Enclosing Flags, first reset
encodingFlags.resetFlags();
//Encoding we only support unsynchrnization
if (unsynchronization)
{
((ID3v24Frame.EncodingFlags) encodingFlags).setUnsynchronised();
}
headerBuffer.put(encodingFlags.getFlags());
try
{
//Add header to the Byte Array Output Stream
tagBuffer.write(headerBuffer.array());
//Add bodybuffer to the Byte Array Output Stream
tagBuffer.write(bodyBuffer);
}
catch (IOException ioe)
{
//This could never happen coz not writing to file, so convert to RuntimeException
throw new RuntimeException(ioe);
}
}
/**
* Get Status Flags Object
*/
protected AbstractID3v2Frame.StatusFlags getStatusFlags()
{
return statusFlags;
}
/**
* Get Encoding Flags Object
*/
protected AbstractID3v2Frame.EncodingFlags getEncodingFlags()
{
return encodingFlags;
}
/**
* Member Class This represents a frame headers Status Flags
* Make adjustments if necessary based on frame type and specification.
*/
class StatusFlags extends AbstractID3v2Frame.StatusFlags
{
public static final String TYPE_TAGALTERPRESERVATION = "typeTagAlterPreservation";
public static final String TYPE_FILEALTERPRESERVATION = "typeFileAlterPreservation";
public static final String TYPE_READONLY = "typeReadOnly";
/**
* Discard frame if tag altered
*/
public static final int MASK_TAG_ALTER_PRESERVATION = FileConstants.BIT6;
/**
* Discard frame if audio part of file altered
*/
public static final int MASK_FILE_ALTER_PRESERVATION = FileConstants.BIT5;
/**
* Frame tagged as read only
*/
public static final int MASK_READ_ONLY = FileConstants.BIT4;
/**
* Use this when creating a frame from scratch
*/
StatusFlags()
{
super();
}
/**
* Use this constructor when reading from file or from another v4 frame
*/
StatusFlags(byte flags)
{
originalFlags = flags;
writeFlags = flags;
modifyFlags();
}
/**
* Use this constructor when convert a v23 frame
*/
StatusFlags(ID3v23Frame.StatusFlags statusFlags)
{
originalFlags = convertV3ToV4Flags(statusFlags.getOriginalFlags());
writeFlags = originalFlags;
modifyFlags();
}
/**
* Convert V3 Flags to equivalent V4 Flags
*/
private byte convertV3ToV4Flags(byte v3Flag)
{
byte v4Flag = (byte) 0;
if ((v3Flag & ID3v23Frame.StatusFlags.MASK_FILE_ALTER_PRESERVATION) != 0)
{
v4Flag |= (byte) MASK_FILE_ALTER_PRESERVATION;
}
if ((v3Flag & ID3v23Frame.StatusFlags.MASK_TAG_ALTER_PRESERVATION) != 0)
{
v4Flag |= (byte) MASK_TAG_ALTER_PRESERVATION;
}
return v4Flag;
}
/**
* Makes modifications to flags based on specification and frameid
*/
protected void modifyFlags()
{
String str = getIdentifier();
if (ID3v24Frames.getInstanceOf().isDiscardIfFileAltered(str) == true)
{
writeFlags |= (byte) MASK_FILE_ALTER_PRESERVATION;
writeFlags &= (byte) ~MASK_TAG_ALTER_PRESERVATION;
}
else
{
writeFlags &= (byte) ~MASK_FILE_ALTER_PRESERVATION;
writeFlags &= (byte) ~MASK_TAG_ALTER_PRESERVATION;
}
}
public void createStructure()
{
MP3File.getStructureFormatter().openHeadingElement(TYPE_FLAGS, "");
MP3File.getStructureFormatter().addElement(TYPE_TAGALTERPRESERVATION, originalFlags & MASK_TAG_ALTER_PRESERVATION);
MP3File.getStructureFormatter().addElement(TYPE_FILEALTERPRESERVATION, originalFlags & MASK_FILE_ALTER_PRESERVATION);
MP3File.getStructureFormatter().addElement(TYPE_READONLY, originalFlags & MASK_READ_ONLY);
MP3File.getStructureFormatter().closeHeadingElement(TYPE_FLAGS);
}
}
/**
* This represents a frame headers Encoding Flags
*/
class EncodingFlags extends AbstractID3v2Frame.EncodingFlags
{
public static final String TYPE_COMPRESSION = "compression";
public static final String TYPE_ENCRYPTION = "encryption";
public static final String TYPE_GROUPIDENTITY = "groupidentity";
public static final String TYPE_FRAMEUNSYNCHRONIZATION = "frameUnsynchronisation";
public static final String TYPE_DATALENGTHINDICATOR = "dataLengthIndicator";
/**
* Frame is part of a group
*/
public static final int MASK_GROUPING_IDENTITY = FileConstants.BIT6;
/**
* Frame is compressed
*/
public static final int MASK_COMPRESSION = FileConstants.BIT3;
/**
* Frame is encrypted
*/
public static final int MASK_ENCRYPTION = FileConstants.BIT2;
/**
* Unsynchronisation
*/
public static final int MASK_FRAME_UNSYNCHRONIZATION = FileConstants.BIT1;
/**
* Length
*/
public static final int MASK_DATA_LENGTH_INDICATOR = FileConstants.BIT0;
/**
* Use this when creating a frame from scratch
*/
EncodingFlags()
{
super();
}
/**
* Use this when creating a frame from existing flags in another v4 frame
*/
EncodingFlags(byte flags)
{
super(flags);
logEnabledFlags();
}
public void logEnabledFlags()
{
if (isCompression())
{
logger.warning(ErrorMessage.MP3_FRAME_IS_COMPRESSED.getMsg(getLoggingFilename(),identifier));
}
if (isEncryption())
{
logger.warning(ErrorMessage.MP3_FRAME_IS_ENCRYPTED.getMsg(getLoggingFilename(),identifier));
}
if (isGrouping())
{
logger.info(ErrorMessage.MP3_FRAME_IS_GROUPED.getMsg(getLoggingFilename(),identifier));
}
if (isUnsynchronised())
{
logger.info(ErrorMessage.MP3_FRAME_IS_UNSYNCHRONISED.getMsg(getLoggingFilename(),identifier));
}
if (isDataLengthIndicator())
{
logger.info(ErrorMessage.MP3_FRAME_IS_DATA_LENGTH_INDICATOR.getMsg(getLoggingFilename(),identifier));
}
}
public byte getFlags()
{
return flags;
}
public boolean isCompression()
{
return (flags & MASK_COMPRESSION) > 0;
}
public boolean isEncryption()
{
return (flags & MASK_ENCRYPTION) > 0;
}
public boolean isGrouping()
{
return (flags & MASK_GROUPING_IDENTITY) > 0;
}
public boolean isUnsynchronised()
{
return (flags & MASK_FRAME_UNSYNCHRONIZATION) > 0;
}
public boolean isDataLengthIndicator()
{
return (flags & MASK_DATA_LENGTH_INDICATOR) > 0;
}
public void setUnsynchronised()
{
flags |= MASK_FRAME_UNSYNCHRONIZATION;
}
public void createStructure()
{
MP3File.getStructureFormatter().openHeadingElement(TYPE_FLAGS, "");
MP3File.getStructureFormatter().addElement(TYPE_COMPRESSION, flags & MASK_COMPRESSION);
MP3File.getStructureFormatter().addElement(TYPE_ENCRYPTION, flags & MASK_ENCRYPTION);
MP3File.getStructureFormatter().addElement(TYPE_GROUPIDENTITY, flags & MASK_GROUPING_IDENTITY);
MP3File.getStructureFormatter().addElement(TYPE_FRAMEUNSYNCHRONIZATION, flags & MASK_FRAME_UNSYNCHRONIZATION);
MP3File.getStructureFormatter().addElement(TYPE_DATALENGTHINDICATOR, flags & MASK_DATA_LENGTH_INDICATOR);
MP3File.getStructureFormatter().closeHeadingElement(TYPE_FLAGS);
}
}
/**
* Does the frame identifier meet the syntax for a idv3v2 frame identifier.
* must start with a capital letter and only contain capital letters and numbers
*
* @param identifier to be checked
* @return whether the identifier is valid
*/
public boolean isValidID3v2FrameIdentifier(String identifier)
{
Matcher m = ID3v24Frame.validFrameIdentifier.matcher(identifier);
return m.matches();
}
/**
* Return String Representation of body
*/
public void createStructure()
{
MP3File.getStructureFormatter().openHeadingElement(TYPE_FRAME, getIdentifier());
MP3File.getStructureFormatter().addElement(TYPE_FRAME_SIZE, frameSize);
statusFlags.createStructure();
encodingFlags.createStructure();
frameBody.createStructure();
MP3File.getStructureFormatter().closeHeadingElement(TYPE_FRAME);
}
/**
* @return true if considered a common frame
*/
public boolean isCommon()
{
return ID3v24Frames.getInstanceOf().isCommon(getId());
}
/**
* @return true if considered a common frame
*/
public boolean isBinary()
{
return ID3v24Frames.getInstanceOf().isBinary(getId());
}
} |
// $Id: ConfiguratorFactory.java,v 1.9 2004/08/04 10:32:46 belaban Exp $
package org.jgroups.conf;
import org.w3c.dom.Element;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jgroups.ChannelException;
import org.jgroups.JChannel;
import java.io.IOException;
import java.io.InputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Properties;
/**
* The ConfigurationFactory is a factory that returns a protocol stack configurator.
* The protocol stack configurator is an object that read a stack configuration and
* parses it so that the ProtocolStack can create a stack.
* <BR>
* Currently the factory returns one of the following objects:<BR>
* 1. XmlConfigurator - parses XML files that are according to the jgroups-protocol.dtd<BR>
* 2. PlainConfigurator - uses the old style strings UDP:FRAG: etc etc<BR>
*
* @author Filip Hanik (<a href="mailto:filip@filip.net">filip@filip.net)
* @version 1.0
*/
public class ConfiguratorFactory {
public static final String JAXP_MISSING_ERROR_MSG=
"JAXP Error: the required XML parsing classes are not available; " +
"make sure that JAXP compatible libraries are in the classpath.";
static final String FORCE_CONFIGURATION="force.properties";
static Log log=LogFactory.getLog(ConfiguratorFactory.class);
static final String propertiesOverride;
// Check for the presence of the system property "force.properties", and
// act appropriately if it is set. We only need to do this once since the
// system properties are highly unlikely to change.
static {
Properties properties = System.getProperties();
propertiesOverride = properties.getProperty(FORCE_CONFIGURATION);
if (propertiesOverride != null && log.isInfoEnabled()) {
log.info("using properties override: " + propertiesOverride);
}
}
protected ConfiguratorFactory() {
}
/**
* Returns a protocol stack configurator based on the XML configuration
* provided at the specified URL.
*
* @param url a URL pointing to a JGroups XML configuration.
*
* @return a <code>ProtocolStackConfigurator</code> containing the stack
* configuration.
*
* @throws ChannelException if problems occur during the configuration of
* the protocol stack.
*/
public static ProtocolStackConfigurator getStackConfigurator(File file)
throws ChannelException {
ProtocolStackConfigurator returnValue;
if (propertiesOverride != null) {
returnValue = getStackConfigurator(propertiesOverride);
}
else {
checkForNullConfiguration(file);
checkJAXPAvailability();
try {
returnValue=
XmlConfigurator.getInstance(new FileInputStream(file));
}
catch (IOException ioe) {
throw createChannelConfigurationException(ioe);
}
}
return returnValue;
}
/**
* Returns a protocol stack configurator based on the XML configuration
* provided at the specified URL.
*
* @param url a URL pointing to a JGroups XML configuration.
*
* @return a <code>ProtocolStackConfigurator</code> containing the stack
* configuration.
*
* @throws ChannelException if problems occur during the configuration of
* the protocol stack.
*/
public static ProtocolStackConfigurator getStackConfigurator(URL url)
throws ChannelException {
ProtocolStackConfigurator returnValue;
if (propertiesOverride != null) {
returnValue = getStackConfigurator(propertiesOverride);
}
else {
checkForNullConfiguration(url);
checkJAXPAvailability();
try {
returnValue=XmlConfigurator.getInstance(url);
}
catch (IOException ioe) {
throw createChannelConfigurationException(ioe);
}
}
return returnValue;
}
/**
* Returns a protocol stack configurator based on the XML configuration
* provided by the specified XML element.
*
* @param element a XML element containing a JGroups XML configuration.
*
* @return a <code>ProtocolStackConfigurator</code> containing the stack
* configuration.
*
* @throws ChannelException if problems occur during the configuration of
* the protocol stack.
*/
public static ProtocolStackConfigurator getStackConfigurator(Element element)
throws ChannelException {
ProtocolStackConfigurator returnValue;
if (propertiesOverride != null) {
returnValue = getStackConfigurator(propertiesOverride);
}
else {
checkForNullConfiguration(element);
// Since Element is a part of the JAXP specification and because an
// Element instance already exists, there is no need to check for
// JAXP availability.
// checkJAXPAvailability();
try {
returnValue=XmlConfigurator.getInstance(element);
}
catch (IOException ioe) {
throw createChannelConfigurationException(ioe);
}
}
return returnValue;
}
/**
* Returns a protocol stack configurator based on the provided properties
* string.
*
* @param properties an old style property string, a string representing a
* system resource containing a JGroups XML configuration,
* a string representing a URL pointing to a JGroups XML
* XML configuration, or a string representing a file name
* that contains a JGroups XML configuration.
*/
public static ProtocolStackConfigurator getStackConfigurator(String properties) throws ChannelException {
if (propertiesOverride != null && propertiesOverride != properties) {
properties = propertiesOverride;
}
// added by bela: for null String props we use the default properties
if(properties == null)
properties=JChannel.DEFAULT_PROTOCOL_STACK;
checkForNullConfiguration(properties);
ProtocolStackConfigurator returnValue;
// Attempt to treat the properties string as a pointer to an XML
// configuration.
XmlConfigurator configurator = null;
try {
configurator=getXmlConfigurator(properties);
}
catch (IOException ioe) {
throw createChannelConfigurationException(ioe);
}
// Did the properties string point to a JGroups XML configuration?
if (configurator != null) {
returnValue=configurator;
}
else {
// Attempt to process the properties string as the old style
// property string.
returnValue=new PlainConfigurator(properties);
}
return returnValue;
}
public static ProtocolStackConfigurator getStackConfigurator(Object properties) throws IOException {
InputStream input=null;
if (propertiesOverride != null) {
properties = propertiesOverride;
}
// added by bela: for null String props we use the default properties
if(properties == null)
properties=JChannel.DEFAULT_PROTOCOL_STACK;
if(properties instanceof URL) {
try {
input=((URL)properties).openStream();
}
catch(Throwable t) {
}
}
// if it is a string, then it could be a plain string or a url
if(input == null && properties instanceof String) {
try {
input=new URL((String)properties).openStream();
}
catch(Exception ignore) {
// if we get here this means we don't have a URL
}
// another try - maybe it is a resource, e.g. default.xml
if(input == null && ((String)properties).endsWith("xml")) {
try {
ClassLoader classLoader=Thread.currentThread().getContextClassLoader();
input=classLoader.getResourceAsStream((String)properties);
}
catch(Throwable ignore) {
}
}
// try a regular file name
// This code was moved from the parent block (below) because of the
// possibility of causing a ClassCastException.
if(input == null) {
try {
input=new FileInputStream((String)properties);
}
catch(Throwable t) {
}
}
}
// try a regular file
if(input == null && properties instanceof File) {
try {
input=new FileInputStream((File)properties);
}
catch(Throwable t) {
}
}
if(input == null)
log.info("properties are neither a URL nor a file");
else {
return XmlConfigurator.getInstance(input);
}
if(properties instanceof Element) {
return XmlConfigurator.getInstance((Element)properties);
}
return new PlainConfigurator((String)properties);
}
/**
* Returns a JGroups XML configuration InputStream based on the provided
* properties string.
*
* @param properties a string representing a system resource containing a
* JGroups XML configuration, a string representing a URL
* pointing to a JGroups ML configuration, or a string
* representing a file name that contains a JGroups XML
* configuration.
*
* @throws IOException if the provided properties string appears to be a
* valid URL but is unreachable.
*/
static InputStream getConfigStream(String properties) throws IOException {
InputStream configStream = null;
// Check to see if the properties string is a URL.
try {
configStream=new URL(properties).openStream();
}
catch (MalformedURLException mre) {
// the properties string is not a URL
}
// Commented so the caller is notified of this condition, but left in
// the code for documentation purposes.
// catch (IOException ioe) {
// the specified URL string was not reachable
// Check to see if the properties string is the name of a resource,
// e.g. default.xml.
if(configStream == null && properties.endsWith("xml")) {
ClassLoader classLoader=Thread.currentThread().getContextClassLoader();
configStream=classLoader.getResourceAsStream(properties);
}
// Check to see if the properties string is the name of a file.
if (configStream == null) {
try {
configStream=new FileInputStream((String)properties);
}
catch(FileNotFoundException fnfe) {
// the properties string is likely not a file
}
}
return configStream;
}
/**
* Returns an XmlConfigurator based on the provided properties string (if
* possible).
*
* @param properties a string representing a system resource containing a
* JGroups XML configuration, a string representing a URL
* pointing to a JGroups ML configuration, or a string
* representing a file name that contains a JGroups XML
* configuration.
*
* @return an XmlConfigurator instance based on the provided properties
* string; <code>null</code> if the provided properties string does
* not point to an XML configuration.
*
* @throws IOException if the provided properties string appears to be a
* valid URL but is unreachable, or if the JGroups XML
* configuration pointed to by the URL can not be
* parsed.
*/
static XmlConfigurator getXmlConfigurator(String properties) throws IOException {
XmlConfigurator returnValue=null;
InputStream configStream=getConfigStream(properties);
if (configStream != null) {
checkJAXPAvailability();
returnValue=XmlConfigurator.getInstance(configStream);
}
return returnValue;
}
/**
* Creates a <code>ChannelException</code> instance based upon a
* configuration problem.
*
* @param cause the exceptional configuration condition to be used as the
* created <code>ChannelException</code>'s cause.
*/
static ChannelException createChannelConfigurationException(Throwable cause) {
return new ChannelException("unable to load the protocol stack", cause);
}
/**
* Check to see if the specified configuration properties are
* <code>null</null> which is not allowed.
*
* @param properties the specified protocol stack configuration.
*
* @throws NullPointerException if the specified configuration properties
* are <code>null</code>.
*/
static void checkForNullConfiguration(Object properties) {
if (properties == null) {
final String msg =
"the specifed protocol stack configuration was null.";
throw new NullPointerException(msg);
}
}
/**
* Checks the availability of the JAXP classes on the classpath.
*
* @throws NoClassDefFoundError if the required JAXP classes are not
* availabile on the classpath.
*/
static void checkJAXPAvailability() {
try {
// TODO: Do some real class checking here instead of forcing the
// load of a JGroups class that happens (by default) to do it
// for us.
XmlConfigurator.class.getName();
}
catch (NoClassDefFoundError error) {
throw new NoClassDefFoundError(JAXP_MISSING_ERROR_MSG);
}
}
} |
package org.mapyrus.dataset;
import java.awt.geom.Rectangle2D;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.StringTokenizer;
import org.mapyrus.Argument;
import org.mapyrus.MapyrusException;
import org.mapyrus.MapyrusMessages;
import org.mapyrus.Row;
import org.mapyrus.geom.Geometry;
/**
* Implements reading of geographic datasets from ESRI shape files.
*/
public class ShapefileDataset implements GeographicDataset
{
/*
* Magic number in file header.
*/
private static final int MAGIC_NUMBER = 9994;
/*
* Sentinel value indicating end of header records in a DBF file.
* Value indicating deleted record in DBF file.
*/
private static final byte DBF_HEADER_SENTINEL = 0x0D;
private static final byte DBF_DELETED_RECORD = '*';
/*
* Types of data present in shape file.
*/
private static final int NULL_SHAPE = 0;
private static final int POINT = 1;
private static final int POLYLINE = 3;
private static final int POLYGON = 5;
private static final int MULTIPOINT = 8;
private static final int POINT_Z = 11;
private static final int POLYLINE_Z = 13;
private static final int POLYGON_Z = 15;
private static final int MULTIPOINT_Z = 18;
private static final int POINT_M = 21;
private static final int POLYLINE_M = 23;
private static final int POLYGON_M = 25;
private static final int MULTIPOINT_M = 28;
private static final int MULTIPATCH = 31;
/*
* types of fields in DBF database file.
*/
private static final byte DBF_CHARACTER = 'C';
private static final byte DBF_DATE = 'D';
private static final byte DBF_NUMBER = 'N';
private static final byte DBF_FLOATING = 'F';
private static final byte DBF_LOGICAL = 'L';
/*
* Files containing data, their lengths and type.
*/
private DataInputStream m_shapeStream;
private DataInputStream m_DBFStream;
private String m_filename;
private int m_shapeFileLength, m_shapeFileType, m_geometryType;
private int m_DBFRecordLength;
private String m_projection;
/*
* Flags indicating which fields in DBF file that user wants to fetch
* and the total number of fields the user wants to fetch.
*/
private ArrayList<Boolean> m_DBFFieldsToFetch;
private int m_nDBFFieldsToFetch;
/*
* Field names, types, and types and lengths as given in DBF file.
*/
private String []m_fieldNames;
private int []m_fieldTypes;
private int []m_DBFFieldTypes;
private int []m_DBFFieldLengths;
/*
* Extents of shape file and extents being queried.
*/
private Rectangle2D.Double m_extents;
private Rectangle2D.Double m_queryExtents;
/*
* Number of bytes already read for query.
* A record read from DBF file for query.
*/
private int m_BytesRead;
private byte []m_DBFRecord;
private String m_encoding;
/**
* Open ESRI shape file containing geographic data for querying.
* @param filename name of shape file to open, with or without shp suffix.
* @param extras options specific to text file datasets, given as var=value pairs.
*/
public ShapefileDataset(String filename, String extras)
throws FileNotFoundException, IOException, MapyrusException
{
String shapeFilename, dbfFilename, prjFilename;
StringTokenizer st, st2;
String token, s;
HashSet<String> extrasDBFFields;
double d, xMin, yMin, xMax, yMax;
/*
* Set default options. Then see if user wants to override any of them.
*/
extrasDBFFields = null;
xMin = yMin = -Float.MAX_VALUE;
xMax = yMax = Float.MAX_VALUE;
m_encoding = null;
st = new StringTokenizer(extras);
while (st.hasMoreTokens())
{
token = st.nextToken();
if (token.startsWith("dbffields="))
{
/*
* Parse list of comma separated field names that user wants
* to fetch.
*/
extrasDBFFields = new HashSet<String>();
st2 = new StringTokenizer(token.substring(10), ",");
while (st2.hasMoreTokens())
{
token = st2.nextToken();
extrasDBFFields.add(token);
}
}
else if (token.startsWith("xmin=") || token.startsWith("ymin=") ||
token.startsWith("xmax=") || token.startsWith("ymax="))
{
s = token.substring(5);
try
{
d = Double.parseDouble(s);
}
catch (NumberFormatException e)
{
throw new MapyrusException(MapyrusMessages.get(MapyrusMessages.INVALID_NUMBER) +
": " + s);
}
if (token.startsWith("xmin="))
xMin = d;
else if (token.startsWith("ymin="))
yMin = d;
else if (token.startsWith("xmax="))
xMax = d;
else
yMax = d;
}
else if (token.startsWith("encoding="))
{
m_encoding = token.substring(9);
}
}
if (xMin > xMax)
{
throw new MapyrusException(MapyrusMessages.get(MapyrusMessages.INVALID_RANGE) +
": " + xMin + " - " + xMax);
}
if (yMin > yMax)
{
throw new MapyrusException(MapyrusMessages.get(MapyrusMessages.INVALID_RANGE) +
": " + yMin + " - " + yMax);
}
m_queryExtents = new Rectangle2D.Double(xMin, yMin, xMax - xMin, yMax - yMin);
/*
* Determine full names of .shp and .dbf files.
*/
if (filename.endsWith(".shp") || filename.endsWith(".dbf") ||
filename.endsWith(".shx"))
{
m_filename = filename.substring(0, filename.length() - 4);
shapeFilename = m_filename + ".shp";
dbfFilename = m_filename + ".dbf";
prjFilename = m_filename + ".prj";
}
else if (filename.endsWith(".SHP") || filename.endsWith(".DBF") ||
filename.endsWith(".SHX"))
{
m_filename = filename.substring(0, filename.length() - 4);
shapeFilename = m_filename + ".SHP";
dbfFilename = m_filename + ".DBF";
prjFilename = m_filename + ".PRJ";
}
else
{
m_filename = filename;
shapeFilename = filename + ".shp";
dbfFilename = filename + ".dbf";
prjFilename = filename + ".prj";
}
try
{
m_shapeStream = new DataInputStream(new BufferedInputStream(new FileInputStream(shapeFilename)));
try
{
m_DBFStream = new DataInputStream(new BufferedInputStream(new FileInputStream(dbfFilename)));
}
catch (FileNotFoundException e)
{
/*
* If .dbf file does not exist then just continue without it.
*/
m_DBFStream = null;
}
}
catch (SecurityException e)
{
throw new IOException(e.getClass().getName() + ": " + e.getMessage());
}
/*
* If there is an accompanying .prj file with the projection then read it.
*/
BufferedReader prjReader = null;
try
{
prjReader = new BufferedReader(new FileReader(prjFilename));
m_projection = prjReader.readLine();
}
catch(FileNotFoundException e)
{
m_projection = "";
}
finally
{
if (prjReader != null)
prjReader.close();
}
try
{
/*
* Read shape header, checking magic number and reading everything with
* correct byte order.
*/
readShapeHeader();
/*
* Read header from database file to get names and types of other fields.
*/
readDBFHeader(extrasDBFFields);
if (Geometry.overlaps(m_queryExtents, m_extents.getMinX(), m_extents.getMinY(),
m_extents.getMaxX(), m_extents.getMaxY()))
{
m_BytesRead = 0;
m_DBFRecord = new byte[m_DBFRecordLength];
}
else
{
/*
* Shape file does not overlap current extents. Fetch will return nothing.
*/
m_BytesRead = m_shapeFileLength;
}
}
catch (IOException e1)
{
close();
throw e1;
}
catch (MapyrusException e2)
{
close();
throw e2;
}
}
/**
* Reads 8 byte little endian long integer value.
* @param f input stream to read from.
* @return long value.
*/
private long readLittleEndianLong(DataInputStream f) throws IOException
{
long n, n1, n2, n3, n4, n5, n6, n7, n8;
n1 = f.read();
n2 = f.read();
n3 = f.read();
n4 = f.read();
n5 = f.read();
n6 = f.read();
n7 = f.read();
n8 = f.read();
n = ((n8 <<56) + (n7 << 48) + (n6 << 40) + (n5 << 32) +
(n4 << 24) + (n3 << 16) + (n2 << 8) + n1);
return(n);
}
/**
* Reads 4 byte little endian integer value.
* @param f input stream to read from.
* @return int value.
*/
private int readLittleEndianInt(DataInputStream f) throws IOException
{
int n, n1, n2, n3, n4;
n1 = f.read();
n2 = f.read();
n3 = f.read();
n4 = f.read();
n = ((n4 << 24) + (n3 << 16) + (n2 << 8) + n1);
return(n);
}
/**
* Reads 2 byte little endian short integer value.
* @param f input stream to read from.
* @return short value.
*/
private short readLittleEndianShort(DataInputStream f) throws IOException
{
int n1, n2;
n1 = f.read();
n2 = f.read();
return((short)((n2 << 8) + n1));
}
/**
* Reads 8 byte little endian double value.
* @param f input stream to read from.
* @return double value.
*/
private double readLittleEndianDouble(DataInputStream f) throws IOException
{
long l;
double d;
l = readLittleEndianLong(f);
d = Double.longBitsToDouble(l);
return(d);
}
/*
* Read shape file header.
*/
private void readShapeHeader() throws IOException, MapyrusException
{
int magic;
double xMin, yMin, xMax, yMax;
magic = m_shapeStream.readInt();
if (magic != MAGIC_NUMBER)
{
throw new MapyrusException(m_filename + ": " +
MapyrusMessages.get(MapyrusMessages.NOT_SHAPE_FILE));
}
m_shapeStream.readInt();
m_shapeStream.readInt();
m_shapeStream.readInt();
m_shapeStream.readInt();
m_shapeStream.readInt();
m_shapeFileLength = m_shapeStream.readInt() * 2 - 100;
readLittleEndianInt(m_shapeStream); /* version */
m_shapeFileType = readLittleEndianInt(m_shapeStream);
xMin = readLittleEndianDouble(m_shapeStream);
yMin = readLittleEndianDouble(m_shapeStream);
xMax = readLittleEndianDouble(m_shapeStream);
yMax = readLittleEndianDouble(m_shapeStream);
readLittleEndianDouble(m_shapeStream); /* zMin */
readLittleEndianDouble(m_shapeStream); /* zMax */
readLittleEndianDouble(m_shapeStream); /* mMin */
readLittleEndianDouble(m_shapeStream); /* mMax */
m_extents = new Rectangle2D.Double(xMin, yMin, xMax - xMin, yMax - yMin);
/*
* Convert geometry type to the type we use internally.
*/
switch (m_shapeFileType)
{
case NULL_SHAPE:
m_geometryType = 0;
break;
case POINT:
case POINT_Z:
case POINT_M:
m_geometryType = Argument.GEOMETRY_POINT;
break;
case POLYLINE:
case POLYLINE_Z:
case POLYLINE_M:
m_geometryType = Argument.GEOMETRY_MULTILINESTRING;
break;
case POLYGON:
case POLYGON_Z:
case POLYGON_M:
case MULTIPATCH:
m_geometryType = Argument.GEOMETRY_MULTIPOLYGON;
break;
case MULTIPOINT:
case MULTIPOINT_Z:
case MULTIPOINT_M:
m_geometryType = Argument.GEOMETRY_MULTIPOINT;
break;
}
}
/**
* Unpack a string from a byte buffer. Trailing whitespace or null bytes are
* removed from string.
* @param buf is buffer to unpack string from.
* @param offset is offset to begin unpacking in buffer
* @param length is number of bytes to add
* @return unpacked string
*/
private String unpackString(byte []buf, int offset, int length)
throws MapyrusException
{
String retval;
int i = offset + length - 1;
while (i >= offset && (buf[i] == 0 || Character.isWhitespace((char)buf[i])))
i
if (i < offset)
retval = "";
else if (m_encoding != null)
{
try
{
retval = new String(buf, offset, i - offset + 1, m_encoding);
}
catch (UnsupportedEncodingException e)
{
throw new MapyrusException(MapyrusMessages.get(MapyrusMessages.INVALID_CHARSET) +
": " + m_encoding + ": " + e.getMessage());
}
}
else
retval = new String(buf, offset, i - offset + 1);
return(retval);
}
/*
* Read header from DBF database file
*/
private void readDBFHeader(HashSet<String> dbfFieldnameList)
throws IOException, MapyrusException
{
int headerLength, nTotalFields;
String fieldName;
int i;
int fieldIndex;
byte dbfField[];
ArrayList<byte []> dbfFields = new ArrayList<byte []>();
int nBytesRead;
boolean fetchStatus;
nTotalFields = m_nDBFFieldsToFetch = headerLength = nBytesRead = 0;
m_DBFFieldsToFetch = new ArrayList<Boolean>();
if (m_DBFStream != null)
{
m_DBFStream.skipBytes(4);
readLittleEndianInt(m_DBFStream); /* number of DBF records */
headerLength = readLittleEndianShort(m_DBFStream);
m_DBFRecordLength = readLittleEndianShort(m_DBFStream);
m_DBFStream.skipBytes(20);
nBytesRead = 32;
/*
* Read record describing each field.
*/
do
{
dbfField = new byte[32];
dbfField[0] = (byte)(m_DBFStream.read());
nBytesRead++;
if (dbfField[0] != DBF_HEADER_SENTINEL)
{
m_DBFStream.read(dbfField, 1, dbfField.length - 1);
fieldName = unpackString(dbfField, 0, 11);
/*
* Build list of flags indicating which fields we'll
* be fetching for the user.
*/
fetchStatus = (dbfFieldnameList == null ||
dbfFieldnameList.contains(fieldName));
m_DBFFieldsToFetch.add(Boolean.valueOf(fetchStatus));
if (fetchStatus)
m_nDBFFieldsToFetch++;
nBytesRead += dbfField.length - 1;
dbfFields.add(dbfField);
nTotalFields++;
}
}
while (dbfField[0] != DBF_HEADER_SENTINEL);
}
/*
* Add one extra field to end of field list for the geometry.
*/
m_fieldNames = new String[m_nDBFFieldsToFetch + 1];
m_fieldTypes = new int[m_nDBFFieldsToFetch + 1];
m_DBFFieldTypes = new int[nTotalFields];
m_DBFFieldLengths = new int[nTotalFields];
m_fieldNames[m_nDBFFieldsToFetch] = "GEOMETRY";
m_fieldTypes[m_nDBFFieldsToFetch] = Argument.GEOMETRY;
/*
* Read description of each field.
*/
for (i = fieldIndex = 0; i < nTotalFields; i++)
{
dbfField = (byte [])(dbfFields.get(i));
m_DBFFieldTypes[i] = dbfField[11];
/*
* Length is unsigned byte value.
*/
if (dbfField[16] >= 0)
m_DBFFieldLengths[i] = dbfField[16];
else
m_DBFFieldLengths[i] = 256 + dbfField[16];
/*
* Unpack field information if we are going to be fetching this field.
*/
if (((Boolean)m_DBFFieldsToFetch.get(i)).booleanValue())
{
/*
* Extract null terminated field name.
*/
m_fieldNames[fieldIndex] = unpackString(dbfField, 0, 11);
/*
* Convert shape field type to our representation of field types.
*/
switch (m_DBFFieldTypes[i])
{
case DBF_CHARACTER:
case DBF_DATE:
m_fieldTypes[fieldIndex] = Argument.STRING;
break;
case DBF_LOGICAL:
case DBF_NUMBER:
case DBF_FLOATING:
m_fieldTypes[fieldIndex] = Argument.NUMERIC;
break;
}
fieldIndex++;
}
}
/*
* Leave DBF file at position of first record.
*/
int skipBytes = headerLength - nBytesRead;
if (skipBytes > 0)
m_DBFStream.skipBytes(skipBytes);
}
/**
* @see org.mapyrus.dataset.GeographicDataset#getProjection()
*/
public String getProjection()
{
return m_projection;
}
/**
* @see org.mapyrus.dataset.GeographicDataset#getMetadata()
*/
public Hashtable<String, String> getMetadata()
{
return(null);
}
/**
* @see org.mapyrus.dataset.GeographicDataset#getFieldNames()
*/
public String[] getFieldNames()
{
return(m_fieldNames);
}
/**
* @see org.mapyrus.dataset.GeographicDataset#getWorlds()
*/
public Rectangle2D.Double getWorlds()
{
return(m_extents);
}
/**
* Read next shape from shapefile that is inside or crossing the query extents.
* @return true if a row was read.
*/
public Row fetch() throws MapyrusException
{
int recordLength;
double x, y, lastX, lastY, xMin, yMin, xMax, yMax;
double fieldValue;
int i, shapeType;
int nBytes, nParts, nPoints, partIndex, pathIndex;
boolean shapeInExtents = false;
Row row;
double path[] = null;
try
{
/*
* Keep reading until we get a shape inside the extents or we reach
* the end of the file.
*/
row = new Row();
while (!shapeInExtents && m_BytesRead < m_shapeFileLength)
{
/*
* Read header for next shape. Convert record length to byte length.
*/
m_shapeStream.readInt(); /* record number */
recordLength = m_shapeStream.readInt() * 2;
shapeType = readLittleEndianInt(m_shapeStream);
nBytes = 4;
if (shapeType == 0)
{
/*
* A null shape.
*/
path = Argument.emptyGeometry.getGeometryValue();
shapeInExtents = true;
}
else if (m_shapeFileType == POINT || m_shapeFileType == POINT_Z ||
m_shapeFileType == POINT_M)
{
/*
* Read point coordinates, see if they are inside
* query extents. Skip Z and Measure values for 3D shapes.
*/
path = new double[5];
path[0] = Argument.GEOMETRY_POINT;
path[1] = 1;
path[2] = Argument.MOVETO;
path[3] = readLittleEndianDouble(m_shapeStream);
path[4] = readLittleEndianDouble(m_shapeStream);
nBytes += 16;
/*
* Accept points on query boundary rectangle, reject anything outside.
*/
shapeInExtents = (m_queryExtents.outcode(path[3], path[4]) == 0);
}
else if (m_shapeFileType == POLYLINE || m_shapeFileType == POLYGON ||
m_shapeFileType == POLYLINE_Z || m_shapeFileType == POLYGON_Z ||
m_shapeFileType == POLYLINE_M || m_shapeFileType == POLYGON_M ||
m_shapeFileType == MULTIPATCH)
{
/*
* Read bounding box of polyline or polygon.
* Find if it intersects with query extents.
*/
xMin = readLittleEndianDouble(m_shapeStream);
yMin = readLittleEndianDouble(m_shapeStream);
xMax = readLittleEndianDouble(m_shapeStream);
yMax = readLittleEndianDouble(m_shapeStream);
nBytes += 4 * 8;
shapeInExtents = Geometry.overlaps(m_queryExtents, xMin, yMin, xMax, yMax);
if (shapeInExtents)
{
/*
* Read polyline or polygon coordinates.
*/
nParts = readLittleEndianInt(m_shapeStream);
nPoints = readLittleEndianInt(m_shapeStream);
nBytes += 2 * 4;
int []parts = new int[nParts];
for (i = 0; i < nParts; i++)
parts[i] = readLittleEndianInt(m_shapeStream);
nBytes += nParts * 4;
/*
* Skip part type information in multi-patch files.
*/
if (m_shapeFileType == MULTIPATCH)
{
m_shapeStream.skipBytes(nParts * 4);
nBytes += nParts * 4;
}
path = new double[2 + nParts * 2 + nPoints * 3];
int counter = 0;
int counterIndex = 0;
boolean isPolyline = (m_shapeFileType == POLYLINE ||
m_shapeFileType == POLYLINE_M || m_shapeFileType == POLYLINE_Z);
/*
* Polylines in shape file may be separate LINESTRING geometries.
* Always return a MULTILINESTRING for polylines (even if it is
* only one segment) so geometry type remains consistent.
*/
if (isPolyline)
{
path[0] = Argument.GEOMETRY_MULTILINESTRING;
path[1] = nParts;
}
else
{
path[0] = Argument.GEOMETRY_POLYGON;
}
partIndex = 0;
pathIndex = 2;
lastX = lastY = Double.MAX_VALUE;
for (i = 0; i < nPoints; i++)
{
/*
* Add next coordinates, as either a moveto or lineto.
*/
x = readLittleEndianDouble(m_shapeStream);
y = readLittleEndianDouble(m_shapeStream);
nBytes += 2 * 8;
if (partIndex < nParts && parts[partIndex] == i)
{
if (isPolyline)
{
if (partIndex > 0)
{
/*
* Set number of points in last part, allowing for duplicate
* points that were skipped.
*/
path[counterIndex] = counter;
}
counter = 0;
path[pathIndex] = Argument.GEOMETRY_LINESTRING;
counterIndex = pathIndex + 1;
pathIndex += 2;
}
path[pathIndex] = Argument.MOVETO;
pathIndex++;
partIndex++;
}
else if (x == lastX && y == lastY)
{
/*
* Skip duplicate points.
*/
continue;
}
else
{
path[pathIndex] = Argument.LINETO;
pathIndex++;
}
path[pathIndex] = lastX = x;
path[pathIndex + 1] = lastY = y;
pathIndex += 2;
counter++;
}
/*
* Finally set number of points polygon or polyline, allowing
* for duplicate points that were skipped.
*/
if (isPolyline)
path[counterIndex] = counter;
else
path[1] = counter;
}
else
{
/*
* Shape is outside query extents, skip it.
*/
}
}
else if (m_shapeFileType == MULTIPOINT || m_shapeFileType == MULTIPOINT_Z ||
m_shapeFileType == MULTIPOINT_M)
{
/*
* Read bounding box of points.
* Find if it intersects with query extents.
*/
xMin = readLittleEndianDouble(m_shapeStream);
yMin = readLittleEndianDouble(m_shapeStream);
xMax = readLittleEndianDouble(m_shapeStream);
yMax = readLittleEndianDouble(m_shapeStream);
nBytes += 4 * 8;
shapeInExtents = Geometry.overlaps(m_queryExtents, xMin, yMin, xMax, yMax);
if (shapeInExtents)
{
nPoints = readLittleEndianInt(m_shapeStream);
nBytes += 4;
/*
* Read each of the points and add them to the path.
*/
path = new double[nPoints * 5 + 2];
path[0] = Argument.GEOMETRY_MULTIPOINT;
path[1] = nPoints;
pathIndex = 2;
for (i = 0; i < nPoints; i++)
{
path[pathIndex] = Argument.GEOMETRY_POINT;
path[pathIndex + 1] = 1;
path[pathIndex + 2] = Argument.MOVETO;
path[pathIndex + 3] = readLittleEndianDouble(m_shapeStream);
path[pathIndex + 4] = readLittleEndianDouble(m_shapeStream);
pathIndex += 5;
nBytes += 16;
}
}
}
/*
* Skip until end of this record in shape file.
*/
if (nBytes < recordLength)
m_shapeStream.skipBytes(recordLength - nBytes);
m_BytesRead += recordLength + 8;
/*
* If user wants any attribute fields then read them for this shape.
* Don't bother unpacking them if we are skipping this shape.
*/
if (m_nDBFFieldsToFetch > 0)
{
m_DBFStream.read(m_DBFRecord);
while (m_DBFRecord[0] == DBF_DELETED_RECORD)
{
/*
* Skip deleted records.
*/
m_DBFStream.read(m_DBFRecord);
}
}
if (shapeInExtents)
{
if (m_nDBFFieldsToFetch > 0)
{
int recordOffset = 1;
for (i = 0; i < m_DBFFieldTypes.length; i++)
{
Argument arg = null;
/*
* Only unpack fields that user asked for.
*/
if (((Boolean)m_DBFFieldsToFetch.get(i)).booleanValue())
{
if (m_DBFFieldTypes[i] == DBF_CHARACTER ||
m_DBFFieldTypes[i] == DBF_DATE)
{
arg = new Argument(Argument.STRING,
unpackString(m_DBFRecord, recordOffset,
m_DBFFieldLengths[i]));
}
else if (m_DBFFieldTypes[i] == DBF_NUMBER ||
m_DBFFieldTypes[i] == DBF_FLOATING)
{
String s = unpackString(m_DBFRecord,
recordOffset, m_DBFFieldLengths[i]);
try
{
fieldValue = Double.parseDouble(s);
}
catch (NumberFormatException e)
{
fieldValue = 0.0;
}
arg = new Argument(fieldValue);
}
else if (m_DBFFieldTypes[i] == DBF_LOGICAL)
{
switch ((char)m_DBFRecord[recordOffset])
{
case 'y':
case 'Y':
case 'T':
case 't':
arg = Argument.numericOne;
break;
default:
arg = Argument.numericZero;
break;
}
}
row.add(arg);
}
recordOffset += m_DBFFieldLengths[i];
}
}
/*
* Add geometry as final field.
*/
row.add(new Argument(m_geometryType, path));
}
}
}
catch (IOException e)
{
throw new MapyrusException(e.getMessage());
}
/*
* Return next row, or null if we did not find one.
*/
if (shapeInExtents)
return(row);
else
return(null);
}
/**
* Closes dataset.
*/
public void close() throws MapyrusException
{
/*
* Always close both files being read.
*/
try
{
m_shapeStream.close();
}
catch (IOException e)
{
throw new MapyrusException(e.getMessage());
}
finally
{
try
{
if (m_DBFStream != null)
m_DBFStream.close();
}
catch (IOException e)
{
throw new MapyrusException(e.getMessage());
}
}
}
} |
package org.ojalgo.optimisation;
import java.io.Serializable;
import java.math.BigDecimal;
import java.math.RoundingMode;
import org.ojalgo.ProgrammingError;
import org.ojalgo.access.Access1D;
import org.ojalgo.array.Array1D;
import org.ojalgo.netio.BasicLogger;
import org.ojalgo.optimisation.integer.IntegerSolver;
import org.ojalgo.type.CalendarDateUnit;
import org.ojalgo.type.TypeUtils;
import org.ojalgo.type.context.NumberContext;
public interface Optimisation {
/**
* Constraint
*
* @author apete
*/
public static interface Constraint extends Optimisation {
/**
* May return null
*/
BigDecimal getLowerLimit();
/**
* May return null
*/
BigDecimal getUpperLimit();
/**
* The Constraint has a lower or an upper limit actually set (possibly both) - it actually is
* constained.
*/
boolean isConstraint();
/**
* The Constraint has both a lower limit and an upper limit, and they are equal.
*/
boolean isEqualityConstraint();
/**
* The Constraint has a lower limit, and the upper limit (if it exists) is different.
*/
boolean isLowerConstraint();
/**
* The Constraint has an upper limit, and the lower limit (if it exists) is different.
*/
boolean isUpperConstraint();
}
public static interface Integration<M extends Optimisation.Model, S extends Optimisation.Solver> extends Optimisation {
/**
* An integration must be able to instantiate a solver that can handle (any) model instance.
*/
S build(M model);
/**
* Extract state from the model and convert it to solver state.
*/
Optimisation.Result extractSolverState(M model);
/**
* @return true if this solver (integration) can handle the input model
*/
boolean isCapable(M model);
/**
* Convert solver state to model state.
*/
Optimisation.Result toModelState(Optimisation.Result solverState, M model);
/**
* Convert model state to solver state.
*/
Optimisation.Result toSolverState(Optimisation.Result modelState, M model);
}
public static interface Model extends Optimisation {
/**
* Cleanup when a model instance is no longer needed. The default implementation does nothing,
*/
default void dispose() {
;
}
Optimisation.Result maximise();
Optimisation.Result minimise();
/**
* @return true If eveything is ok
* @return false The model is structurally ok, but the "value" breaks constraints - the solution is
* infeasible.
*/
boolean validate();
}
/**
* Objective
*
* @author apete
*/
public static interface Objective extends Optimisation {
/**
* May return null
*/
BigDecimal getContributionWeight();
/**
* @return true if this Objective has a non zero contribution weight - it actually is contributing to
* the objective function.
*/
boolean isObjective();
}
public static final class Options implements Optimisation, Cloneable {
/**
* If this is null nothing is printed, if it is not null then debug statements are printed to that
* {@linkplain BasicLogger.Printer}.
*/
public BasicLogger.Printer debug_appender = null;
/**
* Which {@linkplain Solver} to debug. Null means NO solvers. This setting is only relevant if
* {@link #debug_appender} has been set.
*/
public Class<? extends Optimisation.Solver> debug_solver = null;
/**
* Used to determine if a variable value is integer or not.
*/
public NumberContext integer = new NumberContext(12, 7, RoundingMode.HALF_EVEN);
/**
* The maximmum number of iterations allowed for the solve() command.
*/
public int iterations_abort = Integer.MAX_VALUE;
/**
* Calculations will be terminated after this number of iterations if a feasible solution has been
* found. If no feasible solution has been found calculations will continue until one is found or
* {@linkplain #iterations_abort} is reached. This option is, probably, only of interest with the
* {@linkplain IntegerSolver}.
*/
public int iterations_suffice = Integer.MAX_VALUE;
/**
* The (relative) MIP gap is the difference between the best integer solution found so far and a
* node's non-integer solution, relative to the optimal value. If the gap is smaller than this value,
* then the corresponding branch i terminated as it is deemed unlikely or too "expensive" to find
* better integer solutions there.
*/
public double mip_gap = 1.0E-4;
/**
* Used to compare/check objective function values (incl. temporary, phase 1, objectives). The most
* importatnt use of this parameter is, with the linear (simplex) solver, to determine if the phase 1
* objective function value is zero or not. Thus it is used to determine if the problem is feasible or
* not.
* <ul>
* <li>2015-01-30: Changed from 12,7 to 12,8 to be able to handle LinearProblems.testP20150127()</li>
* </ul>
*/
public NumberContext objective = new NumberContext(12, 8, RoundingMode.HALF_EVEN);
/**
* For display only!
*/
public NumberContext print = NumberContext.getGeneral(8, 10);
/**
* Problem parameters; constraints and objectives The numbers used to state/describe the problem,
* incl. when/if these are transformed during the solution algorithm.
* <ul>
* <li>2014-09-29: Changed from 11,9 to 12,8</li>
* </ul>
*/
public NumberContext problem = new NumberContext(12, 8, RoundingMode.HALF_EVEN);
/**
* Used to determine if a constraint is violated or not. Essentially this context determines if the
* various validate(...) methods will return true or false. Calculate the slack - zero if the
* constraint is "active" - and check the sign.
* <ul>
* <li>2015-09-05: Changed from 14,8 to 12,8 (the "8" can/should probably be increased)</li>
* <li>2015-09-09: Changed from 12,8 to 10,8 (the "8" can only be increased if some test cases are
* rewritten)</li>
* </ul>
*/
public NumberContext slack = new NumberContext(10, 8, RoundingMode.HALF_DOWN);
/**
* Used when copying the solver's solution back to the model (converting from double to BigDecimal).
* Variable values, dual variable values, lagrange multipliers...
*/
public NumberContext solution = new NumberContext(12, 14, RoundingMode.HALF_DOWN);
/**
* The maximmum number of millis allowed for the solve() command. Executions will be aborted
* regardless of if a solution has been found or not.
*/
public long time_abort = CalendarDateUnit.MILLENIUM.size();
/**
* Calculations will be terminated after this amount of time if a feasible solution has been found. If
* no feasible solution has been found calculations will continue until one is found or
* {@linkplain #time_abort} is reached. This option is , probably, only of interest with the
* {@linkplain IntegerSolver}.
*/
public long time_suffice = CalendarDateUnit.DAY.size();
/**
* If true models and solvers will validate data at various points. Validation is turned off by
* default. Turning it on will significantly slow down execution - even very expensive validation may
* be performed.
*/
public boolean validate = false;
public Options() {
super();
}
public Options copy() {
try {
return (Options) this.clone();
} catch (final CloneNotSupportedException anException) {
return null;
}
}
/**
* Will set {@link #debug_appender} to BasicLogger#DEBUG, {@link #debug_solver} to solver and
* {@link #validate} to true.
*
* @param solver
*/
public void debug(final Class<? extends Optimisation.Solver> solver) {
debug_appender = BasicLogger.DEBUG;
debug_solver = solver;
validate = true;
}
@Override
protected Object clone() throws CloneNotSupportedException {
return super.clone();
}
}
public static final class Result implements Optimisation, Access1D<BigDecimal>, Comparable<Optimisation.Result>, Serializable {
private final Access1D<?> mySolution;
private final Optimisation.State myState;
private final double myValue; // Objective Function Value
public Result(final Optimisation.State state, final Access1D<?> solution) {
this(state, Double.NaN, solution);
}
public Result(final Optimisation.State state, final double value, final Access1D<?> solution) {
super();
ProgrammingError.throwIfNull(state);
ProgrammingError.throwIfNull(solution);
myState = state;
myValue = value;
mySolution = solution;
}
public Result(final Optimisation.State state, final Optimisation.Result result) {
this(state, result.getValue(), result);
}
public int compareTo(final Result reference) {
final double tmpRefValue = reference.getValue();
if (myValue > tmpRefValue) {
return 1;
} else if (myValue < tmpRefValue) {
return -1;
} else {
return 0;
}
}
public long count() {
return mySolution.count();
}
public double doubleValue(final long index) {
return mySolution.doubleValue(index);
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (this.getClass() != obj.getClass()) {
return false;
}
final Result other = (Result) obj;
if (myState != other.myState) {
return false;
}
if (Double.doubleToLongBits(myValue) != Double.doubleToLongBits(other.myValue)) {
return false;
}
return true;
}
public BigDecimal get(final long index) {
return TypeUtils.toBigDecimal(mySolution.get(index));
}
public Optimisation.State getState() {
return myState;
}
/**
* Objective Function Value
*/
public double getValue() {
return myValue;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = (prime * result) + ((myState == null) ? 0 : myState.hashCode());
long temp;
temp = Double.doubleToLongBits(myValue);
result = (prime * result) + (int) (temp ^ (temp >>> 32));
return result;
}
public int size() {
return (int) this.count();
}
@Override
public String toString() {
return myState + " " + myValue + " @ " + Array1D.PRIMITIVE.copy(mySolution);
}
}
/**
* <p>
* An {@linkplain Optimisation.Solver} instance implements a specific optimisation algorithm. Typically
* each algorithm solves problems of (at least) one problem category. {@linkplain Optimisation.Model}
* represents a problem category.
* </p>
* <p>
* A solver internally works with primitive double.
* </p>
*
* @author apete
*/
public static interface Solver extends Optimisation {
/**
* Cleanup when a solver instance is no longer needed. The default implementation does nothing,
*/
default void dispose() {
;
}
default Optimisation.Result solve() {
return this.solve(null);
}
Optimisation.Result solve(Optimisation.Result kickStarter);
}
public static enum State implements Optimisation {
/**
* Approximate and/or Intermediate solution - Iteration point Probably infeasible, but still "good"
*/
APPROXIMATE(8),
/**
* Unique (and optimal) solution - there is no other solution that is equal or better
*/
DISTINCT(256),
/**
* Unexpected failure or exception
*/
FAILED(-1),
/**
* Solved - a solution that complies with all constraints
*/
FEASIBLE(16),
/**
* No solution that complies with all constraints exists
*/
INFEASIBLE(-8),
/**
* The problem/model is infeasible, unbounded or otherwise invalid.
*/
INVALID(-2),
/**
* Optimal solution - there is no better
*/
OPTIMAL(64),
/**
* There's an infinite number of feasible solutions and no bound on the objective function value
*/
UNBOUNDED(-32),
/**
* New/changed problem
*/
UNEXPLORED(0),
/**
* Model entities and solver components (matrices) are valid
*/
VALID(4);
private final int myValue;
State(final int aValue) {
myValue = aValue;
}
public boolean isApproximate() {
return (this == APPROXIMATE) || this.isFeasible();
}
public boolean isDistinct() {
return this.absValue() >= DISTINCT.absValue();
}
/**
* FAILED, INVALID, INFEASIBLE or UNBOUNDED
*/
public boolean isFailure() {
return myValue < 0;
}
public boolean isFeasible() {
return this.absValue() >= FEASIBLE.absValue();
}
public boolean isOptimal() {
return this.absValue() >= OPTIMAL.absValue();
}
/**
* VALID, APPROXIMATE, FEASIBLE, OPTIMAL or DISTINCT
*/
public boolean isSuccess() {
return myValue > 0;
}
/**
* UNEXPLORED
*/
public boolean isUnexplored() {
return myValue == 0;
}
public boolean isValid() {
return this.absValue() >= VALID.absValue();
}
private int absValue() {
return Math.abs(myValue);
}
}
} |
package org.opencms.jsp;
import org.opencms.file.CmsFile;
import org.opencms.file.collectors.I_CmsResourceCollector;
import org.opencms.flex.CmsFlexController;
import org.opencms.i18n.CmsEncoder;
import org.opencms.i18n.CmsLocaleManager;
import org.opencms.jsp.util.CmsJspContentLoadBean;
import org.opencms.loader.CmsDefaultFileNameGenerator;
import org.opencms.main.CmsException;
import org.opencms.main.CmsIllegalArgumentException;
import org.opencms.main.OpenCms;
import org.opencms.util.CmsMacroResolver;
import org.opencms.util.CmsStringUtil;
import org.opencms.workplace.editors.directedit.CmsDirectEditButtonSelection;
import org.opencms.workplace.editors.directedit.CmsDirectEditMode;
import org.opencms.workplace.editors.directedit.CmsDirectEditParams;
import org.opencms.xml.I_CmsXmlDocument;
import org.opencms.xml.content.CmsXmlContentFactory;
import java.util.Iterator;
import java.util.Locale;
import javax.servlet.jsp.JspException;
import javax.servlet.jsp.PageContext;
import javax.servlet.jsp.tagext.Tag;
/**
* Implementation of the <code><cms:contentload/></code> tag,
* used to access and display XML content item information from the VFS.<p>
*
* Since version 7.0.2 it is also possible to store the results of the content load in the JSP context
* using a {@link CmsJspContentLoadBean}. Using this bean the loaded XML content objects can be accessed
* directly using the JSP EL and the JSTL. To use this feature, you need to add the <code>var</code> (and optionally
* the <code>scope</code>) parameter to the content load tag. For example, if a parameter like
* <code>var="myVarName"</code> is provided, then the result of the content load is stored in the JSP
* context variable <code>myVarName</code> with an instance of a {@link CmsJspContentLoadBean}.<p>
*
* @since 6.0.0
*/
public class CmsJspTagContentLoad extends CmsJspTagResourceLoad implements I_CmsXmlContentContainer {
/** Serial version UID required for safe serialization. */
private static final long serialVersionUID = 981176995635225294L;
/** Reference to the last loaded content element. */
private transient I_CmsXmlDocument m_content;
/**
* The locale to use for displaying the current content.<p>
*
* Initially, this is equal to the locale set using <code>{@link #setLocale(String)}</code>.
* However, the content locale may change in case a loaded XML content does not have the selected locale available.
* In this case the next default locale that is available in the content will be used as content locale.<p>
*/
private Locale m_contentLocale;
/** The "direct edit" button selection to use for the 2nd to the last element. */
private CmsDirectEditButtonSelection m_directEditFollowButtons;
/** The link for creation of a new element, specified by the selected collector. */
private String m_directEditLinkForNew;
/** The direct edit mode. */
private CmsDirectEditMode m_directEditMode;
/** Indicates if the last element was direct editable. */
private boolean m_directEditOpen;
/** The edit empty tag attribute. */
private boolean m_editEmpty;
/** Indicates if this is the first content iteration loop. */
private boolean m_isFirstLoop;
/** Reference to the currently selected locale. */
private Locale m_locale;
/** Post-create handler class. */
private String m_postCreateHandler;
/**
* Empty constructor, required for JSP tags.<p>
*/
public CmsJspTagContentLoad() {
super();
}
/**
* Constructor used when using <code>contentload</code> from scriptlet code.<p>
*
* @param container the parent content container (could be a preloader)
* @param context the JSP page context
* @param collectorName the collector name to use
* @param collectorParam the collector param to use
* @param locale the locale to use
* @param editable indicates if "direct edit" support is wanted
*
* @throws JspException in case something goes wrong
*/
public CmsJspTagContentLoad(
I_CmsXmlContentContainer container,
PageContext context,
String collectorName,
String collectorParam,
Locale locale,
boolean editable)
throws JspException {
this(container, context, collectorName, collectorParam, null, null, locale, editable);
}
/**
* Constructor used when using <code>contentload</code> from scriptlet code.<p>
*
* @param container the parent content container (could be a preloader)
* @param context the JSP page context
* @param collectorName the collector name to use
* @param collectorParam the collector param to use
* @param pageIndex the display page index (may contain macros)
* @param pageSize the display page size (may contain macros)
* @param locale the locale to use
* @param editable indicates if "direct edit" support is wanted
*
* @throws JspException in case something goes wrong
*/
public CmsJspTagContentLoad(
I_CmsXmlContentContainer container,
PageContext context,
String collectorName,
String collectorParam,
String pageIndex,
String pageSize,
Locale locale,
boolean editable)
throws JspException {
this(
container,
context,
collectorName,
collectorParam,
pageIndex,
pageSize,
locale,
CmsDirectEditMode.valueOf(editable));
}
/**
* Constructor used when using <code>contentload</code> from scriptlet code.<p>
*
* @param container the parent content container (could be a preloader)
* @param context the JSP page context
* @param collectorName the collector name to use
* @param collectorParam the collector param to use
* @param pageIndex the display page index (may contain macros)
* @param pageSize the display page size (may contain macros)
* @param locale the locale to use
* @param editMode indicates which "direct edit" mode is wanted
*
* @throws JspException in case something goes wrong
*/
public CmsJspTagContentLoad(
I_CmsXmlContentContainer container,
PageContext context,
String collectorName,
String collectorParam,
String pageIndex,
String pageSize,
Locale locale,
CmsDirectEditMode editMode)
throws JspException {
setCollector(collectorName);
setParam(collectorParam);
setPageIndex(pageIndex);
setPageSize(pageSize);
m_locale = locale;
m_contentLocale = locale;
m_directEditMode = editMode;
m_preload = false;
setPageContext(context);
init(container);
}
/**
* @see javax.servlet.jsp.tagext.Tag#doStartTag()
*/
@Override
public int doStartTag() throws JspException, CmsIllegalArgumentException {
// get a reference to the parent "content container" class (if available)
Tag ancestor = findAncestorWithClass(this, I_CmsXmlContentContainer.class);
I_CmsXmlContentContainer container = null;
if (ancestor != null) {
// parent content container available, use preloaded values from this container
container = (I_CmsXmlContentContainer)ancestor;
// check if container really is a preloader
if (!container.isPreloader()) {
// don't use ancestor if not a preloader
container = null;
}
}
// initialize the content load tag
init(container);
hasMoreResources();
return isScopeVarSet() ? SKIP_BODY : EVAL_BODY_INCLUDE;
}
/**
* Returns the editable flag.<p>
*
* @return the editable flag
*/
public String getEditable() {
return m_directEditMode != null ? m_directEditMode.toString() : "";
}
/**
* Returns the locale.<p>
*
* @return the locale
*/
public String getLocale() {
return (m_locale != null) ? m_locale.toString() : "";
}
/**
* @see org.opencms.jsp.I_CmsXmlContentContainer#getXmlDocument()
*/
public I_CmsXmlDocument getXmlDocument() {
return m_content;
}
/**
* @see org.opencms.jsp.I_CmsXmlContentContainer#getXmlDocumentElement()
*/
public String getXmlDocumentElement() {
// value must be set in "loop" or "show" class
return null;
}
/**
* @see org.opencms.jsp.I_CmsXmlContentContainer#getXmlDocumentLocale()
*/
public Locale getXmlDocumentLocale() {
return m_contentLocale;
}
/**
* @see org.opencms.jsp.I_CmsXmlContentContainer#hasMoreResources()
*/
@Override
public boolean hasMoreResources() throws JspException {
// check if there are more files to iterate
boolean hasMoreContent = m_collectorResult.size() > 0;
if (m_isFirstLoop) {
m_isFirstLoop = false;
if (!hasMoreContent
&& m_editEmpty
&& ((m_directEditLinkForNew != null) && CmsDefaultFileNameGenerator.hasNumberMacro(m_directEditLinkForNew))) {
try {
CmsJspTagEditable.insertEditEmpty(pageContext, this, m_directEditMode);
} catch (CmsException e) {
throw new JspException(e);
}
}
} else {
if (m_directEditOpen) {
// last element was direct editable, close it
CmsJspTagEditable.endDirectEdit(pageContext);
m_directEditOpen = false;
}
}
if (isPreloader()) {
// if in preload mode, no result is required
return false;
}
if (hasMoreContent) {
// there are more results available...
try {
doLoadNextFile();
} catch (CmsException e) {
m_controller.setThrowable(e, m_resourceName);
throw new JspException(e);
}
// check "direct edit" support
if (m_directEditMode.isEnabled() && (m_resourceName != null)) {
// check options for first element
CmsDirectEditButtonSelection directEditButtons;
if (m_directEditFollowButtons == null) {
// this is the first call, calculate the options
if ((m_directEditLinkForNew == null)
|| !CmsDefaultFileNameGenerator.hasNumberMacro(m_directEditLinkForNew)) {
// if create link is null, show only "edit" and "delete" button for first element
directEditButtons = CmsDirectEditButtonSelection.EDIT_DELETE;
m_directEditFollowButtons = directEditButtons;
} else {
// if create link is not null, show "edit", "delete" and "new" buttons
directEditButtons = CmsDirectEditButtonSelection.EDIT_DELETE_NEW;
m_directEditFollowButtons = CmsDirectEditButtonSelection.EDIT_DELETE_NEW;
}
} else {
// re-use pre calculated options
directEditButtons = m_directEditFollowButtons;
}
CmsDirectEditParams params = new CmsDirectEditParams(
m_resourceName,
directEditButtons,
m_directEditMode,
CmsEncoder.encode(m_directEditLinkForNew));
params.setPostCreateHandler(m_postCreateHandler);
params.setId(m_contentInfoBean.getId());
m_directEditOpen = CmsJspTagEditable.startDirectEdit(pageContext, params);
}
} else {
// no more results in the collector, reset locale (just to make sure...)
m_locale = null;
m_editEmpty = false;
}
return hasMoreContent;
}
/**
* Returns the edit empty attribute.<p>
*
* @return the edit empty attribute
*/
public boolean isEditEmpty() {
return m_editEmpty;
}
/**
* @see javax.servlet.jsp.tagext.Tag#release()
*/
@Override
public void release() {
m_content = null;
m_contentLocale = null;
m_directEditLinkForNew = null;
m_directEditFollowButtons = null;
m_directEditOpen = false;
m_directEditMode = null;
m_isFirstLoop = false;
m_locale = null;
super.release();
}
/**
* Sets the editable mode.<p>
*
* @param mode the mode to set
*/
public void setEditable(String mode) {
m_directEditMode = CmsDirectEditMode.valueOf(mode);
}
/**
* Sets the edit empty attribute.<p>
*
* @param editEmpty the edit empty attribute to set
*/
public void setEditEmpty(boolean editEmpty) {
m_editEmpty = editEmpty;
}
/**
* Sets the locale.<p>
*
* @param locale the locale to set
*/
public void setLocale(String locale) {
if (CmsStringUtil.isEmpty(locale)) {
m_locale = null;
m_contentLocale = null;
} else {
m_locale = CmsLocaleManager.getLocale(locale);
m_contentLocale = m_locale;
}
}
/**
* Sets the post-create handler class name.<p>
*
* @param postCreateHandler the post-create handler class name
*/
public void setPostCreateHandler(String postCreateHandler) {
m_postCreateHandler = postCreateHandler;
}
/**
* Load the next file name from the initialized list of file names.<p>
*
* @throws CmsException if something goes wrong
*/
protected void doLoadNextFile() throws CmsException {
super.doLoadNextResource();
if (m_resource == null) {
return;
}
// upgrade the resource to a file
CmsFile file = m_cms.readFile(m_resource);
// unmarshal the XML content from the resource, don't use unmarshal(CmsObject, CmsResource)
// as no support for getting the historic version that has been cached by a CmsHistoryResourceHandler
// will come from there!
m_content = CmsXmlContentFactory.unmarshal(m_cms, file, pageContext.getRequest());
// check if locale is available
m_contentLocale = m_locale;
if (!m_content.hasLocale(m_contentLocale)) {
Iterator<Locale> it = OpenCms.getLocaleManager().getDefaultLocales().iterator();
while (it.hasNext()) {
Locale locale = it.next();
if (m_content.hasLocale(locale)) {
// found a matching locale
m_contentLocale = locale;
break;
}
}
}
}
/**
* Initializes this content load tag.<p>
*
* @param container the parent container (could be a preloader)
*
* @throws JspException in case something goes wrong
*/
protected void init(I_CmsXmlContentContainer container) throws JspException {
// check if the tag contains a pageSize, pageIndex and pageNavLength attribute, or none of them
int pageAttribCount = 0;
pageAttribCount += CmsStringUtil.isNotEmpty(m_pageSize) ? 1 : 0;
pageAttribCount += CmsStringUtil.isNotEmpty(m_pageIndex) ? 1 : 0;
if ((pageAttribCount > 0) && (pageAttribCount < 2)) {
throw new CmsIllegalArgumentException(Messages.get().container(Messages.ERR_TAG_CONTENTLOAD_INDEX_SIZE_0));
}
I_CmsXmlContentContainer usedContainer;
if (container == null) {
// no preloading ancestor has been found
usedContainer = this;
if (CmsStringUtil.isEmpty(m_collector)) {
// check if the tag contains a collector attribute
throw new CmsIllegalArgumentException(Messages.get().container(
Messages.ERR_TAG_CONTENTLOAD_MISSING_COLLECTOR_0));
}
if (CmsStringUtil.isEmpty(m_param)) {
// check if the tag contains a param attribute
throw new CmsIllegalArgumentException(Messages.get().container(
Messages.ERR_TAG_CONTENTLOAD_MISSING_PARAM_0));
}
} else {
// use provided container (preloading ancestor)
usedContainer = container;
}
if (isPreloader()) {
// always disable direct edit for preload
m_directEditMode = CmsDirectEditMode.FALSE;
} else if (m_directEditMode == null) {
// direct edit mode must not be null
m_directEditMode = CmsDirectEditMode.FALSE;
}
// initialize OpenCms access objects
m_controller = CmsFlexController.getController(pageContext.getRequest());
m_cms = m_controller.getCmsObject();
// get the resource name from the selected container
String resourcename = getResourceName(m_cms, usedContainer);
// initialize a string mapper to resolve EL like strings in tag attributes
CmsMacroResolver resolver = CmsMacroResolver.newInstance().setCmsObject(m_cms).setJspPageContext(pageContext).setResourceName(
resourcename).setKeepEmptyMacros(true);
// resolve the collector name
if (container == null) {
// no preload parent container, initialize new values
m_collectorName = resolver.resolveMacros(getCollector());
// resolve the parameter
m_collectorParam = resolver.resolveMacros(getParam());
m_collectorResult = null;
} else {
// preload parent content container available, use values from this container
m_collectorName = usedContainer.getCollectorName();
m_collectorParam = usedContainer.getCollectorParam();
m_collectorResult = usedContainer.getCollectorResult();
if (m_locale == null) {
// use locale from ancestor if available
m_locale = usedContainer.getXmlDocumentLocale();
}
}
if (m_locale == null) {
// no locale set, use locale from users request context
m_locale = m_cms.getRequestContext().getLocale();
}
try {
// now collect the resources
I_CmsResourceCollector collector = OpenCms.getResourceManager().getContentCollector(m_collectorName);
if (collector == null) {
throw new CmsException(Messages.get().container(Messages.ERR_COLLECTOR_NOT_FOUND_1, m_collectorName));
}
// execute the collector if not already done in parent tag
if (m_collectorResult == null) {
m_collectorResult = collector.getResults(m_cms, m_collectorName, m_collectorParam);
}
m_contentInfoBean = new CmsContentInfoBean();
m_contentInfoBean.setPageSizeAsString(resolver.resolveMacros(m_pageSize));
m_contentInfoBean.setPageIndexAsString(resolver.resolveMacros(m_pageIndex));
m_contentInfoBean.setPageNavLengthAsString(resolver.resolveMacros(m_pageNavLength));
m_contentInfoBean.setResultSize(m_collectorResult.size());
m_contentInfoBean.setLocale(m_locale.toString());
m_contentInfoBean.initResultIndex();
if (!isPreloader()) {
// not required when only preloading
m_collectorResult = CmsJspTagResourceLoad.limitCollectorResult(m_contentInfoBean, m_collectorResult);
m_contentInfoBean.initPageNavIndexes();
String createParam = collector.getCreateParam(m_cms, m_collectorName, m_collectorParam);
if (createParam != null) {
// use "create link" only if collector supports it
m_directEditLinkForNew = m_collectorName + "|" + createParam;
}
} else if (isScopeVarSet()) {
// scope variable is set, store content load bean in JSP context
CmsJspContentLoadBean bean = new CmsJspContentLoadBean(m_cms, m_locale, m_collectorResult);
storeAttribute(bean);
}
} catch (CmsException e) {
m_controller.setThrowable(e, m_cms.getRequestContext().getUri());
throw new JspException(e);
}
// reset the direct edit options (required because of re-used tags)
m_directEditOpen = false;
m_directEditFollowButtons = null;
// the next loop is the first loop
m_isFirstLoop = true;
}
} |
package org.openid4java.util;
import org.apache.commons.httpclient.Credentials;
import org.apache.commons.httpclient.NTCredentials;
import org.apache.commons.httpclient.UsernamePasswordCredentials;
/**
* Utility bean for setting transport properties in runtime.
*/
public class ProxyProperties
{
private static final String ANONYMOUS = "anonymous";
protected int proxyPort = -1;
protected String domain;
protected String password;
protected String proxyHostName;
protected String userName;
public ProxyProperties()
{
}
public String getDomain()
{
if (domain == null || domain.length() == 0)
{
return ANONYMOUS;
}
else
{
return domain;
}
}
public void setDomain(String domain)
{
this.domain = domain;
}
public String getPassword()
{
if (password == null || password.length() == 0)
{
return ANONYMOUS;
}
else
{
return password;
}
}
public void setPassword(String password)
{
this.password = password;
}
public String getProxyHostName()
{
return proxyHostName;
}
public void setProxyHostName(String proxyHostName)
{
this.proxyHostName = proxyHostName;
}
public int getProxyPort()
{
return proxyPort;
}
public void setProxyPort(int proxyPort)
{
this.proxyPort = proxyPort;
}
public String getUserName()
{
if (userName == null || userName.length() == 0)
{
return ANONYMOUS;
}
else
{
return userName;
}
}
public void setUserName(String userName)
{
this.userName = userName;
}
/**
* Get the proxy credentials.
*
* @return the proxy credentials
*/
public Credentials getCredentials() {
Credentials credentials = null;
if (this.getDomain().equals(ANONYMOUS))
{
credentials = new UsernamePasswordCredentials(
this.getUserName(),
this.getPassword());
}
else
{
credentials = new NTCredentials(
this.getUserName(),
this.getPassword(),
this.getProxyHostName(),
this.getDomain());
}
return credentials;
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
return this.getDomain() + "\\" + this.getUserName()
+ ":" + this.getPassword()
+ "@" + this.getProxyHostName() + ":" + this.getProxyPort();
}
} |
package org.conserve;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import org.conserve.aggregate.AggregateFunction;
import org.conserve.cache.ObjectRowMap;
import org.conserve.connection.ConnectionWrapper;
import org.conserve.select.Clause;
import org.conserve.select.discriminators.Equal;
import org.conserve.tools.Defaults;
import org.conserve.tools.Tools;
import org.conserve.tools.generators.NameGenerator;
import org.conserve.tools.protection.ProtectionManager;
/**
* Object database interface. Saves to and retrieves from a persistence
* database.
*
* This is the main programming interface class. Create an instance of this
* class to interact with the database.
*
*
*
* @author Erik Berglund
*
*/
public class PersistenceManager
{
private Persist persist;
/**
* Load the settings from a file. The file contains properties on the form
* property=value
*
* @param filename
* the name of the properties file to load settings from.
* @throws IOException
* @throws SQLException
*/
public PersistenceManager(String filename) throws IOException, SQLException
{
this(filename, true);
}
/**
* Load the settings from a file. The file contains properties on the form
* property=value
*
* If createSchema is true the database tables will be automatically
* created.
*
* @param filename
* the name of the properties file to load settings from.
* @param createSchema
* whether to create tables or not.
* @throws IOException
* @throws SQLException
*/
public PersistenceManager(String filename, boolean createSchema) throws IOException, SQLException
{
Properties p = new Properties();
InputStream in = ClassLoader.getSystemClassLoader().getResourceAsStream(filename);
if (in == null)
{
File f = new File(filename);
in = new FileInputStream(f);
}
p.load(in);
setup(p, createSchema);
in.close();
}
/**
* Reads property=value pairs from the inputstream.
*
* @param in
* the source of the properties of the connection.
* @throws IOException
* @throws SQLException
*/
public PersistenceManager(InputStream in) throws IOException, SQLException
{
this(in, true);
}
/**
* Reads property=value pairs from the inputstream. If createSchema is true
* the database tables will be automatically created.
*
* @param in
* the source of the properties of the connection.
* @param createSchema
* whether to create tables or not.
* @throws IOException
* @throws SQLException
*/
public PersistenceManager(InputStream in, boolean createSchema) throws IOException, SQLException
{
Properties prop = new Properties();
prop.load(in);
setup(prop, createSchema);
}
/**
* Create object, read settings from provided properties.
*
* @param prop
* contains the driver, connectionstring, username and password
* strings.
* @throws SQLException
*/
public PersistenceManager(Properties prop) throws SQLException
{
this(prop, true);
}
/**
* Create object, read settings from provided properties.
*
* If createSchema is true the database tables will be automatically
* created.
*
* @param createSchema
* whether to create tables or not.
* @param prop
* contains the driver, connectionstring, username and password
* strings.
* @throws SQLException
*/
public PersistenceManager(Properties prop, boolean createSchema) throws SQLException
{
setup(prop, createSchema);
}
private void setup(Properties prop, boolean createSchema) throws SQLException
{
persist = new Persist();
persist.setCreateSchema(createSchema);
persist.initialize(prop);
}
/**
* @param driver
* the driver name, optionally null if JDBC version is 4 or
* greater.
* @param connectionstring
* the connection string to the database.
* @param username
* the database username.
* @param password
* the database password.
* @throws SQLException
*/
public PersistenceManager(String driver, String connectionstring, String username, String password)
throws SQLException
{
this(driver, connectionstring, username, password, true);
}
/**
*
* @param driver
* the driver name, optionally null if JDBC version is 4 or
* greater.
* @param connectionstring
* the connection string.
* @param username
* the database username.
* @param password
* the database password.
* @param createSchema
* whether to create database tables or not.
* @throws SQLException
*/
public PersistenceManager(String driver, String connectionstring, String username, String password,
boolean createSchema) throws SQLException
{
persist = new Persist();
persist.setCreateSchema(createSchema);
persist.initialize(driver, connectionstring, username, password);
}
/**
* Constructor that omits driver class name. Only works with JDBC 4.0
* compliant drivers.
*
* @param connectionstring
* the connection string to the database.
* @param username
* the database username.
* @param password
* the database password.
* @throws SQLException
*/
public PersistenceManager(String connectionstring, String username, String password) throws SQLException
{
this(connectionstring, username, password, true);
}
/**
* Constructor that omits driver class name. Only works with JDBC 4.0
* compliant drivers.
*
* @param connectionstring
* the connection string to the database.
* @param username
* the database username.
* @param password
* the database password.
* @param createSchema
* whether to create database tables or not.
* @throws SQLException
*/
public PersistenceManager(String connectionstring, String username, String password, boolean createSchema)
throws SQLException
{
this(null, connectionstring, username, password, createSchema);
}
/**
* Delete one particular object from the database.
*
* @param toDelete the object that will be deleted
* @return true if the object existed and was deleted, false otherwise.
* @throws SQLExcpetion
*/
public boolean deleteObject(Object toDelete) throws SQLException
{
boolean res = false;
ConnectionWrapper cw = getConnectionWrapper();
try
{
res = deleteObject(cw,toDelete);
cw.commitAndDiscard();
}
catch(Exception e)
{
cw.rollbackAndDiscard();
throw new SQLException(e);
}
return res;
}
/**
* Delete one particular object from the database.
*
* @param toDelete the object that will be deleted
* @param cw the connection wrapper to use for the operation.
* @return true if the object existed and was deleted, false otherwise.
* @throws SQLExcpetion
*/
public boolean deleteObject(ConnectionWrapper cw, Object toDelete) throws SQLException
{
boolean res = false;
Long dbId = persist.getCache().getDatabaseId(toDelete);
if(dbId != null)
{
Integer tableNameId = persist.getTableNameNumberMap().getNumber(cw, toDelete.getClass());
ProtectionManager pm = persist.getProtectionManager();
pm.unprotectObjectExternal(tableNameId, dbId, cw);
if(!pm.isProtected(tableNameId, dbId, cw))
{
res = persist.deleteObject(cw,toDelete.getClass(), dbId);
}
}
return res;
}
/**
* Delete all objects that share properties with pattern. Convenience method
* that does not require the user to supply a ConnectionWrapper.
*
* @param pattern
* @return the number of deleted objects.
*/
public int deleteObjects(Object pattern) throws SQLException
{
int res = 0;
ConnectionWrapper cw = getConnectionWrapper();
try
{
res = deleteObjects(cw,pattern);
cw.commitAndDiscard();
}
catch(Exception e)
{
cw.rollbackAndDiscard();
throw new SQLException(e);
}
return res;
}
/**
* Delete all objects that share properties with pattern.
*
* @param pattern
* @param cw
* the connection wrapper to use for this operation.
* @return the number of deleted objects.
*/
public int deleteObjects(ConnectionWrapper cw, Object pattern) throws SQLException
{
return persist.deleteObjects(cw, pattern.getClass(), new Equal(pattern));
}
/**
* Delete all objects of class clazz (or any of its subclasses) that satisfy
* the where clause. If clazz is an interface, delete all implementing
* classes that satisfy the where clause.
*
* Convenience method that does not require the user to supply a
* ConnectionWrapper.
*
* @param clazz
* @param where
* @return the number of deleted objects.
*/
public <T> int deleteObjects(Class<T> clazz, Clause where) throws SQLException
{
int res = 0;
ConnectionWrapper cw = getConnectionWrapper();
try
{
res = deleteObjects(cw,clazz,where);
cw.commitAndDiscard();
}
catch(Exception e)
{
cw.rollbackAndDiscard();
throw new SQLException(e);
}
return res;
}
/**
* Delete all objects of class clazz (or any of its subclasses) that satisfy
* the where clause. If clazz is an interface, delete all implementing
* classes that satisfy the where clause.
*
* @param clazz
* the class of objects to delete.
* @param where
* the clause that must be satisfied for an object to be deleted.
* @param cw
* the connection wrapper to use for this operation.
* @return the number of deleted objects.
*/
public <T> int deleteObjects(ConnectionWrapper cw, Class<T> clazz, Clause where) throws SQLException
{
return persist.deleteObjects(cw, clazz, where);
}
/**
* Add an object to the database. If the object already exists, it will be
* updated. Convenience method that does not require the user to supply a
* ConnectionWrapper.
*
* @param object
* the object to save.
*
* @throws SQLException
*/
public Long saveObject(Object object) throws SQLException
{
Long res = null;
ConnectionWrapper cw = getConnectionWrapper();
try
{
res = saveObject(cw,object);
cw.commitAndDiscard();
}
catch(Exception e)
{
cw.rollbackAndDiscard();
throw new SQLException(e);
}
return res;
}
/**
* Add an object to the database. If the object already exists, it will be
* updated.
*
* @param object
* the object to save.
* @param cw
* the connection wrapper to use for this operation.
*
* @throws SQLException
*/
public Long saveObject(ConnectionWrapper cw, Object object) throws SQLException
{
return persist.saveObject(cw, object, true, null);
}
/**
* Get all objects that share the non-null properties of pattern. If no
* results are found, an empty ArrayList is returned. Convenience method
* that does not require the user to supply a ConnectionWrapper.
*
* @param pattern
* the example to use for retrieving objects.
* @return a list of objects that match the pattern.
* @throws SQLException
*/
public <T> List<T> getObjects(T pattern) throws SQLException
{
List<T>res = null;
ConnectionWrapper cw = getConnectionWrapper();
try
{
res = getObjects(cw,pattern);
cw.commitAndDiscard();
}
catch(Exception e)
{
cw.rollbackAndDiscard();
throw new SQLException(e);
}
return res;
}
/**
* Return a list of objects of a given class (including subclasses and/or
* implementing classes) that satisfy the given clause. Convenience method
* that does not require the user to supply a ConnectionWrapper.
*
* @param <T>
* the type of objects to return
* @param clazz
* the class of objects to return, subclasses will also be
* returned.
* @param clause
* the clause(s) that all the returned objects must satisfy.
* @return an ArrayList of the desired type.
* @throws SQLException
*/
public <T> List<T> getObjects(Class<T> clazz, Clause... clause) throws SQLException
{
List<T> res = null;
ConnectionWrapper cw = getConnectionWrapper();
try
{
res = getObjects(cw,clazz,clause);
cw.commitAndDiscard();
}
catch(Exception e)
{
cw.rollbackAndDiscard();
throw new SQLException(e);
}
return res;
}
/**
* Get all objects that share the non-null properties of pattern. If no
* results are found, an empty ArrayList is returned.
*
* @param pattern
* the example to use for retrieving objects.
* @param cw
* the wrapped connection to use for this operation.
* @return a list of objects that match the pattern.
* @throws SQLException
*/
@SuppressWarnings("unchecked")
public <T> List<T> getObjects(ConnectionWrapper cw, T pattern) throws SQLException
{
return getObjects(cw, (Class<T>) pattern.getClass(), pattern);
}
/**
* Get the objects that match the non-null properties of pattern. The fields
* with non-null values in the pattern are matched to database fields.
*
* @param clazz
* the class of the objects to look for.
* @param pattern
* return only objects that match the pattern object.
*/
public <T> List<T> getObjects(ConnectionWrapper cw, Class<T> clazz, Object pattern) throws SQLException
{
return persist.getObjects(cw, clazz, new Equal(pattern, clazz));
}
/**
* Get the objects matching the search class and search clauses. The objects
* are passed one by one to the {@link SearchListener#objectFound(Object)}
* method of the listener parameter.
*
* This method conserves memory compared to the other getObjects(...)
* methods by only loading one object at a time. This means this method is
* slower than the other getObjects(...) methods, as a new database query is
* issued for each separate object.
*
* The next object in the search won't be loaded until the objectFound(...)
* method returns, so if heavy processing needs to be done on each object
* it's best to offload it to a separate thread.
*
*
* @param listener
* an object that implements the SearchListener interface.
* @param clazz
* the class of objects to search for.
* @param clauses
* the clause(s) that all returned objects must satisfy.
* @throws SQLException
*/
public <T> void getObjects(Class<T> clazz, SearchListener<T> listener, Clause... clauses) throws SQLException
{
ConnectionWrapper cw = getConnectionWrapper();
try
{
persist.getObjects(cw,listener, clazz, clauses);
cw.commitAndDiscard();
}
catch(Exception e)
{
cw.rollbackAndDiscard();
throw new SQLException(e);
}
}
/**
* Return a list of objects of a given class (including subclasses and/or
* implementing classes) that satisfy the given clause.
*
* @param <T>
* the type of objects to return
* @param clazz
* the class of objects to return, subclasses will also be
* returned.
* @param clause
* the clause that all the returned objects must satisfy.
* @param cw
* the connection wrapper to use for this operation.
* @return an ArrayList of the desired type.
* @throws SQLException
*/
public <T> List<T> getObjects(ConnectionWrapper cw, Class<T> clazz, Clause... clause) throws SQLException
{
return persist.getObjects(cw, clazz, clause);
}
/**
* Get the number of objects that share the non-null properties of pattern.
* If no results are found, zero is returned. Convenience method that does
* not require the user to supply a ConnectionWrapper.
*
* @param pattern
* the example to use for retrieving objects.
* @return the number of objects that match the pattern.
* @throws SQLException
*/
public <T> long getCount(T pattern) throws SQLException
{
long res = 0;
ConnectionWrapper cw = getConnectionWrapper();
try
{
res = getCount(cw,pattern);
cw.commitAndDiscard();
}
catch(Exception e)
{
cw.rollbackAndDiscard();
throw new SQLException(e);
}
return res;
}
/**
* Get the number of objects that share the non-null properties of pattern.
* If no results are found, zero is returned.
*
* @param pattern
* the example to use for retrieving objects.
* @param cw
* the ConnectionWrapper to use for this transaction.
* @return the number of objects that match the pattern.
* @throws SQLException
*/
@SuppressWarnings("unchecked")
public <T> long getCount(ConnectionWrapper cw, T pattern) throws SQLException
{
return getCount(cw, (Class<T>) pattern.getClass(), pattern);
}
/**
* Get the number of objects that match the non-null properties of pattern.
* The fields with non-null values in the pattern are matched to database
* fields.
*
* @param clazz
* the class of the objects to look for.
* @param pattern
* return only objects that match the pattern object.
* @param cw
* the connection wrapper to use for this transaction.
*/
public long getCount(ConnectionWrapper cw, Class<?> clazz, Object pattern) throws SQLException
{
return persist.getCount(cw, clazz, new Equal(pattern, clazz));
}
/**
* Get the number of database objects of class clazz that satisfy the
* clause. Convenience method that does not require the user to supply a
* ConnectionWrapper.
*
* @param <T>
* @param clazz
* the class to look for.
* @param clause
* the clause that must be satisfied.
* @return the number of objects of class clazz and its subclasses that
* satisfy clause.
* @throws SQLException
*/
public <T> long getCount(Class<T> clazz, Clause... clause) throws SQLException
{
long res = 0;
ConnectionWrapper cw = getConnectionWrapper();
try
{
res = getCount(cw,clazz,clause);
cw.commitAndDiscard();
}
catch(Exception e)
{
cw.rollbackAndDiscard();
throw new SQLException(e);
}
return res;
}
/**
* Get the number of database objects of class clazz that satisfy the
* clause.
*
* @param <T>
* @param clazz
* @param clause
* the clause that must be satisfied.
* @param cw
* the connection wrapper for this operation.
* @return the number of objects of class clazz and its subclasses that
* satisfy clause.
* @throws SQLException
*/
public <T> long getCount(ConnectionWrapper cw, Class<T> clazz, Clause... clause) throws SQLException
{
return persist.getCount(cw, clazz, clause);
}
/**
* Get the object of class clazz with the given database id. The actual
* object returned may be an instance of a subclass.
*
* @param clazz
* the class of the object to retrieve.
* @param id
* the database id of the object at the level of clazz.
* @param cw
* the connection wrapper to use for this operation.
* @return the matching object.
*
* @throws SQLException
* @throws ClassNotFoundException
*/
public <T> T getObject(ConnectionWrapper cw, Class<T> clazz, Long id) throws SQLException, ClassNotFoundException
{
return persist.getObject(cw, clazz, id);
}
/**
* Get the object of class clazz with the given database id. The actual
* object returned may be an instance of a subclass.
* This is a convenience method that handles the ConnectionWrapper for you.
*
* @param clazz
* the class of the object to retrieve.
* @param id
* the database id of the object at the level of clazz.
*
* @return the matching object.
*
* @throws SQLException
* @throws ClassNotFoundException
*/
public <T> T getObject( Class<T> clazz, Long id) throws SQLException, ClassNotFoundException
{
T res = null;
ConnectionWrapper cw = getConnectionWrapper();
try
{
res = getObject(cw, clazz, id);
cw.commitAndDiscard();
}
catch(Exception e)
{
cw.rollbackAndDiscard();
throw new SQLException(e);
}
return res;
}
/**
* Get a list of all classes persisted in this database. It does not include
* classes representing primitives, e.g. java.lang.Integer, or array
* classes.
*
* @return a list of classes.
* @throws SQLException
*/
public List<Class<?>> getClasses(ConnectionWrapper cw) throws SQLException
{
return persist.getClasses(cw);
}
/**
* Get a list of all classes persisted in this database. It does not include
* classes representing primitives, e.g. java.lang.Integer, or array
* classes.
* This is a convenience method that allocates a ConnectionWrapper for you.
*
* @return a list of classes.
* @throws SQLException
*/
public List<Class<?>> getClasses() throws SQLException
{
List<Class<?>>res=null;
ConnectionWrapper cw = getConnectionWrapper();
try
{
res = getClasses(cw);
cw.commitAndDiscard();
}
catch (Exception e)
{
// cancel the operation
cw.rollbackAndDiscard();
// re-throw the original exception
throw new SQLException(e);
}
return res;
}
public <T> T refresh(T obj) throws IllegalArgumentException, SQLException
{
T res = null;
ConnectionWrapper cw = getConnectionWrapper();
try
{
res = refresh(cw, obj);
cw.commitAndDiscard();
}
catch (Exception e)
{
// cancel the operation
cw.rollbackAndDiscard();
// re-throw the original exception
throw new SQLException(e);
}
return res;
}
public <T> T refresh(ConnectionWrapper cw, T obj) throws IllegalArgumentException, SQLException
{
return persist.refresh(cw, obj);
}
/**
* Check if an object has been changed since it was loaded from the
* database.
* This is a convenience method that handles the ConnectionWrapper for you.
*
* @param o
* the object to check for, will be unchanged.
*
* @return true if the object or any of its properties has changed or been
* deleted, false otherwise.
* @throws SQLException
* @throws ClassNotFoundException
*/
public boolean hasChanged( Object o) throws SQLException, ClassNotFoundException
{
boolean res = false;
ConnectionWrapper cw = getConnectionWrapper();
try
{
res = hasChanged(cw,o);
cw.commitAndDiscard();
}
catch(Exception e)
{
cw.rollbackAndDiscard();
throw new SQLException(e);
}
return res;
}
/**
* Check if an object has been changed since it was loaded from the
* database.
*
* @param o
* the object to check for, will be unchanged.
*
* @return true if the object or any of its properties has changed or been
* deleted, false otherwise.
* @throws SQLException
* @throws ClassNotFoundException
*/
private boolean hasChanged(ConnectionWrapper cw, Object o) throws SQLException, ClassNotFoundException
{
Long dbId = persist.getCache().getDatabaseId(o);
if (dbId != null)
{
Class<?>clazz = o.getClass();
//get all classes that are subclasses of clazz, or equal to clazz
List<Class<?>> allClasses = getClasses(cw);
Iterator<Class<?>> iter = allClasses.iterator();
while(iter.hasNext())
{
Class<?> tmpClass = iter.next();
if(!clazz.isAssignableFrom(tmpClass))
{
iter.remove();
}
}
// Search using o as example, make sure returned object exists
// and has same table id number.
HashMap<Class<?>, List<Long>> res = persist.getObjectDescriptors(cw, clazz, null,allClasses, new Equal(o), null);
List<Long> ids = res.get(o.getClass());
if (ids != null && ids.contains(dbId))
{
boolean result = true;
// get the object from the database
ObjectRowMap tmpCache = new ObjectRowMap();
tmpCache.start();
Object actual = persist.getObject(cw, Object.class, dbId, tmpCache);
tmpCache.stop();
tmpCache = new ObjectRowMap();
tmpCache.start();
// temporarily save the old object, bypassing the cache
long tmpId = persist.saveObject(cw, o, false, null, tmpCache);
// make sure the new object can be used to find the old object
res = persist.getObjectDescriptors(cw, clazz, null, allClasses,new Equal(actual), null);
ids = res.get(o.getClass());
if (ids != null && ids.contains(tmpId))
{
result = false;
}
// delete the temporary object by rolling back the transaction
cw.rollback();
tmpCache.stop();
// purge temporary object from cache
persist.getCache().purge(NameGenerator.getTableName(o, persist.getAdapter()), tmpId);
return result;
}
else
{
return true;
}
}
return false;
}
/**
* Drop all tables that make up class c. Tables will be dropped regardless
* if they are empty or not. All subclasses of c will also be dropped. If c
* is an interface, all classes that implement c will be dropped, along with
* their subclasses. Any interface that extends c will also be dropped,
* along with implementing classes and their sublcasses.
*
* Warning: All classes that references c will also be dropped.
*
* This is an extremely powerful method, use caution.
*
* This method is a wrapper around
* {@link #dropTable(ConnectionWrapper, Class)}, a ConnectionWrapper will be
* automatically obtained and released.
*
* @param c
* the class of object to drop the table for.
* @throws SQLException
*/
public void dropTable(Class<?> c) throws SQLException
{
ConnectionWrapper cw = getConnectionWrapper();
try
{
dropTable(cw, c);
cw.commitAndDiscard();
}
catch (Exception e)
{
// cancel the operation
cw.rollbackAndDiscard();
// re-throw the original exception
throw new SQLException(e);
}
}
/**
* Drop all tables that make up class c. Tables will be dropped regardless
* if they are empty or not. All subclasses of c will also be dropped. If c
* is an interface, all classes that implement c will be dropped, along with
* their subclasses. Any interface that extends c will also be dropped,
* along with implementing classes and their sublcasses.
*
* Warning: All classes that references c will also be dropped.
*
* This is an extremely powerful method, use caution.
*
*
* @param cw
* the connection wrapper to use for the operation.
* @param c
* the class of object to drop the table for.
* @throws SQLException
*/
public void dropTable(ConnectionWrapper cw, Class<?> c) throws SQLException
{
persist.getTableManager().dropTableForClass(c, cw);
}
/**
* Get the amount of the total capacity that has been used. Since Conserve
* stores objects with a unique, database generated identifier there is an
* upper limit to the number of objects that can be stored. This number is
* very, very large but nevertheless finite. As an example, most databases
* allows 64-bit signed integers as auto-generated identifiers. This gives
* 2^63-1 or 9,223,372,036,854,775,807 entries. That's nine quintillion, two
* hundred twenty-three quadrillion, three hundred seventy-two trillion,
* thirty-six billion, eight hundred fifty-four million, seven hundred
* seventy-five thousand, eight hundred seven. If you add a thousand entries
* every second it will take you more than 290 million years to run out.
* Some database engines (notably SQLite and its derivatives) use a smaller
* number here, so you can actually realistically run out.
*
* Returns a number in the range [0,1]. 0 indicates that the database is
* empty, 1 means it's full.
*
* Note that even if you delete objects this number
* won't necessarily decrease, as used identifiers may not be recycled.
* This behaviour is database dependent.
*
* Note that if you delete everything in your database, this method will incorrectly
* return 0, even though some identifiers have been used up.
*
* @return the fraction of total capacity that has been used, normalised to
* the range [0,1].
* @throws SQLException
*/
public double getUsedCapacity(ConnectionWrapper cw) throws SQLException
{
String objectTableName = NameGenerator.getTableName(Object.class, persist.getAdapter());
if (persist.getTableManager().tableExists(objectTableName, cw))
{
String query = "SELECT MAX(" + Defaults.ID_COL + ") FROM " + objectTableName;
PreparedStatement prepareStatement = cw.prepareStatement(query);
Tools.logFine(prepareStatement);
ResultSet rs = prepareStatement.executeQuery();
rs.next();
long count = rs.getLong(1);
return count / (double) persist.getAdapter().getMaximumIdNumber();
}
else
{
//if the table don't exist no capacity has been used
return 0;
}
}
/**
* Duplicate all entries from this PersistenceManager to the other. At the
* end of a successful call of this method the contents of the target
* database will be an exact duplicate the this database, with the exception
* of the id numbers.
*
* The behaviour of this call is undefined if this PersistenceManager
* contains a code structure that is not compatible with the source
* PersistenceManager.
*
* Useful for backing up a database, moving it from one server to another,
* or merging two databases.
*
* After this method completes successfully, the target database will
* contain an exact copy of this database. Schema and contents will be
* copied, but there is no guarantee that the C__ID fields will be the same.
* If this database or the target database is written to during this
* operation, the outcome is undefined.
*
* If this operation fails the state of the target database is undefined.
*
* This operation does not interfere with existing objects in the target
* database.
*
* This operation does not change the state of this database.
*
* @param target
* the database to copy to.
* @throws SQLException
* if anything goes wrong.
*/
public void duplicate(PersistenceManager target) throws SQLException
{
this.persist.duplicate(target.persist);
}
/**
* Update the database description or schema for the given class and all
* classes it depends on. This method is used after you have made changes to
* the definition of a class. If this method is called with a class that has
* not been changed it has no effect.
*
* The following changes ARE supported:
* <p/>
*
*
* * Remove or remove a property.
* <p/>
*
* * Rename a property.
* <p/>
*
* * Add or remove an index.
* <p/>
*
* * Move a class from one superclass to another.
* <p/>
*
* * Add or remove an interface.
* <p/>
*
* * Change a property from a primitive to the corresponding reference type,
* for example from double to Double. The opposite (Double to double) is
* possible, but not encouraged as it is entirely up to your code how any
* nulls existing in the database are handled.
* <p/>
*
* * Change a property from one reference type to another. If the new type
* is a supertype or implemented by the original type, either directly or
* indirectly, references will be preserved. In other words, properties can
* be made more general, not less. For example, it is possible to change
* from ArrayList to List, but not the other way around, without losing
* data. Any non-compatible references will be dropped. If a property refers
* to both List and ArrayList objects and is converted from List to
* ArrayList (the 'wrong' way) all List references that are not also
* ArrayList references will be deleted.
* <p/>
*
* * Change a reference type to a primitive, or the other way around, for
* example from String to java.util.Date. Please observe that this WILL
* result in all old references being null. This has the same effect as
* dropping the property and creating a new one. You probably don't want
* this, but you can. This does not apply to the reference types that
* directly correspond to primitive types (see above).
* <p/>
*
*
*
* To carry out any of the supported changes, just pass a Class object you
* want changed to this method. If you are changing a property from
* primitive to reference or the other way, you do not even need to call
* this method, just start using the new class.
* <p/>
*
* After calling this method the PersistenceManager should be closed and a new instance created.
* Any other PersistenceManager objects should do the same - the integrity of objects that are loaded
* by other PersistenceManagers can not be guaranteed otherwise.
* <p/>
*
* If you wish to implement any other changes, you have to do this in a
* two-step approach:
* <p/>
*
* 1. Create an intermediary class, and read all of the old objects into it.
* Store the objects as intermediary classes.
* <p/>
*
* 2. Drop the old class, and copy from the intermediary class to the new
* class, storing it.
* <p/>
*
* In this case you do not use the updateSchema method.
*
*
* @param klass
* @throws SQLException
*/
public void updateSchema(Class<?> klass) throws SQLException
{
ConnectionWrapper cw = getConnectionWrapper();
try
{
persist.getTableManager().updateTableForClass(klass, cw);
cw.commitAndDiscard();
}
catch (Exception e)
{
// cancel the operation
cw.rollbackAndDiscard();
// re-throw the original exception
throw new SQLException(e);
}
}
/**
* Close the database connection and release all resources. After calling
* this method any further use of this object is undefined.
*/
public void close()
{
if (persist != null)
{
persist.close();
persist = null;
}
}
/**
* Package-level accessor for the managed Persistence object.
*
* @return
*/
Persist getPersist()
{
return this.persist;
}
/**
* Get a wrapped SQL connection to the database used by this
* PersistenceManager instance. To use the connection, call the
* getConnection() method on the returned object.
*
* After you're done with the connection, it is important that you return it
* to the connection pool - otherwise your application will soon run out of
* connections.
*
* There are two ways of doing this:
*
* 1. Call rollback(), then discard() on the ConnectionWrapper. This undoes
* all your changes.
*
* 2. Call commit(), then discard() on the ConnectionWrapper. This makes
* your changes permanent.
*
* There are convenience methods that combine the two calls;
* rollbackAndDiscard() and commitAndDiscard().
*
* After calling discard() you should no longer use the ConnectionWrapper or
* the associated connection, but instead request a new ConnectionWrapper
* using the getConnectionWrapper() method.
*
* @return a ready-to-use ConnectionWrapper object.
* @throws SQLException
*/
public ConnectionWrapper getConnectionWrapper() throws SQLException
{
return persist.getConnectionWrapper();
}
/**
* Returns an array containing the result of the SQL sum() function for each field.
* If the field is an integer type, the corresponding entry is Integer or Long type, whichever is most appropriate.
*
* If the field is a floating point type, the corresponding entry will be Float or Double, whichever is most appropriate.
*
*
*
* @param cw the database connection to use for the operation.
* @param clazz the class of the object to calculate the sum for.
* functions the functions to calculate - each entry will get a corresponding entry in the returned array.
* @param where selection clauses that determine what objects will be matched - if empty, all objects are matched.
*
* @return
* @throws SQLException
*/
public Number[] calculateAggregate(ConnectionWrapper cw,Class<?>clazz, AggregateFunction [] functions,Clause... where)throws SQLException
{
return persist.calculateAggregate(cw,clazz,functions,where);
}
/**
* Convenience function that calculates the sum of one given field in all matching entries.
*
* @param cw the database connection to use for the operation.
* @param clazz the class of the object to calculate the sum for.
* @param function the function to calculate.
* @param where selection clauses that determine what objects will be matched - if empty, all objects are matched.
*
* @return
* @throws SQLException
*/
public Number calculateAggregate(ConnectionWrapper cw,Class<?>clazz, AggregateFunction function,Clause... where)throws SQLException
{
Number [] tmp = calculateAggregate(cw, clazz,new AggregateFunction []{function},where);
return tmp[0];
}
/**
* Returns an array containing the result of the SQL sum() function for each field.
* If the field is an integer type, the corresponding entry is Long, Integer, Byte, or Short type, whichever is appropriate.
*
* If the field is a floating point type, the corresponding entry will be Double or Float, whichever is appropriate.
*
* If the function is Average, the corresponding entry in the returned array will be Double, no matter what the field type is.
*
* If the function is Sum the corresponding entry in the returned array will be Double or Long, as appropriate.
*
* This function is undefined for non-numeric fields.
*
* @param clazz the class of the object to calculate the sum for.
* @param functions the functions to calculate - each entry will get a corresponding entry in the returned array.
* @param where selection clauses that determine what objects will be matched - if empty, all objects are matched.
*
* @return
* @throws SQLException
*/
public Number[] calculateAggregate(Class<?>clazz, AggregateFunction [] functions,Clause... where) throws SQLException
{
Number[] res=null;
ConnectionWrapper cw = getConnectionWrapper();
try
{
res = calculateAggregate(cw,clazz,functions,where);
cw.commitAndDiscard();
}
catch (Exception e)
{
// cancel the operation
cw.rollbackAndDiscard();
// re-throw the original exception
throw new SQLException(e);
}
return res;
}
/**
* Convenience function that calculates the sum of one given field in all matching entries.
*
* @param clazz the class of the object to calculate the sum for.
* @param function the function to calculate.
* @param where selection clauses that determine what objects will be matched - if empty, all objects are matched.
*
* @return
* @throws SQLException
*/
public Number calculateAggregate(Class<?>clazz, AggregateFunction function,Clause... where) throws SQLException
{
Number [] tmp = calculateAggregate(clazz,new AggregateFunction []{function},where);
return tmp[0];
}
} |
package org.playasophy.wonderdome;
import java.util.ArrayList;
import java.util.List;
import processing.core.*;
import org.playasophy.wonderdome.input.ButtonEvent;
import org.playasophy.wonderdome.input.InputEvent;
import org.playasophy.wonderdome.mode.ColorCycle;
import org.playasophy.wonderdome.mode.Mode;
import org.playasophy.wonderdome.mode.MovementTest;
import org.playasophy.wonderdome.mode.LanternMode;
import org.playasophy.wonderdome.mode.FlickerMode;
public class Wonderdome {
///// TYPES /////
private enum State {
PAUSED,
RUNNING
}
///// CONSTANTS /////
private static final int NUM_STRIPS = 6;
private static final int PIXELS_PER_STRIP = 240;
///// PROPERTIES /////
private final PApplet parent;
private int[][] pixels;
private List<Mode> modes;
private int currentModeIndex;
private State state;
private long lastUpdate;
///// INITIALIZATION /////
public Wonderdome(PApplet parent) {
this.parent = parent;
parent.registerMethod("pre", this);
pixels = new int[NUM_STRIPS][PIXELS_PER_STRIP];
modes = new ArrayList<Mode>();
// List of Modes
modes.add(new ColorCycle(parent)); // Mode 0
modes.add(new MovementTest(parent)); // Mode 1
modes.add(new LanternMode(parent)); // Mode 2
modes.add(new FlickerMode(parent)); // Mode 3
// Initial Mode [Change for ease of use when testing new modes].
switchToMode(3);
state = State.RUNNING;
lastUpdate = System.currentTimeMillis();
}
///// PUBLIC METHODS /////
public void pre() {
if ( state == State.RUNNING ) {
long dt = System.currentTimeMillis() - lastUpdate;
try {
getCurrentMode().update(pixels, dt);
} catch ( Exception e ) {
evictCurrentMode(e);
}
}
lastUpdate = System.currentTimeMillis();
}
public int[][] getPixels() {
return pixels;
}
public void handleEvent(InputEvent event) {
boolean consumed = false;
if ( event instanceof ButtonEvent ) {
ButtonEvent be = (ButtonEvent) event;
if ( be.getId() == ButtonEvent.Id.SELECT ) {
handleSelectButton(be.getType());
consumed = true;
}
}
if ( !consumed ) {
try {
getCurrentMode().handleEvent(event);
} catch ( Exception e ) {
evictCurrentMode(e);
}
}
}
public void pause() {
state = State.PAUSED;
}
public void resume() {
state = State.RUNNING;
}
public void setModeList(List<Mode> modes) {
// TODO: Implement this.
}
///// PRIVATE METHODS /////
private Mode getCurrentMode() {
return modes.get(currentModeIndex);
}
private void evictCurrentMode(final Throwable cause) {
System.err.println(
"Mode '" + getCurrentMode().getClass() +
"' threw exception '" + cause.getMessage() +
"' and is being evicted from the mode cycle."
);
cause.printStackTrace();
modes.remove(currentModeIndex);
cycleModes();
}
private void handleSelectButton(final ButtonEvent.Type type) {
if ( type == ButtonEvent.Type.PRESSED ) {
cycleModes();
}
}
private void switchToMode(int modeIndex)
{
if (modeIndex >= 0 && modeIndex < modes.size())
{
currentModeIndex = modeIndex;
System.out.println("Now in mode " + currentModeIndex + ": " + modes.get(currentModeIndex).getClass());
}
}
private void cycleModes()
{
int newMode = currentModeIndex + 1;
if (newMode >= modes.size())
{
newMode = 0;
}
switchToMode(newMode);
}
} |
package org.usfirst.frc.team1492.robot;
import edu.wpi.first.wpilibj.DigitalInput;
import edu.wpi.first.wpilibj.Joystick;
import edu.wpi.first.wpilibj.Joystick.AxisType;
import edu.wpi.first.wpilibj.SampleRobot;
import edu.wpi.first.wpilibj.Servo;
import edu.wpi.first.wpilibj.Solenoid;
import edu.wpi.first.wpilibj.Talon;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
public class Robot extends SampleRobot {
Talon motorFL;
Talon motorFR;
Talon motorBL;
Talon motorBR;
Talon motorMid;
Talon motorElevator;
Joystick stickLeft;
Joystick stickRight;
Joystick stickThree;
Solenoid testSolenoid;
Servo cameraServo;
double cameraServoValue;
DigitalInput limitSwitchElevatorTop;
DigitalInput limitSwitchElevatorBottom;
DigitalInput limitSwitchElevatorOne;
DigitalInput limitSwitchElevatorTwo;
DigitalInput limitSwitchElevatorThree;
double elevatorSpeed = 0;
double elevatorMaxSpeed = 1;
public Robot() {
motorFL = new Talon(1);
motorFR = new Talon(3);
motorBL = new Talon(0);
motorBR = new Talon(2);
motorMid = new Talon(5);
motorElevator = new Talon(4);
stickLeft = new Joystick(0);
stickRight = new Joystick(1);
stickThree = new Joystick(2);
testSolenoid = new Solenoid(0);
cameraServo = new Servo(7);
limitSwitchElevatorBottom = new DigitalInput(0);
limitSwitchElevatorTop = new DigitalInput(1);
limitSwitchElevatorOne = new DigitalInput(2);
limitSwitchElevatorTwo = new DigitalInput(3);
limitSwitchElevatorThree = new DigitalInput(4);
}
public void autonomous() {
}
public void operatorControl() {
// CameraThread c = new CameraThread();
while (isOperatorControl() && isEnabled()) {
driveControl();
manipulatorControl();
Timer.delay(0.005);
}
// c.finish();
}
public void test() {
}
public void driveControl() {
double leftSide = -stickLeft.getAxis(AxisType.kY);
double rightSide = stickRight.getAxis(AxisType.kY);
double horizontal = (stickLeft.getAxis(AxisType.kX) + stickRight
.getAxis(AxisType.kX)) / 2;
motorFL.set(leftSide);
motorBL.set(leftSide);
motorFR.set(rightSide);
motorBR.set(rightSide);
motorMid.set(horizontal);
}
public void manipulatorControl() {
if (stickRight.getRawButton(1)) {
testSolenoid.set(true);
} else {
testSolenoid.set(false);
}
if (stickRight.getRawButton(4)) {
cameraServoValue -= .05;
if (cameraServoValue < 0) {
cameraServoValue = 0;
}
}
if (stickRight.getRawButton(5)) {
cameraServoValue += .05;
if (cameraServoValue > 1) {
cameraServoValue = 1;
}
}
cameraServo.set(cameraServoValue);
// elevator limit switches not edge ones
elevatorMaxSpeed = (stickThree.getAxis(AxisType.kZ) + 1) / 2;
SmartDashboard.putNumber("elevatorMaxSpeed", elevatorMaxSpeed);
SmartDashboard.putBoolean("!limitSwitchElevatorTop",
!limitSwitchElevatorTop.get());
SmartDashboard.putBoolean("!limitSwitchElevatorBottom",
!limitSwitchElevatorBottom.get());
SmartDashboard.putBoolean("!limitSwitchElevatorOne",
!limitSwitchElevatorOne.get());
if (!limitSwitchElevatorOne.get() /*
* || !limitSwitchElevatorTwo.get() ||
* !limitSwitchElevatorThree.get()
*/) {
elevatorSpeed = 0;
}
if (stickThree.getRawButton(3)) {
elevatorSpeed = -elevatorMaxSpeed;
}
if (stickThree.getRawButton(2)) {
elevatorSpeed = elevatorMaxSpeed;
}
if (!limitSwitchElevatorTop.get() || !limitSwitchElevatorBottom.get()) {
elevatorSpeed = 0;
}
motorElevator.set(elevatorSpeed);
}
} |
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* the project. */
// FILE NAME: Autonomous.java (Team 339 - Kilroy)
// ABSTRACT:
// This file is where almost all code for Kilroy will be
// written. All of these functions are functions that should
// override methods in the base class (IterativeRobot). The
// functions are as follows:
// Init() - Initialization code for teleop mode
// should go here. Will be called each time the robot enters
// teleop mode.
// Periodic() - Periodic code for teleop mode should
// go here. Will be called periodically at a regular rate while
// the robot is in teleop mode.
// Team 339.
package org.usfirst.frc.team339.robot;
import org.usfirst.frc.team339.Hardware.Hardware;
import org.usfirst.frc.team339.Utils.Guidance;
import org.usfirst.frc.team339.Utils.ManipulatorArm.ArmPosition;
import edu.wpi.first.wpilibj.CameraServer;
import edu.wpi.first.wpilibj.DoubleSolenoid;
import edu.wpi.first.wpilibj.Relay.Value;
/**
* This class contains all of the user code for the Autonomous
* part of the match, namely, the Init and Periodic code
*
* @author Nathanial Lydick
* @written Jan 13, 2015
*/
public class Teleop
{
/**
* User Initialization code for teleop mode should go here. Will be
* called once when the robot enters teleop mode.
*
* @author Nathanial Lydick
* @written Jan 13, 2015
*/
public static void init ()
{
CameraServer.getInstance().setSize(1);
Hardware.axisCamera
.writeBrightness(Hardware.NORMAL_AXIS_CAMERA_BRIGHTNESS);
// set max speed. change by gear?
Hardware.drive.setMaxSpeed(MAXIMUM_TELEOP_SPEED);
Hardware.transmission.setGear(1);
Hardware.transmission
.setFirstGearPercentage(Robot.FIRST_GEAR_PERCENTAGE);
Hardware.transmission
.setSecondGearPercentage(Robot.SECOND_GEAR_PERCENTAGE);
Hardware.transmission.setJoystickDeadbandRange(.20);
Hardware.transmission.setJoysticksAreReversed(false);
Hardware.ringLightRelay.set(Value.kOff);
isAligningByCamera = false;
fireRequested = false;
Hardware.arrowDashboard.setDirection(Guidance.Direction.neutral);
Hardware.arrowDashboard.update();
Hardware.catapultSolenoid0.set(false);
Hardware.catapultSolenoid1.set(false);
Hardware.catapultSolenoid2.set(false);
// armEncoder needs to be set to 0
Hardware.delayTimer.reset();
Hardware.rightRearEncoder.reset();
Hardware.leftRearEncoder.reset();
Hardware.leftFrontMotor.set(0.0);
Hardware.leftRearMotor.set(0.0);
Hardware.rightFrontMotor.set(0.0);
Hardware.rightRearMotor.set(0.0);
Hardware.armMotor.set(0.0);
Hardware.armIntakeMotor.set(0.0);
} // end Init
//private char[] reports;
private static boolean done = false;
//private static boolean done2 = false;
private static edu.wpi.first.wpilibj.DoubleSolenoid.Value Reverse;
private static edu.wpi.first.wpilibj.DoubleSolenoid.Value Forward;
/**
* User Periodic code for teleop mode should go here. Will be called
* periodically at a regular rate while the robot is in teleop mode.
*
* @author Nathanial Lydick
* @written Jan 13, 2015
*/
public static void periodic ()
{
// block of code to move the arm
if (Math.abs(Hardware.rightOperator
.getY()) >= PICKUP_ARM_CONTROL_DEADZONE)
{
// use the formula for the sign (value/abs(value)) to get the direction
// we want the motor to go in,
// and round it just in case it isn't exactly 1, then cast to an int to
// make the compiler happy
Hardware.pickupArm.moveReasonably(
-(int) Math.round(Hardware.rightOperator.getY()
/ Math.abs(Hardware.rightOperator.getY())),
Hardware.rightOperator.getRawButton(2));
// Hardware.pickupArm
// .moveFast((int) Math.round(Hardware.rightOperator.getY()
// / Math.abs(Hardware.rightOperator.getY())),
// Hardware.rightOperator.getRawButton(2));
// Hardware.pickupArm.moveFast(1);
}
else
{
Hardware.pickupArm.stopArmMotor();
}
//Block of code to toggle the camera up or down
//If the camera is down and we press the button.
if (Hardware.cameraToggleButton.isOnCheckNow() == false)
{
//raise the camera and tell the code that it's up
Hardware.cameraSolenoid.set(DoubleSolenoid.Value.kForward);
}
//If the camera is up and we press the toggle button.
if (Hardware.cameraToggleButton.isOnCheckNow() == true)
{
//Drop the camera and tell the code that it's down
Hardware.cameraSolenoid.set(DoubleSolenoid.Value.kReverse);
}
//end raise/lower camera block
//Block of code to align us on the goal using the camera
if (Hardware.rightOperator.getTrigger() == true)
{
//Tell the code to align us to the camera
isAligningByCamera = true;
}
//If we want to point at the goal using the camera
if (isAligningByCamera == true)
{
//Keep trying to point at the goal
if (Hardware.drive.alignByCamera(
PERCENT_IMAGE_PROCESSING_DEADBAND,
CAMERA_ALIGNMENT_TURNING_SPEED) == true)
{
// Once we're in the center, tell the code we no longer care about
// steering towards the goal
isAligningByCamera = false;
}
}
//end alignByCameraBlock
// Block of code to pick up ball or push it out
//pull in the ball if the pull in button is pressed.
if (Hardware.rightOperator
.getRawButton(TAKE_IN_BALL_BUTTON) == true)
{
//TODO demystify magic argument
Hardware.pickupArm
.pullInBall(Hardware.rightOperator.getRawButton(3));
}
//push out the ball if the push out button is pressed
else if (Hardware.rightOperator
.getRawButton(PUSH_OUT_BALL_BUTTON) == true)
{
Hardware.pickupArm.pushOutBall();
}
// If neither the pull in or the push out button are pressed, stop the
// intake motors
else
{
Hardware.pickupArm.stopIntakeArms();
}
// block of code to fire
if (Hardware.leftOperator.getTrigger() == true)
{
//Tell the code to start firing
fireRequested = true;
}
//if we want to fire, but the arm is in the way
//NOTE: temporarily stores the firing state so that if fireRequested is false the method won't stop working
if (!Hardware.pickupArm.isClearOfArm()
&& (fireRequested == true || storeFiringState == true))
{
storeFiringState = fireRequested;
fireRequested = false;
if (Hardware.pickupArm
.moveToPosition(ArmPosition.CLEAR_OF_FIRING_ARM))
{
fireRequested = true;
storeFiringState = false;
}
}
if (Hardware.leftOperator.getRawButton(4) == true
&& fireRequested == true)
{
if (fire(3, true) == true)
{
fireRequested = false;
}
}
// cancel the fire request
if (Hardware.leftOperator.getRawButton(FIRE_CANCEL_BUTTON) == true)
{
fireRequested = false;
}
// if we want to fire
if (fireRequested == true
&& Hardware.leftOperator.getRawButton(4) != true)
{
// fire
if (fire(3, false) == true)
{
// if we're done firing, drop the request
fireRequested = false;
}
}
//end fire block
//block of code to tell the drivers where to go
//TODO finish based on camera input and IR sensors
//if the rightIR detects HDPE and the left one doesn't
// if (Hardware.rightIR.isOn() == true
// && Hardware.leftIR.isOn() == false)
// //tell the drivers to spin right a little
// Hardware.arrowDashboard.setDirection(Guidance.Direction.right);
// //if the right side doesn't detect HDPE but the left one does
// else if (Hardware.rightIR.isOn() == false
// && Hardware.leftIR.isOn() == true)
// //tell the drives to spin left a little
// Hardware.arrowDashboard.setDirection(Guidance.Direction.left);
//if one of the IR's detect HDPE
if (Hardware.rightIR.isOn() == true
|| Hardware.leftIR.isOn() == true)
{
//Tell the drivers to stop and hopefully alignByCamera
Hardware.arrowDashboard
.setDirection(Guidance.Direction.linedUp);
}
//If neither IR detects anything on the ground
else
{
//trust the camera
//TODO base these ones on the camera if we have one.
Hardware.arrowDashboard
.setDirection(Guidance.Direction.neutral);
}
//put the arrows on the screen
Hardware.arrowDashboard.update();
//If the ball is in the robot, update the driver station with that info
if (Hardware.armIR.get())
{
Guidance.updateBallStatus(true);
}
else
{
Guidance.updateBallStatus(false);
}
//End driver direction block
// Print statements to test Hardware on the Robot
printStatements();
// Takes Pictures based on Operator Button stuff.
takePicture();
// Driving the Robot
//TODO delete all conditionals.
if (Hardware.leftDriver.getRawButton(8) == true)
{
isSpeedTesting = true;
}
if (isSpeedTesting == false)
driveRobot();
else
{
if (Hardware.drive.driveStraightByInches(140.0, true, -1.0,
-1.0) == true)
{
isSpeedTesting = false;
}
}
// runCameraSolenoid(Hardware.rightOperator.getRawButton(11),
// Hardware.rightOperator.getRawButton(10), false, true);
} // end Periodic
private static boolean isSpeedTesting = false;
/**
* Hand the transmission class the joystick values and motor controllers for
* four wheel drive.
*
*/
public static void driveRobot ()
{
//drive the robot with the joysticks
Hardware.transmission.controls(Hardware.leftDriver.getY(),
Hardware.rightDriver.getY());
// If we're pressing the upshift button, shift up.
if (Hardware.rightDriver
.getRawButton(GEAR_UPSHIFT_JOYSTICK_BUTTON) == true)
{
Hardware.transmission.upshift(1);
}
// If we press the downshift button, shift down.
if (Hardware.rightDriver
.getRawButton(GEAR_DOWNSHIFT_JOYSTICK_BUTTON) == true)
{
Hardware.transmission.downshift(1);
}
}
public static boolean armIsUp = false;
/**
* ^^^Bring the boolean armIsUp
* if method is moved to a different class.^^^
*
* @param upState
* @param downState
* @param holdState
* @param toggle
*
* When in toggle mode, one boolean raises the arm and one lowers.
* When not in toggle mode, only use boolean holdState. This will
* keep the arm up for the duration that the holdState is true.
*
* NOTE: if a parameter is not applicable, set it to false.
*
*
*
* @author Ryan McGee
* @written 2/13/16
*
*/
//public static void runCameraSolenoid (boolean upState,
// boolean downState, boolean holdState, boolean toggle)
// if (upState && toggle == true && armIsUp == false)
// Hardware.cameraSolenoid.set(DoubleSolenoid.Value.kForward);
// armIsUp = true;
// else if (downState && toggle == true && armIsUp == true)
// Hardware.cameraSolenoid.set(DoubleSolenoid.Value.kReverse);
// armIsUp = false;
// else if (holdState && toggle == false)
// Hardware.cameraSolenoid.set(DoubleSolenoid.Value.kForward);
// else
// Hardware.cameraSolenoid.set(DoubleSolenoid.Value.kReverse);
/**
* Fires the catapult.
*
* @param power
* -Can be 1, 2, or 3; corresponds to the amount of solenoids used to
* fire.
* @return
* -False if we're not yet done firing, true otherwise.
*/
public static boolean fire (int power, boolean override)
{
if (Hardware.transducer.get() >= FIRING_MIN_PSI || override == true)
{
// if (Hardware.pickupArm.moveToPosition(
// ManipulatorArm.ArmPosition.CLEAR_OF_FIRING_ARM) == true)
if (firstTimeFireRun == true)
{
Hardware.fireTimer.start();
firstTimeFireRun = false;
}
switch (power)
{
case 1:
Hardware.catapultSolenoid0.set(true);
break;
case 2:
Hardware.catapultSolenoid1.set(true);
Hardware.catapultSolenoid0.set(true);
break;
default:
case 3:
Hardware.catapultSolenoid0.set(true);
Hardware.catapultSolenoid1.set(true);
Hardware.catapultSolenoid2.set(true);
break;
}
}
//TODO reduce time to minimum possible
if (Hardware.fireTimer.get() >= .5)
{
System.out.println("Somehow getting into if");
Hardware.catapultSolenoid0.set(false);
Hardware.catapultSolenoid1.set(false);
Hardware.catapultSolenoid2.set(false);
Hardware.fireTimer.stop();
Hardware.fireTimer.reset();
firstTimeFireRun = true;
return true;
}
return false;
}
private static boolean firstTimeFireRun = true;
/**
* Takes a picture, processes it and saves it with left operator joystick
* take unlit picture: 6&7
* take lit picture: 10&11
*/
public static void takePicture ()
{
// If we click buttons 6+7 on the left operator joystick, we dim the
// brightness a lot, turn the ringlight on, and then if we haven't
// already taken an image then we do and set the boolean to true to
// prevent us taking more images. Otherwise we don't turn on the
// ringlight and we don't take a picture. We added a timer to delay
// taking the picture for the brightness to dim and for the ring
// light to turn on.
if (Hardware.leftOperator.getRawButton(6) == true
&& Hardware.leftOperator.getRawButton(7) == true)
{
if (prepPic == false)
{
Hardware.axisCamera.writeBrightness(
Hardware.MINIMUM_AXIS_CAMERA_BRIGHTNESS);
Hardware.ringLightRelay.set(Value.kOn);
Hardware.delayTimer.start();
prepPic = true;
takingLitImage = true;
}
}
// Once the brightness is down and the ring light is on then the
// picture is taken, the brightness returns to normal, the ringlight
// is turned off, and the timer is stopped and reset.
// @TODO Change .25 to a constant, see line 65 under Hardware
// Replaced '.25' with Hardware.CAMERA_DELAY_TIME' change back if camera
// fails
if (Hardware.delayTimer.get() >= Hardware.CAMERA_DELAY_TIME
&& prepPic == true && takingLitImage == true)
{
Hardware.axisCamera.saveImagesSafely();
prepPic = false;
takingLitImage = false;
}
if (takingLitImage == false && Hardware.delayTimer.get() >= 1)
{
Hardware.axisCamera.writeBrightness(
Hardware.NORMAL_AXIS_CAMERA_BRIGHTNESS);
Hardware.ringLightRelay.set(Value.kOff);
Hardware.delayTimer.stop();
Hardware.delayTimer.reset();
}
// If we click buttons 10+11, we take a picture without the
// ringlight and set the boolean to true so we don't take a bunch of
// other pictures.
if (Hardware.leftOperator.getRawButton(10) == true &&
Hardware.leftOperator.getRawButton(11) == true)
{
if (takingUnlitImage == false)
{
takingUnlitImage = true;
Hardware.axisCamera.saveImagesSafely();
}
}
else
takingUnlitImage = false;
// if the left operator trigger is pressed, then we check to see if
// we're taking a processed picture through the boolean. If we are
// not currently taking a processed picture, then it lets us take a
// picture and sets the boolean to true so we don't take multiple
// pictures. If it is true, then it does nothing. If we don't click
// the trigger, then the boolean resets itself to false to take
// pictures again.
// if (Hardware.leftOperator.getTrigger() == true)
// if (processingImage == true)
// processImage();
// processingImage = false;
} // end Periodic
static boolean hasBegunTurning = true;
/**
*
* Processes images with the Axis Camera for use in autonomous when
* trying to score. Will eventually be moved to a Shoot class when
* one is made.
*
* @author Marlene McGraw
* @written 2/6/16
*
*/
public static void processImage ()
{
// If we took a picture, we set the boolean to true to prevent
// taking more pictures and create an image processor to process
// images.
// processingImage = true;
// Hardware.imageProcessor.processImage();
// System.out.println("Length: " +
// Hardware.imageProcessor.reports.length);
// System.out.println("Center of Mass Y: ");
}
// End processImage
/**
* stores print statements for future use in the print "bank", statements are
* commented out when
* not in use, when you write a new print statement, "deposit" the statement in
* the "bank"
* do not "withdraw" statements, unless directed to
*
* @author Ashley Espeland
* @written 1/28/16
*
* Edited by Ryan McGee
*
*/
public static void printStatements ()
{
//System.out.println("AligningByCamera = " + isAligningByCamera);
//checks to see if the robot is aligning by camera
// System.out.println("Left Joystick: " + Hardware.leftDriver.getY());
// System.out
// .println("Right Joystick: " + Hardware.rightDriver.getY());
//System.out.println("Left Operator: " + Hardware.leftOperator.getY());
// System.out.println("Right Operator: " + Hardware.rightOperator.getY());
//System.out.println("left IR = " + Hardware.leftIR.isOn());
//System.out.println("right IR = " + Hardware.rightIR.isOn());
System.out.println("Has ball IR = " + Hardware.armIR.isOn());
// System.out.println("delay pot = " + (int) Hardware.delayPot.get());
// prints the value of the transducer- (range in code is 50)
//hits psi of 100 accurately
//System.out.println("transducer = " + Hardware.transducer.get());
System.out.println("Arm Pot = " + Hardware.armPot.get());
// prints value of the motors
// System.out.println("RR Motor T = " + Hardware.rightRearMotor.get());
// System.out.println("LR Motor T = " + Hardware.leftRearMotor.get());
// System.out
// .println("RF Motor T = " + Hardware.rightFrontMotor.get());
// System.out.println("LF Motor T = " + Hardware.leftFrontMotor.get());
System.out.println("Arm Motor: " + Hardware.armMotor.get());
System.out
.println("Intake Motor: " + Hardware.armIntakeMotor.get());
// prints the state of the solenoids
// System.out.println("cameraSolenoid = " + Hardware.cameraSolenoid.get());
// System.out.println("catapultSolenoid0 = " +
// Hardware.catapultSolenoid0.get());
// System.out.println("catapultSolenoid1 = " +
// Hardware.catapultSolenoid1.get());
// System.out.println("catapultSolenoid2 = " +
// Hardware.catapultSolenoid2.get());
// System.out.println(
// "RR distance = " + Hardware.rightRearEncoder.getDistance());
// System.out.println(
// "LR distance = " + Hardware.leftRearEncoder.getDistance());
// // System.out.println("Arm Motor = " + Hardware.armMotor.getDistance());
// System.out.println(
// "Right Rear Encoder Tics: "
// + Hardware.rightRearEncoder.get());
// System.out.println(
// "Left Rear Encoder Tics: "
// + Hardware.leftRearEncoder.get());
// System.out.println(
// "RR distance = " + Hardware.rightRearEncoder.getDistance());
// System.out.println(
// "LR distance = " + Hardware.leftRearEncoder.getDistance());
// System.out.println("Arm Motor = " + Hardware.armMotor.getDistance());
// prints state of switches
// System.out.println("Autonomous Enabled Switch: " +
// Hardware.autonomousEnabled.isOn());
// System.out.println("Shoot High Switch: " + Hardware.shootHigh.isOn());
// System.out.println("Shoot Low Switch: " + Hardware.shootLow.isOn());
//System.out.println("Position: " +
//Hardware.startingPositionDial.getPosition());
//System.out.println("Position: " +
//Hardware.startingPositionDial.getPosition());
// System.out.println(Hardware.ringLightRelay.get());
} // end printStatements
private static final double MAXIMUM_TELEOP_SPEED = 1.0;
// right driver 3
private static final int GEAR_UPSHIFT_JOYSTICK_BUTTON = 3;
// right driver 2
private static final int GEAR_DOWNSHIFT_JOYSTICK_BUTTON = 2;
// left operator 2
private static final int CAMERA_TOGGLE_BUTTON = 2;
// Right operator 2
private static final int FIRE_OVERRIDE_BUTTON = 2;
// Left operator 3
private static final int FIRE_CANCEL_BUTTON = 3;
// Right operator 4
private static final int TAKE_IN_BALL_BUTTON = 4;
// right operator 5
private static final int PUSH_OUT_BALL_BUTTON = 5;
private static final double PICKUP_ARM_CONTROL_DEADZONE = 0.2;
private final static double PERCENT_IMAGE_PROCESSING_DEADBAND = .15;
private final static double CAMERA_ALIGNMENT_TURNING_SPEED = .45;
//minimum pressure when allowed to fire
private static final int FIRING_MIN_PSI = 90;
// TUNEABLES
private static boolean isAligningByCamera = false;
private static boolean cameraIsUp = false;
private static boolean isDrivingByCamera = false;
private static boolean fireRequested = false;
private static boolean processingImage = true;
// Boolean to check if we're taking a lit picture
private static boolean takingLitImage = false;
// Boolean to check if we're taking an unlit picture
private static boolean takingUnlitImage = false;
// this is for preparing to take a picture with the timer; changes
// brightness, turns on ringlight, starts timer
private static boolean prepPic = false;
//Stores temporarily whether firingState is true, for use in whether the arm is in the way
private static boolean storeFiringState;
} // end class |
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* the project. */
// FILE NAME: Autonomous.java (Team 339 - Kilroy)
// ABSTRACT:
// This file is where almost all code for Kilroy will be
// written. All of these functions are functions that should
// override methods in the base class (IterativeRobot). The
// functions are as follows:
// Init() - Initialization code for teleop mode
// should go here. Will be called each time the robot enters
// teleop mode.
// Periodic() - Periodic code for teleop mode should
// go here. Will be called periodically at a regular rate while
// the robot is in teleop mode.
// Team 339.
package org.usfirst.frc.team339.robot;
import org.usfirst.frc.team339.Hardware.Hardware;
import edu.wpi.first.wpilibj.CameraServer;
import edu.wpi.first.wpilibj.Relay.Value;
/**
* This class contains all of the user code for the Autonomous
* part of the match, namely, the Init and Periodic code
*
* @author Nathanial Lydick
* @written Jan 13, 2015
*/
public class Teleop
{
// TUNEABLES
// Boolean to check if we're taking a lit picture
private static boolean takingLitImage = false;
// Boolean to check if we're taking an unlit picture
private static boolean takingUnlitImage = false;
// this is for preparing to take a picture with the timer; changes
// brightness, turns on ringlight, starts timer
private static boolean prepPic = false;
/**
* User Initialization code for teleop mode should go here. Will be
* called once when the robot enters teleop mode.
*
* @author Nathanial Lydick
* @written Jan 13, 2015
*/
public static void init ()
{
// set max speed. change by gear?
Hardware.drive.setMaxSpeed(MAXIMUM_TELEOP_SPEED);
Hardware.transmission.setFirstGearPercentage(FIRST_GEAR_PERCENTAGE);
Hardware.transmission
.setSecondGearPercentage(SECOND_GEAR_PERCENTAGE);
Hardware.transmission.setGear(1);
// stop cam0 in case we have declared them
// in Autonomous. Then declare a new cam0
// and start it going automatically with the
// camera server
Hardware.delayTimer.reset();
CameraServer.getInstance().setSize(1);
// set max speed. change by gear?
Hardware.drive.setMaxSpeed(MAXIMUM_TELEOP_SPEED);
Hardware.transmission.setFirstGearPercentage(FIRST_GEAR_PERCENTAGE);
Hardware.transmission
.setSecondGearPercentage(SECOND_GEAR_PERCENTAGE);
Hardware.transmission.setGear(1);
// stop cam0 in case we have declared them
// in Autonomous. Then declare a new cam0
// and start it going automatically with the
// camera server
Hardware.delayTimer.reset();
CameraServer.getInstance().setSize(1);
Hardware.axisCamera
.writeBrightness(Hardware.NORMAL_AXIS_CAMERA_BRIGHTNESS);
} // end Init
/**
* User Periodic code for teleop mode should go here. Will be called
* periodically at a regular rate while the robot is in teleop mode.
*
* @author Nathanial Lydick
* @written Jan 13, 2015
*/
public static void periodic ()
{
//Print statements to test Hardware on the Robot
printStatements();
// If we click buttons 6+7 on the left operator joystick, we dim the
// brightness a lot, turn the ringlight on, and then if we haven't
// already taken an image then we do and set the boolean to true to
// prevent us taking more images. Otherwise we don't turn on the
// ringlight and we don't take a picture. We added a timer to delay
// taking the picture for the brightness to dim and for the ring
// light to turn on.
if (Hardware.leftOperator.getRawButton(6) == true &&
Hardware.leftOperator.getRawButton(7) == true)
{
if (prepPic == false)
{
Hardware.axisCamera
.writeBrightness(
Hardware.MINIMUM_AXIS_CAMERA_BRIGHTNESS);
Hardware.ringLightRelay.set(Value.kOn);
Hardware.delayTimer.start();
prepPic = true;
takingLitImage = true;
}
}
// Once the brightness is down and the ring light is on then the
// picture is taken, the brightness returns to normal, the ringlight
// is turned off, and the timer is stopped and reset.
if (Hardware.delayTimer.get() >= .25 && prepPic == true
&& takingLitImage == true)
{
Hardware.axisCamera.saveImagesSafely();
prepPic = false;
takingLitImage = false;
}
if (takingLitImage == false && Hardware.delayTimer.get() >= 1)
{
Hardware.axisCamera
.writeBrightness(
Hardware.NORMAL_AXIS_CAMERA_BRIGHTNESS);
Hardware.ringLightRelay.set(Value.kOff);
Hardware.delayTimer.stop();
Hardware.delayTimer.reset();
}
// If we click buttons 10+11, we take a picture without the
// ringlight and set the boolean to true so we don't take a bunch of
// other pictures.
if (Hardware.leftOperator.getRawButton(10) == true &&
Hardware.leftOperator.getRawButton(11) == true)
{
if (takingUnlitImage == false)
{
takingUnlitImage = true;
Hardware.axisCamera.saveImagesSafely();
}
}
else
{
takingUnlitImage = false;
}
//Driving the Robot
//Hand the transmission class the joystick values and motor controllers for four wheel drive.
Hardware.transmission.controls(Hardware.rightDriver.getY(),
Hardware.leftDriver.getY(), Hardware.leftFrontMotor,
Hardware.leftRearMotor, Hardware.rightFrontMotor,
Hardware.rightRearMotor);
//If we're in 1st gear and we're pressing the upshift button, shift up.
if (Hardware.transmission.getGear() == 1 &&
Hardware.rightDriver
.getRawButton(GEAR_UPSHIFT_JOYSTICK_BUTTON) == true)
{
Hardware.transmission.upshift(1);
}
//If we're in 2nd gear and we press the downshift button, shift down.
else if (Hardware.transmission.getGear() == 2 &&
Hardware.rightDriver.getRawButton(
GEAR_DOWNSHIFT_JOYSTICK_BUTTON) == true)
{
Hardware.transmission.downshift(1);
}
} // end Periodic
/**
* stores print statements for future use in the print "bank", statements are
* commented out when
* not in use, when you write a new print statement, "deposit" the statement in
* the "bank"
* do not "withdraw" statements, unless directed to
*
* @author Ashley Espeland
* @written 1/28/16
*
* Edited by Ryan McGee
*
*/
public static void printStatements ()
{
// System.out.println("Left Joystick: " + Hardware.leftDriver.getY());
// System.out.println("Right Joystick: " + Hardware.rightDriver.getY());
// System.out.println("Left Operator: " + Hardware.leftOperator.getY());
// System.out.println("Right Operator: " + Hardware.rightOperator.getY());
// System.out.println("left IR = " + Hardware.leftIR.isOn());
// System.out.println("right IR = " + Hardware.rightIR.isOn());
// System.out.println("delay pot = " + (int) Hardware.delayPot.get());
//prints the value of the transducer- (range in code is 50)
//hits psi of 100 accurately
//System.out.println("transducer = " + Hardware.transducer.get());
//prints value of the motors
// System.out.println("RR Motor V = " + Hardware.rightRearMotor.get());
// System.out.println("LR Motor V = " + Hardware.leftRearMotor.get());
// System.out.println("RF Motor V = " + Hardware.rightFrontMotor.get());
// System.out.println("LF Motor V = " + Hardware.leftFrontMotor.get());
// System.out.println("Arm Motor V = " + Hardware.armMotor.get());
//prints the state of the solenoids
// System.out.println("cameraSolenoid = " + Hardware.cameraSolenoid.get());
// System.out.println("catapultSolenoid0 = " +
// Hardware.catapultSolenoid0.get());
// System.out.println("catapultSolenoid1 = " +
// Hardware.catapultSolenoid1.get());
// System.out.println("catapultSolenoid2 = " +
// Hardware.catapultSolenoid2.get());
// System.out.println(
// "RR distance = " + Hardware.rightRearEncoder.getDistance());
// System.out.println(
// "LR distance = " + Hardware.leftRearEncoder.getDistance());
// System.out.println("RF distance = "
// + Hardware.rightFrontEncoder.getDistance());
// System.out.println(
// "LF distance = " + Hardware.leftFrontEncoder.getDistance());
// System.out.println("Arm Motor = " + Hardware.armMotor.getDistance());
//prints state of switches
// System.out.println("Autonomous Enabled Switch: " + Hardware.autonomousEnabled.isOn());
// System.out.println("Shoot High Switch: " + Hardware.shootHigh.isOn());
// System.out.println("Shoot Low Switch: " + Hardware.shootLow.isOn());
// System.out.println("Position: " + Hardware.startingPositionDial.getPosition());
// System.out.println(Hardware.ringLightRelay.get());
} // end printStatements
private static final double MAXIMUM_TELEOP_SPEED = 1.0;
private static final double FIRST_GEAR_PERCENTAGE = 0.5;
private static final double SECOND_GEAR_PERCENTAGE =
MAXIMUM_TELEOP_SPEED;
// Makes the brightness to a visible level so our drivers can see.
private static final int NORMAL_AXIS_CAMERA_BRIGHTNESS = 50;
// Crazy dark brightness for retroreflective pictures
private static final int MINIMUM_AXIS_CAMERA_BRIGHTNESS = 6;
//TODO change based on driver request
private static final int GEAR_UPSHIFT_JOYSTICK_BUTTON = 3;
private static final int GEAR_DOWNSHIFT_JOYSTICK_BUTTON = 2;
} // end class |
package org.usfirst.frc.team4678.robot;
import edu.wpi.first.wpilibj.IterativeRobot;
import edu.wpi.first.wpilibj.smartdashboard.SendableChooser;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
import edu.wpi.first.wpilibj.*;
import com.ctre.CANTalon;
import com.ctre.CANTalon.FeedbackDevice;
import com.ctre.CANTalon.TalonControlMode;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the IterativeRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
/**
* TODO
* - Classes
* - Controls for Gear simplified
*
*
*/
public class Robot extends IterativeRobot {
///Robot Port Mappings
//Compressor
public static final int COMPRESSOR = 0;
//Motors
public static final int LEFTDRIVEMOTOR = 1;
public static final int RIGHTDRIVEMOTOR = 0;
public static final int CLIMBERMOTOR = 2;
public static final int CLAWPIVOTMOTOR = 2;
//Pneumatics
public static final int PCM = 0;
public static final int LOWGEAR = 0;
public static final int HIGHGEAR = 1;
public static final int CLAWRETRACT = 2;
public static final int CLAWEXTEND = 3;
public static final boolean DEBUG = true;
//Controllers
public static final int DRIVERGAMEPAD = 0;
public static final int OPERATORGAMEPAD = 1;
//PIDConstants
//Claw
public static final double clawPIDP = 0.7;
public static final double clawPIDI = 0;
public static final double clawPIDD = 0;
//GamePadMapping
//Driver
public static final int LEFTAXISX = 0;
public static final int LEFTAXISY = 1;
public static final int RIGHTAXISX = 2;
public static final int RIGHTAXISY = 3;
public static final int CLAWUPANDEXTENDBTN = 3;
public static final int CLAWUPANDRETRACTBTN = 1;
public static final int CLAWDOWNANDEXTENDBTN = 4;
public static final int CLAWDOWNANDRETRACTBTN = 2;
public static final int SHIFTUPBTN = 5;
public static final int SHIFTDOWNBTN = 6;
public static final int CLIMBFASTBTN = 7;
public static final int CLIMBSLOWBTN = 8;
public static String autoModes[] = {
"Do Nothing",
"Mode 1",
"Mode 2",
"Mode 3",
"Mode 4",
"Mode 5",
"Mode 6",
"Mode 7",
"Mode 8",
"Mode 9",
"Mode 10"
};
//Operator
//DriveTrain
public static VictorSP driveTrainLeftMotor;
public static VictorSP driveTrainRightMotor;
public static Compressor driveTrainCompressor;
public static DoubleSolenoid driveTrainShifter;
//Controllers
public static Joystick driverGamePad;
public static Joystick operatorGamePad;
//Claw
public static DoubleSolenoid clawGrabber;
public static CANTalon clawPivot;
//Climber
public static VictorSP climber;
//State Machine Enums
//DriveStateMachine
public static enum DriveStates{
JOYSTICKDRIVE, AUTO, DISABLED
}
public static DriveStates currentDriveState = DriveStates.AUTO;
public static enum ClawStates{
DOWNANDEXTENDED, DOWNANDRETRACTED, UPANDRETRACTED, UPANDEXTENDED
}
public static ClawStates currentClawState = ClawStates.UPANDRETRACTED;
/**
* This function is run when the robot is first started up and should be
* used for any initialization code.
*/
public static int autoMode = 0;
@Override
public void robotInit() {
driveTrainInit();
controllerInit();
clawInit();
climberInit();
smartDashboard();
}
@Override
public void autonomousInit() {
}
/**
* This function is called periodically during autonomous
*/
@Override
public void autonomousPeriodic() {
if(operatorGamePad.getRawButton(0)){
autoMode = 0;
}
if(operatorGamePad.getRawButton(1)){
autoMode = 1;
}
if(operatorGamePad.getRawButton(2)){
autoMode = 2;
}
if(operatorGamePad.getRawButton(3)){
autoMode = 3;
}
if(operatorGamePad.getRawButton(4)){
autoMode = 4;
}
if(operatorGamePad.getRawButton(5)){
autoMode = 5;
}
if(operatorGamePad.getRawButton(6)){
autoMode = 6;
}
if(operatorGamePad.getRawButton(7)){
autoMode = 7;
}
if(operatorGamePad.getRawButton(8)){
autoMode = 8;
}
if(operatorGamePad.getRawButton(9)){
autoMode = 9;
}
if(operatorGamePad.getRawButton(10)){
autoMode = 10;
}
}
/**
* This function is called periodically during operator control
*/
@Override
public void teleopInit(){
currentDriveState = DriveStates.JOYSTICKDRIVE;
}
@Override
public void teleopPeriodic() {
driveStateMachine(currentDriveState);
driverControls();
//clawStateMachine(currentClawState);
smartDashboard();
}
@Override
public void disabledPeriodic(){
smartDashboard();
}
/**
* This function is called periodically during test mode
*/
@Override
public void testPeriodic() {
}
public void driveTrainInit(){
driveTrainLeftMotor = new VictorSP(LEFTDRIVEMOTOR);
driveTrainRightMotor = new VictorSP(RIGHTDRIVEMOTOR);
driveTrainCompressor = new Compressor(COMPRESSOR);
driveTrainShifter = new DoubleSolenoid(PCM, HIGHGEAR, LOWGEAR);
driveTrainCompressor.setClosedLoopControl(true);
}
public void controllerInit(){
driverGamePad = new Joystick(DRIVERGAMEPAD);
operatorGamePad = new Joystick(OPERATORGAMEPAD);
}
public void clawInit(){
clawGrabber = new DoubleSolenoid(PCM, CLAWEXTEND, CLAWRETRACT);
clawPivot = new CANTalon(CLAWPIVOTMOTOR);
clawPivot.setPID(clawPIDP, clawPIDI, clawPIDD);
clawPivot.configMaxOutputVoltage(5);
clawPivot.setAllowableClosedLoopErr(200);
//clawPivot.setFeedbackDevice(FeedbackDevice.CtreMagEncoder_Absolute);
clawPivot.setEncPosition(clawPivot.getPulseWidthPosition());
clawPivot.reverseOutput(true);
}
public void climberInit(){
climber = new VictorSP(CLIMBERMOTOR);
}
public void driveStateMachine(DriveStates DriveState){
switch(DriveState){
case JOYSTICKDRIVE:
joyStickDrive();
break;
case AUTO:
break;
case DISABLED:
break;
}
}
public void clawStateMachine(ClawStates ClawState){
switch(ClawState){
case DOWNANDEXTENDED:
clawDown();
clawExtend();
break;
case DOWNANDRETRACTED:
clawDown();
clawRetract();
break;
case UPANDEXTENDED:
clawUp();
clawExtend();
break;
case UPANDRETRACTED:
clawUp();
clawRetract();
break;
}
}
public void shiftUp(){
driveTrainShifter.set(DoubleSolenoid.Value.kReverse);
}
public void shiftDown(){
driveTrainShifter.set(DoubleSolenoid.Value.kForward);
}
public void joyStickDrive(){
double gamePadY, gamePadX, leftPower, rightPower;
gamePadX = driverGamePad.getRawAxis(0);
gamePadY = driverGamePad.getRawAxis(1);
leftPower = gamePadY + gamePadX;
rightPower = gamePadY - gamePadX;
driveTrainLeftMotor.set(-leftPower);
driveTrainRightMotor.set(rightPower);
}
public void driverControls(){
if(driverGamePad.getRawButton(SHIFTUPBTN)){
shiftUp();
}
if(driverGamePad.getRawButton(SHIFTDOWNBTN)){
shiftDown();
}
if(driverGamePad.getRawButton(CLAWUPANDEXTENDBTN)){
clawDown();
}
if(driverGamePad.getRawButton(CLAWUPANDRETRACTBTN)){
clawUp();
}
if(driverGamePad.getRawButton(CLIMBFASTBTN)){
clawMiddle();;
}
if(driverGamePad.getRawButton(CLIMBSLOWBTN)){
climbSlow();
}
if(driverGamePad.getRawButton(CLAWDOWNANDEXTENDBTN)){
clawExtend();
}
if(driverGamePad.getRawButton(CLAWDOWNANDRETRACTBTN)){
clawRetract();
}
}
public void operatorControls(){
}
public void clawExtend(){
clawGrabber.set(DoubleSolenoid.Value.kForward);
}
public void clawRetract(){
clawGrabber.set(DoubleSolenoid.Value.kReverse);
}
public void clawDown(){
//Enc PW Pos of 1920
clawPivot.changeControlMode(TalonControlMode.Position);
clawPivot.set(4000);
}
public void clawUp(){
//Enc PW Pos of 890
clawPivot.changeControlMode(TalonControlMode.Position);
clawPivot.set(3000);
}
public void climbSlow(){
climber.set(0.2);
}
public void climbFast(){
climber.set(0);
}
public void clawMiddle(){
clawPivot.changeControlMode(TalonControlMode.Position);
clawPivot.set(3870);
}
public void smartDashboard(){
if(DEBUG){
SmartDashboard.putNumber("Claw Encoder", clawPivot.getPulseWidthPosition());
SmartDashboard.putNumber("Claw Encoder 2", clawPivot.getEncPosition());
SmartDashboard.putString("Auto Mode", autoModes[autoMode]);
}else {
}
}
} |
package org.usfirst.frc4909.STEAMWORKS;
import edu.wpi.first.wpilibj.CameraServer;
import edu.wpi.first.wpilibj.IterativeRobot;
import edu.wpi.first.wpilibj.command.Scheduler;
import edu.wpi.first.wpilibj.livewindow.LiveWindow;
import org.usfirst.frc4909.STEAMWORKS.config.Config;
import org.usfirst.frc4909.STEAMWORKS.subsystems.*;
public class Robot extends IterativeRobot {
public static OI oi;
public static Drivetrain drivetrain;
public static Climber climber;
public static Intake intake;
public static Feeder feeder;
public static Shooter shooter;
public static Loader loader;
public static Config config;
public void robotInit() {
RobotMap.init();
drivetrain = new Drivetrain();
climber = new Climber();
intake = new Intake();
feeder = new Feeder();
shooter = new Shooter();
loader = new Loader();
oi = new OI();
config = new Config();
CameraServer.getInstance().startAutomaticCapture();
}
public void disabledInit(){}
public void disabledPeriodic() {
Scheduler.getInstance().run();
}
public void autonomousInit() {}
public void autonomousPeriodic() {
Scheduler.getInstance().run();
}
public void teleopInit() {}
public void teleopPeriodic() {
Scheduler.getInstance().run();
}
public void testPeriodic() {
LiveWindow.run();
}
} |
package pl.polidea.imagemanager;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.ref.WeakReference;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedBlockingQueue;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import android.app.Application;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.BitmapFactory.Options;
import android.graphics.Point;
import android.net.Uri;
import android.util.Log;
/**
* Image manager. Manager provides way to load image resources asynchronously
* with many options like:
* <ul>
* <li>loading from
* <ul>
* <li>file system
* <li>application resources
* </ul>
* <li>caching
* <li>low-quality preview
* <li>sub-sampling
* <li>loading rescaled bitmap
* <li>using strong GC-proof cache
* </ul>
* Provides optional logging on different levels which makes it easy to debug
* your code. Image manager should be interfaced mostly by
* {@link #getImage(ImageManagerRequest)} or using
* {@link pl.polidea.imagemanager.ManagedImageView}
*
*
* @author karooolek
* @see #getImage(ImageManagerRequest)
* @see pl.polidea.imagemanager.ManagedImageView
*/
public final class ImageManager {
private static final String TAG = ImageManager.class.getSimpleName();
/**
* Image load thread helper class.
*
* @author karooolek
*/
private static final class LoadThread extends Thread {
private LoadThread() {
super(TAG);
}
@Override
public void run() {
if (logging) {
Log.d(TAG, "Image loading thread started");
}
// loop
final boolean exit = false;
while (!exit) {
// get loading request
ImageManagerRequest req = null;
try {
loadingReqs.add(req = loadQueue.take());
} catch (final InterruptedException e) {
break;
}
try {
// load bitmap
final Bitmap bmp = loadImage(req, false);
// remove preview image
if (isImageLoaded(req)) {
final Bitmap prevbmp = getLoadedBitmap(req);
if (prevbmp != null && !prevbmp.isRecycled()) {
if (logging) {
Log.d(TAG, "Unloading preview image " + req);
}
prevbmp.recycle();
if (logging) {
Log.d(TAG, "Preview image " + req + " unloaded");
}
}
}
// save bitmap
loaded.put(req, new LoadedBitmap(bmp, req.strong));
} catch (final OutOfMemoryError err) {
// oh noes! we have no memory for image
if (logging) {
Log.e(TAG, "Error while loading full image " + req + ". Out of memory.");
logImageManagerStatus();
}
cleanUp();
}
loadingReqs.remove(req);
} // while(!exit)
if (logging) {
Log.d(TAG, "Image loading thread ended");
}
}
}
/**
* Image download thread helper class.
*
* @author karooolek
*/
private static final class DownloadThread extends Thread {
private DownloadThread() {
super(TAG);
}
@Override
public void run() {
if (logging) {
Log.d(TAG, "Image downloading thread started");
}
// loop
final boolean exit = false;
while (!exit) {
// get downloading URI
Uri uri = null;
try {
downloadingUris.add(uri = downloadQueue.take());
} catch (final InterruptedException e) {
break;
}
try {
// download
downloadImage(uri, getFilenameForUri(uri));
} catch (final Exception e) {
// some problems with downloading officer
if (logging) {
Log.e(TAG, "Error while downloading image from " + uri);
}
}
downloadingUris.remove(uri);
} // while(!exit)
if (logging) {
Log.d(TAG, "Image downloading thread ended");
}
}
}
/**
* Loaded bitmap helper class.
*
* @author karooolek
*/
private static final class LoadedBitmap {
private final WeakReference<Bitmap> weakBitmap;
private final Bitmap bitmap;
LoadedBitmap(final Bitmap bitmap, final boolean strong) {
this.bitmap = strong ? bitmap : null;
this.weakBitmap = strong ? null : new WeakReference<Bitmap>(bitmap);
}
Bitmap getBitmap() {
return weakBitmap == null ? bitmap : weakBitmap.get();
}
}
private static Application application;
private static long start;
private static boolean logging = false;
private static List<ImageManagerRequest> requests = new ArrayList<ImageManagerRequest>();
private static BlockingQueue<ImageManagerRequest> loadQueue = new LinkedBlockingQueue<ImageManagerRequest>();
private static List<ImageManagerRequest> loadingReqs = new ArrayList<ImageManagerRequest>();
private static Map<ImageManagerRequest, LoadedBitmap> loaded = new ConcurrentHashMap<ImageManagerRequest, LoadedBitmap>();
private static BlockingQueue<Uri> downloadQueue = new LinkedBlockingQueue<Uri>();
private static List<Uri> downloadingUris = new ArrayList<Uri>();
private ImageManager() {
// unreachable private constructor
}
/**
* Initialize image manager for application.
*
* @param application
* application context.
*/
public static void init(final Application application) {
ImageManager.application = application;
}
private static boolean isImageLoaded(final ImageManagerRequest req) {
return loaded.containsKey(req);
}
private static boolean isImageLoading(final ImageManagerRequest req) {
return loadQueue.contains(req) || loadingReqs.contains(req);
}
private static void queueImageLoad(final ImageManagerRequest req) {
if (logging) {
Log.d(TAG, "Queuing image " + req + " to load");
}
loadQueue.add(req);
}
private static Bitmap getLoadedBitmap(final ImageManagerRequest req) {
return isImageLoaded(req) ? loaded.get(req).getBitmap() : null;
}
private static String getFilenameForUri(final Uri uri) {
return application.getCacheDir() + "/image_manager/" + String.valueOf(uri.toString().hashCode());
}
private static boolean isImageDownloaded(final Uri uri) {
if (isImageDownloading(uri)) {
return false;
}
final File file = new File(getFilenameForUri(uri));
return file.exists() && !file.isDirectory();
}
private static boolean isImageDownloading(final Uri uri) {
return downloadQueue.contains(uri) || downloadingUris.contains(uri);
}
private static void queueImageDownload(final ImageManagerRequest req) {
if (logging) {
Log.d(TAG, "Queuing image " + req + " to download");
}
downloadQueue.add(req.uri);
}
/**
* Load image request. Loads synchronously image specified by request. Adds
* loaded image to cache.
*
* @param req
* image request
* @param preview
* loading preview or not.
* @return loaded image.
*/
public static Bitmap loadImage(final ImageManagerRequest req, final boolean preview) {
// no request
if (req == null) {
return null;
}
if (logging) {
if (preview) {
Log.d(TAG, "Loading preview image " + req);
} else {
Log.d(TAG, "Loading full image " + req);
}
}
Bitmap bmp = null;
// loading options
final Options opts = new Options();
// sub-sampling options
opts.inSampleSize = preview ? 8 : req.subsample;
// load from filename
if (req.filename != null) {
final File file = new File(req.filename);
if (!file.exists() || file.isDirectory()) {
if (logging) {
Log.d(TAG, "Error while loading image " + req + ". File does not exist.");
}
return null;
}
bmp = BitmapFactory.decodeFile(req.filename, opts);
}
// load from resources
if (req.resId >= 0) {
bmp = BitmapFactory.decodeResource(application.getResources(), req.resId, opts);
}
// load from uri
if (req.uri != null) {
final String filename = getFilenameForUri(req.uri);
if (!isImageDownloaded(req.uri)) {
if (logging) {
Log.d(TAG, "Error while loading image " + req + ". File was not downloaded.");
}
return null;
}
bmp = BitmapFactory.decodeFile(filename, opts);
}
// scaling options
if (!preview && (req.width > 0 && req.height > 0)) {
final Bitmap sBmp = Bitmap.createScaledBitmap(bmp, req.width, req.height, true);
if (sBmp != null) {
bmp.recycle();
bmp = sBmp;
}
}
if (logging) {
if (preview) {
Log.d(TAG, "Preview image " + req + " loaded");
} else {
Log.d(TAG, "Full image " + req + " loaded");
}
}
return bmp;
}
/**
* Unload image specified by image request and remove it from cache.
*
* @param req
* image request.
*/
public static void unloadImage(final ImageManagerRequest req) {
if (logging) {
Log.d(TAG, "Unloading image " + req);
}
final Bitmap bmp = getLoadedBitmap(req);
if (bmp != null) {
bmp.recycle();
}
loaded.remove(req);
if (logging) {
Log.d(TAG, "Image " + req + " unloaded");
}
}
private static void readFile(final File filename, final InputStream inputStream) throws IOException {
final byte[] buffer = new byte[1024];
final OutputStream out = new FileOutputStream(filename);
try {
int r = inputStream.read(buffer);
while (r != -1) {
out.write(buffer, 0, r);
out.flush();
r = inputStream.read(buffer);
}
} finally {
try {
out.flush();
} finally {
out.close();
}
}
}
/**
* Download image from specified URI to specified file in file system.
*
* @param uri
* image URI.
* @param filename
* image file name to download.
* @throws URISyntaxException
* thrown when URI is invalid.
* @throws ClientProtocolException
* thrown when there is problem with connecting.
* @throws IOException
* thrown when there is problem with connecting.
*/
public static void downloadImage(final Uri uri, final String filename) throws URISyntaxException, IOException {
if (logging) {
Log.d(TAG, "Downloading image from " + uri + " to " + filename);
}
// connect to uri
final DefaultHttpClient client = new DefaultHttpClient();
final HttpGet getRequest = new HttpGet(new URI(uri.toString()));
final HttpResponse response = client.execute(getRequest);
final int statusCode = response.getStatusLine().getStatusCode();
if (statusCode != HttpStatus.SC_OK) {
Log.w(TAG, "Error " + statusCode + " while retrieving file from " + uri);
}
// create file
final File file = new File(filename);
final File parent = new File(file.getParent());
if (!parent.exists() && !parent.mkdir()) {
Log.w(TAG, "Parent directory doesn't exist");
}
// download
final HttpEntity entity = response.getEntity();
if (entity == null) {
Log.w(TAG, "Null entity received when downloading " + uri);
}
final InputStream inputStream = entity.getContent();
try {
readFile(file, new BufferedInputStream(inputStream, 1024));
} finally {
inputStream.close();
entity.consumeContent();
}
if (logging) {
Log.d(TAG, "Image from " + uri + " downloaded to " + filename);
}
}
/**
* Delete specified file from download cache.
*
* @param filename
* file name.
*/
public static void deleteImage(final String filename) {
if (logging) {
Log.d(TAG, "Deleting image " + filename);
}
final File file = new File(filename);
if (!file.delete() && logging) {
Log.w(TAG, "Image " + filename + " couldn't be deleted");
}
if (logging) {
Log.d(TAG, "Image " + filename + " deleted");
}
}
/**
* Clean up image manager. Unloads all cached images. Deletes all downloaded
* images.
*/
public static synchronized void cleanUp() {
final long t = System.currentTimeMillis();
if (logging) {
Log.d(TAG, "Image manager clean up");
}
// unload all images
final Set<ImageManagerRequest> reqs = loaded.keySet();
for (final ImageManagerRequest req : reqs) {
unloadImage(req);
}
// delete downloaded files
final File dir = new File(application.getCacheDir() + "/image_manager/");
if (dir.exists() && dir.isDirectory()) {
final File[] files = dir.listFiles();
for (int i = 0; i != files.length; ++i) {
deleteImage(files[i].getAbsolutePath());
}
if (logging) {
Log.d(TAG, "Deleting directory " + dir.getAbsolutePath());
}
if (!dir.delete() && logging) {
Log.w(TAG, "Directory " + dir.getAbsolutePath() + " couldn't be deleted");
}
if (logging) {
Log.d(TAG, "Directory " + dir.getAbsolutePath() + " deleted");
}
}
final long dt = System.currentTimeMillis() - t;
if (logging) {
Log.d(TAG, "Image manager clean up finished, took " + dt + "[msec]");
logImageManagerStatus();
}
}
/**
* Check if image manager logging is enabled. By default logging is
* disabled.
*
* @return true if image manager logging is enabled, false otherwise.
*/
public static boolean isLoggingEnabled() {
return logging;
}
/**
* Enable/disable image manager logging.
*
* @param logging
* enable/disable image manager logging.
*/
public static void setLoggingEnabled(final boolean logging) {
ImageManager.logging = logging;
}
/**
* Log image manager current status. Logs:
* <ul>
* <li>manager uptime in seconds
* <li>all loaded images details
* <li>used memory
* </ul>
*/
public static void logImageManagerStatus() {
final float t = 0.001f * (System.currentTimeMillis() - start);
Log.d(TAG, "Uptime: " + t + "[s]");
// count loaded images
final int imgn = loaded.size();
Log.d(TAG, "Loaded images: " + imgn);
if (imgn > 0) {
int totalSize = 0;
for (final LoadedBitmap limg : loaded.values()) {
final Bitmap bmp = limg.getBitmap();
// no bitmap
if (bmp == null) {
continue;
}
// get bits per pixel
int bpp = 0;
if (bmp.getConfig() != null) {
switch (bmp.getConfig()) {
case ALPHA_8:
bpp = 1;
break;
case RGB_565:
case ARGB_4444:
bpp = 2;
break;
case ARGB_8888:
default:
bpp = 4;
break;
}
}
// count total size
totalSize += bmp.getWidth() * bmp.getHeight() * bpp;
}
Log.d(TAG, "Estimated loaded images size: " + totalSize / 1024 + "[kB]");
}
// count queued images
Log.d(TAG, "Queued images: " + loadQueue.size());
// count downloaded files
final File dir = new File(application.getCacheDir() + "/image_manager/");
if (dir.isDirectory()) {
final File[] files = dir.listFiles();
Log.d(TAG, "Downloaded images: " + files.length);
if (files.length > 0) {
int totalSize = 0;
for (int i = 0; i != files.length; ++i) {
totalSize += files[i].length();
}
Log.d(TAG, "Estimated downloaded images size: " + totalSize / 1024 + "[kB]");
}
} else {
Log.d(TAG, "Downloaded images: 0");
}
}
/**
* Get size of image specified by image request.
*
* @param req
* image request.
* @return image dimensions.
*/
public static Point getImageSize(final ImageManagerRequest req) {
final Options options = new Options();
options.inJustDecodeBounds = true;
if (req.filename != null) {
BitmapFactory.decodeFile(req.filename, options);
}
if (req.resId >= 0) {
BitmapFactory.decodeResource(application.getResources(), req.resId, options);
}
if (req.uri != null && isImageDownloaded(req.uri)) {
BitmapFactory.decodeFile(getFilenameForUri(req.uri), options);
}
return new Point(options.outWidth, options.outHeight);
}
/**
* Get image specified by image request. This returns image as currently
* available in manager, which means:
* <ul>
* <li>not loaded at all: NULL - no image
* <li>loaded preview
* <li>loaded full
* </ul>
* If image is not available in cache, image request is posted to
* asynchronous loading and will be available soon. All image options are
* specified in image request.
*
* @param req
* image request.
* @return image as currently available in manager (preview/full) or NULL if
* it's not available at all.
* @see pl.polidea.imagemanager.ImageManagerRequest
*/
public static Bitmap getImage(final ImageManagerRequest req) {
Bitmap bmp = null;
// save bitmap request
synchronized (requests) {
requests.remove(req);
requests.add(req);
}
// look for bitmap in already loaded resources
if (isImageLoaded(req)) {
bmp = getLoadedBitmap(req);
}
// bitmap found
if (bmp != null) {
return bmp;
}
// wait until image is not downloaded
if (req.uri != null && !isImageDownloaded(req.uri)) {
// start download if necessary
if (!isImageDownloading(req.uri)) {
queueImageDownload(req);
}
return null;
}
// load preview image quickly
if (req.preview) {
try {
bmp = loadImage(req, true);
if (bmp == null) {
return null;
}
// save preview image
loaded.put(req, new LoadedBitmap(bmp, req.strong));
} catch (final OutOfMemoryError err) {
// oh noes! we have no memory for image
if (logging) {
Log.e(TAG, "Error while loading preview image " + req + ". Out of memory.");
logImageManagerStatus();
}
}
}
// add image to loading queue
if (!isImageLoading(req)) {
queueImageLoad(req);
}
return bmp;
}
static {
// save starting time
start = System.currentTimeMillis();
// start threads
new LoadThread().start();
new LoadThread().start();
new LoadThread().start();
new DownloadThread().start();
new DownloadThread().start();
new DownloadThread().start();
}
} |
package nl.mpi.kinnate.ui;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.net.URI;
import java.net.URISyntaxException;
import javax.swing.JMenu;
import javax.swing.JMenuItem;
import nl.mpi.arbil.ui.ArbilWindowManager;
import nl.mpi.arbil.ui.GuiHelper;
public class SamplesFileMenu extends JMenu implements ActionListener {
MainFrame mainFrame;
public SamplesFileMenu(MainFrame mainFrameLocal) {
mainFrame = mainFrameLocal;
addSampleToMenu("Application Overview", "ApplicationOverview.svg");
addSampleToMenu("Hawaiian Kin Terms", "HawaiianKinTerms.svg");
// addSampleToMenu("Japanese Kin Terms", "JapaneseKinTerms.svg");
// addSampleToMenu("Custom Symbols", "CustomSymbols.svg");
addSampleToMenu("Named Transient Entities", "NamedTransientEntities.svg");
// addSampleToMenu("Cha'palaa Kin Terms", "ChapalaaKinTerms.svg");
// addSampleToMenu("Gendered Ego", "GenderedEgo.svg");
// addSampleToMenu("Olivier Kyburz Examples", "N40.svg");
// addSampleToMenu("Archive Link Example", "ArchiveLinks.svg");
addSampleToMenu("Charles II of Spain", "Charles_II_of_Spain.svg");
addSampleToMenu("Imported Data Query Example (ANTONIO DE PAULA PESSOA DE /FIGUEIREDO/)", "QueryExample.svg");
// addSampleToMenu("Imported Entities (600)", "600ImportedEntities.svg");
// addSampleToMenu("R Usage of the Entity Server", "R-ServerUsage.svg");
}
private void addSampleToMenu(String menuText, String sampleFileString) {
String currentFilePath = SamplesFileMenu.class.getResource("/svgsamples/" + sampleFileString).toString();
JMenuItem currentMenuItem = new JMenuItem(menuText);
currentMenuItem.setActionCommand(currentFilePath);
currentMenuItem.addActionListener(this);
this.add(currentMenuItem);
}
public void actionPerformed(ActionEvent e) {
try {
final URI sampleFile = new URI(e.getActionCommand());
if (e.getSource() instanceof JMenuItem) {
String sampleName = ((JMenuItem) e.getSource()).getText();
mainFrame.openDiagram(sampleName, sampleFile, false);
}
} catch (URISyntaxException exception) {
GuiHelper.linorgBugCatcher.logError(exception);
ArbilWindowManager.getSingleInstance().addMessageDialogToQueue("Failed to load sample", "Sample Diagram");
}
}
// private void addLaunchSampleToMenu(String menuText, String sampleFileString) {
// String currentFilePath = SamplesFileMenu.class.getResource("../../../../svgsamples/" + sampleFileString).getPath();
// JMenuItem currentMenuItem = new JMenuItem(menuText);
// currentMenuItem.setActionCommand(currentFilePath);
// currentMenuItem.addActionListener(new LaunchExternal());
// this.add(currentMenuItem);
// class LaunchExternal implements ActionListener {
// public void actionPerformed(ActionEvent e) {
// try {
// GuiHelper.getSingleInstance().openFileInExternalApplication(new URI(e.getActionCommand()));
// } catch (URISyntaxException exception) {
// System.err.println(exception.getMessage());
} |
package com.precious.calccedo.handlers;
import com.precious.calccedo.Calccedo;import com.precious.calccedo.configuration.Configuration;import java.util.ArrayList;
/** * * @author Ibrahim Abdsaid Hanna * ibrahim.seniore@gmail.com */ public class CalccedoHandler extends Calccedo implements Handler {
private ArrayList<Character> list; private ArrayList<Character> list2; public CalccedoHandler(){ list=new ArrayList<>(); list.add('S'); list.add('C'); list.add('T'); list.add('L'); list.add('<'); list.add('(');
list2=new ArrayList<>(); list2.add('/'); list2.add('%'); list2.add('+'); list2.add('-'); list2.add('*'); list2.add('^'); list2.add('n'); list2.add('s'); list2.add('g'); list2.add('<'); list2.add('('); init(); }
private Quote parsePartethis(String formula) { int offsetA=0; int offsetZ=0; for(int i=formula.length()-1;i>=0;i--){ if(formula.charAt(i)=='('){ offsetA=i; for(int j=i;j<formula.length();j++){ if(formula.charAt(j)==')'){ offsetZ=j; i=-1; break; } } } } if(offsetA==0){ return new Quote(formula.substring(offsetA, offsetZ+1), offsetA, offsetZ+1); } else{ try{ if(formula.substring(offsetA-3, offsetA).equals("Sin")){ return new Quote("Sin", formula.substring(offsetA, offsetZ+1),offsetA, offsetZ+1); } else if(formula.substring(offsetA-3, offsetA).equals("Cos")){ return new Quote("Cos", formula.substring(offsetA, offsetZ+1), offsetA, offsetZ+1); } else if(formula.substring(offsetA-3, offsetA).equals("Tan")){ return new Quote("Tan", formula.substring(offsetA, offsetZ+1), offsetA, offsetZ+1); } else if(formula.substring(offsetA-3, offsetA).equals("Log")){ return new Quote("Log", formula.substring(offsetA, offsetZ+1), offsetA, offsetZ+1); } else{ return new Quote(formula.substring(offsetA, offsetZ+1), offsetA, offsetZ+1); } } catch(Exception ex){ return new Quote(formula.substring(offsetA, offsetZ+1), offsetA, offsetZ+1); } } } private String obtainQuoteOperand(String digits){ if(digits.equals("Sin")){ return "Sin"; } else if(digits.equals("Cos")){ return "Cos"; } if(digits.equals("Tan")){ return "Tan"; } if(digits.equals("Log")){ return "Log"; } else{ return ""; } } @Override public boolean isNumber(char c){ if(c=='.'){ return true; } try{ Integer.parseInt(""+c); return true; } catch(Exception ex){ return false; } }
@Override public String optimizeFormula(String formula) { String newformula=""; for(int i=0;i<formula.length();i++){ if(list.contains(formula.charAt(i)) && i>0){ if(list2.contains(formula.charAt(i-1)) && i>0){ newformula=newformula+formula.charAt(i); } else{ newformula=newformula+"*"+formula.charAt(i); } } else if(isNumber(formula.charAt(i))){ if(i>0&&(formula.charAt(i-1)==')'||formula.charAt(i-1)=='>')){ newformula=newformula+"*"+formula.charAt(i); } else{ newformula=newformula+formula.charAt(i); } } else{ newformula=newformula+formula.charAt(i); } } if(Configuration.deepTracing) System.out.println("optinmization is >>>>>>>>>>>"+newformula);
return newformula; }
@Override public boolean initValidation(String formula) { if(!formula.contains("(")&&!formula.contains("<")&&!formula.contains(">")&&!formula.contains(")")){ return true; } int openedPartethis=0; int openedRoot=0; for(int i=0;i<formula.length();i++){ if(formula.charAt(i)=='('){ openedPartethis++; } if(formula.charAt(i)==')'){ openedPartethis--; } if(openedPartethis<0){ return false; } if(formula.charAt(i)=='<'){ openedRoot++; } if(formula.charAt(i)=='>'){ openedRoot--; } if(openedRoot<0){ return false; } } return openedPartethis==0 && openedRoot==0 ; } @Override public String calculate(String formula) { // validate formula if(!initValidation(formula)) return "error"; // optimize formula formula=optimizeFormula(formula); // calculate inside partethis String firstProcess= calculatePartethis(formula); if(firstProcess.equals("error")){ return "Error"; } // include final formula inside partetehis to process it // second peocess is the final process in calccedo, just because conatins only +,- String secondProcess= calculatePartethis("("+firstProcess+")"); if(secondProcess.equals("error")){ return "Error"; } return secondProcess;}
private String calculatePartethis(String formula){ Quote quote; SubFormula subFormula; QuoteResult quoteResult; while(formula.contains("(")){ quote=parsePartethis(formula); subFormula=new SubFormula(quote); quoteResult=subFormula.getQuoteResult(); if(quoteResult==null){ return "error"; } else{ formula=formula.substring(0,quoteResult.offsetA)+quoteResult.result+formula.substring(quoteResult.offsetZ,formula.length()); } } if(Configuration.deepTracing) System.out.println("formula after parsing partethis"+formula); return formula; }
@Override public boolean isNumber(String quote) { try{ Double.parseDouble(quote); return true; } catch(Exception ex){ if(Configuration.deepTracing){ // System.err.println(ex+"\n just dummy exception do in behind while validating Numbers\n"); System.out.println("\nCalccedo Info: this is just info to help developers how Calccedo Library work"); System.out.println("Info quote "+quote+", cannot wrapped to Double, so it will complete loop until finishing operations \n"); } return false; } }
} |
package rhomobile.sync;
import java.io.IOException;
import java.io.ByteArrayInputStream;
import javax.microedition.io.HttpConnection;
import rhomobile.URI;
import j2me.util.ArrayList;
import rhomobile.db.PerstLiteAdapter;
import com.xruby.runtime.builtin.ObjectFactory;
import com.xruby.runtime.builtin.RubyArray;
import com.xruby.runtime.builtin.RubyHash;
import com.xruby.runtime.builtin.RubyInteger;
import com.xruby.runtime.builtin.RubyString;
import com.xruby.runtime.lang.RubyConstant;
import com.xruby.runtime.lang.RubyValue;
/**
* The Class SyncUtil.
*/
public class SyncUtil {
/** The adapter. */
public static PerstLiteAdapter adapter = null;
public static byte[] m_byteBuffer = new byte[4096];
public static void init(){
SyncUtil.adapter = PerstLiteAdapter.alloc(null);
SyncBlob.DBCallback callback = new SyncBlob.DBCallback();
SyncUtil.adapter.setDbCallback(callback);
}
/**
* Creates the array.
*
* @return the ruby array
*/
public static RubyArray createArray() {
return new RubyArray();
}
/**
* Creates the hash.
*
* @return the ruby hash
*/
public static RubyHash createHash() {
return ObjectFactory.createHash();
}
/**
* Creates the integer.
*
* @param val
* the val
*
* @return the ruby integer
*/
public static RubyInteger createInteger(long val) {
return ObjectFactory.createInteger(val);
}
/**
* Creates the string.
*
* @param val
* the val
*
* @return the ruby string
*/
public static RubyString createString(String val) {
return ObjectFactory.createString(val);
}
/**
* Fetch the changes for a given source
*
* @param source
* @param client_id
* @param params
* @return
*/
public static SyncFetchResult fetchRemoteChanges(SyncSource source, String client_id, String params) {
int success = 0, deleted = 0, inserted = 0;
long start = 0, duration = 0;
String data = null;
SyncJSONParser.SyncHeader header = new SyncJSONParser.SyncHeader();
int nTry = 0, nTotal = -1;
boolean repeat = true;
start = System.currentTimeMillis();
String session = get_session(source);
do {
String fetch_url = source.get_sourceUrl() +
((params != null && params.length() > 0) ? SyncConstants.SYNC_ASK_ACTION : "") +
SyncConstants.SYNC_FORMAT +
"&client_id=" + client_id
+ "&p_size=" + SyncConstants.SYNC_PAGE_SIZE;
if (params != null && params.length() > 0) {
fetch_url += "&question=" + params;
// Don't repeat if we're calling ask method
repeat = false;
}
if (header._token.length() > 0)
fetch_url += "&ack_token=" + header._token;
if ( source.get_token().length() == 0 || source.get_token().equals("0") )
processToken("1", source );
header = new SyncJSONParser.SyncHeader();
try {
data = SyncManager.fetchRemoteData(fetch_url, session, true);
} catch (IOException e) {
System.out
.println("There was an error fetching data from the sync source: "
+ e.getMessage());
}
if (data != null) {
ArrayList list = SyncJSONParser.parseObjectValues(data, header);
processToken(header._token, source);
int count = list.size();
if ( nTotal < 0 )
nTotal = 0;
nTotal += count;
if (count > 0) {
for (int i = 0; i < count; i++) {
SyncObject syncObj = ((SyncObject) list.get(i));
String dbOp = syncObj.getDbOperation();
if (dbOp != null) {
if (dbOp.equalsIgnoreCase("insert")) {
// SyncBlob.insertOp(syncObj, client_id, SyncBlob.SYNC_STAGE);
syncObj.insertIntoDatabase();
inserted++;
} else if (dbOp.equalsIgnoreCase("delete")) {
syncObj.deleteFromDatabase();
deleted++;
}
}
}
}
success = 1;
} else {
nTry++;
}
} while (header._count != 0 && nTry < SyncConstants.MAX_SYNC_TRY_COUNT && repeat);
duration = (System.currentTimeMillis() - start) / 1000L;
updateSourceSyncStatus(source, inserted, deleted, duration, success);
return new SyncFetchResult(nTotal,header._count == -1);
}
private static void processToken(String token, SyncSource source) {
if ( token.length() > 0 && !token.equals("0") && !token.equals("1") &&
source.get_token().equals(token)) {
// Delete non-confirmed records
RubyHash where = createHash();
where.add(createString("source_id"), createInteger(source
.get_sourceId()));
where.add(PerstLiteAdapter.TOKEN, createString(token));
adapter.deleteFromTable(createString("object_values"), where);
} else //if (token.length() > 0)
{
source.set_token(token);
RubyHash values = SyncUtil.createHash();
values.add(PerstLiteAdapter.TOKEN, createString(token));
RubyHash where = SyncUtil.createHash();
where.add(PerstLiteAdapter.SOURCE_ID, createInteger(source
.get_sourceId()));
adapter.updateIntoTable(createString(SyncConstants.SOURCES_TABLE),
values, where);
}
}
/**
* Update the sync source status after each sync run
*
* @param source
* @param inserted
* @param deleted
* @param duration
* @param success
*/
private static void updateSourceSyncStatus(SyncSource source, int inserted,
int deleted, long duration, int success) {
RubyHash values = SyncUtil.createHash();
long now = System.currentTimeMillis() / 1000;
values.add(PerstLiteAdapter.Table_sources.LAST_UPDATED,
createInteger(now));
values.add(PerstLiteAdapter.Table_sources.LAST_INSERTED_SIZE,
createInteger(inserted));
values.add(PerstLiteAdapter.Table_sources.LAST_DELETED_SIZE,
createInteger(deleted));
values.add(PerstLiteAdapter.Table_sources.LAST_SYNC_DURATION,
createInteger(duration));
values.add(PerstLiteAdapter.Table_sources.LAST_SYNC_SUCCESS,
createInteger(success));
RubyHash where = SyncUtil.createHash();
where.add(PerstLiteAdapter.SOURCE_ID, createInteger(source
.get_sourceId()));
adapter.updateIntoTable(createString(SyncConstants.SOURCES_TABLE),
values, where);
}
/**
* Gets the object value list.
*
* @param id
* the id
*
* @return the object value list
*/
public static RubyArray getObjectValueList(int id) {
RubyArray arr = createArray();
RubyHash where = createHash();
arr.add(createString("object_values"));
arr.add(createString("*"));
where.add(createString("source_id"), createInteger(id));
arr.add(where);
return (RubyArray) adapter.selectFromTable(arr);
}
/**
* Gets the op list from database.
*
* @param type
* the type
* @param source
* the source
*
* @return the op list from database
*/
public static ArrayList getOpListFromDatabase(String type, SyncSource source) {
System.out.println("Checking database for " + type + " operations...");
RubyArray arr = createArray();
RubyHash where = createHash();
String operation = null;
arr.add(createString("object_values"));
arr.add(createString("*"));
where.add(createString("update_type"), createString(type));
where.add(createString("source_id"), createInteger(source
.get_sourceId()));
arr.add(where);
RubyArray rows = (RubyArray) adapter.selectFromTable(arr);
ArrayList objects = getSyncObjectList(rows);
System.out.println("Found " + objects.size() + " records for " + type
+ " processing...");
ArrayList results = new ArrayList();
if (type != null) {
if (type.equalsIgnoreCase("create")) {
operation = SyncConstants.UPDATE_TYPE_CREATE;
} else if (type.equalsIgnoreCase("update")) {
operation = SyncConstants.UPDATE_TYPE_UPDATE;
} else if (type.equalsIgnoreCase("delete")) {
operation = SyncConstants.UPDATE_TYPE_DELETE;
}
}
for (int i = 0; i < objects.size(); i++) {
SyncObject current = (SyncObject) objects.get(i);
SyncOperation newOperation = new SyncOperation(operation, current);
results.add(newOperation);
System.out
.println("Adding sync operation (attrib, source_id, object, value, update_type, uri): "
+ current.getAttrib()
+ ", "
+ current.getSourceId()
+ ", "
+ current.getObject()
+ ", "
+ (current.getValue() == null ? "null" : current
.getValue())
+ ", "
+ operation
+ ", "
+ source.get_sourceUrl());
}
return results;
}
public static void removeOpListFromDatabase(String type, SyncSource source) {
RubyHash where = createHash();
where.add(createString("update_type"), createString(type));
where.add(createString("source_id"), createInteger(source
.get_sourceId()));
adapter.deleteFromTable(createString("object_values"), where);
}
/**
* Returns the parameter string for a source
*
* @param sourceId
* @return
*/
public static String getParamsForSource(SyncSource source) {
String askType = "ask";
RubyHash where = createHash();
RubyArray arr = createArray();
arr.add(createString("object_values"));
arr.add(createString("*"));
where.add(createString("source_id"), createInteger(source.get_sourceId()));
where.add(createString("update_type"), createString(askType));
arr.add(where);
RubyArray list = (RubyArray) adapter.selectFromTable(arr);
if ( list.size() == 0 )
return "";
// There should only be one result
RubyHash element = (RubyHash) list.at(createInteger(0));
String params = element.get(createString("value")).asString();
removeOpListFromDatabase(askType, source);
return params;
}
/**
* Gets the source list.
*
* @return the source list
*/
public static RubyArray getSourceList() {
RubyArray arr = createArray();
if (adapter == null)
return arr;
arr.add(createString("sources"));
arr.add(createString("*"));
RubyHash order = createHash();
order.add(createString("order by"), createString("source_id"));
arr.add(RubyConstant.QNIL); // where
arr.add(order);
return (RubyArray) adapter.selectFromTable(arr);
}
/**
* Gets the sync object list.
*
* @param list
* the list
*
* @return the sync object list
*/
public static ArrayList getSyncObjectList(RubyArray list) {
ArrayList results = new ArrayList();
for (int i = 0; i < list.size(); i++) {
RubyHash element = (RubyHash) list.at(createInteger(i));
String attrib = element.get(createString("attrib")).asString();
RubyValue val = element.get(createString("value"));
String value = val == null ? null : val.asString();
String object = element.get(createString("object")).asString();
String updateType = element.get(createString("update_type"))
.asString();
String type = "";
val = element.get(createString("attrib_type"));
if ( val != null && val != RubyConstant.QNIL)
type = val.asString();
int sourceId = element.get(createString("source_id")).toInt();
results.add(new SyncObject(attrib, sourceId, object, value,
updateType, type));
}
return results;
}
/**
* Prints the results.
*
* @param objects
* the objects
*/
public static void printResults(RubyArray objects) {
// Debug code to print results
for (int j = 0; j < objects.size(); j++) {
RubyHash objectElement = (RubyHash) objects.at(SyncUtil
.createInteger(j));
String value = objectElement.get(SyncUtil.createString("value"))
.toString();
String attrib = objectElement.get(SyncUtil.createString("attrib"))
.toString();
System.out.println("value[" + j + "][" + attrib + "]: " + value);
}
}
private static int get_start_source( RubyArray sources )
{
for (int i = 0; i < sources.size(); i++) {
RubyHash element = (RubyHash) sources.at(SyncUtil.createInteger(i));
RubyValue token = element.get(PerstLiteAdapter.TOKEN);
if (token != null && token != RubyConstant.QNIL)
{
String strToken = token.toStr();
if ( strToken.length() > 0 && !strToken.equals("0") )
return i;
}
}
return 0;
}
static class SyncFetchResult {
int available = 0;
boolean stopSync = false;
SyncFetchResult() {}
SyncFetchResult( int avail, boolean bStop ) {
available = avail;
stopSync = bStop;
}
};
/**
* Process local changes.
*
* @return the int
*/
public static int processLocalChanges(SyncThread thread) {
RubyArray sources = SyncUtil.getSourceList();
String client_id = null;
int nStartSrc = get_start_source(sources);
SyncFetchResult syncResult = new SyncFetchResult();
for (int i = nStartSrc; i < sources.size() && !thread.isStop() && !syncResult.stopSync; i++) {
RubyHash element = (RubyHash) sources.at(SyncUtil.createInteger(i));
String url = element.get(PerstLiteAdapter.SOURCE_URL).toString();
int id = element.get(PerstLiteAdapter.SOURCE_ID).toInt();
RubyValue token = element.get(PerstLiteAdapter.TOKEN);
SyncSource current = new SyncSource(url, id);
if (token != null && token != RubyConstant.QNIL)
current.set_token(token.toStr());
if (client_id == null)
client_id = get_client_id(current);
if (thread.isStop())
break;
System.out.println("URL: " + current.get_sourceUrl());
int success = 0;
success += processOpList(current, "create", client_id);
if (thread.isStop())
break;
success += processOpList(current, "update", client_id);
if (thread.isStop())
break;
success += processOpList(current, "delete", client_id);
if (thread.isStop())
break;
if (success > 0) {
System.out
.println("Remote update failed, not continuing with sync...");
} else {
String askParams = SyncUtil.getParamsForSource(current);
syncResult = SyncUtil.fetchRemoteChanges(current, client_id, askParams);
System.out.println("Successfully processed " + syncResult.available
+ " records...");
if (SyncConstants.DEBUG) {
RubyArray objects = SyncUtil.getObjectValueList(current
.get_sourceId());
SyncUtil.printResults(objects);
}
if (!thread.isStop())
SyncEngine.getNotificationImpl().fireNotification(id, syncResult.available);
}
}
return SyncConstants.SYNC_PROCESS_CHANGES_OK;
}
/**
* Process op list.
*
* @param source
* the source
* @param type
* the type
*
* @return the int
*/
private static int processOpList(SyncSource source, String type,
String clientId) {
int success = SyncConstants.SYNC_PUSH_CHANGES_OK;
ArrayList list = getOpListFromDatabase(type, source);
if (list.size() == 0) {
return success;
}
System.out.println("Found " + list.size()
+ " available records for processing...");
ArrayList listBlobs = SyncBlob.extractBlobs(list);
if (pushRemoteChanges(source, list, clientId) != SyncConstants.SYNC_PUSH_CHANGES_OK) {
success = SyncConstants.SYNC_PUSH_CHANGES_ERROR;
} else {
if ( SyncBlob.pushRemoteBlobs(source, listBlobs, clientId) == SyncConstants.SYNC_PUSH_CHANGES_OK )
{
// We're done processsing, remove from database so we
// don't process again
removeOpListFromDatabase(type, source);
}
else
success = SyncConstants.SYNC_PUSH_CHANGES_ERROR;
}
return success;
}
/**
* Push remote changes.
*
* @param source
* the source
* @param list
* the list
*
* @return the int
*/
public static int pushRemoteChanges(SyncSource source, ArrayList list,
String clientId) {
int success = 0;
StringBuffer data = new StringBuffer();
String url = null;
if (list.size() == 0) {
return SyncConstants.SYNC_PUSH_CHANGES_OK;
}
for (int i = 0; i < list.size(); i++) {
data.append(((SyncOperation) list.get(i)).get_postBody());
if (i != (list.size() - 1)) {
data.append("&");
}
}
ByteArrayInputStream dataStream = null;
try {
// Construct the post url
url = source.get_sourceUrl() + "/"
+ ((SyncOperation) list.get(0)).get_operation()
+ "?client_id=" + clientId;
String session = get_session(source);
dataStream = new ByteArrayInputStream(data.toString().getBytes());
success = SyncManager.pushRemoteData(url, dataStream, session,true,
"application/x-www-form-urlencoded");
} catch (IOException e) {
System.out.println("There was an error pushing changes: "
+ e.getMessage());
success = SyncConstants.SYNC_PUSH_CHANGES_ERROR;
}
if ( dataStream != null ){
try{dataStream.close();}catch(IOException exc){}
dataStream = null;
}
return success == SyncConstants.SYNC_PUSH_CHANGES_OK ? SyncConstants.SYNC_PUSH_CHANGES_OK
: SyncConstants.SYNC_PUSH_CHANGES_ERROR;
}
/**
*
* @return size of objectValues table
*/
public static int getObjectCountFromDatabase(String dbName) {
RubyArray arr = createArray();
arr.add(createString(dbName));// "object_values")); //table name
arr.add(createString("*")); // attributes
// arr.add(createString("source_id")); //not nil attributes
arr.add(RubyConstant.QNIL); // where
RubyHash params = createHash();
params.add(createString("count"), RubyConstant.QTRUE);
arr.add(params);
RubyInteger results = (RubyInteger) adapter.selectFromTable(arr);
return results == null ? 0 : results.toInt();
}
public static String get_client_id(SyncSource source) {
String client_id = get_client_db_info("client_id");
if (client_id.length() == 0) {
String data = null;
try {
data = SyncManager.fetchRemoteData(source.get_sourceUrl()
+ "/clientcreate" + SyncConstants.SYNC_FORMAT, "",
false);
if (data != null)
client_id = SyncJSONParser.parseClientID(data);
RubyHash hash = SyncUtil.createHash();
hash.add(SyncUtil.createString("client_id"),
createString(client_id));
if (getObjectCountFromDatabase(SyncConstants.CLIENT_INFO) > 0)
adapter.updateIntoTable(
createString(SyncConstants.CLIENT_INFO), hash,
RubyConstant.QNIL);
else
adapter.insertIntoTable(
createString(SyncConstants.CLIENT_INFO), hash);
} catch (IOException e) {
System.out
.println("There was an error fetching data from the sync source: "
+ e.getMessage());
}
}
return client_id;
}
public static String get_session(SyncSource source) {
RubyArray arr = createArray();
arr.add(createString("sources"));
arr.add(PerstLiteAdapter.SESSION);
RubyHash where = SyncUtil.createHash();
where.add(PerstLiteAdapter.SOURCE_ID, createInteger(source
.get_sourceId()));
arr.add(where);
RubyArray res = (RubyArray) adapter.selectFromTable(arr);
if (res.size() == 0)
return "";
RubyHash element = (RubyHash) res.at(SyncUtil.createInteger(0));
return element.get(PerstLiteAdapter.SESSION).toString();
}
private static String getSessionByDomain(String url) {
RubyArray sources = getSourceList();
try {
URI uri = new URI(url);
for (int i = 0; i < sources.size(); i++) {
try {
RubyHash element = (RubyHash) sources.at(SyncUtil
.createInteger(i));
String sourceUrl = element.get(PerstLiteAdapter.SOURCE_URL)
.toString();
String session = element.get(PerstLiteAdapter.SESSION)
.toString();
if (sourceUrl == null || sourceUrl.length() == 0)
continue;
URI uriSrc = new URI(sourceUrl);
if (session != null && session.length() > 0
&& uri.getHost().equalsIgnoreCase(uriSrc.getHost()))
return session;
} catch (URI.MalformedURIException exc) {
}
}
} catch (URI.MalformedURIException exc) {
}
return "";
}
static class ParsedCookie {
String strAuth;
String strSession;
};
/*
* private static void cutCookieField(ParsedCookie cookie, String strField){
* int nExp = cookie.strCookie.indexOf(strField); cookie.strFieldValue = "";
* if ( nExp > 0 ){ int nExpEnd = cookie.strCookie.indexOf(';', nExp); if (
* nExpEnd > 0 ){ cookie.strFieldValue =
* cookie.strCookie.substring(nExp+strField.length(), nExpEnd);
* cookie.strCookie = cookie.strCookie.substring(0, nExp) +
* cookie.strCookie.substring(nExpEnd+1); }else{ cookie.strFieldValue =
* cookie.strCookie.substring(nExp+strField.length()); cookie.strCookie =
* cookie.strCookie.substring(0, nExp); } } }
*/
private static void parseCookie(String value, ParsedCookie cookie) {
boolean bAuth = false;
boolean bSession = false;
Tokenizer stringtokenizer = new Tokenizer(value, ";");
while (stringtokenizer.hasMoreTokens()) {
String tok = stringtokenizer.nextToken();
tok = tok.trim();
if (tok.length() == 0) {
continue;
}
int i = tok.indexOf('=');
String s1;
String s2;
if (i > 0) {
s1 = tok.substring(0, i);
s2 = tok.substring(i + 1);
} else {
s1 = tok;
s2 = "";
}
s1 = s1.trim();
s2 = s2.trim();
if (s1.equalsIgnoreCase("auth_token") && s2.length() > 0) {
cookie.strAuth = s1 + "=" + s2;
bAuth = true;
} else if (s1.equalsIgnoreCase("path") && s2.length() > 0) {
if (bAuth)
cookie.strAuth += ";" + s1 + "=" + s2;
else if (bSession)
cookie.strSession += ";" + s1 + "=" + s2;
} else if (s1.equalsIgnoreCase("rhosync_session")
&& s2.length() > 0) {
cookie.strSession = s1 + "=" + s2;
bSession = true;
}
}
}
private static String extractToc(String toc_name, String data) {
int start = data.indexOf(toc_name);
if (start != -1) {
int end = data.indexOf(';', start);
if (end != -1) {
return data.substring(start, end);
}
}
return null;
}
private static ParsedCookie makeCookie(HttpConnection connection)
throws IOException {
ParsedCookie cookie = new ParsedCookie();
for (int i = 0;; i++) {
String strField = connection.getHeaderFieldKey(i);
if (strField == null)
break;
if (strField.equalsIgnoreCase("Set-Cookie")) {
String header_field = connection.getHeaderField(i);
System.out.println("Set-Cookie: " + header_field);
parseCookie(header_field, cookie);
// Hack to make it work on 4.6 device which doesn't parse
// cookies correctly
// if (cookie.strAuth==null) {
// String auth = extractToc("auth_token", header_field);
// cookie.strAuth = auth;
// System.out.println("Extracted auth_token: " + auth);
if (cookie.strSession == null) {
String rhosync_session = extractToc("rhosync_session",
header_field);
cookie.strSession = rhosync_session;
System.out.println("Extracted rhosync_session: "
+ rhosync_session);
}
}
}
return cookie;
}
public static boolean fetch_client_login(String strUser, String strPwd) {
boolean success = true;
RubyArray sources = getSourceList();
for (int i = 0; i < sources.size(); i++) {
String strSession = "";
// String strExpire="";
HttpConnection connection = null;
RubyHash element = (RubyHash) sources.at(SyncUtil.createInteger(i));
String sourceUrl = element.get(PerstLiteAdapter.SOURCE_URL)
.toString();
int id = element.get(PerstLiteAdapter.SOURCE_ID).toInt();
if (sourceUrl.length() == 0)
continue;
strSession = getSessionByDomain(sourceUrl);
if (strSession.length() == 0) {
ByteArrayInputStream dataStream = null;
try {
String body = "login=" + strUser + "&password=" + strPwd+ "&remember_me=1";
dataStream = new ByteArrayInputStream(body.getBytes());
SyncManager.makePostRequest(sourceUrl + "/client_login", dataStream, "",
"application/x-www-form-urlencoded");
connection = SyncManager.getConnection();
int code = connection.getResponseCode();
if (code == HttpConnection.HTTP_OK) {
ParsedCookie cookie = makeCookie(connection);
strSession = cookie.strAuth + ";" + cookie.strSession
+ ";";
} else {
System.out.println("Error posting data: " + code);
success = false;
}
} catch (IOException e) {
System.out
.println("There was an error fetch_client_login: "
+ e.getMessage());
} finally {
if ( dataStream != null ){
try{dataStream.close();}catch(IOException exc){}
dataStream = null;
}
SyncManager.closeConnection();
connection = null;
}
}
RubyHash values = SyncUtil.createHash();
values.add(PerstLiteAdapter.SESSION, createString(strSession));
RubyHash where = SyncUtil.createHash();
where.add(PerstLiteAdapter.SOURCE_ID, createInteger(id));
adapter.updateIntoTable(createString(SyncConstants.SOURCES_TABLE),
values, where);
}
return success;
}
public static String get_client_db_info(String attr) {
RubyArray arr = createArray();
arr.add(createString("client_info")); // table name
arr.add(createString(attr)); // attributes
arr.add(RubyConstant.QNIL); // where
RubyArray results = (RubyArray) adapter.selectFromTable(arr);
if (results.size() > 0) {
RubyHash item = (RubyHash) results.get(0);
RubyValue value = item.getValue(createString(attr));
return value.toString();
}
return "";
}
public static boolean logged_in() {
boolean success = false;
RubyArray sources = SyncUtil.getSourceList();
for (int i = 0; i < sources.size(); i++) {
RubyHash element = (RubyHash) sources.at(SyncUtil.createInteger(i));
String url = element.get(PerstLiteAdapter.SOURCE_URL).toString();
int id = element.get(PerstLiteAdapter.SOURCE_ID).toInt();
SyncSource current = new SyncSource(url, id);
if (get_session(current).length() > 0) {
success = true;
}
}
return success;
}
public static void logout() {
RubyArray sources = SyncUtil.getSourceList();
for (int i = 0; i < sources.size(); i++) {
RubyHash element = (RubyHash) sources.at(SyncUtil.createInteger(i));
int id = element.get(PerstLiteAdapter.SOURCE_ID).toInt();
RubyHash values = SyncUtil.createHash();
values.add(PerstLiteAdapter.SESSION, SyncUtil.createString(""));
RubyHash where = SyncUtil.createHash();
where.add(PerstLiteAdapter.SOURCE_ID, createInteger(id));
adapter.updateIntoTable(createString(SyncConstants.SOURCES_TABLE),
values, where);
}
}
public static void resetSyncDb() {
adapter.deleteAllFromTable(createString(SyncConstants.CLIENT_INFO));
adapter.deleteAllFromTable(createString(SyncConstants.OBJECTS_TABLE));
}
} |
package io.happie.naturalforms;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.ActivityNotFoundException;
import android.content.pm.ResolveInfo;
import android.os.Environment;
import android.net.Uri;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaPlugin;
import org.json.JSONArray;
import org.json.JSONException;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.List;
public class NaturalForms extends CordovaPlugin {
public static final String TAG = "NaturalForms";
@Override
public boolean execute(String action, JSONArray args, CallbackContext callback) throws JSONException {
try {
PackageManager manager = cordova.getActivity().getApplicationContext().getPackageManager();
Intent LaunchIntent = manager.getLaunchIntentForPackage("net.expedata.naturalforms");
if (LaunchIntent == null) {
callback.error("naturalForms not installed");
return true;
}
File nfData = new File(cordova.getActivity().getExternalCacheDir() + File.separator + "nf-data" + System.currentTimeMillis() + ".csv");
try {
nfData.createNewFile();
FileOutputStream overWrite = new FileOutputStream(nfData.toString(), false);
overWrite.write(args.getString(0).getBytes());
overWrite.flush();
overWrite.close();
LaunchIntent.setDataAndType(Uri.parse("file://" + nfData.toString()), "text/csv");
ResolveInfo best = getPackageInfo(LaunchIntent, "net.expedata.naturalforms");
LaunchIntent.setClassName(best.activityInfo.packageName, "net.expedata.naturalforms.ui.CsvImportActivity");
cordova.getActivity().startActivity(LaunchIntent);
} catch (IOException ioe) {
callback.error("Could not send naturalForms data");
} catch (Exception e) {
callback.error("Failed to launch naturalForms");
}
callback.success();
} catch (ActivityNotFoundException e) {
callback.error("naturalForms not installed");
}
return true;
}
private ResolveInfo getPackageInfo(final Intent intent, final String packageName) {
final PackageManager pm = cordova.getActivity().getPackageManager();
final List<ResolveInfo> matches = pm.queryIntentActivities(intent, 0);
ResolveInfo best = null;
for (final ResolveInfo info : matches) {
if (info.activityInfo.packageName.contains(packageName)) {
best = info;
}
}
return best;
}
} |
package com.raphxyz.natif;
import android.R;
import android.app.AlertDialog;
import android.app.AlertDialog.Builder;
import android.content.DialogInterface;
import android.widget.EditText;
import android.widget.TextView;
import android.media.Ringtone;
import android.media.RingtoneManager;
import android.net.Uri;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaInterface;
import org.apache.cordova.CordovaPlugin;
import org.apache.cordova.CordovaWebView;
import org.apache.cordova.PluginResult;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
public class RaphxyzNatif extends CordovaPlugin {
public boolean execute(String action, JSONArray args, CallbackContext callbackContext) throws JSONException {
final CallbackContext cbContext = callbackContext;
PluginResult result = new PluginResult(PluginResult.Status.NO_RESULT);
result.setKeepCallback(true);
cbContext.sendPluginResult(result);
if(this.cordova.getActivity().isFinishing()) return true;
if("alert".equals(action)){
alert(args.getString(0), args.getString(1), cbContext);
return true;
}
else if("confirm".equals(action)) {
confirm(args.getString(0), args.getString(1), cbContext);
return true;
}
else if("prompt".equals(action)) {
promt(args.getString(0), args.getString(1), cbContext);
return true;
}
else if("promtPassword".equals(action)) {
promtPassword(args.getString(0), args.getString(1), cbContext);
return true;
}
else if("beep".equals(action)) {
beep(args.getString(0));
return true;
}
else {
cbContext.sendPluginResult(new PluginResult(PluginResult.Status.INVALID_ACTION));
return false;
}
}
private synchronized void alert(final String title, final String content, final CallbackContext cbContext){
AlertDialog.Builder alertDialog = new AlertDialog.Builder(this.cordova.getActivity(), AlertDialog.THEME_DEVICE_DEFAULT_LIGHT);
alertDialog.setTitle(title)
.setMessage(content)
.setPositiveButton(R.string.ok, new DialogInterface.OnClickListener(){
public void onClick(DialogInterface dialog, int id){
dialog.dismiss();
cbContext.sendPluginResult(new PluginResult(PluginResult.Status.OK));
}
})
.show();
}
private synchronized void confirm(final String title, final String message, final CallbackContext cbContext) {
AlertDialog.Builder alertDialog = new AlertDialog.Builder(this.cordova.getActivity(), AlertDialog.THEME_DEVICE_DEFAULT_LIGHT);
alertDialog.setTitle(title)
.setMessage(message)
.setPositiveButton(R.string.yes, new DialogInterface.OnClickListener(){
public void onClick(DialogInterface dialog, int id){
dialog.dismiss();
cbContext.sendPluginResult(new PluginResult(PluginResult.Status.OK, true));
}
})
.setNegativeButton(R.string.no, new DialogInterface.OnClickListener(){
public void onClick(DialogInterface dialog, int id){
dialog.dismiss();
cbContext.sendPluginResult(new PluginResult(PluginResult.Status.OK, false));
}
})
.show();
}
private synchronized void promt(final String title, final String message, final CallbackContext cbContext) {
final EditText promptInput = new EditText(cordova.getActivity());
promptInput.setHint("");
AlertDialog.Builder alertDialog = new AlertDialog.Builder(this.cordova.getActivity(), AlertDialog.THEME_DEVICE_DEFAULT_LIGHT);
final JSONObject result = new JSONObject();
alertDialog.setTitle(title)
.setMessage(message)
.setCancelable(true)
.setView(promptInput)
.setPositiveButton(R.string.ok, new DialogInterface.OnClickListener(){
public void onClick(DialogInterface dialog, int id){
dialog.dismiss();
try {
result.put("cancel",false);
result.put("value", promptInput.getText().toString().trim().length()==0 ? "" : promptInput.getText());
}
catch (JSONException e) { }
cbContext.sendPluginResult(new PluginResult(PluginResult.Status.OK, result));
}
})
.setNegativeButton(R.string.cancel, new DialogInterface.OnClickListener(){
public void onClick(DialogInterface dialog, int id){
dialog.dismiss();
try {
result.put("cancel",true);
result.put("value", null);
}
catch (JSONException e) { }
cbContext.sendPluginResult(new PluginResult(PluginResult.Status.OK, result));
}
})
.show();
}
private synchronized void promtPassword(final String title, final String message, final CallbackContext cbContext) {
final EditText promptInput = new EditText(cordova.getActivity());
promptInput.setInputType(InputType.TYPE_CLASS_NUMBER | InputType.TYPE_NUMBER_VARIATION_PASSWORD);
promptInput.setHint("");
AlertDialog.Builder alertDialog = new AlertDialog.Builder(this.cordova.getActivity(), AlertDialog.THEME_DEVICE_DEFAULT_LIGHT);
final JSONObject result = new JSONObject();
alertDialog.setTitle(title)
.setMessage(message)
.setCancelable(true)
.setView(promptInput)
.setPositiveButton(R.string.ok, new DialogInterface.OnClickListener(){
public void onClick(DialogInterface dialog, int id){
dialog.dismiss();
try {
result.put("cancel",false);
result.put("value", promptInput.getText().toString().trim().length()==0 ? "" : promptInput.getText());
}
catch (JSONException e) { }
cbContext.sendPluginResult(new PluginResult(PluginResult.Status.OK, result));
}
})
.setNegativeButton(R.string.cancel, new DialogInterface.OnClickListener(){
public void onClick(DialogInterface dialog, int id){
dialog.dismiss();
try {
result.put("cancel",true);
result.put("value", null);
}
catch (JSONException e) { }
cbContext.sendPluginResult(new PluginResult(PluginResult.Status.OK, result));
}
})
.show();
}
/**
* Beep plays the default notification ringtone.
*
* @param count Number of times to play notification
*/
private void beep(final String c) {
cordova.getThreadPool().execute(new Runnable() {
public void run() {
Uri ringtone = RingtoneManager.getDefaultUri(RingtoneManager.TYPE_NOTIFICATION);
Ringtone notification = RingtoneManager.getRingtone(cordova.getActivity().getBaseContext(), ringtone);
int count = Integer.parseInt(c);
// If phone is not set to silent mode
if (notification != null) {
for (long i = 0; i < count; ++i) {
notification.play();
long timeout = 5000;
while (notification.isPlaying() && (timeout > 0)) {
timeout = timeout - 100;
try {
Thread.sleep(100);
}
catch (InterruptedException e) { }
}
}
}
}
});
}
} |
package com.intellij.util.xml;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ReflectionCache;
import org.jetbrains.annotations.Nullable;
import java.lang.annotation.Annotation;
import java.lang.reflect.*;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
/**
* @author peter
*/
public class DomReflectionUtil {
private DomReflectionUtil() {
}
public static Object invokeMethod(final JavaMethodSignature method, final Object object, final Object... args) {
return invokeMethod(method.findMethod(object.getClass()), object, args);
}
public static Object invokeMethod(final Method method, final Object object, final Object... args) {
try {
return method.invoke(object, args);
}
catch (IllegalArgumentException e) {
throw new RuntimeException("Calling method " + method + " on object " + object + " with arguments " + Arrays.asList(args), e);
}
catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
catch (InvocationTargetException e) {
final Throwable cause = e.getCause();
if (cause instanceof ProcessCanceledException) {
throw (ProcessCanceledException)cause;
}
else if (cause instanceof Error) {
throw (Error)cause;
}
else if (cause instanceof RuntimeException) {
throw (RuntimeException) cause;
}
throw new RuntimeException(e);
}
}
public static Type resolveVariable(TypeVariable variable, final Class classType) {
final Class aClass = getRawType(classType);
int index = ContainerUtil.findByEquals(ReflectionCache.getTypeParameters(aClass), variable);
if (index >= 0) {
return variable;
}
final Class[] classes = ReflectionCache.getInterfaces(aClass);
final Type[] genericInterfaces = ReflectionCache.getGenericInterfaces(aClass);
for (int i = 0; i < classes.length; i++) {
Class anInterface = classes[i];
final Type resolved = resolveVariable(variable, anInterface);
if (resolved instanceof Class || resolved instanceof ParameterizedType) {
return resolved;
}
if (resolved instanceof TypeVariable) {
final TypeVariable typeVariable = (TypeVariable)resolved;
index = ContainerUtil.findByEquals(ReflectionCache.getTypeParameters(anInterface), typeVariable);
assert index >= 0 : "Cannot resolve type variable:\n" +
"typeVariable = " + typeVariable + "\n" +
"genericDeclaration = " + declarationToString(typeVariable.getGenericDeclaration()) + "\n" +
"searching in " + declarationToString(anInterface);
final Type type = genericInterfaces[i];
if (type instanceof Class) {
return Object.class;
}
if (type instanceof ParameterizedType) {
return ((ParameterizedType)type).getActualTypeArguments()[index];
}
throw new AssertionError("Invalid type: " + type);
}
}
return null;
}
private static String declarationToString(final GenericDeclaration anInterface) {
return anInterface.toString() + Arrays.asList(anInterface.getTypeParameters()) + " loaded by " + ((Class)anInterface).getClassLoader();
}
public static Class<?> substituteGenericType(final Type genericType, final Type classType) {
if (genericType instanceof TypeVariable) {
final Class<?> aClass = getRawType(classType);
final Type type = resolveVariable((TypeVariable)genericType, aClass);
if (type instanceof Class) {
return (Class)type;
}
if (type instanceof ParameterizedType) {
return (Class<?>)((ParameterizedType)type).getRawType();
}
if (type instanceof TypeVariable && classType instanceof ParameterizedType) {
final int index = ContainerUtil.findByEquals(ReflectionCache.getTypeParameters(aClass), type);
if (index >= 0) {
return getRawType(((ParameterizedType)classType).getActualTypeArguments()[index]);
}
}
} else {
return getRawType(genericType);
}
return null;
}
public static Class<?> getRawType(Type type) {
if (type instanceof Class) {
return (Class)type;
}
if (type instanceof ParameterizedType) {
return getRawType(((ParameterizedType)type).getRawType());
}
assert false : type;
return null;
}
public static <T extends Annotation> T findAnnotationDFS(final Class<?> rawType, final Class<T> annotationType) {
T annotation = rawType.getAnnotation(annotationType);
if (annotation != null) return annotation;
for (Class aClass : rawType.getInterfaces()) {
annotation = findAnnotationDFS(aClass, annotationType);
if (annotation != null) {
return annotation;
}
}
return null;
}
public static <T extends Annotation> T findAnnotationDFS(final Method method, final Class<T> annotationClass) {
return JavaMethodSignature.getSignature(method).findAnnotation(annotationClass, method.getDeclaringClass());
}
@Nullable
public static Type extractCollectionElementType(Type returnType) {
if (returnType instanceof ParameterizedType) {
ParameterizedType parameterizedType = (ParameterizedType)returnType;
final Type rawType = parameterizedType.getRawType();
if (rawType instanceof Class) {
final Class<?> rawClass = (Class<?>)rawType;
if (List.class.equals(rawClass) || Collection.class.equals(rawClass)) {
final Type[] arguments = parameterizedType.getActualTypeArguments();
if (arguments.length == 1) {
final Type argument = arguments[0];
if (argument instanceof WildcardType) {
final Type[] upperBounds = ((WildcardType)argument).getUpperBounds();
if (upperBounds.length == 1) {
return upperBounds[0];
}
}
else if (argument instanceof ParameterizedType) {
if (DomUtil.getGenericValueParameter(argument) != null) {
return argument;
}
}
else if (argument instanceof Class) {
return argument;
}
}
}
}
}
return null;
}
public static boolean canHaveIsPropertyGetterPrefix(final Type type) {
return boolean.class.equals(type) || Boolean.class.equals(type)
|| Boolean.class.equals(DomUtil.getGenericValueParameter(type));
}
public static Method[] getGetterMethods(final String[] path, final Class<? extends DomElement> startClass) {
final Method[] methods = new Method[path.length];
Class aClass = startClass;
for (int i = 0; i < path.length; i++) {
final Method getter = findGetter(aClass, path[i]);
assert getter != null : "Couldn't find getter for property " + path[i] + " in class " + aClass;
methods[i] = getter;
aClass = getter.getReturnType();
if (List.class.isAssignableFrom(aClass)) {
aClass = getRawType(extractCollectionElementType(getter.getGenericReturnType()));
}
}
return methods;
}
@Nullable
public static Method findGetter(Class aClass, String propertyName) {
final String capitalized = StringUtil.capitalize(propertyName);
try {
return aClass.getMethod("get" + capitalized);
}
catch (NoSuchMethodException e) {
final Method method;
try {
method = aClass.getMethod("is" + capitalized);
return canHaveIsPropertyGetterPrefix(method.getGenericReturnType()) ? method : null;
}
catch (NoSuchMethodException e1) {
return null;
}
}
}
} |
package com.frank.gangofsuits.player;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Input;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.Sprite;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.frank.gangofsuits.GangOfSuits;
import com.frank.gangofsuits.stage.DeathScreen;
import com.frank.gangofsuits.stage.IntroStage;
import com.frank.gangofsuits.utilities.Constants;
enum PlayerState {
ALIVE, DEAD
}
public class Player {
private Sprite sprite;
private PlayerState state = PlayerState.ALIVE;
private int health = 100;
private GangOfSuits game;
private boolean invincible = false;
public Player(GangOfSuits game) {
this.game = game;
sprite = new Sprite(new Texture(Gdx.files.internal("spritesheets/char_sprite.png")));
sprite.setPosition((Constants.WORLD_WIDTH / 2) - (sprite.getWidth() / 2), (Constants.WORLD_HEIGHT / 2) - (sprite.getHeight() / 2));
sprite.setRotation(0);
}
public void draw(SpriteBatch batch) {
sprite.draw(batch);
}
public void update() {
if (health <= 0 && !invincible) {
state = PlayerState.DEAD;
game.setScreen(new DeathScreen());
}
float playerSpeed = 10.0f;
float velocityX = 0.0f;
float velocityY = 0.0f;
if(Gdx.input.isKeyPressed(Input.Keys.A))
velocityX -= Gdx.graphics.getDeltaTime() * playerSpeed;
if(Gdx.input.isKeyPressed(Input.Keys.D))
velocityX += Gdx.graphics.getDeltaTime() * playerSpeed;
if(Gdx.input.isKeyPressed(Input.Keys.W))
velocityY += Gdx.graphics.getDeltaTime() * playerSpeed;
if(Gdx.input.isKeyPressed(Input.Keys.S))
velocityY -= Gdx.graphics.getDeltaTime() * playerSpeed;
float length = (float) Math.sqrt((Math.pow(velocityX, 2)) + (Math.pow(velocityY, 2)));
if (length > 0.0f) {
velocityX /= length;
velocityY /= length;
}
sprite.translate(velocityX, velocityY);
IntroStage.camera.translate(velocityX, velocityY);
}
public PlayerState getState() {
return state;
}
public void setPlayerState(PlayerState newState) {
this.state = newState;
}
public boolean isInvinsible() {
return invincible;
}
public void setInvincible(boolean isInvincible) {
this.invincible = isInvincible;
}
} |
package org.bitcoinj.core;
import org.slf4j.*;
import static com.google.common.base.Preconditions.*;
// TODO: Finish adding Context c'tors to all the different objects so we can start deprecating the versions that take NetworkParameters.
// TODO: Add a working directory notion to Context and make various subsystems that want to use files default to that directory (eg. Orchid, block stores, wallet, etc).
// TODO: Auto-register the block chain object here, and then use it in the (newly deprecated) TransactionConfidence.getDepthInBlocks() method: the new version should take an AbstractBlockChain specifically.
// Also use the block chain object reference from the context in PeerGroup and remove the other constructors, as it's easy to forget to wire things up.
// TODO: Move Threading.USER_THREAD to here and leave behind just a source code stub. Allow different instantiations of the library to use different user threads.
// TODO: Keep a URI to where library internal data files can be found, to abstract over the lack of JAR files on Android.
// TODO: Stash anything else that resembles global library configuration in here and use it to clean up the rest of the API without breaking people.
// TODO: Move the TorClient into Context, so different parts of the library can read data over Tor without having to request it directly. (or maybe a general socket factory??)
/**
* <p>The Context object holds various objects and pieces of configuration that are scoped to a specific instantiation of
* bitcoinj for a specific network. You can get an instance of this class through calling {@link #get()}.</p>
*
* <p>Context is new in 0.13 and the library is currently in a transitional period: you should create a Context that
* wraps your chosen network parameters before using the rest of the library. However if you don't, things will still
* work as a Context will be created for you and stashed in thread local storage. The context is then propagated between
* library created threads as needed. This automagical propagation and creation is a temporary mechanism: one day it
* will be removed to avoid confusing edge cases that could occur if the developer does not fully understand it e.g.
* in the case where multiple instances of the library are in use simultaneously.</p>
*/
public class Context {
private static final Logger log = LoggerFactory.getLogger(Context.class);
private TxConfidenceTable confidenceTable;
private NetworkParameters params;
private int eventHorizon = 100;
/**
* Creates a new context object. For now, this will be done for you by the framework. Eventually you will be
* expected to do this yourself in the same manner as fetching a NetworkParameters object (at the start of your app).
*
* @param params The network parameters that will be associated with this context.
*/
public Context(NetworkParameters params) {
this.confidenceTable = new TxConfidenceTable();
this.params = params;
lastConstructed = this;
// We may already have a context in our TLS slot. This can happen a lot during unit tests, so just ignore it.
slot.set(this);
}
/**
* Creates a new context object. For now, this will be done for you by the framework. Eventually you will be
* expected to do this yourself in the same manner as fetching a NetworkParameters object (at the start of your app).
*
* @param params The network parameters that will be associated with this context.
* @param eventHorizon Number of blocks after which the library will delete data and be unable to always process reorgs (see {@link #getEventHorizon()}.
*/
public Context(NetworkParameters params, int eventHorizon) {
this(params);
this.eventHorizon = eventHorizon;
}
private static volatile Context lastConstructed;
private static final ThreadLocal<Context> slot = new ThreadLocal<Context>();
public static Context get() {
Context tls = slot.get();
if (tls == null) {
if (lastConstructed == null)
throw new IllegalStateException("You must construct a Context object before using bitcoinj!");
slot.set(lastConstructed);
log.error("Performing thread fixup: you are accessing bitcoinj via a thread that has not had any context set on it.");
log.error("This error has been corrected for, but doing this makes your app less robust.");
log.error("You should use Context.propagate() or a ContextPropagatingThreadFactory.");
log.error("Please refer to the user guide for more information about this.");
// TODO: Actually write the user guide section about this.
// TODO: If the above TODO makes it past the 0.13 release, kick Mike and tell him he sucks.
return lastConstructed;
} else {
return tls;
}
}
// A temporary internal shim designed to help us migrate internally in a way that doesn't wreck source compatibility.
public static Context getOrCreate(NetworkParameters params) {
Context context;
try {
context = get();
} catch (IllegalStateException e) {
log.warn("Implicitly creating context. This is a migration step and this message will eventually go away.");
context = new Context(params);
return context;
}
if (context.getParams() != params)
throw new IllegalStateException("Context does not match implicit network params: " + context.getParams() + " vs " + params);
return context;
}
/**
* Sets the given context as the current thread context. You should use this if you create your own threads that
* want to create core BitcoinJ objects. Generally, if a class can accept a Context in its constructor and might
* be used (even indirectly) by a thread, you will want to call this first. Your task may be simplified by using
* a {@link org.bitcoinj.utils.ContextPropagatingThreadFactory}.
*/
public static void propagate(Context context) {
slot.set(checkNotNull(context));
}
public TxConfidenceTable getConfidenceTable() {
return confidenceTable;
}
/**
* Returns the {@link org.bitcoinj.core.NetworkParameters} specified when this context was (auto) created. The
* network parameters defines various hard coded constants for a specific instance of a Bitcoin network, such as
* main net, testnet, etc.
*/
public NetworkParameters getParams() {
return params;
}
/**
* The event horizon is the number of blocks after which various bits of the library consider a transaction to be
* so confirmed that it's safe to delete data. Re-orgs larger than the event horizon will not be correctly
* processed, so the default value is high (100).
*/
public int getEventHorizon() {
return eventHorizon;
}
} |
package polyglot.ext.jl5.ast;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import polyglot.ast.ClassDecl;
import polyglot.ast.ClassDeclOps;
import polyglot.ast.JL;
import polyglot.ast.Node;
import polyglot.ast.NodeFactory;
import polyglot.ast.Node_c;
import polyglot.ast.TypeNode;
import polyglot.ext.jl5.types.JL5Context;
import polyglot.ext.jl5.types.JL5Flags;
import polyglot.ext.jl5.types.JL5ParsedClassType;
import polyglot.ext.jl5.types.JL5TypeSystem;
import polyglot.ext.jl5.types.TypeVariable;
import polyglot.ext.jl5.visit.JL5Translator;
import polyglot.ext.param.types.MuPClass;
import polyglot.types.ConstructorInstance;
import polyglot.types.Context;
import polyglot.types.Flags;
import polyglot.types.ReferenceType;
import polyglot.types.SemanticException;
import polyglot.types.TypeSystem;
import polyglot.util.CodeWriter;
import polyglot.util.CollectionUtil;
import polyglot.util.InternalCompilerError;
import polyglot.util.SerialVersionUID;
import polyglot.visit.NodeVisitor;
import polyglot.visit.PrettyPrinter;
import polyglot.visit.TypeBuilder;
import polyglot.visit.TypeChecker;
public class JL5ClassDeclDel extends JL5AnnotatedElementDel implements
ClassDeclOps {
private static final long serialVersionUID = SerialVersionUID.generate();
@Override
public Node buildTypes(TypeBuilder tb) throws SemanticException {
ClassDecl n = (ClassDecl) super.buildTypes(tb);
JL5ClassDeclExt ext = (JL5ClassDeclExt) JL5Ext.ext(n);
JL5TypeSystem ts = (JL5TypeSystem) tb.typeSystem();
JL5ParsedClassType ct = (JL5ParsedClassType) n.type();
MuPClass<TypeVariable, ReferenceType> pc =
ts.mutablePClass(ct.position());
ct.setPClass(pc);
pc.clazz(ct);
if (ext.paramTypes() != null && !ext.paramTypes().isEmpty()) {
List<TypeVariable> typeVars =
new ArrayList<TypeVariable>(ext.paramTypes().size());
for (ParamTypeNode ptn : ext.paramTypes()) {
TypeVariable tv = (TypeVariable) ptn.type();
typeVars.add(tv);
tv.setDeclaringClass(ct);
}
ct.setTypeVariables(typeVars);
pc.formals(new ArrayList<TypeVariable>(typeVars));
}
return n;
}
@Override
public Node visitChildren(NodeVisitor v) {
JL5ClassDeclExt ext = (JL5ClassDeclExt) JL5Ext.ext(this.node());
List<AnnotationElem> annots =
this.node().visitList(ext.annotationElems(), v);
List<ParamTypeNode> paramTypes =
this.node().visitList(ext.paramTypes(), v);
Node newN = super.visitChildren(v);
JL5ClassDeclExt newext = (JL5ClassDeclExt) JL5Ext.ext(newN);
if (!CollectionUtil.equals(annots, newext.annotationElems())
|| !CollectionUtil.equals(paramTypes, newext.paramTypes())) {
// the annotations or param thypes changed! Let's update the node.
if (newN == this.node()) {
// we need to create a copy.
newN = (Node) newN.copy();
newext = (JL5ClassDeclExt) JL5Ext.ext(newN);
}
else {
// the call to super.visitChildren(v) already
// created a copy of the node (and thus of its extension).
}
newext.annotations = annots;
newext.paramTypes = paramTypes;
}
return newN;
}
/*
* (non-Javadoc)
*
* @see polyglot.ast.NodeOps#enterScope(polyglot.types.Context)
*/
@Override
public Context enterChildScope(Node child, Context c) {
ClassDecl n = (ClassDecl) this.node();
JL5ClassDeclExt ext = (JL5ClassDeclExt) JL5Ext.ext(n);
if (child == n.body()) {
TypeSystem ts = c.typeSystem();
c = c.pushClass(n.type(), ts.staticTarget(n.type()).toClass());
}
else {
// Add this class to the context, but don't push a class scope.
// This allows us to detect loops in the inheritance
// hierarchy, but avoids an infinite loop.
c = ((JL5Context) c).pushExtendsClause(n.type());
c.addNamed(n.type());
}
for (ParamTypeNode tn : ext.paramTypes()) {
((JL5Context) c).addTypeVariable((TypeVariable) tn.type());
}
return child.del().enterScope(c);
}
@Override
public Node typeCheck(TypeChecker tc) throws SemanticException {
ClassDecl n = (ClassDecl) this.node();
JL5ClassDeclExt ext = (JL5ClassDeclExt) JL5Ext.ext(n);
JL5TypeSystem ts = (JL5TypeSystem) tc.typeSystem();
if (n.type().superType() != null
&& JL5Flags.isEnum(n.type().superType().toClass().flags())) {
throw new SemanticException("Cannot extend enum type", n.position());
}
if (ts.equals(ts.Object(), n.type()) && !ext.paramTypes.isEmpty()) {
throw new SemanticException("Type: " + n.type()
+ " cannot declare type variables.", n.position());
}
if (JL5Flags.isAnnotation(n.flags()) && n.flags().isPrivate()) {
throw new SemanticException("Annotation types cannot have explicit private modifier",
n.position());
}
ts.checkDuplicateAnnotations(ext.annotations);
// check not extending java.lang.Throwable (or any of its subclasses)
// with a generic class
if (n.type().superType() != null
&& ts.isSubtype(n.type().superType(), ts.Throwable())
&& !ext.paramTypes.isEmpty()) {
// JLS 3rd ed. 8.1.2
throw new SemanticException("Cannot subclass java.lang.Throwable or any of its subtypes with a generic class",
n.superClass().position());
}
// check duplicate type variable decls
for (int i = 0; i < ext.paramTypes.size(); i++) {
TypeNode ti = ext.paramTypes.get(i);
for (int j = i + 1; j < ext.paramTypes.size(); j++) {
TypeNode tj = ext.paramTypes.get(j);
if (ti.name().equals(tj.name())) {
throw new SemanticException("Duplicate type variable declaration.",
tj.position());
}
}
}
for (ParamTypeNode paramType : ext.paramTypes)
ts.checkCycles(paramType.type().toReference());
return super.typeCheck(tc);
}
public void prettyPrintModifiers(CodeWriter w, PrettyPrinter tr) {
ClassDecl n = (ClassDecl) this.node();
Flags f = n.flags();
if (f.isInterface()) {
f = f.clearInterface().clearAbstract();
}
if (JL5Flags.isEnum(f)) {
f = JL5Flags.clearEnum(f).clearStatic().clearAbstract();
}
if (JL5Flags.isAnnotation(f)) {
f = JL5Flags.clearAnnotation(f);
}
w.write(f.translate());
if (n.flags().isInterface()) {
if (JL5Flags.isAnnotation(n.flags())) {
w.write("@interface ");
}
else {
w.write("interface ");
}
}
else if (JL5Flags.isEnum(n.flags())) {
w.write("enum ");
}
else {
w.write("class ");
}
}
public void prettyPrintName(CodeWriter w, PrettyPrinter tr) {
ClassDecl n = (ClassDecl) this.node();
w.write(n.id().id());
}
public void prettyPrintHeaderRest(CodeWriter w, PrettyPrinter tr) {
ClassDecl n = (ClassDecl) this.node();
if (n.superClass() != null
&& ((!JL5Flags.isEnum(n.flags()) && !JL5Flags.isAnnotation(n.flags())))) {
w.write(" extends ");
((Node_c) n).print(n.superClass(), w, tr);
}
if (!n.interfaces().isEmpty() && !JL5Flags.isAnnotation(n.flags())) {
if (n.flags().isInterface()) {
w.write(" extends ");
}
else {
w.write(" implements ");
}
for (Iterator<TypeNode> i = n.interfaces().iterator(); i.hasNext();) {
TypeNode tn = i.next();
((Node_c) n).print(tn, w, tr);
if (i.hasNext()) {
w.write(", ");
}
}
}
w.write(" {");
}
@Override
public void prettyPrintHeader(CodeWriter w, PrettyPrinter tr) {
ClassDecl n = (ClassDecl) this.node();
JL5ClassDeclExt ext = (JL5ClassDeclExt) JL5Ext.ext(n);
w.begin(0);
for (AnnotationElem ae : ext.annotationElems()) {
ae.del().prettyPrint(w, tr);
w.newline();
}
w.end();
prettyPrintModifiers(w, tr);
prettyPrintName(w, tr);
// print type variables
boolean printTypeVars = true;
if (tr instanceof JL5Translator) {
JL5Translator jl5tr = (JL5Translator) tr;
printTypeVars = !jl5tr.removeJava5isms();
}
if (printTypeVars && !ext.paramTypes().isEmpty()) {
w.write("<");
for (Iterator<ParamTypeNode> iter = ext.paramTypes.iterator(); iter.hasNext();) {
ParamTypeNode ptn = iter.next();
ptn.del().prettyPrint(w, tr);
if (iter.hasNext()) {
w.write(", ");
}
}
w.write(">");
}
prettyPrintHeaderRest(w, tr);
}
@Override
public void prettyPrintFooter(CodeWriter w, PrettyPrinter tr) {
JL jl = this.jl();
if (jl instanceof ClassDeclOps) {
((ClassDeclOps) jl).prettyPrintFooter(w, tr);
}
else {
throw new InternalCompilerError("jl() should implement ClassDeclOps");
}
}
@Override
public Node addDefaultConstructor(TypeSystem ts, NodeFactory nf,
ConstructorInstance defaultConstructorInstance)
throws SemanticException {
JL jl = this.jl();
if (jl instanceof ClassDeclOps) {
return ((ClassDeclOps) jl).addDefaultConstructor(ts,
nf,
defaultConstructorInstance);
}
else {
throw new InternalCompilerError("jl() should implement ClassDeclOps");
}
}
} |
package postgres.gen;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.stream.Collectors;
import lama.Query;
import lama.QueryAdapter;
import lama.Randomly;
import postgres.PostgresProvider;
import postgres.PostgresSchema.PostgresTable;
public class PostgresVacuumGenerator {
public static Query create(PostgresTable table) {
StringBuilder sb = new StringBuilder("VACUUM ");
if (Randomly.getBoolean()) {
// VACUUM [ ( { FULL | FREEZE | VERBOSE | ANALYZE | DISABLE_PAGE_SKIPPING } [,
// ...] ) ] [ table_name [ (column_name [, ...] ) ] ]
sb.append("(");
for (int i = 0; i < Randomly.smallNumber() + 1; i++) {
ArrayList<String> opts = new ArrayList<String>(
Arrays.asList("FULL", "FREEZE", "ANALYZE", "VERBOSE", "DISABLE_PAGE_SKIPPING"));
if (PostgresProvider.IS_POSTGRES_TWELVE) {
opts.add("SKIP_LOCKED");
opts.add("INDEX_CLEANUP");
opts.add("TRUNCATE");
}
String option = Randomly.fromList(opts);
if (i != 0) {
sb.append(", ");
}
sb.append(option);
if (PostgresProvider.IS_POSTGRES_TWELVE && Randomly.getBoolean()) {
sb.append(" ");
sb.append(Randomly.fromOptions(1, 0));
}
}
sb.append(")");
if (Randomly.getBoolean()) {
addTableAndColumns(table, sb);
}
} else {
String firstOption = Randomly.fromOptions("FULL", "FREEZE", "VERBOSE");
sb.append(firstOption);
if (Randomly.getBoolean()) {
sb.append(" ANALYZE");
addTableAndColumns(table, sb);
} else {
if (Randomly.getBoolean()) {
sb.append(" ");
sb.append(table.getName());
}
}
}
return new QueryAdapter(sb.toString()) {
@Override
public void execute(Connection con) throws SQLException {
try {
super.execute(con);
} catch (SQLException e) {
if (e.getMessage().contains("VACUUM option DISABLE_PAGE_SKIPPING cannot be used with FULL")) {
} else if (e.getMessage()
.contains("ERROR: ANALYZE option must be specified when a column list is provided")) {
} else if (e.getMessage().contains("deadlock")) {
} else if (e.getMessage().contains("VACUUM cannot run inside a transaction block")) {
} else {
throw e;
}
}
}
};
}
private static void addTableAndColumns(PostgresTable table, StringBuilder sb) {
sb.append(" ");
sb.append(table.getName());
if (Randomly.getBoolean()) {
sb.append("(");
sb.append(table.getRandomNonEmptyColumnSubset().stream().map(c -> c.getName())
.collect(Collectors.joining(", ")));
sb.append(")");
}
}
} |
package weave.config;
import java.io.File;
import java.io.FileOutputStream;
import java.net.URL;
import java.rmi.RemoteException;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import weave.utils.FileUtils;
import weave.utils.MapUtils;
import weave.utils.ProgressManager;
import weave.utils.SQLUtils;
import weave.utils.XMLUtils;
/**
* ISQLConfig An interface to retrieve strings from a configuration file.
*
* @author Andy Dufilie
*/
public class ConnectionConfig
{
public static final String XML_FILENAME = "sqlconfig.xml";
public static final String DTD_FILENAME = "sqlconfig.dtd";
public static final URL DTD_EMBEDDED = ConnectionConfig.class.getResource("/weave/config/" + DTD_FILENAME);
public ConnectionConfig(File file)
{
_file = file;
}
private boolean _temporaryDataConfigPermission = false;
private boolean _oldVersionDetected = false;
private long _lastMod = 0L;
private File _file;
private DatabaseConfigInfo _databaseConfigInfo;
private Map<String,ConnectionInfo> _connectionInfoMap = new HashMap<String,ConnectionInfo>();
private Connection _adminConnection = null;
public long getLastModified() throws RemoteException
{
_load();
return _lastMod;
}
/**
* This function must be called before making any modifications to the config.
*/
@SuppressWarnings("deprecation")
public DataConfig initializeNewDataConfig(ProgressManager progress) throws RemoteException
{
if (migrationPending())
{
try
{
DataConfig dataConfig;
synchronized (this)
{
_temporaryDataConfigPermission = true;
dataConfig = new DataConfig(this);
_temporaryDataConfigPermission = false;
}
DeprecatedConfig.migrate(this, dataConfig, progress);
// after everything has successfully been migrated, save under new connection config format
_oldVersionDetected = false;
_save();
return dataConfig;
}
finally
{
_temporaryDataConfigPermission = false;
}
}
else
{
return new DataConfig(this);
}
}
public boolean allowDataConfigInitialize() throws RemoteException
{
return _temporaryDataConfigPermission || !migrationPending();
}
public boolean migrationPending() throws RemoteException
{
_load();
return _oldVersionDetected;
}
/**
* This function gets a connection to the database containing the configuration information. This function will reuse a previously created
* Connection if it is still valid.
*
* @return A Connection to the SQL database.
*/
public Connection getAdminConnection() throws RemoteException, SQLException
{
_load();
// if old version is detected, don't run test query
boolean isValid = _oldVersionDetected ? _adminConnection != null : SQLUtils.connectionIsValid(_adminConnection);
// use previous connection if still valid
if (isValid)
return _adminConnection;
DatabaseConfigInfo dbInfo = _databaseConfigInfo;
if (dbInfo == null)
throw new RemoteException("databaseConfig has not been specified.");
if (dbInfo.schema == null || dbInfo.schema.length() == 0)
throw new RemoteException("databaseConfig schema has not been specified.");
ConnectionInfo connInfo = getConnectionInfo(dbInfo.connection);
if (connInfo == null)
throw new RemoteException(String.format("Connection named \"%s\" doead not exist.", dbInfo.connection));
return _adminConnection = connInfo.getConnection();
}
private void resetAdminConnection()
{
SQLUtils.cleanup(_adminConnection);
_adminConnection = null;
}
private void _setXMLAttributes(Element tag, Map<String,String> attrs)
{
for (Entry<String,String> entry : attrs.entrySet())
tag.setAttribute(entry.getKey(), entry.getValue());
}
private Map<String,String> _getXMLAttributes(Node node)
{
NamedNodeMap attrs = node.getAttributes();
Map<String, String> attrMap = new HashMap<String, String>();
for (int j = 0; j < attrs.getLength(); j++)
{
Node attr = attrs.item(j);
String attrName = attr.getNodeName();
String attrValue = attr.getTextContent();
attrMap.put(attrName, attrValue);
}
return attrMap;
}
private void _load() throws RemoteException
{
long lastMod = _file.lastModified();
if (_lastMod != lastMod)
{
try
{
// read file as XML
Document doc = XMLUtils.getValidatedXMLFromFile(_file);
XPath xpath = XPathFactory.newInstance().newXPath();
// read all ConnectionInfo
Map<String,ConnectionInfo> connectionInfoMap = new HashMap<String,ConnectionInfo>();
NodeList nodes = (NodeList) xpath.evaluate("/sqlConfig/connection", doc, XPathConstants.NODESET);
for (int i = 0; i < nodes.getLength(); i++)
{
ConnectionInfo info = new ConnectionInfo();
info.copyFrom(_getXMLAttributes(nodes.item(i)));
connectionInfoMap.put(info.name, info);
}
// read DatabaseConfigInfo
Node node = (Node) xpath.evaluate("/sqlConfig/databaseConfig", doc, XPathConstants.NODE);
DatabaseConfigInfo databaseConfigInfo = new DatabaseConfigInfo();
Map<String,String> attrs = _getXMLAttributes(node);
databaseConfigInfo.copyFrom(attrs);
// detect old version
_oldVersionDetected = databaseConfigInfo.dataConfigTable != null && databaseConfigInfo.dataConfigTable.length() != 0;
// commit values only after everything succeeds
_connectionInfoMap = connectionInfoMap;
_databaseConfigInfo = databaseConfigInfo;
_lastMod = lastMod;
// reset admin connection when config changes
resetAdminConnection();
}
catch (Exception e)
{
throw new RemoteException("Unable to load connection config file", e);
}
}
}
private void _save() throws RemoteException
{
// we can't save until the old data has been migrated
if (_oldVersionDetected)
throw new RemoteException("Unable to save connection config because old data hasn't been migrated yet.");
try
{
// reset admin connection when config changes
resetAdminConnection();
Document doc = XMLUtils.getXMLFromString("<sqlConfig/>");
Node rootNode = doc.getDocumentElement();
// write DatabaseConfigInfo
Element element = doc.createElement("databaseConfig");
_setXMLAttributes(element, _databaseConfigInfo.getPropertyMap());
rootNode.appendChild(element);
// write all ConnectionInfo, sorted by name
List<String> names = new LinkedList<String>(getConnectionInfoNames());
Collections.sort(names);
for (String name : names)
{
element = doc.createElement("connection");
_setXMLAttributes(element, _connectionInfoMap.get(name).getPropertyMap());
rootNode.appendChild(element);
}
// get file paths
String dtdPath = _file.getParentFile().getAbsolutePath() + '/' + DTD_FILENAME;
String filePath = _file.getAbsolutePath();
if (_oldVersionDetected)
{
// save backup of old files
FileUtils.copy(dtdPath, dtdPath + ".old");
FileUtils.copy(filePath, filePath + ".old");
_oldVersionDetected = false;
}
// save new files
FileUtils.copy(DTD_EMBEDDED.openStream(), new FileOutputStream(dtdPath));
XMLUtils.getStringFromXML(rootNode, DTD_FILENAME, filePath);
}
catch (Exception e)
{
throw new RemoteException("Unable to save connection config file", e);
}
}
public ConnectionInfo getConnectionInfo(String name) throws RemoteException
{
_load();
ConnectionInfo original = _connectionInfoMap.get(name);
if (original == null)
return null;
ConnectionInfo copy = new ConnectionInfo();
copy.copyFrom(original);
return copy;
}
public void saveConnectionInfo(ConnectionInfo connectionInfo) throws RemoteException
{
connectionInfo.validate();
_load();
ConnectionInfo copy = new ConnectionInfo();
copy.copyFrom(connectionInfo);
_connectionInfoMap.put(connectionInfo.name, copy);
_save();
}
public void removeConnectionInfo(String name) throws RemoteException
{
_load();
_connectionInfoMap.remove(name);
_save();
}
public Collection<String> getConnectionInfoNames() throws RemoteException
{
_load();
return _connectionInfoMap.keySet();
}
public DatabaseConfigInfo getDatabaseConfigInfo() throws RemoteException
{
_load();
if (_databaseConfigInfo == null)
return null;
DatabaseConfigInfo copy = new DatabaseConfigInfo();
copy.copyFrom(_databaseConfigInfo);
return copy;
}
public void setDatabaseConfigInfo(DatabaseConfigInfo info) throws RemoteException
{
if (!_connectionInfoMap.containsKey(info.connection))
throw new RemoteException(String.format("Connection named \"%s\" does not exist.", info.connection));
if (info.schema == null || info.schema.length() == 0)
throw new RemoteException("Schema must be specified.");
_load();
if (_databaseConfigInfo == null)
_databaseConfigInfo = new DatabaseConfigInfo();
_databaseConfigInfo.copyFrom(info);
_save();
}
/**
* This class contains all the information related to where the
* configuration should be stored in a database.
*/
static public class DatabaseConfigInfo
{
public DatabaseConfigInfo()
{
}
public void copyFrom(Map<String,String> other)
{
this.connection = other.get("connection");
this.schema = other.get("schema");
geometryConfigTable = other.get("geometryConfigTable");
dataConfigTable = other.get("dataConfigTable");
}
public void copyFrom(DatabaseConfigInfo other)
{
this.connection = other.connection;
this.schema = other.schema;
this.geometryConfigTable = other.geometryConfigTable;
this.dataConfigTable = other.dataConfigTable;
}
public Map<String,String> getPropertyMap()
{
return MapUtils.fromPairs(
"connection", connection,
"schema", schema
);
}
/**
* The name of the connection (in the xml configuration) which allows
* connection to the database which contains the configurations
* (columns->SQL queries, and geometry collections).
*/
public String connection;
public String schema;
@Deprecated public String geometryConfigTable;
@Deprecated public String dataConfigTable;
}
/**
* This class contains all the information needed to connect to a SQL
* database.
*/
static public class ConnectionInfo
{
public ConnectionInfo()
{
}
private boolean isEmpty(String str) { return str == null || str.length() == 0; }
public void validate() throws RemoteException
{
String missingField = null;
if (isEmpty(name))
missingField = "name";
else if (isEmpty(dbms))
missingField = "dbms";
else if (isEmpty(pass))
missingField = "password";
else if (isEmpty(connectString))
missingField = "connectString";
if (missingField != null)
throw new RemoteException(String.format("Connection %s must be specified", missingField));
}
public void copyFrom(Map<String,String> other)
{
this.name = other.get("name");
this.dbms = other.get("dbms");
this.pass = other.get("pass");
this.folderName = other.get("folderName");
this.connectString = other.get("connectString");
this.is_superuser = other.get("is_superuser").equalsIgnoreCase("true");
// backwards compatibility
if (connectString == null || connectString.length() == 0)
{
String ip = other.get("ip");
String port = other.get("port");
String database = other.get("database");
String user = other.get("user");
this.connectString = SQLUtils.getConnectString(dbms, ip, port, database, user, pass);
}
}
public void copyFrom(ConnectionInfo other)
{
this.name = other.name;
this.dbms = other.dbms;
this.pass = other.pass;
this.folderName = other.folderName;
this.connectString = other.connectString;
this.is_superuser = other.is_superuser;
}
public Map<String,String> getPropertyMap()
{
return MapUtils.fromPairs(
"name", name,
"dbms", dbms,
"pass", pass,
"folderName", folderName,
"connectString", connectString,
"is_superuser", is_superuser ? "true" : "false"
);
}
public String name = "";
public String dbms = "";
public String pass = "";
public String folderName = "";
public String connectString = "";
public boolean is_superuser = false;
public Connection getStaticReadOnlyConnection() throws RemoteException
{
return SQLUtils.getStaticReadOnlyConnection(SQLUtils.getDriver(dbms), connectString);
}
public Connection getConnection() throws RemoteException
{
return SQLUtils.getConnection(SQLUtils.getDriver(dbms), connectString);
}
}
} |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package mechanism;
import core.PollingSensor;
import core.Sensor;
import event.BallEvent;
import event.BallTrackingListener;
import event.SwitchEvent;
import event.SwitchListener;
import java.util.Vector;
import sensor.GRTSwitch;
/**
*
* Entity that keeps track of all the balls
* currently controlled by the robot
*
* @author gerberduffy
*/
public class BallTracker extends Sensor implements SwitchListener{
//Ball position enumerations
public static final int IN_LOWER_ROLLERS = 0; //Right after being collected
public static final int IN_HOPPER = 1; //In the hopper
public static final int IN_UPPER_ROLLERS = 2; //In the upper rollers
public static final int IN_SHOOTING_QUEUE = 3; //The ball is queued up for shooting
//KEYS
public static final int KEY_NUM_BALLS = 0;
public static final int KEY_FIRST_BALL_POSITION = 1;
public static final int KEY_SECOND_BALL_POSITION = 2;
public static final int KEY_THIRD_BALL_POSITION = 3;
public static final int NUM_DATA = 4;
//THe limit switches that are responsible for ball tracking.
private GRTSwitch lowerRollersSwitch;
private GRTSwitch upperRollersSwitch;
private GRTSwitch hopperSwitch;
private GRTSwitch ballQueueSwitch;
//Booleans
private boolean ballQueued = false; //True if a ball is waiting to be shot.
//Tracking numbers
private int totalBalls = 0; //Total balls in system. Starts at 0
//Listeners vector
private Vector listeners;
public BallTracker(double pollTime,
GRTSwitch lowerRollers,
GRTSwitch upperRollers,
GRTSwitch hopper,
GRTSwitch ballQueue)
{
super("Ball Tracker");
//Setup our instance variables
this.lowerRollersSwitch = lowerRollers;
this.hopperSwitch = hopper;
this.upperRollersSwitch = upperRollers;
this.ballQueueSwitch = ballQueue;
this.listeners = new Vector();
}
/**
* Add a ball listener
* @param l
*/
public void addListener(BallTrackingListener l){
listeners.addElement(l);
}
/**
* Remove a ball listener
* @param l
*/
public void removeListener(BallTrackingListener l){
listeners.removeElement(l);
}
/**
* Start listening to the count switches
*
*/
protected void startListening() {
lowerRollersSwitch.addSwitchListener(this);
hopperSwitch.addSwitchListener(this);
upperRollersSwitch.addSwitchListener(this);
ballQueueSwitch.addSwitchListener(this);
}
/**
* Stop listening to the switches.
*/
protected void stopListening() {
lowerRollersSwitch.removeSwitchListener(this);
hopperSwitch.removeSwitchListener(this);
upperRollersSwitch.removeSwitchListener(this);
ballQueueSwitch.removeSwitchListener(this);
}
/**
* Gets the total number of balls in the robot.
* @return The number of balls contained in the robot
*/
int getTotalBalls(){
return totalBalls;
}
/**
* On a switch state change, update the ball count as well as
* the current ball positions.
*
*
* @param e
*/
public void switchStateChanged(SwitchEvent e) {
BallEvent be = null; //The event we are sending
if (e.getState() == GRTSwitch.PRESSED){
//If the switch is pressed,
if (e.getSource() == lowerRollersSwitch){
be = new BallEvent(this, ++totalBalls, BallTracker.IN_LOWER_ROLLERS);
} else if (e.getSource() == upperRollersSwitch){
be = new BallEvent(this, ++totalBalls, BallTracker.IN_UPPER_ROLLERS);
} else if (e.getSource() == ballQueueSwitch){
be = new BallEvent(this, ++totalBalls, BallTracker.IN_SHOOTING_QUEUE);
}
}
//Logic for switch release.
else if (e.getState() == GRTSwitch.RELEASED){
//If the ball queue switch has been released after a ball
//has been queued, we know that we have shot a ball, so decrement
//the total ball count.
if (e.getSource() == ballQueueSwitch && ballQueued){
be = new BallEvent(this, --totalBalls, IN_HOPPER);
}
}
for(int i=0; i < listeners.size(); i++){
((BallTrackingListener)listeners.elementAt(i)).ballPositionChanged(be);
}
}
protected void notifyListeners(int id, double oldDatum, double newDatum) {
}
} |
package com.malhartech.stram;
import com.malhartech.api.Context.OperatorContext;
import com.malhartech.api.Operator;
import com.malhartech.api.Sink;
import com.malhartech.bufferserver.Buffer.Message.MessageType;
import com.malhartech.engine.Tuple;
import java.io.*;
import java.util.HashMap;
import java.util.Map.Entry;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
import org.slf4j.LoggerFactory;
/**
*
* @author Zhongjian Wang <zhongjian@malhar-inc.com>
*/
public class TupleRecorder implements Operator
{
public static final String INDEX_FILE = "index.txt";
public static final String META_FILE = "meta.txt";
public static final String VERSION = "1.0";
private transient FileSystem fs;
private transient FSDataOutputStream partOutStr;
private transient FSDataOutputStream indexOutStr;
private transient OutputStream localDataOutput;
private transient OutputStream localIndexOutput;
private transient String localBasePath;
private int bytesPerPartFile = 100 * 1024;
private String basePath = ".";
private transient String hdfsFile;
private int fileParts = 0;
private int partFileTupleCount = 0;
private int totalTupleCount = 0;
private HashMap<String, PortInfo> portMap = new HashMap<String, PortInfo>(); // used for output portInfo <name, id> map
private HashMap<String, PortCount> portCountMap = new HashMap<String, PortCount>(); // used for tupleCount of each port <name, count> map
private transient long currentWindowId = -1;
private transient long partBeginWindowId = -1;
private String recordingName = "Untitled";
private final long startTime = System.currentTimeMillis();
private int nextPortIndex = 0;
private HashMap<String, Sink<Object>> sinks = new HashMap<String, Sink<Object>>();
private transient long endWindowTuplesProcessed = 0;
private boolean isLocalMode = false;
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(TupleRecorder.class);
public RecorderSink newSink(String key)
{
RecorderSink recorderSink = new RecorderSink(key);
sinks.put(key, recorderSink);
return recorderSink;
}
public HashMap<String, PortInfo> getPortInfoMap()
{
return portMap;
}
public int getTotalTupleCount()
{
return totalTupleCount;
}
public HashMap<String, Sink<Object>> getSinkMap()
{
return sinks;
}
public void setLocalMode(boolean isLocalMode)
{
this.isLocalMode = isLocalMode;
}
/* defined for json information */
public static class PortInfo
{
public String name;
public String streamName;
public String type;
public int id;
}
/* defined for written tuple count of each port recorded in index file */
public static class PortCount
{
public int id;
public long count;
}
public static class RecordInfo
{
public long startTime;
public String recordingName;
}
public String getRecordingName()
{
return recordingName;
}
public void setRecordingName(String recordingName)
{
this.recordingName = recordingName;
}
public void setBytesPerPartFile(int bytes)
{
bytesPerPartFile = bytes;
}
public void setBasePath(String path)
{
basePath = path;
}
public String getBasePath()
{
return basePath;
}
public long getStartTime()
{
return startTime;
}
public void addInputPortInfo(String portName, String streamName)
{
PortInfo portInfo = new PortInfo();
portInfo.name = portName;
portInfo.streamName = streamName;
portInfo.type = "input";
portInfo.id = nextPortIndex++;
portMap.put(portName, portInfo);
PortCount pc = new PortCount();
pc.id = portInfo.id;
pc.count = 0;
portCountMap.put(portName, pc);
}
public void addOutputPortInfo(String portName, String streamName)
{
PortInfo portInfo = new PortInfo();
portInfo.name = portName;
portInfo.streamName = streamName;
portInfo.type = "output";
portInfo.id = nextPortIndex++;
portMap.put(portName, portInfo);
PortCount pc = new PortCount();
pc.id = portInfo.id;
pc.count = 0;
portCountMap.put(portName, pc);
}
@Override
public void teardown()
{
logger.info("Closing down tuple recorder.");
try {
if (partOutStr != null) {
logger.debug("Closing part file");
partOutStr.close();
if (indexOutStr != null) {
logger.debug("Writing index file for windows {} to {}", partBeginWindowId, currentWindowId);
writeIndex();
}
}
if (indexOutStr != null) {
indexOutStr.close();
}
}
catch (IOException ex) {
logger.error(ex.toString());
}
}
@Override
public void setup(OperatorContext context)
{
try {
Path pa = new Path(basePath, META_FILE);
if (basePath.startsWith("file:")) {
isLocalMode = true;
localBasePath = basePath.substring(5);
(new File(localBasePath)).mkdirs();
}
fs = FileSystem.get(pa.toUri(), new Configuration());
FSDataOutputStream metaOs = fs.create(pa);
ObjectMapper mapper = new ObjectMapper();
ByteArrayOutputStream bos = new ByteArrayOutputStream();
bos.write((VERSION + "\n").getBytes());
RecordInfo recordInfo = new RecordInfo();
recordInfo.startTime = startTime;
recordInfo.recordingName = recordingName;
mapper.writeValue(bos, recordInfo);
bos.write("\n".getBytes());
for (PortInfo pi: portMap.values()) {
mapper.writeValue(bos, pi);
bos.write("\n".getBytes());
}
metaOs.write(bos.toByteArray());
metaOs.hflush();
metaOs.close();
pa = new Path(basePath, INDEX_FILE);
if (isLocalMode) {
localIndexOutput = new FileOutputStream(localBasePath + "/" + INDEX_FILE);
indexOutStr = new FSDataOutputStream(localIndexOutput, null);
}
else {
indexOutStr = fs.create(pa);
}
}
catch (IOException ex) {
logger.error(ex.toString());
}
}
protected void openNewPartFile() throws IOException
{
hdfsFile = "part" + fileParts + ".txt";
Path path = new Path(basePath, hdfsFile);
logger.debug("Opening new part file: {}", hdfsFile);
if (isLocalMode) {
localDataOutput = new FileOutputStream(localBasePath + "/" + hdfsFile);
partOutStr = new FSDataOutputStream(localDataOutput, null);
}
else {
partOutStr = fs.create(path);
}
fileParts++;
}
@Override
public void beginWindow(long windowId)
{
if (this.currentWindowId != windowId) {
this.currentWindowId = windowId;
if (partBeginWindowId < 0) {
partBeginWindowId = windowId;
}
endWindowTuplesProcessed = 0;
try {
if (partOutStr == null || partOutStr.getPos() > bytesPerPartFile) {
openNewPartFile();
}
logger.debug("Writing begin window (id: {}) to tuple recorder", windowId);
partOutStr.write(("B:" + windowId + "\n").getBytes());
//fsOutput.hflush();
}
catch (IOException ex) {
logger.error(ex.toString());
}
}
}
@Override
public void endWindow()
{
if (++endWindowTuplesProcessed == portMap.size()) {
try {
partOutStr.write(("E:" + currentWindowId + "\n").getBytes());
logger.debug("Got last end window tuple. Flushing...");
partOutStr.hflush();
//fsOutput.hsync();
if (partOutStr.getPos() > bytesPerPartFile) {
partOutStr.close();
logger.debug("Writing index file for windows {} to {}", partBeginWindowId, currentWindowId);
writeIndex();
partBeginWindowId = -1;
logger.debug("Closing current part file because it's full");
}
}
catch (JsonGenerationException ex) {
logger.error(ex.toString());
}
catch (JsonMappingException ex) {
logger.error(ex.toString());
}
catch (IOException ex) {
logger.error(ex.toString());
}
}
}
public void writeTuple(Object obj, String port)
{
try {
ObjectMapper mapper = new ObjectMapper();
ByteArrayOutputStream bos = new ByteArrayOutputStream();
mapper.writeValue(bos, obj);
bos.write("\n".getBytes());
PortInfo pi = portMap.get(port);
String str = "T:" + pi.id + ":" + bos.size() + ":";
PortCount pc = portCountMap.get(port);
pc.count++;
portCountMap.put(port, pc);
partOutStr.write(str.getBytes());
partOutStr.write(bos.toByteArray());
//logger.debug("Writing tuple for port id {}", pi.id);
//fsOutput.hflush();
++partFileTupleCount;
++totalTupleCount;
}
catch (JsonGenerationException ex) {
logger.error(ex.toString());
}
catch (JsonMappingException ex) {
logger.error(ex.toString());
}
catch (IOException ex) {
logger.error(ex.toString());
}
}
public void writeControlTuple(Tuple tuple, String port)
{
try {
PortInfo pi = portMap.get(port);
String str = "C:" + pi.id; // to be completed when Tuple is externalizable
if (partOutStr == null) {
openNewPartFile();
}
partOutStr.write(str.getBytes());
partOutStr.write("\n".getBytes());
}
catch (IOException ex) {
logger.error(ex.toString());
}
}
public void writeIndex()
{
if (partBeginWindowId < 0) {
return;
}
try {
indexOutStr.write(("F:" + partBeginWindowId + ":" + currentWindowId + ":T:" + partFileTupleCount + ":").getBytes());
ByteArrayOutputStream bos = new ByteArrayOutputStream();
int i = 0;
String countStr = "{";
for (String key: portCountMap.keySet()) {
PortCount pc = portCountMap.get(key);
if (i != 0) {
countStr += ",";
}
countStr += "\"" + pc.id + "\"" + ":" + pc.count;
i++;
pc.count = 0;
portCountMap.put(key, pc);
}
countStr += "}";
bos.write(countStr.getBytes());
partFileTupleCount = 0;
indexOutStr.write((String.valueOf(bos.size()) + ":").getBytes());
indexOutStr.write(bos.toByteArray());
indexOutStr.write((":" + hdfsFile + "\n").getBytes());
indexOutStr.hflush();
indexOutStr.hsync();
}
catch (IOException ex) {
logger.error(ex.toString());
}
}
public class RecorderSink implements Sink<Object>
{
private final String portName;
public RecorderSink(String portName)
{
this.portName = portName;
}
@Override
public void process(Object payload)
{
// In other words, if it's a regular tuple emitted by operators (payload), payload
// is not an instance of Tuple (confusing... I know)
if (payload instanceof Tuple) {
Tuple tuple = (Tuple)payload;
MessageType messageType = tuple.getType();
if (messageType == MessageType.BEGIN_WINDOW) {
beginWindow(tuple.getWindowId());
}
writeControlTuple(tuple, portName);
if (messageType == MessageType.END_WINDOW) {
endWindow();
}
}
else {
writeTuple(payload, portName);
}
}
}
} |
package org.testng.remote;
import com.beust.jcommander.JCommander;
import com.beust.jcommander.ParameterException;
import org.osgi.framework.Version;
import org.testng.CommandLineArgs;
import org.testng.TestNGException;
import org.testng.remote.support.ServiceLoaderHelper;
import java.lang.reflect.Field;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.*;
import java.util.jar.Attributes;
import java.util.jar.Manifest;
public class RemoteTestNG {
// The following constants are referenced by the Eclipse plug-in, make sure you
// modify the plug-in as well if you change any of them.
public static final String DEBUG_PORT = "12345";
public static final String DEBUG_SUITE_FILE = "testng-customsuite.xml";
public static final String DEBUG_SUITE_DIRECTORY = System.getProperty("java.io.tmpdir");
public static final String PROPERTY_DEBUG = "testng.eclipse.debug";
public static final String PROPERTY_VERBOSE = "testng.eclipse.verbose";
// End of Eclipse constants.
private static boolean m_debug;
public static void main(String[] args) throws ParameterException {
CommandLineArgs cla = new CommandLineArgs();
RemoteArgs ra = new RemoteArgs();
new JCommander(Arrays.asList(cla, ra), args);
Version ver = ra.version;
if (ver == null) {
// no version specified on cli, detect ourself
ver = getTestNGVersion();
}
p("detected TestNG version " + ver);
IRemoteTestNG remoteTestNg = ServiceLoaderHelper.getFirst(ver).createRemoteTestNG();
remoteTestNg.dontExit(ra.dontExit);
if (cla.port != null && ra.serPort != null) {
throw new TestNGException("Can only specify one of " + CommandLineArgs.PORT
+ " and " + RemoteArgs.PORT);
}
m_debug = cla.debug;
remoteTestNg.setDebug(cla.debug);
remoteTestNg.setAck(ra.ack);
if (m_debug) {
// while (true) {
initAndRun(remoteTestNg, args, cla, ra);
}
else {
initAndRun(remoteTestNg, args, cla, ra);
}
}
private static Version getTestNGVersion() {
String errMsg = "";
try {
// use reflection to read org.testng.internal.Version.VERSION for reason of:
// 1. bypass the javac compile time constant substitution
// 2. org.testng.internal.Version is available since version 6.6
@SuppressWarnings("rawtypes")
Class clazz = Class.forName("org.testng.internal.Version");
Field field = clazz.getDeclaredField("VERSION");
String strVer = (String) field.get(null);
// trim the version to leave digital number only
int idx = strVer.indexOf("beta");
if (idx > 0) {
strVer = strVer.substring(0, idx);
}
return new Version(strVer);
} catch (Exception e) {
// testng version < 6.6, ClassNotFound: org.testng.internal.Version
// parse the MANIFEST.MF of testng jar from classpath
try {
Enumeration<URL> resources = ClassLoader.getSystemClassLoader().getResources("META-INF/MANIFEST.MF");
while (resources.hasMoreElements()) {
Manifest mf = new Manifest(resources.nextElement().openStream());
Attributes mainAttrs = mf.getMainAttributes();
if ("org.testng.TestNG".equals(mainAttrs.getValue("Main-Class"))) {
return new Version(mainAttrs.getValue("Implementation-Version"));
}
if ("org.testng".equals(mainAttrs.getValue("Bundle-SymbolicName"))) {
return new Version(mainAttrs.getValue("Bundle-Version"));
}
if ("testng".equals(mainAttrs.getValue("Specification-Title"))) {
return new Version(mainAttrs.getValue("Specification-Version"));
}
}
} catch (Exception ex) {
errMsg = ex.getMessage();
if (isDebug()) {
ex.printStackTrace();
}
}
}
p("No TestNG version found on classpath");
ClassLoader cl = ClassLoader.getSystemClassLoader();
if (cl instanceof URLClassLoader) {
p(join(((URLClassLoader) cl).getURLs(), ", "));
}
throw new RuntimeException("Can't recognize the TestNG version on classpath. " + errMsg);
}
private static void initAndRun(IRemoteTestNG remoteTestNg, String[] args, CommandLineArgs cla, RemoteArgs ra) {
if (m_debug) {
// In debug mode, override the port and the XML file to a fixed location
cla.port = Integer.parseInt(DEBUG_PORT);
ra.serPort = cla.port;
cla.suiteFiles = Arrays.asList(new String[] {
DEBUG_SUITE_DIRECTORY + DEBUG_SUITE_FILE
});
}
remoteTestNg.configure(cla);
remoteTestNg.setHost(cla.host);
remoteTestNg.setSerPort(ra.serPort);
remoteTestNg.setProtocol(ra.protocol);
remoteTestNg.setPort(cla.port);
if (isVerbose()) {
StringBuilder sb = new StringBuilder("Invoked with ");
for (String s : args) {
sb.append(s).append(" ");
}
p(sb.toString());
// remoteTestNg.setVerbose(1);
// } else {
// remoteTestNg.setVerbose(0);
}
AbstractRemoteTestNG.validateCommandLineParameters(cla);
remoteTestNg.run();
// if (m_debug) {
// // Run in a loop if in debug mode so it is possible to run several launches
// // without having to relauch RemoteTestNG.
// while (true) {
// remoteTestNg.run();
// remoteTestNg.configure(cla);
// } else {
// remoteTestNg.run();
}
private static void p(String s) {
if (isVerbose()) {
System.out.println("[RemoteTestNG] " + s);
}
}
private static String join(URL[] urls, String sep) {
StringBuilder sb = new StringBuilder();
for (URL url : urls) {
sb.append(url);
sb.append(sep);
}
return sb.toString();
}
public static boolean isVerbose() {
boolean result = System.getProperty(PROPERTY_VERBOSE) != null || isDebug();
return result;
}
public static boolean isDebug() {
return m_debug || System.getProperty(PROPERTY_DEBUG) != null;
}
} |
package sokoban.Algorithms;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.PriorityQueue;
import java.util.Set;
import sokoban.BoardPosition;
import sokoban.BoardState;
import sokoban.Path;
import sokoban.types.NodeType;
public class AStar_Path implements ISearchAlgorithmPath{
private Path path;
private PriorityQueue openSet;
private Set closedSet;
private List<List<AStar_Node>> nodeMap;
private HashMap<BoardPosition,AStar_Node> nMap;
@Override
public Path getPath(BoardState state, BoardPosition initialPosition,
BoardPosition destination) {
Set<BoardPosition> position = new HashSet<>();
position.add(destination);
return getPath(state, initialPosition, position);
}
@Override
public Path getPath(BoardState state, BoardPosition initialPosition,
Set<BoardPosition> destination) {
for(BoardPosition goal: destination){
openSet=new PriorityQueue();
nodeMap = new ArrayList<>();
//build nodeMap
for(int row=0; row<state.getRowsCount(); row++){
for(int column=0; column <state.getColumnsCount(row); column++){
nodeMap.add(new ArrayList<AStar_Node>(state.getColumnsCount(row)));
NodeType nodeType=state.getNode(row, column);
if(nodeType== NodeType.SPACE || nodeType == NodeType.GOAL);
//nMap.put(arg0, arg1)
nodeMap.get(row).add(new AStar_Node(Math.abs((row-goal.Row)+(column-goal.Column))));
}
}
while(!openSet.isEmpty()){
AStar_Node current = (AStar_Node) openSet.poll();
if(current.bp == goal){
//TODO
}
closedSet.add(current);
List<BoardPosition> neighbourPositions = state.getNeighbours(current.bp);
for(BoardPosition neighbour : neighbourPositions){
AStar_Node neighbourNode=nodeMap.get(neighbour.Row).get(neighbour.Column);
int tentative_g_score = current.g+1;
if(closedSet.contains(neighbourNode) && (tentative_g_score >= neighbourNode.g)){
//Continue
}if(!closedSet.contains(neighbourNode) || (tentative_g_score < neighbourNode.g)){
neighbourNode.parent=current;
neighbourNode.g=tentative_g_score;
neighbourNode.f=neighbourNode.g+neighbourNode.h;
if(!openSet.contains(neighbourNode)){
openSet.add(neighbourNode);
}
}
}
}
return null;
}
}
public void reconstruct_path(Path came_from, BoardPosition current_node){
if(current_node)
}
private class AStar_Node implements Comparable{
int g=1; //total cost of getting to this node
int h; //estimated time to reach the finish from this node
int f;
AStar_Node parent;
BoardPosition bp;
private AStar_Node(int hCost){
h=hCost;
f = g+h;
}
@Override
public int compareTo(Object o) {
AStar_Node n = (AStar_Node) o;
return this.f- n.f;
}
}
} |
package org.jetel.database;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ParameterMetaData;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Types;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jetel.metadata.DataFieldMetadata;
import org.jetel.metadata.DataRecordMetadata;
/**
* Various utilities for working with Databases
*
* @author dpavlis
* @since September 25, 2002
* @revision $Revision: 1.10
* @created January 24, 2003
*/
public class SQLUtil {
private final static String DEFAULT_DELIMITER = ";";
private final static String END_RECORD_DELIMITER = "\n";
static Log logger = LogFactory.getLog(SQLUtil.class);
/**
* Creates SQL insert statement based on metadata describing data flow and
* supplied table name
*
* @param metadata Metadata describing data flow from which to feed database
* @param tableName Table name into which insert data
* @return string containing SQL insert statement
* @since October 2, 2002
*/
public static String assembleInsertSQLStatement(DataRecordMetadata metadata, String tableName) {
StringBuffer strBuf = new StringBuffer();
//StringBuffer strBuf2 = new StringBuffer();
strBuf.append(" values(");
for (int i = 0; i < metadata.getNumFields(); i++) {
//strBuf2.append(metadata.getField(i).getName());
strBuf.append("?");
if (i < metadata.getNumFields() - 1) {
strBuf.append(",");
//strBuf2.append(",");
}
}
//strBuf.insert(0, strBuf2.toString());
//strBuf.insert(0, " (");
strBuf.insert(0, tableName);
strBuf.insert(0, "insert into ");
strBuf.append(")");
if (logger.isDebugEnabled()) {
logger.debug(strBuf.toString());
}
return strBuf.toString();
}
/**
* Description of the Method
*
* @param tableName Description of the Parameter
* @param dbFields Description of the Parameter
* @return Description of the Return Value
*/
public static String assembleInsertSQLStatement(String tableName, String[] dbFields) {
StringBuffer strBuf = new StringBuffer("insert into ");
strBuf.append(tableName).append(" (");
for (int i = 0; i < dbFields.length; i++) {
strBuf.append(dbFields[i]);
if (i < dbFields.length - 1) {
strBuf.append(", ");
}
}
strBuf.append(") values (");
for (int i = 0; i < dbFields.length; i++) {
strBuf.append("?");
if (i < dbFields.length - 1) {
strBuf.append(",");
}
}
strBuf.append(")");
return strBuf.toString();
}
/**
* Converts SQL metadata into Clover's DataRecordMetadata
*
* @param dbMetadata SQL ResultSet metadata describing which columns are
* returned by query
* @return DataRecordMetadata which correspond to the SQL
* ResultSet
* @exception SQLException Description of the Exception
*/
public static DataRecordMetadata dbMetadata2jetel(ResultSetMetaData dbMetadata) throws SQLException {
DataFieldMetadata fieldMetadata;
DataRecordMetadata jetelMetadata = new DataRecordMetadata(dbMetadata.getTableName(1),
DataRecordMetadata.DELIMITED_RECORD);
for (int i = 1; i <= dbMetadata.getColumnCount(); i++) {
try {
fieldMetadata = new DataFieldMetadata(dbMetadata.getColumnName(i), DEFAULT_DELIMITER);
} catch (Exception ex) {
throw new RuntimeException(ex.getMessage() + " field name " + dbMetadata.getColumnName(i));
}
// set proper data type
fieldMetadata.setType(SQLUtil.sqlType2jetel(dbMetadata.getColumnType(i)));
if (i == dbMetadata.getColumnCount()) {
fieldMetadata.setDelimiter(END_RECORD_DELIMITER);
}
if (dbMetadata.isNullable(i) == ResultSetMetaData.columnNullable) {
fieldMetadata.setNullable(true);
}
/*
* this is not safe - at least Oracle JDBC driver reports NUMBER to be currency
* if (dbMetadata.isCurrency(i)) {
* fieldMetadata.setFormatStr("#.#");
* }
*/
jetelMetadata.addField(fieldMetadata);
}
return jetelMetadata;
}
/**
* For specified table returns names of individual fileds
*
* @param conn database connection
* @param tableName name of DB table
* @return array of field names
*/
public static String[] getColumnNames(Connection conn, String tableName) {
List tmp = new ArrayList();
String[] out = null;
try {
ResultSet rs = conn.getMetaData().getColumns(null, null, tableName, "%");
while (rs.next()) {
// FIELD NAME - 4 column in resultset
// get DATA TYPE - 5 column in result set from Database metadata
//out.add(rs.getString(4).toUpperCase(), new Integer(rs.getInt(5)));
tmp.add(rs.getString(4));
}
out = new String[tmp.size()];
tmp.toArray(out);
} catch (Exception e) {
e.printStackTrace();
}
return out;
}
/**
* Gets the FieldTypes of fields present in specified DB table
*
* @param metadata Description of Parameter
* @param tableName name of the table for which to get metadata (field names, types)
* @return list of JDBC FieldTypes
* @exception SQLException Description of Exception
* @since October 4, 2002
* @see java.sql.DatabaseMetaData
*/
public static List getFieldTypes(DatabaseMetaData metadata, String tableName) throws SQLException {
String[] tableSpec = new String[]{null, tableName.toUpperCase()};
if (tableName.indexOf(".") != -1) {
tableSpec = tableName.toUpperCase().split("\\.", 2);
}
ResultSet rs = metadata.getColumns(null, tableSpec[0], tableSpec[1], "%");//null as last parm
List fieldTypes = new LinkedList();
while (rs.next()) {
// get DATA TYPE - fifth column in result set from Database metadata
fieldTypes.add(new Integer(rs.getInt(5)));
}
if (fieldTypes.size() == 0) {
//throw new RuntimeException("No metadata obtained for table: " + tableName);
//Warn !
logger.warn("No metadata obtained for table: \"" + tableName + "\", using workaround ...");
// WE HAVE SOME PATCH, but ...
ResultSetMetaData fieldsMetadata = getTableFieldsMetadata(metadata.getConnection(), tableName);
for (int i = 0; i < fieldsMetadata.getColumnCount(); i++) {
fieldTypes.add(new Integer(fieldsMetadata.getColumnType(i + 1)));
}
}
return fieldTypes;
}
/**
* Gets the FieldTypes of fields (enumerated in dbFields) present in specified DB table
*
* @param metadata Description of the Parameter
* @param tableName name of the table for which to get metadata (field names, types)
* @param dbFields array of field names
* @return list of JDBC FieldTypes
* @exception SQLException Description of the Exception
*/
public static List getFieldTypes(DatabaseMetaData metadata, String tableName, String[] dbFields) throws SQLException {
String[] tableSpec = new String[]{null, tableName.toUpperCase()};
// if schema defined in table name, extract schema & table name into separate fields
if (tableName.indexOf(".") != -1) {
tableSpec = tableName.toUpperCase().split("\\.", 2);
}
ResultSet rs = metadata.getColumns(null, tableSpec[0], tableSpec[1], "%");//null as last parm
Map dbFieldsMap = new HashMap();
List fieldTypes = new LinkedList();
Integer dataType;
while (rs.next()) {
// FIELD NAME - fourth columnt in resutl set
// get DATA TYPE - fifth column in result set from Database metadata
dbFieldsMap.put(rs.getString(4).toUpperCase(), new Integer(rs.getInt(5)));
}
if (dbFieldsMap.size() == 0) {
//throw new RuntimeException("No metadata obtained for table: " + tableName);
//Warn !
logger.warn("No metadata obtained for table: \"" + tableName + "\", using workaround ...");
// WE HAVE SOME PATCH, but ...
ResultSetMetaData fieldsMetadata = getTableFieldsMetadata(metadata.getConnection(), tableName);
for (int i = 0; i < fieldsMetadata.getColumnCount(); i++) {
dbFieldsMap.put(fieldsMetadata.getColumnName(i + 1).toUpperCase(),
new Integer(fieldsMetadata.getColumnType(i + 1)));
}
}
for (int i = 0; i < dbFields.length; i++) {
dataType = (Integer) dbFieldsMap.get(dbFields[i].toUpperCase());
if (dataType == null) {
throw new SQLException("Field \"" + dbFields[i] + "\" does not exists in table \"" + tableName + "\"");
}
fieldTypes.add(dataType);
}
return fieldTypes;
}
/**
* Gets the fieldTypes attribute of the SQLUtil class
*
* @param metadata Description of the Parameter
* @return The fieldTypes value
* @exception SQLException Description of the Exception
*/
public static List getFieldTypes(ParameterMetaData metadata) throws SQLException {
List fieldTypes = new LinkedList();
for (int i = 1; i <= metadata.getParameterCount(); i++) {
fieldTypes.add(new Integer(metadata.getParameterType(i)));
}
return fieldTypes;
}
/**
* Gets the fieldTypes attribute of the SQLUtil class
*
* @param metadata Description of the Parameter
* @return The fieldTypes value
* @exception SQLException Description of the Exception
*/
public static List getFieldTypes(ResultSetMetaData metadata) throws SQLException {
List fieldTypes = new LinkedList();
for (int i = 1; i <= metadata.getColumnCount(); i++) {
fieldTypes.add(new Integer(metadata.getColumnType(i)));
}
return fieldTypes;
}
/**
* Gets the fieldTypes attribute of the SQLUtil class
*
* @param metadata Description of the Parameter
* @param cloverFields Description of the Parameter
* @return The fieldTypes value
* @exception SQLException Description of the Exception
*/
public static List getFieldTypes(DataRecordMetadata metadata, String[] cloverFields) {
List fieldTypes = new LinkedList();
DataFieldMetadata fieldMeta;
for (int i = 0; i < cloverFields.length; i++) {
if ((fieldMeta = metadata.getField(cloverFields[i])) != null) {
fieldTypes.add(new Integer(jetelType2sql(fieldMeta.getType())));
} else {
throw new RuntimeException("Field name [" + cloverFields[i] + "] not found in " + metadata.getName());
}
}
return fieldTypes;
}
/**
* Gets the fieldTypes attribute of the SQLUtil class
*
* @param metadata Description of the Parameter
* @return The fieldTypes value
* @exception SQLException Description of the Exception
*/
public static List getFieldTypes(DataRecordMetadata metadata) {
List fieldTypes = new LinkedList();
for (int i = 0; i < metadata.getNumFields(); i++) {
fieldTypes.add(new Integer(jetelType2sql(metadata.getField(i).getType())));
}
return fieldTypes;
}
/**
* Gets the tableFieldsMetadata attribute of the SQLUtil class
*
* @param con Description of the Parameter
* @param tableName Description of the Parameter
* @return The tableFieldsMetadata value
* @exception SQLException Description of the Exception
*/
public static ResultSetMetaData getTableFieldsMetadata(Connection con, String tableName) throws SQLException {
String queryStr = "select * from " + tableName + " where 1=0 ";
ResultSet rs = con.createStatement().executeQuery(queryStr);
return rs.getMetaData();
}
/**
* Converts Jetel/Clover datatype into String
*
* @param fieldType Jetel datatype
* @return Corresponding string name
*/
public static String jetelType2Str(char fieldType) {
return DataFieldMetadata.type2Str(fieldType);
}
/**
* Converts Jetel data type into SQL data type
*
* @param jetelType
* @return corresponding Jetel data type
* @since September 25, 2002
*/
public static int jetelType2sql(char jetelType) {
switch (jetelType) {
case DataFieldMetadata.INTEGER_FIELD:
return Types.INTEGER;
case DataFieldMetadata.NUMERIC_FIELD:
return Types.NUMERIC;
case DataFieldMetadata.STRING_FIELD:
return Types.VARCHAR;
case DataFieldMetadata.DATE_FIELD:
return Types.DATE;
case DataFieldMetadata.LONG_FIELD:
return Types.BIGINT;
default:
return -1;
// unknown or not possible to translate
}
}
/**
* Converts SQL data type into Jetel data type
*
* @param sqlType JDBC SQL data type
* @return corresponding Jetel data type
* @since September 25, 2002
*/
public static char sqlType2jetel(int sqlType) {
switch (sqlType) {
case Types.INTEGER:
case Types.SMALLINT:
case Types.TINYINT:
return DataFieldMetadata.INTEGER_FIELD;
case Types.BIGINT:
return DataFieldMetadata.LONG_FIELD;
case Types.DECIMAL:
case Types.DOUBLE:
case Types.FLOAT:
case Types.NUMERIC:
case Types.REAL:
return DataFieldMetadata.NUMERIC_FIELD;
case Types.CHAR:
case Types.LONGVARCHAR:
case Types.VARCHAR:
case Types.OTHER:
return DataFieldMetadata.STRING_FIELD;
case Types.DATE:
case Types.TIME:
case Types.TIMESTAMP:
return DataFieldMetadata.DATE_FIELD;
// proximity assignment
case Types.BOOLEAN:
case Types.BIT:
return DataFieldMetadata.STRING_FIELD;
default:
logger.warn("Unknown SQL type is: " + sqlType);
return (char) -1;
// unknown or not possible to translate
}
}
} |
package rfx.core.util;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.HttpClient;
import org.apache.http.client.HttpResponseException;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.conn.routing.HttpRoute;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.util.EntityUtils;
public class HttpClientUtil {
private static final int CONNECT_TIMEOUT = 20 * 1000; // 20 seconds
public static final Charset CHARSET_UTF8 = Charset.forName(StringPool.UTF_8);
static int DEFAULT_TIMEOUT = 10000;// 10 seconds
public static final String USER_AGENT = "Mozilla/5.0 (Windows NT 5.1; rv:9.0) Gecko/20100101 Firefox/9.0";
public static final String MOBILE_USER_AGENT = "Mozilla/5.0 (Linux; U; Android 2.2; en-us; DROID2 GLOBAL Build/S273) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1";
final static int MAX_SIZE = 20;
static ConcurrentMap<Integer, HttpClient> httpClientPool = new ConcurrentHashMap<>(MAX_SIZE);
public static final HttpClient getThreadSafeClient() throws Exception {
int slot = (int) (Math.random() * (MAX_SIZE + 1));
return getThreadSafeClient(slot);
}
public static final HttpClient getThreadSafeClient(int slot) throws Exception {
HttpClient httpClient = httpClientPool.get(slot);
if (httpClient == null) {
PoolingHttpClientConnectionManager cm = new PoolingHttpClientConnectionManager();
// Increase max total connection to 200
cm.setMaxTotal(300);
// Increase default max connection per route to 20
cm.setDefaultMaxPerRoute(30);
// Increase max connections for localhost:80 to 50
HttpHost localhost = new HttpHost("locahost", 80);
cm.setMaxPerRoute(new HttpRoute(localhost), 50);
RequestConfig requestConfig = RequestConfig.custom().setConnectTimeout(CONNECT_TIMEOUT).build();
httpClient = HttpClients.custom().setConnectionManager(cm).setDefaultRequestConfig(requestConfig).build();
httpClientPool.put(slot, httpClient);
}
return httpClient;
}
public static boolean isValidHtml(String html) {
if (html == null) {
return false;
}
if (html.equals("404") || html.isEmpty() || html.equals("500")) {
return false;
}
return true;
}
public static String executePost(String url, Map<String, String> params, String accessTokens) {
try {
HttpClient httpClient = getThreadSafeClient();
HttpPost postRequest = new HttpPost(url);
postRequest.addHeader("Accept-Charset", StringPool.UTF_8);
postRequest.addHeader("User-Agent", USER_AGENT);
postRequest.setHeader("Authorization", "OAuth oauth_token=" + accessTokens);
Set<String> names = params.keySet();
List<NameValuePair> postParameters = new ArrayList<NameValuePair>(names.size());
for (String name : names) {
System.out.println(name + "=" + params.get(name));
postParameters.add(new BasicNameValuePair(name, params.get(name)));
}
postRequest.setEntity(new UrlEncodedFormEntity(postParameters, "UTF-8"));
HttpResponse response = httpClient.execute(postRequest);
HttpEntity entity = response.getEntity();
if (entity != null) {
return EntityUtils.toString(entity, CHARSET_UTF8);
}
} catch (HttpResponseException e) {
System.err.println(e.getMessage());
} catch (Exception e) {
e.printStackTrace();
}
return "";
}
public static String executePost(String url) {
try {
HttpPost httppost = new HttpPost(url);
httppost.setHeader("User-Agent", USER_AGENT);
httppost.setHeader("Accept-Charset", "utf-8");
httppost.setHeader("Cache-Control", "max-age=3, must-revalidate, private");
httppost.setHeader("Authorization",
"OAuth oauth_token=2d62f7b3de642cdd402f62e42fba0b25, oauth_consumer_key=a324957217164fd1d76b4b60d037abec, oauth_version=1.0, oauth_signature_method=HMAC-SHA1, oauth_timestamp=1322049404, oauth_nonce=-5195915877644743836, oauth_signature=wggOr1ia7juVbG%2FZ2ydImmiC%2Ft4%3D");
HttpResponse response = getThreadSafeClient().execute(httppost);
HttpEntity entity = response.getEntity();
if (entity != null) {
return EntityUtils.toString(entity, CHARSET_UTF8);
}
} catch (HttpResponseException e) {
System.err.println(e.getMessage());
} catch (Exception e) {
e.printStackTrace();
}
return "";
}
public static String executeGet(final URL url) {
HttpResponse response = null;
HttpClient httpClient = null;
// System.out.println("executeGet:" + url);
try {
HttpGet httpget = new HttpGet(url.toURI());
httpget.setHeader("User-Agent", USER_AGENT);
httpget.setHeader("Accept-Charset", "utf-8");
httpget.setHeader("Accept", "text/html,application/xhtml+xml");
httpget.setHeader("Cache-Control", "max-age=3, must-revalidate, private");
httpClient = getThreadSafeClient();
response = httpClient.execute(httpget);
int code = response.getStatusLine().getStatusCode();
if (code == 200) {
HttpEntity entity = response.getEntity();
if (entity != null) {
String html = EntityUtils.toString(entity, CHARSET_UTF8);
return html;
}
} else if (code == 404) {
return "404";
} else {
return "500";
}
} catch (Throwable e) {
// e.printStackTrace();
return "444";
} finally {
response = null;
}
return "";
}
public static String executeHttpGet(String urlString) {
HttpResponse response = null;
HttpClient httpClient = null;
String html = StringPool.BLANK;
int slot = (int) (Math.random() * (MAX_SIZE + 1));
HttpGet httpget = null;
try {
URL url = new URL(urlString);
httpget = new HttpGet(url.toURI());
httpget.setHeader("User-Agent", USER_AGENT);
httpget.setHeader("Accept-Charset", "utf-8");
httpget.setHeader("Accept", "text/html,application/xhtml+xml");
httpget.setHeader("Cache-Control", "max-age=3, must-revalidate, private");
httpClient = getThreadSafeClient(slot);
response = httpClient.execute(httpget);
int code = response.getStatusLine().getStatusCode();
if (code == 200) {
HttpEntity entity = response.getEntity();
if (entity != null) {
html = EntityUtils.toString(entity, CHARSET_UTF8);
}
}
} catch (Exception e) {
e.printStackTrace();
httpClientPool.remove(slot);
} finally {
response = null;
}
return html;
}
public static String executeGet(final String url) {
try {
return executeGet(new URL(url));
} catch (MalformedURLException e) {
e.printStackTrace();
}
return "";
}
public static String executeGet(final String url, boolean safeThread, boolean redownload500, int numRetry) {
try {
if (redownload500) {
String html = executeGet(new URL(url));
while (html.equals("500")) {
Thread.sleep(400);
html = executeGet(new URL(url));
numRetry
if (numRetry <= 0) {
break;
}
}
return html;
} else {
return executeGet(new URL(url));
}
} catch (Exception e) {
e.printStackTrace();
}
return "";
}
public static String executeGet(String baseUrl, Map<String, Object> params) {
if (!baseUrl.contains("?")) {
baseUrl += "?";
}
StringBuilder url = new StringBuilder(baseUrl);
Set<String> ps = params.keySet();
int c = 0, s = params.size() - 1;
try {
for (String p : ps) {
String v = URLEncoder.encode(params.get(p).toString().trim(), StringPool.UTF_8);
if (!v.equals(StringPool.BLANK)) {
p = URLEncoder.encode(p, StringPool.UTF_8);
url.append(p).append("=").append(v);
if (c < s) {
url.append("&");
}
c++;
}
}
} catch (UnsupportedEncodingException e) {
}
System.out.println(url.toString());
return executeGet(url.toString());
}
public static void main(String[] args) {
String rs = HttpClientUtil.executeGet("http://vnexpress.net/");
System.out.println(rs);
}
} |
package org.jboss.jca.rhq.test;
import org.jboss.jca.core.api.connectionmanager.pool.Pool;
import org.jboss.jca.core.api.connectionmanager.pool.PoolConfiguration;
import org.jboss.jca.core.api.connectionmanager.pool.PoolStatistics;
import org.jboss.jca.core.api.management.DataSource;
import org.jboss.jca.core.api.management.ManagementRepository;
import org.jboss.jca.rhq.core.ManagementRepositoryManager;
import org.jboss.jca.rhq.embed.core.EmbeddedJcaDiscover;
import java.io.File;
import java.net.URL;
import java.sql.Connection;
import java.util.List;
import javax.naming.InitialContext;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.rhq.core.domain.configuration.Configuration;
import org.rhq.core.domain.configuration.PropertySimple;
import org.rhq.core.domain.resource.Resource;
import org.rhq.core.pc.PluginContainer;
import org.rhq.core.pc.PluginContainerConfiguration;
import org.rhq.core.pc.inventory.InventoryManager;
import org.rhq.core.pc.inventory.RuntimeDiscoveryExecutor;
import org.rhq.core.pc.plugin.FileSystemPluginFinder;
import org.rhq.core.pluginapi.configuration.ConfigurationFacet;
import org.rhq.core.pluginapi.configuration.ConfigurationUpdateReport;
import org.rhq.core.pluginapi.operation.OperationFacet;
import org.rhq.core.pluginapi.operation.OperationResult;
import static org.junit.Assert.*;
/**
* RHQ plugin test cases for Datasource
*
* @author <a href="mailto:jeff.zhang@jboss.org">Jeff Zhang</a>
* @author <a href="mailto:lgao@redhat.com">Lin Gao</a>
*/
public class DsTestCase
{
/** RAR resource */
private static Resource rarServiceResource;
/** deployed url */
private static URL deployedUrl;
/** deployed ds */
private static URL ds;
/**
* Basic
* @exception Throwable Thrown if case of an error
*/
@Test
public void testBasic() throws Throwable
{
assertEquals("java:/H2DS", rarServiceResource.getName());
}
/**
* test Datasource LoadResourceConfiguration.
*
* @throws Throwable exception
*/
@Test
public void testDsLoadResourceConfiguration() throws Throwable
{
PluginContainer pc = PluginContainer.getInstance();
InventoryManager im = pc.getInventoryManager();
ConfigurationFacet configFacet = (ConfigurationFacet)im.getResourceComponent(rarServiceResource);
Configuration config = configFacet.loadResourceConfiguration();
assertEquals("java:/H2DS", config.getSimpleValue("jndi-name", null));
assertFalse(Boolean.valueOf(config.getSimpleValue("xa", null)));
assertEquals("H2DS", config.getSimpleValue("pool-name", null));
testLoadPoolConfigurationIntialValue(config);
}
/**
* Tests load PoolConfiguration initial value.
*
* @param config RHQ Configuration of PoolConfiguration
*/
protected static void testLoadPoolConfigurationIntialValue(Configuration config)
{
assertEquals("0", config.getSimpleValue("min-pool-size", null));
assertEquals("20", config.getSimpleValue("max-pool-size", null));
assertEquals("false", config.getSimpleValue("background-validation", null));
assertEquals("0", config.getSimpleValue("background-validation-millis", null));
assertEquals("0", config.getSimpleValue("background-validation-minutes", null));
assertEquals("30000", config.getSimpleValue("blocking-timeout-millis", null));
assertEquals("30", config.getSimpleValue("idle-timeout-minutes", null));
assertEquals("false", config.getSimpleValue("prefill", null));
assertEquals("false", config.getSimpleValue("use-strict-min", null));
assertEquals("false", config.getSimpleValue("use-fast-fail", null));
}
/**
* test Datasource update ResourceConfiguration.
*
* @throws Throwable exception
*/
@Test
public void testDsUpdateResourceConfiguration() throws Throwable
{
PluginContainer pc = PluginContainer.getInstance();
InventoryManager im = pc.getInventoryManager();
ConfigurationFacet configFacet = (ConfigurationFacet)im.getResourceComponent(rarServiceResource);
Configuration config = configFacet.loadResourceConfiguration();
DataSource ds = getDataSource();
PoolConfiguration poolConfig = ds.getPoolConfiguration();
testUpdatePoolConfig(configFacet, config, poolConfig);
}
/**
* Tests updates PoolConfiguration.
*
* @param configFacet ConfigurationFacet
* @param config Configuration
* @param poolConfig PoolConfiguraton
*/
protected static void testUpdatePoolConfig(ConfigurationFacet configFacet, Configuration config,
PoolConfiguration poolConfig)
{
int oldMinPoolSize = poolConfig.getMinSize();
int oldMaxPoolSize = poolConfig.getMaxSize();
boolean oldBackgroundValidation = poolConfig.isBackgroundValidation();
int oldBackgroundValidationTime = poolConfig.getBackgroundValidationMinutes();
long oldBlockingTimeout = poolConfig.getBlockingTimeout();
long oldIdleTimeout = poolConfig.getIdleTimeout();
boolean oldPreFill = poolConfig.isPrefill();
boolean oldUseStrictMin = poolConfig.isStrictMin();
boolean oldUseFastFail = poolConfig.isUseFastFail();
int minPoolSize = 5;
int maxPoolSize = 15;
int backValidationTime = 30;
long blockingTimeout = 10000;
long idleTimeOut = 15;
config.put(new PropertySimple("min-pool-size", minPoolSize));
config.put(new PropertySimple("max-pool-size", maxPoolSize));
config.put(new PropertySimple("background-validation", true));
config.put(new PropertySimple("background-validation-minutes", backValidationTime));
config.put(new PropertySimple("blocking-timeout-millis", blockingTimeout));
config.put(new PropertySimple("idle-timeout-minutes", idleTimeOut));
config.put(new PropertySimple("prefill", false));
config.put(new PropertySimple("use-strict-min", true));
config.put(new PropertySimple("use-fast-fail", true));
ConfigurationUpdateReport updateConfigReport = new ConfigurationUpdateReport(config);
configFacet.updateResourceConfiguration(updateConfigReport);
assertEquals(minPoolSize, poolConfig.getMinSize());
assertEquals(maxPoolSize, poolConfig.getMaxSize());
assertTrue(poolConfig.isBackgroundValidation());
assertEquals(backValidationTime, poolConfig.getBackgroundValidationMinutes());
assertEquals(blockingTimeout, poolConfig.getBlockingTimeout());
assertEquals(idleTimeOut * 60 * 1000L, poolConfig.getIdleTimeout());
assertFalse(poolConfig.isPrefill());
assertTrue(poolConfig.isStrictMin());
assertTrue(poolConfig.isUseFastFail());
poolConfig.setBackgroundValidation(oldBackgroundValidation);
poolConfig.setBackgroundValidationMinutes(oldBackgroundValidationTime);
poolConfig.setBlockingTimeout(oldBlockingTimeout);
poolConfig.setIdleTimeout(oldIdleTimeout);
poolConfig.setMaxSize(oldMaxPoolSize);
poolConfig.setMinSize(oldMinPoolSize);
poolConfig.setPrefill(oldPreFill);
poolConfig.setStrictMin(oldUseStrictMin);
poolConfig.setUseFastFail(oldUseFastFail);
}
/**
* Gets the associated DataSource
* @return datasource
*/
private DataSource getDataSource()
{
ManagementRepository manRepo = ManagementRepositoryManager.getManagementRepository();
List<DataSource> datasources = manRepo.getDataSources();
assertEquals(1, datasources.size());
DataSource ds = datasources.get(0);
return ds;
}
/**
* Tests DsResourceComponent pool testConnection.
*
* @throws Throwable the exception.
*/
@Test
public void testDsPoolOperationTestConnection() throws Throwable
{
testPoolOperationTestConnection(rarServiceResource);
}
/**
* Tests pool testConnection.
*
* @param poolRes Resource
* @throws Throwable the exception.
*/
protected static void testPoolOperationTestConnection(Resource poolRes) throws Throwable
{
PluginContainer pc = PluginContainer.getInstance();
InventoryManager im = pc.getInventoryManager();
OperationFacet cfOpertaionFacet = (OperationFacet)im.getResourceComponent(poolRes);
OperationResult result = cfOpertaionFacet.invokeOperation("testConnection", null);
assertNotNull(result);
Configuration config = result.getComplexResults();
assertEquals("true", config.getSimpleValue("result", null));
}
/**
* test DataSource Pool.flush()
*
* @throws Throwable exception
*/
@Test
public void testDsPoolFlush() throws Throwable
{
DataSource ds = getDataSource();
Pool pool = ds.getPool();
pool.flush();
// just not thrown exception for now.
}
/**
* test DataSource Pool.flush(true)
*
* @throws Throwable exception
*/
@Ignore
@Test
public void testDsPoolFlushKill() throws Throwable
{
DataSource ds = getDataSource();
Pool pool = ds.getPool();
// set prefill to false
ds.getPoolConfiguration().setPrefill(false);
PoolStatistics poolStatistics = pool.getStatistics();
InitialContext context = new InitialContext();
javax.sql.DataSource sqlDS = (javax.sql.DataSource)context.lookup(ds.getJndiName());
Connection conn = sqlDS.getConnection();
assertEquals(1, poolStatistics.getActiveCount());
pool.flush(true); // it flushes all connections from the pool.
assertEquals(0, poolStatistics.getActiveCount());
conn.close();
// just not thrown exception for now.
}
/**
* Lifecycle start, before the suite is executed
* @throws Throwable throwable exception
*/
@BeforeClass
public static void beforeClass() throws Throwable
{
File pluginDir = new File(System.getProperty("archives.dir"));
PluginContainerConfiguration pcConfig = new PluginContainerConfiguration();
pcConfig.setPluginFinder(new FileSystemPluginFinder(pluginDir));
pcConfig.setPluginDirectory(pluginDir);
pcConfig.setInsideAgent(false);
PluginContainer pc = PluginContainer.getInstance();
pc.setConfiguration(pcConfig);
pc.initialize();
EmbeddedJcaDiscover jca = EmbeddedJcaDiscover.getInstance();
deployedUrl = DsTestCase.class.getResource("/jdbc-local.rar");
jca.deploy(deployedUrl);
ds = DsTestCase.class.getResource("/h2-ds.xml");
jca.deploy(ds);
InventoryManager im = pc.getInventoryManager();
im.executeServerScanImmediately();
Resource platformRes = im.getPlatform();
Resource serverRes = platformRes.getChildResources().iterator().next();
RuntimeDiscoveryExecutor discoverExecutor = new RuntimeDiscoveryExecutor(im, pcConfig, serverRes);
discoverExecutor.run();
rarServiceResource = serverRes.getChildResources().iterator().next();
}
/**
* Lifecycle stop, after the suite is executed
* @throws Throwable throwable exception
*/
@AfterClass
public static void afterClass() throws Throwable
{
EmbeddedJcaDiscover jca = EmbeddedJcaDiscover.getInstance();
jca.undeploy(ds);
jca.undeploy(deployedUrl);
PluginContainer pc = PluginContainer.getInstance();
pc.shutdown();
}
} |
package matlabcontrol;
import java.awt.EventQueue;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ArrayBlockingQueue;
import com.mathworks.jmi.Matlab;
import com.mathworks.jmi.NativeMatlab;
class JMIWrapper
{
/**
* Map of unique identifier to stored object.
*/
private static final Map<String, StoredObject> STORED_OBJECTS = new HashMap<String, StoredObject>();
/**
* The name of this class and package.
*/
private static final String CLASS_NAME = JMIWrapper.class.getName();
/**
* Retrieves the stored object. If it is not to be kept permanently then the reference will no longer be kept.
*
* @param id
* @return
*/
public static Object retrieveStoredObject(String id)
{
Object obj = null;
StoredObject stored = STORED_OBJECTS.get(id);
if(stored != null)
{
obj = stored.object;
if(!stored.storePermanently)
{
STORED_OBJECTS.remove(id);
}
}
return obj;
}
/**
* @see MatlabInteractor#storeObject(java.lang.Object, boolean)
*/
synchronized String storeObject(Object obj, boolean storePermanently)
{
StoredObject stored = new StoredObject(obj, storePermanently);
STORED_OBJECTS.put(stored.id, stored);
String retrievalString = CLASS_NAME + ".retrieveStoredObject('" + stored.id + "')";
return retrievalString;
}
/**
* An object stored by matlabcontrol that can be accessed via {@link #retrieveStoredObject(java.lang.String)}.
*/
private static class StoredObject
{
private final Object object;
private final boolean storePermanently;
private final String id;
private StoredObject(Object object, boolean storePermanently)
{
this.object = object;
this.storePermanently = storePermanently;
this.id = "STORED_OBJECT_" + UUID.randomUUID().toString();
}
}
/**
* @see MatlabInteractor#setVariable(java.lang.String, java.lang.Object)
*/
synchronized void setVariable(String variableName, Object value) throws MatlabInvocationException
{
String retrievalStr = storeObject(value, false);
this.eval(variableName + " = " + retrievalStr + ";");
}
/**
* @see MatlabInteractor#getVariable(java.lang.String)
*/
synchronized Object getVariable(String variableName) throws MatlabInvocationException
{
return this.returningEval(variableName, 1);
}
/**
* @see MatlabInteractor#exit()
*/
synchronized void exit() throws MatlabInvocationException
{
new Thread()
{
@Override
public void run()
{
Matlab.whenMatlabReady(new Runnable()
{
@Override
public void run()
{
try
{
Matlab.mtFevalConsoleOutput("exit", null, 0);
}
catch (Exception e) { }
}
});
}
}.start();
}
/**
* @see MatlabInteractor#eval(java.lang.String)
*/
synchronized void eval(final String command) throws MatlabInvocationException
{
this.returningEval(command, 0);
}
/**
* @see MatlabInteractor#returningEval(java.lang.String, int)
*/
synchronized Object returningEval(final String command, final int returnCount) throws MatlabInvocationException
{
return this.returningFeval("eval", new Object[]{ command }, returnCount);
}
/**
* @see MatlabInteractor#returningFeval(java.lang.String, java.lang.Object[])
*/
synchronized void feval(final String functionName, final Object[] args) throws MatlabInvocationException
{
this.returningFeval(functionName, args, 0);
}
/**
* @see MatlabInteractor#returningFeval(java.lang.String, java.lang.Object[], int)
*/
synchronized Object returningFeval(final String functionName, final Object[] args,
final int returnCount) throws MatlabInvocationException
{
Object result;
if(EventQueue.isDispatchThread())
{
throw new MatlabInvocationException(MatlabInvocationException.EVENT_DISPATCH_THREAD_MSG);
}
else if(NativeMatlab.nativeIsMatlabThread())
{
try
{
result = Matlab.mtFevalConsoleOutput(functionName, args, returnCount);
}
catch(Exception e)
{
throw new MatlabInvocationException(MatlabInvocationException.INTERNAL_EXCEPTION_MSG, e);
}
}
else
{
final ArrayBlockingQueue<MatlabReturn> returnQueue = new ArrayBlockingQueue<MatlabReturn>(1);
Matlab.whenMatlabReady(new Runnable()
{
@Override
public void run()
{
try
{
returnQueue.add(new MatlabReturn(Matlab.mtFevalConsoleOutput(functionName, args, returnCount)));
}
catch(Exception e)
{
returnQueue.add(new MatlabReturn(e));
}
}
});
try
{
//Wait for MATLAB's main thread to finish computation
MatlabReturn matlabReturn = returnQueue.take();
//If exception was thrown, rethrow it
if(matlabReturn.exceptionThrown)
{
Throwable cause = new ThrowableWrapper(matlabReturn.exception);
throw new MatlabInvocationException(MatlabInvocationException.INTERNAL_EXCEPTION_MSG, cause);
}
//Return value computed by MATLAB
else
{
result = matlabReturn.value;
}
}
catch(InterruptedException e)
{
throw new MatlabInvocationException(MatlabInvocationException.INTERRUPTED_MSG, e);
}
}
return result;
}
/**
* Data returned from MATLAB.
*/
private static class MatlabReturn
{
final boolean exceptionThrown;
final Object value;
final Exception exception;
MatlabReturn(Object value)
{
this.exceptionThrown = false;
this.value = value;
this.exception = null;
}
MatlabReturn(Exception exception)
{
this.exceptionThrown = true;
this.value = null;
this.exception = exception;
}
}
/**
* @see MatlabInteractor#returningFeval(java.lang.String, java.lang.Object[])
*/
synchronized Object returningFeval(final String functionName, final Object[] args) throws MatlabInvocationException
{
//Get the number of arguments that will be returned
Object result = this.returningFeval("nargout", new String[] { functionName }, 1);
int nargout = 0;
try
{
nargout = (int) ((double[]) result)[0];
//If an unlimited number of arguments (represented by -1), choose 1
if(nargout == -1)
{
nargout = 1;
}
}
catch(Exception e) {}
//Send the request
return this.returningFeval(functionName, args, nargout);
}
/**
* @see MatlabInteractor#setDiagnosticMode(boolean)
*/
synchronized void setDiagnosticMode(final boolean enable) throws MatlabInvocationException
{
if(EventQueue.isDispatchThread())
{
throw new MatlabInvocationException(MatlabInvocationException.EVENT_DISPATCH_THREAD_MSG);
}
else if(NativeMatlab.nativeIsMatlabThread())
{
Matlab.setEchoEval(enable);
}
else
{
Matlab.whenMatlabReady(new Runnable()
{
@Override
public void run()
{
Matlab.setEchoEval(enable);
}
});
}
}
} |
package tableviewer;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.Vector;
/**
* Class that plays the role of the domain model in the TableViewerExample
* In real life, this class would access a persistent store of some kind.
*
*/
public class SolrGUIServerList {
private final int COUNT = 10;
private Vector<SolrGUIServer> tasks = new Vector<SolrGUIServer>(COUNT);
private Set<ISolrGUIServerListViewer> changeListeners = new HashSet<ISolrGUIServerListViewer>();
// Combo box choices
static final String[] OWNERS_ARRAY = { "?", "Nancy", "Larry", "Joe" };
/**
* Constructor
*/
public SolrGUIServerList() {
super();
this.initData();
}
/*
* Initialize the table data.
* Create COUNT tasks and add them them to the
* collection of tasks
*/
private void initData() {
SolrGUIServer task;
for (int i = 0; i < COUNT; i++) {
task = new SolrGUIServer("Task " + i);
task.setOwner(OWNERS_ARRAY[i % 3]);
tasks.add(task);
}
};
/**
* Return the array of owners
*/
public String[] getOwners() {
return OWNERS_ARRAY;
}
/**
* Return the collection of tasks
*/
public Vector<SolrGUIServer> getTasks() {
return tasks;
}
/**
* Add a new task to the collection of tasks
*/
public void addTask() {
SolrGUIServer task = new SolrGUIServer("New task");
tasks.add(tasks.size(), task);
Iterator<ISolrGUIServerListViewer> iterator = changeListeners.iterator();
while (iterator.hasNext())
((ISolrGUIServerListViewer) iterator.next()).addTask(task);
}
/**
* @param task
*/
public void removeTask(SolrGUIServer task) {
tasks.remove(task);
Iterator<ISolrGUIServerListViewer> iterator = changeListeners.iterator();
while (iterator.hasNext())
((ISolrGUIServerListViewer) iterator.next()).removeTask(task);
}
/**
* @param task
*/
public void taskChanged(SolrGUIServer task) {
Iterator<ISolrGUIServerListViewer> iterator = changeListeners.iterator();
while (iterator.hasNext())
((ISolrGUIServerListViewer) iterator.next()).updateTask(task);
}
/**
* @param viewer
*/
public void removeChangeListener(ISolrGUIServerListViewer viewer) {
changeListeners.remove(viewer);
}
/**
* @param viewer
*/
public void addChangeListener(ISolrGUIServerListViewer viewer) {
changeListeners.add(viewer);
}
} |
import de.haw.rnp.messageticker.model.RandomGenerator;
import org.junit.Test;
import java.util.ArrayList;
import static org.junit.Assert.*;
/**
* Tests the RandomGenerator.
*/
public class RandomGeneratorTest {
@Test
public void testGetInstance() throws Exception {
assertEquals(RandomGenerator.getInstance(), RandomGenerator.getInstance());
}
@Test
public void testGenerateRandomMessageType() throws Exception {
ArrayList<String> possibilities = new ArrayList<>();
possibilities.add("INFO");
possibilities.add("WARN");
possibilities.add("CORR");
String testMessageType = RandomGenerator.getInstance().generateRandomMessageType();
assertTrue(possibilities.contains(testMessageType));
}
@Test
public void testGenerateRandomSleepTime() throws Exception {
long r1 = RandomGenerator.getInstance().generateRandomSleepTime();
long r2 = RandomGenerator.getInstance().generateRandomSleepTime();
assertEquals(true, (r1 >= 1000L && r1 <= 5000L));
assertEquals(true, (r2 >= 1000L && r2 <= 5000L));
assertFalse(r1 == r2);
}
@Test
public void testGenerateRandomMessage() throws Exception {
String r1 = RandomGenerator.getInstance().generateRandomMessage();
String r2 = RandomGenerator.getInstance().generateRandomMessage();
String pattern = "[0-9a-zA-Z]+";
assertTrue(r1.matches(pattern));
assertTrue(r2.matches(pattern));
assertFalse(r1.equals(r2));
}
} |
package net.md_5.bungee;
import com.google.common.io.ByteStreams;
import net.md_5.bungee.log.BungeeLogger;
import net.md_5.bungee.reconnect.SQLReconnectHandler;
import net.md_5.bungee.scheduler.BungeeScheduler;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.ning.http.client.AsyncHttpClient;
import com.ning.http.client.AsyncHttpClientConfig;
import com.ning.http.client.providers.netty.NettyAsyncHttpProvider;
import com.ning.http.client.providers.netty.NettyAsyncHttpProviderConfig;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.Channel;
import io.netty.channel.ChannelException;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.MultithreadEventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import net.md_5.bungee.config.Configuration;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.net.InetSocketAddress;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.MissingResourceException;
import java.util.ResourceBundle;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.logging.Level;
import java.util.logging.Logger;
import jline.UnsupportedTerminal;
import jline.console.ConsoleReader;
import jline.internal.Log;
import lombok.Getter;
import lombok.Setter;
import lombok.Synchronized;
import net.md_5.bungee.api.ChatColor;
import net.md_5.bungee.api.CommandSender;
import net.md_5.bungee.api.ProxyServer;
import net.md_5.bungee.api.ReconnectHandler;
import net.md_5.bungee.api.config.ConfigurationAdapter;
import net.md_5.bungee.api.config.ListenerInfo;
import net.md_5.bungee.api.config.ServerInfo;
import net.md_5.bungee.api.connection.ProxiedPlayer;
import net.md_5.bungee.api.plugin.Plugin;
import net.md_5.bungee.api.plugin.PluginManager;
import net.md_5.bungee.api.scheduler.TaskScheduler;
import net.md_5.bungee.api.tab.CustomTabList;
import net.md_5.bungee.command.*;
import net.md_5.bungee.config.YamlConfig;
import net.md_5.bungee.log.LoggingOutputStream;
import net.md_5.bungee.netty.PipelineUtils;
import net.md_5.bungee.protocol.packet.DefinedPacket;
import net.md_5.bungee.protocol.packet.Packet3Chat;
import net.md_5.bungee.protocol.packet.PacketFAPluginMessage;
import net.md_5.bungee.protocol.Vanilla;
import net.md_5.bungee.scheduler.BungeeThreadPool;
import net.md_5.bungee.tab.Custom;
import net.md_5.bungee.util.CaseInsensitiveMap;
import org.fusesource.jansi.AnsiConsole;
/**
* Main BungeeCord proxy class.
*/
public class BungeeCord extends ProxyServer
{
/**
* Current operation state.
*/
public volatile boolean isRunning;
/**
* Configuration.
*/
public final Configuration config = new Configuration();
/**
* Localization bundle.
*/
public final ResourceBundle bundle = ResourceBundle.getBundle( "messages_en" );
/**
* Thread pools.
*/
public final ScheduledThreadPoolExecutor executors = new BungeeThreadPool( new ThreadFactoryBuilder().setNameFormat( "Bungee Pool Thread #%1$d" ).build() );
public final MultithreadEventLoopGroup eventLoops = new NioEventLoopGroup( Runtime.getRuntime().availableProcessors(), new ThreadFactoryBuilder().setNameFormat( "Netty IO Thread #%1$d" ).build() );
/**
* locations.yml save thread.
*/
private final Timer saveThread = new Timer( "Reconnect Saver" );
private final Timer metricsThread = new Timer( "Metrics Thread" );
/**
* Server socket listener.
*/
private Collection<Channel> listeners = new HashSet<>();
/**
* Fully qualified connections.
*/
private final Map<String, UserConnection> connections = new CaseInsensitiveMap<>();
private final ReadWriteLock connectionLock = new ReentrantReadWriteLock();
/**
* Plugin manager.
*/
@Getter
public final PluginManager pluginManager = new PluginManager( this );
@Getter
@Setter
private ReconnectHandler reconnectHandler;
@Getter
@Setter
private ConfigurationAdapter configurationAdapter = new YamlConfig();
private final Collection<String> pluginChannels = new HashSet<>();
@Getter
private final File pluginsFolder = new File( "plugins" );
@Getter
private final TaskScheduler scheduler = new BungeeScheduler();
@Getter
private final AsyncHttpClient httpClient = new AsyncHttpClient(
new NettyAsyncHttpProvider(
new AsyncHttpClientConfig.Builder().setAsyncHttpClientProviderConfig(
new NettyAsyncHttpProviderConfig().addProperty( NettyAsyncHttpProviderConfig.BOSS_EXECUTOR_SERVICE, executors ) ).setExecutorService( executors ).build() ) );
@Getter
private ConsoleReader consoleReader;
@Getter
private final Logger logger;
{
// TODO: Proper fallback when we interface the manager
getPluginManager().registerCommand( null, new CommandReload() );
getPluginManager().registerCommand( null, new CommandEnd() );
getPluginManager().registerCommand( null, new CommandList() );
getPluginManager().registerCommand( null, new CommandServer() );
getPluginManager().registerCommand( null, new CommandIP() );
getPluginManager().registerCommand( null, new CommandAlert() );
getPluginManager().registerCommand( null, new CommandBungee() );
getPluginManager().registerCommand( null, new CommandPerms() );
getPluginManager().registerCommand( null, new CommandSend() );
getPluginManager().registerCommand( null, new CommandFind() );
registerChannel( "BungeeCord" );
}
public static BungeeCord getInstance()
{
return (BungeeCord) ProxyServer.getInstance();
}
public BungeeCord() throws IOException
{
Log.setOutput( new PrintStream( ByteStreams.nullOutputStream() ) ); // TODO: Bug JLine
AnsiConsole.systemInstall();
consoleReader = new ConsoleReader();
logger = new BungeeLogger( this );
System.setErr( new PrintStream( new LoggingOutputStream( logger, Level.SEVERE ), true ) );
System.setOut( new PrintStream( new LoggingOutputStream( logger, Level.INFO ), true ) );
if ( consoleReader.getTerminal() instanceof UnsupportedTerminal )
{
logger.info( "Unable to initialize fancy terminal. To fix this on Windows, install the correct Microsoft Visual C++ 2008 Runtime" );
logger.info( "NOTE: This error is non crucial, and BungeeCord will still function correctly! Do not bug the author about it unless you are still unable to get it working" );
}
}
/**
* Starts a new instance of BungeeCord.
*
* @param args command line arguments, currently none are used
* @throws Exception when the server cannot be started
*/
public static void main(String[] args) throws Exception
{
Calendar deadline = Calendar.getInstance();
deadline.set( 2013, 7, 1 ); // year, month, date
if ( Calendar.getInstance().after( deadline ) )
{
System.err.println( "*** Warning, this build is outdated ***" );
System.err.println( "*** Please download a new build from http:
System.err.println( "*** You will get NO support regarding this build ***" );
System.err.println( "*** Server will start in 15 seconds ***" );
Thread.sleep( TimeUnit.SECONDS.toMillis( 15 ) );
}
BungeeCord bungee = new BungeeCord();
ProxyServer.setInstance( bungee );
bungee.getLogger().info( "Enabled BungeeCord version " + bungee.getVersion() );
bungee.start();
while ( bungee.isRunning )
{
String line = bungee.getConsoleReader().readLine( ">" );
if ( line != null )
{
if ( !bungee.getPluginManager().dispatchCommand( ConsoleCommandSender.getInstance(), line ) )
{
bungee.getConsole().sendMessage( ChatColor.RED + "Command not found" );
}
}
}
}
/**
* Start this proxy instance by loading the configuration, plugins and
* starting the connect thread.
*
* @throws Exception
*/
@Override
public void start() throws Exception
{
pluginsFolder.mkdir();
pluginManager.detectPlugins( pluginsFolder );
config.load();
if ( reconnectHandler == null )
{
reconnectHandler = new SQLReconnectHandler();
}
isRunning = true;
pluginManager.loadAndEnablePlugins();
startListeners();
saveThread.scheduleAtFixedRate( new TimerTask()
{
@Override
public void run()
{
getReconnectHandler().save();
}
}, 0, TimeUnit.MINUTES.toMillis( 5 ) );
metricsThread.scheduleAtFixedRate( new Metrics(), 0, TimeUnit.MINUTES.toMillis( Metrics.PING_INTERVAL ) );
}
public void startListeners()
{
for ( final ListenerInfo info : config.getListeners() )
{
ChannelFutureListener listener = new ChannelFutureListener()
{
@Override
public void operationComplete(ChannelFuture future) throws Exception
{
if ( future.isSuccess() )
{
listeners.add( future.channel() );
getLogger().info( "Listening on " + info.getHost() );
} else
{
getLogger().log( Level.WARNING, "Could not bind to host " + info.getHost(), future.cause() );
}
}
};
new ServerBootstrap()
.channel( NioServerSocketChannel.class )
.childAttr( PipelineUtils.LISTENER, info )
.childHandler( PipelineUtils.SERVER_CHILD )
.group( eventLoops )
.localAddress( info.getHost() )
.bind().addListener( listener );
}
}
public void stopListeners()
{
for ( Channel listener : listeners )
{
getLogger().log( Level.INFO, "Closing listener {0}", listener );
try
{
listener.close().syncUninterruptibly();
} catch ( ChannelException ex )
{
getLogger().severe( "Could not close listen thread" );
}
}
listeners.clear();
}
@Override
public void stop()
{
new Thread( "Shutdown Thread" )
{
@Override
public void run()
{
BungeeCord.this.isRunning = false;
httpClient.close();
executors.shutdown();
stopListeners();
getLogger().info( "Closing pending connections" );
connectionLock.readLock().lock();
try
{
getLogger().info( "Disconnecting " + connections.size() + " connections" );
for ( UserConnection user : connections.values() )
{
user.disconnect( getTranslation( "restart" ) );
}
} finally
{
connectionLock.readLock().unlock();
}
getLogger().info( "Closing IO threads" );
eventLoops.shutdown();
try
{
eventLoops.awaitTermination( Long.MAX_VALUE, TimeUnit.NANOSECONDS );
} catch ( InterruptedException ex )
{
}
getLogger().info( "Saving reconnect locations" );
reconnectHandler.save();
reconnectHandler.close();
saveThread.cancel();
metricsThread.cancel();
// TODO: Fix this shit
getLogger().info( "Disabling plugins" );
for ( Plugin plugin : pluginManager.getPlugins() )
{
plugin.onDisable();
getScheduler().cancel( plugin );
}
getLogger().info( "Thankyou and goodbye" );
System.exit( 0 );
}
}.start();
}
/**
* Broadcasts a packet to all clients that is connected to this instance.
*
* @param packet the packet to send
*/
public void broadcast(DefinedPacket packet)
{
connectionLock.readLock().lock();
try
{
for ( UserConnection con : connections.values() )
{
con.unsafe().sendPacket( packet );
}
} finally
{
connectionLock.readLock().unlock();
}
}
@Override
public String getName()
{
return "BungeeCord";
}
@Override
public String getVersion()
{
return ( BungeeCord.class.getPackage().getImplementationVersion() == null ) ? "unknown" : BungeeCord.class.getPackage().getImplementationVersion();
}
@Override
public String getTranslation(String name)
{
String translation = "<translation '" + name + "' missing>";
try
{
translation = bundle.getString( name );
} catch ( MissingResourceException ex )
{
}
return translation;
}
@Override
@SuppressWarnings("unchecked")
public Collection<ProxiedPlayer> getPlayers()
{
connectionLock.readLock().lock();
try
{
return (Collection) new HashSet<>( connections.values() );
} finally
{
connectionLock.readLock().unlock();
}
}
@Override
public int getOnlineCount()
{
return connections.size();
}
@Override
public ProxiedPlayer getPlayer(String name)
{
connectionLock.readLock().lock();
try
{
return connections.get( name );
} finally
{
connectionLock.readLock().unlock();
}
}
@Override
public Map<String, ServerInfo> getServers()
{
return config.getServers();
}
@Override
public ServerInfo getServerInfo(String name)
{
return getServers().get( name );
}
@Override
@Synchronized("pluginChannels")
public void registerChannel(String channel)
{
pluginChannels.add( channel );
}
@Override
@Synchronized("pluginChannels")
public void unregisterChannel(String channel)
{
pluginChannels.remove( channel );
}
@Override
@Synchronized("pluginChannels")
public Collection<String> getChannels()
{
return Collections.unmodifiableCollection( pluginChannels );
}
public PacketFAPluginMessage registerChannels()
{
return new PacketFAPluginMessage( "REGISTER", Util.format( pluginChannels, "\00" ).getBytes() );
}
@Override
public byte getProtocolVersion()
{
return Vanilla.PROTOCOL_VERSION;
}
@Override
public String getGameVersion()
{
return Vanilla.GAME_VERSION;
}
@Override
public ServerInfo constructServerInfo(String name, InetSocketAddress address, boolean restricted)
{
return new BungeeServerInfo( name, address, restricted );
}
@Override
public CommandSender getConsole()
{
return ConsoleCommandSender.getInstance();
}
@Override
public void broadcast(String message)
{
getConsole().sendMessage( message );
broadcast( new Packet3Chat( message ) );
}
public void addConnection(UserConnection con)
{
connectionLock.writeLock().lock();
try
{
connections.put( con.getName(), con );
} finally
{
connectionLock.writeLock().unlock();
}
}
public void removeConnection(UserConnection con)
{
connectionLock.writeLock().lock();
try
{
connections.remove( con.getName() );
} finally
{
connectionLock.writeLock().unlock();
}
}
@Override
public CustomTabList customTabList(ProxiedPlayer player)
{
return new Custom( player );
}
} |
import java.util.HashMap;
import java.util.Map;
import com.amazonaws.AmazonClientException;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.profile.ProfileCredentialsProvider;
import com.amazonaws.regions.Region;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClient;
import com.amazonaws.services.dynamodbv2.model.AttributeDefinition;
import com.amazonaws.services.dynamodbv2.model.AttributeValue;
import com.amazonaws.services.dynamodbv2.model.ComparisonOperator;
import com.amazonaws.services.dynamodbv2.model.Condition;
import com.amazonaws.services.dynamodbv2.model.CreateTableRequest;
import com.amazonaws.services.dynamodbv2.model.DescribeTableRequest;
import com.amazonaws.services.dynamodbv2.model.KeySchemaElement;
import com.amazonaws.services.dynamodbv2.model.KeyType;
import com.amazonaws.services.dynamodbv2.model.ProvisionedThroughput;
import com.amazonaws.services.dynamodbv2.model.PutItemRequest;
import com.amazonaws.services.dynamodbv2.model.PutItemResult;
import com.amazonaws.services.dynamodbv2.model.ScalarAttributeType;
import com.amazonaws.services.dynamodbv2.model.ScanRequest;
import com.amazonaws.services.dynamodbv2.model.ScanResult;
import com.amazonaws.services.dynamodbv2.model.TableDescription;
import com.amazonaws.services.dynamodbv2.util.TableUtils;
public class Register {
private static Map<String, AttributeValue> addUser(String username, String password, String email, String hash) {
Map<String, AttributeValue> user = new HashMap<String, AttributeValue>();
user.put("Username", new AttributeValue(username));
user.put("Password", new AttributeValue(password));
user.put("Email", new AttributeValue(email));
user.put("Hash", new AttributeValue(hash));
return user;
}
public static void main(String[] args) throws Exception {
DynamoDB dynamoDB = new DynamoDB(new AmazonDynamoDBClient(
new ProfileCredentialsProvider()));
Table table = dynamoDB.getTable("UserInfo");
}
} |
package integration;
import ch.obermuhlner.math.big.BigFloat;
import com.cluttered.cryptocurrency.ann.Layer;
import com.cluttered.cryptocurrency.ann.NeuralNetwork;
import com.cluttered.cryptocurrency.ann.neuron.Neuron;
import org.junit.Test;
import java.math.MathContext;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static com.cluttered.cryptocurrency.ann.MathConstants.ONE;
import static com.cluttered.cryptocurrency.ann.MathConstants.ZERO;
import static java.math.RoundingMode.HALF_UP;
import static org.assertj.core.api.Assertions.assertThat;
/**
* @author cluttered.code@gmail.com
*/
public class XorTest {
private static final MathContext MATH_CONTEXT_100_HALF_UP = new MathContext(100, HALF_UP);
private static final BigFloat.Context BIG_FLOAT_CONTEXT_100_HALF_UP = BigFloat.context(MATH_CONTEXT_100_HALF_UP);
private static BigFloat bigFloat(final String value) {
return BIG_FLOAT_CONTEXT_100_HALF_UP.valueOf(value);
}
@Test
public void xorTest() {
// Hidden Layer
final Neuron hiddenNeuron1 = Neuron.builder()
.sigmoid()
.weights(bigFloat("60"), bigFloat("60"))
.bias(bigFloat("-90"))
.build();
final Neuron hiddenNeuron2 = Neuron.builder()
.sigmoid()
.weights(bigFloat("80"), bigFloat("80"))
.bias(bigFloat("-40"))
.build();
final Layer hiddenLayer = new Layer(Arrays.asList(hiddenNeuron1, hiddenNeuron2));
// Output Layer
final Neuron outputNeuron = Neuron.builder()
.sigmoid()
.weights(bigFloat("-60"), bigFloat("60"))
.bias(bigFloat("-30"))
.build();
final Layer outputLayer = new Layer(Collections.singletonList(outputNeuron));
final NeuralNetwork neuralNetwork = new NeuralNetwork(2L, Arrays.asList(hiddenLayer, outputLayer));
// [<INPUTS>] -> [<OUTPUTS>]
// [0, 0] -> [0]
final List<BigFloat> inputs1 = Arrays.asList(ZERO, ZERO);
final BigFloat result1 = neuralNetwork.fire(inputs1).get(0);
assertThat(result1.isLessThan(bigFloat("1E-10"))).isTrue();
// [1, 0] -> [1]
final List<BigFloat> inputs2 = Arrays.asList(ONE, ZERO);
final BigFloat result2 = neuralNetwork.fire(inputs2).get(0);
assertThat(result2.isGreaterThan(bigFloat("0.9999999999")));
// [0, 1] -> [1]
final List<BigFloat> inputs3 = Arrays.asList(ZERO, ONE);
final BigFloat result3 = neuralNetwork.fire(inputs3).get(0);
assertThat(result3.isGreaterThan(bigFloat("0.9999999999")));
// [1, 1] -> [0]
final List<BigFloat> inputs4 = Arrays.asList(ONE, ONE);
final BigFloat result4 = neuralNetwork.fire(inputs4).get(0);
assertThat(result4.isLessThan(bigFloat("1E-10"))).isTrue();
}
} |
package net.minecraftforge.oredict;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import net.minecraft.src.*;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.event.Event;
public class OreDictionary
{
private static boolean hasInit = false;
private static int maxID = 0;
private static HashMap<String, Integer> oreIDs = new HashMap<String, Integer>();
private static HashMap<Integer, ArrayList<ItemStack>> oreStacks = new HashMap<Integer, ArrayList<ItemStack>>();
static {
initVanillaEntries();
}
public static void initVanillaEntries()
{
if (!hasInit)
{
registerOre("logWood", new ItemStack(Block.wood, 1, -1));
registerOre("plankWood", new ItemStack(Block.planks, 1, -1));
registerOre("slabWood", new ItemStack(Block.woodSingleSlab, 1, -1));
registerOre("stairWood", Block.stairCompactPlanks);
registerOre("stairWood", Block.stairsWoodBirch);
registerOre("stairWood", Block.stairsWoodJungle);
registerOre("stairWood", Block.stairsWoodSpruce);
registerOre("stickWood", Item.stick);
registerOre("treeSapling", new ItemStack(Block.sapling, 1, -1));
registerOre("treeLeaves", new ItemStack(Block.leaves, 1, -1));
}
// Build our list of items to replace with ore tags
Map<ItemStack, String> replacements = new HashMap<ItemStack, String>();
replacements.put(new ItemStack(Block.planks, 1, -1), "plankWood");
replacements.put(new ItemStack(Item.stick), "stickWood");
// Register dyes
String[] dyes =
{
"dyeBlack",
"dyeRed",
"dyeGreen",
"dyeBrown",
"dyeBlue",
"dyePurple",
"dyeCyan",
"dyeLightGray",
"dyeGray",
"dyePink",
"dyeLime",
"dyeYellow",
"dyeLightBlue",
"dyeMagenta",
"dyeOrange",
"dyeWhite"
};
for(int i = 0; i < 16; i++)
{
ItemStack dye = new ItemStack(Item.dyePowder, 1, i);
if (!hasInit)
{
registerOre(dyes[i], dye);
}
replacements.put(dye, dyes[i]);
}
hasInit = true;
ItemStack[] replaceStacks = replacements.keySet().toArray(new ItemStack[0]);
// Ignore recipes for the following items
ItemStack[] exclusions = new ItemStack[]
{
new ItemStack(Block.blockLapis),
new ItemStack(Item.cookie),
};
List recipes = CraftingManager.getInstance().getRecipeList();
List<IRecipe> recipesToRemove = new ArrayList<IRecipe>();
List<IRecipe> recipesToAdd = new ArrayList<IRecipe>();
// Search vanilla recipes for recipes to replace
for(Object obj : recipes)
{
if(obj instanceof ShapedRecipes)
{
ShapedRecipes recipe = (ShapedRecipes)obj;
ItemStack output = recipe.getRecipeOutput();
if (output != null && containsMatch(false, exclusions, output))
{
continue;
}
if(containsMatch(true, recipe.recipeItems, replaceStacks))
{
recipesToRemove.add(recipe);
recipesToAdd.add(new ShapedOreRecipe(recipe, replacements));
}
}
else if(obj instanceof ShapelessRecipes)
{
ShapelessRecipes recipe = (ShapelessRecipes)obj;
ItemStack output = recipe.getRecipeOutput();
if (output != null && containsMatch(false, exclusions, output))
{
continue;
}
if(containsMatch(true, (ItemStack[])recipe.recipeItems.toArray(new ItemStack[0]), replaceStacks))
{
recipesToRemove.add((IRecipe)obj);
IRecipe newRecipe = new ShapelessOreRecipe(recipe, replacements);
recipesToAdd.add(newRecipe);
}
}
}
recipes.removeAll(recipesToRemove);
recipes.addAll(recipesToAdd);
if (recipesToRemove.size() > 0)
{
System.out.println("Replaced " + recipesToRemove.size() + " ore recipies");
}
}
/**
* Gets the integer ID for the specified ore name.
* If the name does not have a ID it assigns it a new one.
*
* @param name The unique name for this ore 'oreIron', 'ingotIron', etc..
* @return A number representing the ID for this ore type
*/
public static int getOreID(String name)
{
Integer val = oreIDs.get(name);
if (val == null)
{
val = maxID++;
oreIDs.put(name, val);
oreStacks.put(val, new ArrayList<ItemStack>());
}
return val;
}
/**
* Reverse of getOreID, will not create new entries.
*
* @param id The ID to translate to a string
* @return The String name, or "Unknown" if not found.
*/
public static String getOreName(int id)
{
for (Map.Entry<String, Integer> entry : oreIDs.entrySet())
{
if (id == entry.getValue())
{
return entry.getKey();
}
}
return "Unknown";
}
/**
* Gets the integer ID for the specified item stack.
* If the item stack is not linked to any ore, this will return -1 and no new entry will be created.
*
* @param itemStack The item stack of the ore.
* @return A number representing the ID for this ore type, or -1 if couldn't find it.
*/
public static int getOreID(ItemStack itemStack)
{
if( itemStack == null )
return -1;
for(int oreID : oreStacks.keySet())
{
for(ItemStack target : oreStacks.get(oreID))
{
if(itemStack.itemID == target.itemID && (target.getItemDamage() == -1 || itemStack.getItemDamage() == target.getItemDamage()))
return oreID;
}
}
return -1; // didn't find it.
}
/**
* Retrieves the ArrayList of items that are registered to this ore type.
* Creates the list as empty if it did not exist.
*
* @param name The ore name, directly calls getOreID
* @return An arrayList containing ItemStacks registered for this ore
*/
public static ArrayList<ItemStack> getOres(String name)
{
return getOres(getOreID(name));
}
/**
* Retrieves a list of all unique ore names that are already registered.
*
* @return All unique ore names that are currently registered.
*/
public static String[] getOreNames()
{
return oreIDs.keySet().toArray(new String[0]);
}
/**
* Retrieves the ArrayList of items that are registered to this ore type.
* Creates the list as empty if it did not exist.
*
* @param id The ore ID, see getOreID
* @return An arrayList containing ItemStacks registered for this ore
*/
public static ArrayList<ItemStack> getOres(Integer id)
{
ArrayList<ItemStack> val = oreStacks.get(id);
if (val == null)
{
val = new ArrayList<ItemStack>();
oreStacks.put(id, val);
}
return val;
}
private static boolean containsMatch(boolean strict, ItemStack[] inputs, ItemStack... targets)
{
for (ItemStack input : inputs)
{
for (ItemStack target : targets)
{
if (itemMatches(target, input, strict))
{
return true;
}
}
}
return false;
}
public static boolean itemMatches(ItemStack target, ItemStack input, boolean strict)
{
if (input == null && target != null || input != null && target == null)
{
return false;
}
return (target.itemID == input.itemID && ((target.getItemDamage() == -1 && !strict) || target.getItemDamage() == input.getItemDamage()));
}
//Convenience functions that make for cleaner code mod side. They all drill down to registerOre(String, int, ItemStack)
public static void registerOre(String name, Item ore){ registerOre(name, new ItemStack(ore)); }
public static void registerOre(String name, Block ore){ registerOre(name, new ItemStack(ore)); }
public static void registerOre(String name, ItemStack ore){ registerOre(name, getOreID(name), ore); }
public static void registerOre(int id, Item ore){ registerOre(id, new ItemStack(ore)); }
public static void registerOre(int id, Block ore){ registerOre(id, new ItemStack(ore)); }
public static void registerOre(int id, ItemStack ore){ registerOre(getOreName(id), id, ore); }
/**
* Registers a ore item into the dictionary.
* Raises the registerOre function in all registered handlers.
*
* @param name The name of the ore
* @param id The ID of the ore
* @param ore The ore's ItemStack
*/
private static void registerOre(String name, int id, ItemStack ore)
{
ArrayList<ItemStack> ores = getOres(id);
ore = ore.copy();
ores.add(ore);
MinecraftForge.EVENT_BUS.post(new OreRegisterEvent(name, ore));
}
public static class OreRegisterEvent extends Event
{
public final String Name;
public final ItemStack Ore;
public OreRegisterEvent(String name, ItemStack ore)
{
this.Name = name;
this.Ore = ore;
}
}
} |
package heufybot.modules;
import heufybot.utils.FileUtils;
import heufybot.utils.URLUtils;
import java.util.List;
import org.json.simple.parser.ParseException;
public class TimeDB extends Module
{
public TimeDB()
{
this.authType = Module.AuthType.Anyone;
this.trigger = "^" + commandPrefix + "(time)($| .*)";
}
@Override
public void processEvent(String source, String message, String triggerUser, List<String> params)
{
if(FileUtils.readFile("data/worldweatheronlineapikey.txt").equals(""))
{
bot.getIRC().cmdPRIVMSG(source, "No WorldWeatherOnline API key found");
return;
}
if (params.size() == 1)
{
if(URLUtils.grab("http://tsukiakariusagi.net/chatmaplookup.php?nick=" + triggerUser).equals(", "))
{
bot.getIRC().cmdPRIVMSG(source, "You are not registered on the chatmap.");
return;
}
params.add(triggerUser);
}
params.remove(0);
GeocodingInterface geo = new GeocodingInterface();
// First try latitude and longitude. If these are not in fact lat/lon this will fail before any network stuff is done
try
{
float latitude = Float.parseFloat(params.get(0));
float longitude = Float.parseFloat(params.get(1));
try
{
Geolocation location = geo.getGeolocationForLatLng(latitude, longitude);
String time = getTimeFromGeolocation(location);
String prefix = location.success ? "Location: " + location.locality : "City: " + latitude + "," + longitude;
bot.getIRC().cmdPRIVMSG(source, String.format("%s | %s", prefix, time));
return;
}
catch (ParseException e)
{
bot.getIRC().cmdPRIVMSG(source, "I don't think that's even a location in this multiverse...");
return;
}
}
catch (NumberFormatException e)
{
// Nothing to see here, just not latitude/longitude, continuing.
}
catch (IndexOutOfBoundsException e)
{
// Either this is fuzzing or invalid input. Either way we don't care, and should check the next two cases.
}
try
{
Geolocation location = geo.getGeolocationForIRCUser(params.get(0));
if (location != null)
{
String weather = getTimeFromGeolocation(location);
bot.getIRC().cmdPRIVMSG(source, String.format("Location: %s | %s", location.locality, weather));
return;
}
}
catch (ParseException e)
{
bot.getIRC().cmdPRIVMSG(source, "I don't think that's even a user in this multiverse...");
return;
}
try
{
Geolocation location = geo.getGeolocationForPlace(message.substring(message.indexOf(' ') + 1));
if (!location.success)
{
bot.getIRC().cmdPRIVMSG(source, "I don't think that's even a location in this multiverse...");
return;
}
String weather = getTimeFromGeolocation(location);
bot.getIRC().cmdPRIVMSG(source, String.format("Location: %s | %s", location.locality, weather));
return;
}
catch (ParseException e)
{
bot.getIRC().cmdPRIVMSG(source, "I don't think that's even a location in this multiverse...");
return;
}
}
private String getTimeFromGeolocation(Geolocation location) throws ParseException
{
TimeInterface weatherInterface = new TimeInterface();
String weather = weatherInterface.getTime(location.latitude, location.longitude);
return weather;
}
@Override
public String getHelp(String message)
{
return "Commands: " + commandPrefix + "time (<place>/<latitude longitude>/<ircuser>) | Makes the bot get the current time at the location specified or at the location of the ircuser.";
}
@Override
public void onLoad()
{
FileUtils.touchFile("data/worldweatheronlineapikey.txt");
}
@Override
public void onUnload()
{
}
} |
package helper;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.util.Enumeration;
import java.util.Properties;
import javax.net.ssl.HttpsURLConnection;
public class Connector {
private static URLConnection urlConn = null;
private URL url = null;
private static final int HTTP = 0;
private static final int HTTPS = 1;
private static int protocol = -1;
private static int httpStatus = -1;
private static String redirectLocation = null;
private static Properties reqProp = new Properties();
private static InputStream inStream = null;
private Connector() {
}
public static String getRedirectLocation() {
return redirectLocation;
}
/**
* Initiate URLConnection as https connection if protocol in URL is https
*
* @param url
* @return HttpURLConnection
*/
private static HttpURLConnection getHttpConn(URL url) {
HttpURLConnection httpConn = null;
try {
httpConn = (HttpURLConnection) url.openConnection();
addProperties(httpConn);
httpStatus = httpConn.getResponseCode();
redirectLocation = httpConn.getHeaderField("Location");
inStream = httpConn.getInputStream();
play.Logger.debug("http Status Code: " + httpStatus + "Location Header: " + redirectLocation);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return httpConn;
}
/**
* Initiate URLConnection as https connection if protocol in URL is https
*
* @param url
* @return HttpsURLConnection
*/
private static HttpsURLConnection getHttpsConn(URL url) {
HttpsURLConnection httpsConn = null;
try {
httpsConn = (HttpsURLConnection) url.openConnection();
addProperties(httpsConn);
httpStatus = httpsConn.getResponseCode();
redirectLocation = httpsConn.getHeaderField("Location");
inStream = httpsConn.getInputStream();
play.Logger.debug("http Status Code: " + httpStatus + "Location Header: " + redirectLocation);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return httpsConn;
}
private static void addProperties(URLConnection connect) {
Enumeration<Object> propEnum = reqProp.keys();
while (propEnum.hasMoreElements()) {
String key = propEnum.nextElement().toString();
connect.setRequestProperty((String) key, (String) reqProp.get(key));
}
};
/**
* Generates and returns URL Instance from String
*
* @param urlString
* @return
*/
private static URL createUrl(String urlString) {
URL url = null;
try {
url = new URL(urlString);
} catch (MalformedURLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return url;
}
/**
* Checks if http Status determines redirect and if redirect location is
* associated with protocol change from http to https. If both is true,
* initiate new HttpsURLConnection
*
*/
private static void performProtocolChange() {
if ((299 < getStatusCode() && getStatusCode() < 400) && getRedirectLocation().startsWith("https")) {
play.Logger.debug("found https-protocol and redirect-location: " + getRedirectLocation());
urlConn = getHttpsConn(createUrl(getRedirectLocation()));
}
}
public URLConnection getConnection() {
return urlConn;
}
public static int getStatusCode() {
return httpStatus;
}
public InputStream getInputStream() {
return inStream;
}
public void setConnectorProperty(String key, String value) {
reqProp.put(key, value);
}
public void connect() {
switch (protocol) {
case 0:
urlConn = getHttpConn(url);
break;
case 1:
urlConn = getHttpsConn(url);
break;
}
performProtocolChange();
}
public static class Factory {
private static Connector conn = new Connector();
/**
* Provide Connector Instance that automatically returns the appropriate
* URLConnector. This is either HttpURLConnector or HttpsURLConnector.
*
* @param url
* @return
*/
public static Connector getInstance(URL url) {
if (url.getProtocol().equals("http")) {
protocol = HTTP;
} else {
protocol = HTTPS;
}
// performProtocolChange(conn);
conn.url = url;
return conn;
}
}
} |
package ethanjones.cubes.core.platform;
import ethanjones.cubes.core.logging.Log;
import ethanjones.cubes.core.system.Debug;
import ethanjones.cubes.core.system.Executor;
import ethanjones.cubes.graphics.menu.Menu;
import ethanjones.cubes.graphics.menus.MainMenu;
import ethanjones.cubes.graphics.menus.RunnableMenu;
import ethanjones.cubes.input.InputChain;
import ethanjones.cubes.side.client.CubesClient;
import ethanjones.cubes.side.common.Side;
import ethanjones.cubes.side.server.CubesServer;
import ethanjones.cubes.world.thread.WorldTasks;
import com.badlogic.gdx.Gdx;
public class Adapter {
private static final int JOIN_TIMEOUT = 60000;
private static AdapterInterface adapter;
public static void setClient(CubesClient cubesClient) throws UnsupportedOperationException {
adapter.setClient(cubesClient);
}
public static void setServer(CubesServer cubesServer) throws UnsupportedOperationException {
adapter.setServer(cubesServer);
}
public static Menu getMenu() {
return adapter.getMenu();
}
public static void setMenu(Menu menu) throws UnsupportedOperationException {
adapter.setMenu(menu);
}
public static void dispose() {
Log.debug("Disposing adapter");
Gdx.app.postRunnable(new Runnable() {
@Override
public void run() {
Gdx.app.exit();
}
});
final Menu menu = adapter.getMenu();
try {
if (menu != null) {
menu.save();
InputChain.hideMenu(menu);
}
} catch (Exception e) {
Debug.crash(e);
}
stop();
stopBackground();
}
private static void stop() {
final CubesClient cubesClient = adapter.getClient();
final CubesServer cubesServer = adapter.getServer();
final Thread currentThread = Thread.currentThread();
if (!isDedicatedServer()) {
if (Adapter.getInterface().getThread() == currentThread) {
stopFromClientThread(cubesClient, cubesServer);
} else if (cubesServer != null && cubesServer.getThread() == currentThread) {
stopFromServerThread(cubesClient, cubesServer);
} else {
stopFromOtherThread(cubesClient, cubesServer);
}
throw new StopLoopException();
} else {
if (Adapter.getInterface().getThread() == currentThread) {
stopFromServerThread(cubesClient, cubesServer);
} else {
stopFromOtherThread(cubesClient, cubesServer);
}
}
}
public static boolean isDedicatedServer() {
return adapter.getSide() == Side.Server;
}
public static AdapterInterface getInterface() {
return adapter;
}
public static void setInterface(AdapterInterface adapterInterface) {
if (adapter == null && adapterInterface != null) adapter = adapterInterface;
}
private static void stopFromClientThread(final CubesClient cubesClient, final CubesServer cubesServer) {
try {
if (cubesServer != null) {
cubesServer.dispose();
}
if (cubesClient != null) {
cubesClient.dispose();
}
if (cubesServer != null) {
try {
cubesServer.getThread().join(JOIN_TIMEOUT);
} catch (InterruptedException e) {
}
if (cubesServer.getThread().isAlive()) {
failedToStopThread(cubesServer.getThread());
}
}
} catch (Exception e) {
Debug.crash(e);
}
}
private static void stopFromServerThread(final CubesClient cubesClient, final CubesServer cubesServer) {
try {
if (cubesClient != null) {
cubesClient.dispose();
}
if (cubesServer != null) {
cubesServer.dispose();
}
// if (cubesClient != null) {
// try {
// cubesClient.getThread().join(JOIN_TIMEOUT);
// } catch (InterruptedException e) {
// if (cubesClient.getThread().isAlive()) {
// failedToStopThread(cubesClient.getThread());
} catch (Exception e) {
Debug.crash(e);
}
}
private static void stopFromOtherThread(final CubesClient cubesClient, final CubesServer cubesServer) {
try {
if (cubesClient != null) {
cubesClient.dispose();
}
if (cubesServer != null) {
cubesServer.dispose();
}
// if (cubesClient != null) {
// try {
// cubesClient.getThread().join(JOIN_TIMEOUT);
// } catch (InterruptedException e) {
// if (cubesClient.getThread().isAlive()) {
// failedToStopThread(cubesClient.getThread());
// if (cubesServer != null) {
// try {
// cubesServer.getThread().join(JOIN_TIMEOUT);
// } catch (InterruptedException e) {
// if (cubesServer.getThread().isAlive()) {
// failedToStopThread(cubesServer.getThread());
} catch (Exception e) {
Debug.crash(e);
}
}
private static void stopBackground() {
try {
Executor.stop();
WorldTasks.dispose();
} catch (Exception ignored) {
}
}
private static synchronized void failedToStopThread(Thread thread) {
StackTraceElement[] stackTrace = thread.getStackTrace();
Log.error("Failed to stop " + thread.getName() + " thread");
for (StackTraceElement stackTraceElement : stackTrace) {
Log.error(" " + stackTraceElement.toString());
}
}
public static void gotoMainMenu() {
gotoMenu(new MainMenu());
}
/**
* Will exit if server
*/
public static void gotoMenu(final Menu menu) {
if (isDedicatedServer()) quit();
if (menu == null || adapter.getMenu() instanceof RunnableMenu || menu.getClass().isInstance(adapter.getMenu())) return;
adapter.setMenu(new RunnableMenu(new Runnable() {
@Override
public void run() {
if (adapter.getClient() == null && adapter.getServer() == null) {
adapter.setMenu(menu);
}
}
}));
stop();
}
public static void quit() {
System.exit(0); // Force android to exit vm
//Gdx.app.exit();
}
} |
package org.bouncycastle.math;
import java.math.BigInteger;
import java.security.SecureRandom;
import org.bouncycastle.crypto.Digest;
import org.bouncycastle.util.Arrays;
import org.bouncycastle.util.BigIntegers;
/**
* Utility methods for generating primes and testing for primality.
*/
public abstract class Primes
{
private static final BigInteger ONE = BigInteger.valueOf(1);
private static final BigInteger TWO = BigInteger.valueOf(2);
private static final BigInteger THREE = BigInteger.valueOf(3);
/**
* Used to return the output from the
* {@linkplain Primes#enhancedMRProbablePrimeTest(BigInteger, SecureRandom, int) Enhanced
* Miller-Rabin Probabilistic Primality Test}
*/
public static class MROutput
{
private static MROutput probablyPrime()
{
return new MROutput(false, null);
}
private static MROutput provablyCompositeWithFactor(BigInteger factor)
{
return new MROutput(true, factor);
}
private static MROutput provablyCompositeNotPrimePower()
{
return new MROutput(true, null);
}
private boolean provablyComposite;
private BigInteger factor;
private MROutput(boolean provablyComposite, BigInteger factor)
{
this.provablyComposite = provablyComposite;
this.factor = factor;
}
public BigInteger getFactor()
{
return factor;
}
public boolean isProvablyComposite()
{
return provablyComposite;
}
public boolean isNotPrimePower()
{
return provablyComposite && factor == null;
}
}
/**
* Used to return the output from the
* {@linkplain Primes#generateSTRandomPrime(Digest, int, byte[]) Shawe-Taylor Random_Prime
* Routine}
*/
public static class STOutput
{
private BigInteger prime;
private byte[] primeSeed;
private int primeGenCounter;
private STOutput(BigInteger prime, byte[] primeSeed, int primeGenCounter)
{
this.prime = prime;
this.primeSeed = primeSeed;
this.primeGenCounter = primeGenCounter;
}
public BigInteger getPrime()
{
return prime;
}
public byte[] getPrimeSeed()
{
return primeSeed;
}
public int getPrimeGenCounter()
{
return primeGenCounter;
}
}
/**
* FIPS 186-4 C.6 Shawe-Taylor Random_Prime Routine
*
* Construct a provable prime number using a hash function.
*
* @param hash
* the {@link Digest} instance to use (as "Hash()"). Cannot be null.
* @param length
* the length (in bits) of the prime to be generated. Must be at least 2.
* @param inputSeed
* the seed to be used for the generation of the requested prime. Cannot be null or
* empty.
* @return an {@link STOutput} instance containing the requested prime.
*/
public static STOutput generateSTRandomPrime(Digest hash, int length, byte[] inputSeed)
{
if (hash == null)
{
throw new IllegalArgumentException("'hash' cannot be null");
}
if (length < 2)
{
throw new IllegalArgumentException("'length' must be >= 2");
}
if (inputSeed == null || inputSeed.length == 0)
{
throw new IllegalArgumentException("'inputSeed' cannot be null or empty");
}
return implSTRandomPrime(hash, length, Arrays.clone(inputSeed));
}
/**
* FIPS 186-4 C.3.2 Enhanced Miller-Rabin Probabilistic Primality Test
*
* Run several iterations of the Miller-Rabin algorithm with randomly-chosen bases. This is an
* alternative to {@link #isMRProbablePrime(BigInteger, SecureRandom, int)} that provides more
* information about a composite candidate, which may be useful when generating or validating
* RSA moduli.
*
* @param candidate
* the {@link BigInteger} instance to test for primality.
* @param random
* the source of randomness to use to choose bases.
* @param iterations
* the number of randomly-chosen bases to perform the test for.
* @return an {@link MROutput} instance that can be further queried for details.
*/
public static MROutput enhancedMRProbablePrimeTest(BigInteger candidate, SecureRandom random, int iterations)
{
checkCandidate(candidate, "candidate");
if (random == null)
{
throw new IllegalArgumentException("'random' cannot be null");
}
if (iterations < 1)
{
throw new IllegalArgumentException("'iterations' must be > 0");
}
if (candidate.bitLength() == 2)
{
return MROutput.probablyPrime();
}
if (!candidate.testBit(0))
{
return MROutput.provablyCompositeWithFactor(TWO);
}
BigInteger w = candidate;
BigInteger wSubOne = candidate.subtract(ONE);
BigInteger wSubTwo = candidate.subtract(TWO);
int a = wSubOne.getLowestSetBit();
BigInteger m = wSubOne.shiftRight(a);
for (int i = 0; i < iterations; ++i)
{
BigInteger b = BigIntegers.createRandomInRange(TWO, wSubTwo, random);
BigInteger g = b.gcd(w);
if (g.compareTo(ONE) > 0)
{
return MROutput.provablyCompositeWithFactor(g);
}
BigInteger z = b.modPow(m, w);
if (z.equals(ONE) || z.equals(wSubOne))
{
continue;
}
boolean primeToBase = false;
BigInteger x = z;
for (int j = 1; j < a; ++j)
{
z = z.modPow(TWO, w);
if (z.equals(wSubOne))
{
primeToBase = true;
break;
}
if (z.equals(ONE))
{
break;
}
x = z;
}
if (!primeToBase)
{
if (!z.equals(ONE))
{
x = z;
z = z.modPow(TWO, w);
if (!z.equals(ONE))
{
x = z;
}
}
g = x.subtract(ONE).gcd(w);
if (g.compareTo(ONE) > 0)
{
return MROutput.provablyCompositeWithFactor(g);
}
return MROutput.provablyCompositeNotPrimePower();
}
}
return MROutput.probablyPrime();
}
/**
* A fast check for small divisors, up to some implementation-specific limit.
*
* @param candidate
* the {@link BigInteger} instance to test for division by small factors.
*
* @return <code>true</code> if the candidate is found to have any small factors,
* <code>false</code> otherwise.
*/
public static boolean hasAnySmallFactors(BigInteger candidate)
{
checkCandidate(candidate, "candidate");
return implHasAnySmallFactors(candidate);
}
/**
* FIPS 186-4 C.3.1 Miller-Rabin Probabilistic Primality Test
*
* Run several iterations of the Miller-Rabin algorithm with randomly-chosen bases.
*
* @param candidate
* the {@link BigInteger} instance to test for primality.
* @param random
* the source of randomness to use to choose bases.
* @param iterations
* the number of randomly-chosen bases to perform the test for.
* @return <code>false</code> if any witness to compositeness is found amongst the chosen bases
* (so <code>candidate</code> is definitely NOT prime), or else <code>true</code>
* (indicating primality with some probability dependent on the number of iterations
* that were performed).
*/
public static boolean isMRProbablePrime(BigInteger candidate, SecureRandom random, int iterations)
{
checkCandidate(candidate, "candidate");
if (random == null)
{
throw new IllegalArgumentException("'random' cannot be null");
}
if (iterations < 1)
{
throw new IllegalArgumentException("'iterations' must be > 0");
}
if (candidate.bitLength() == 2)
{
return true;
}
if (!candidate.testBit(0))
{
return false;
}
BigInteger w = candidate;
BigInteger wSubOne = candidate.subtract(ONE);
BigInteger wSubTwo = candidate.subtract(TWO);
int a = wSubOne.getLowestSetBit();
BigInteger m = wSubOne.shiftRight(a);
for (int i = 0; i < iterations; ++i)
{
BigInteger b = BigIntegers.createRandomInRange(TWO, wSubTwo, random);
if (!implMRProbablePrimeToBase(w, wSubOne, m, a, b))
{
return false;
}
}
return true;
}
/**
* FIPS 186-4 C.3.1 Miller-Rabin Probabilistic Primality Test (to a fixed base).
*
* Run a single iteration of the Miller-Rabin algorithm against the specified base.
*
* @param candidate
* the {@link BigInteger} instance to test for primality.
* @param base
* the base value to use for this iteration.
* @return <code>false</code> if the specified base is a witness to compositeness (so
* <code>candidate</code> is definitely NOT prime), or else <code>true</code>.
*/
public static boolean isMRProbablePrimeToBase(BigInteger candidate, BigInteger base)
{
checkCandidate(candidate, "candidate");
checkCandidate(base, "base");
if (base.compareTo(candidate.subtract(ONE)) >= 0)
{
throw new IllegalArgumentException("'base' must be < ('candidate' - 1)");
}
if (candidate.bitLength() == 2)
{
return true;
}
BigInteger w = candidate;
BigInteger wSubOne = candidate.subtract(ONE);
int a = wSubOne.getLowestSetBit();
BigInteger m = wSubOne.shiftRight(a);
return implMRProbablePrimeToBase(w, wSubOne, m, a, base);
}
private static void checkCandidate(BigInteger n, String name)
{
if (n == null || n.signum() < 1 || n.bitLength() < 2)
{
throw new IllegalArgumentException("'" + name + "' must be non-null and >= 2");
}
}
private static boolean implHasAnySmallFactors(BigInteger x)
{
/*
* Bundle trial divisors into ~32-bit moduli then use fast tests on the ~32-bit remainders.
*/
int m = 2 * 3 * 5 * 7 * 11 * 13 * 17 * 19 * 23;
int r = x.mod(BigInteger.valueOf(m)).intValue();
if ((r & 1) != 0 && (r % 3) != 0 && (r % 5) != 0 && (r % 7) != 0 && (r % 11) != 0
&& (r % 13) != 0 && (r % 17) != 0 && (r % 19) != 0 && (r % 23) != 0)
{
m = 29 * 31 * 37 * 41 * 43;
r = x.mod(BigInteger.valueOf(m)).intValue();
if ((r % 29) != 0 && (r % 31) != 0 && (r % 37) != 0 && (r % 41) != 0 && (r % 43) != 0)
{
m = 47 * 53 * 59 * 61 * 67;
r = x.mod(BigInteger.valueOf(m)).intValue();
if ((r % 47) != 0 && (r % 53) != 0 && (r % 59) != 0 && (r % 61) != 0 && (r % 67) != 0)
{
m = 71 * 73 * 79 * 83;
r = x.mod(BigInteger.valueOf(m)).intValue();
if ((r % 71) != 0 && (r % 73) != 0 && (r % 79) != 0 && (r % 83) != 0)
{
m = 89 * 97 * 101 * 103;
r = x.mod(BigInteger.valueOf(m)).intValue();
if ((r % 89) != 0 && (r % 97) != 0 && (r % 101) != 0 && (r % 103) != 0)
{
m = 107 * 109 * 113 * 127;
r = x.mod(BigInteger.valueOf(m)).intValue();
if ((r % 107) != 0 && (r % 109) != 0 && (r % 113) != 0 && (r % 127) != 0)
{
return false;
}
}
}
}
}
}
return true;
}
private static boolean implMRProbablePrimeToBase(BigInteger w, BigInteger wSubOne, BigInteger m, int a, BigInteger b)
{
BigInteger z = b.modPow(m, w);
if (z.equals(ONE) || z.equals(wSubOne))
{
return true;
}
boolean result = false;
for (int j = 1; j < a; ++j)
{
z = z.modPow(TWO, w);
if (z.equals(wSubOne))
{
result = true;
break;
}
if (z.equals(ONE))
{
return false;
}
}
return result;
}
private static STOutput implSTRandomPrime(Digest d, int length, byte[] primeSeed)
{
int dLen = d.getDigestSize();
if (length < 33)
{
int primeGenCounter = 0;
byte[] c0 = new byte[dLen];
byte[] c1 = new byte[dLen];
for (;;)
{
hash(d, primeSeed, c0, 0);
inc(primeSeed, 1);
hash(d, primeSeed, c1, 0);
inc(primeSeed, 1);
int c = extract32(c0) ^ extract32(c1);
c &= (-1 >>> (32 - length));
c |= (1 << (length - 1)) | 1;
++primeGenCounter;
long c64 = c & 0xFFFFFFFFL;
if (isPrime32(c64))
{
return new STOutput(BigInteger.valueOf(c64), primeSeed, primeGenCounter);
}
if (primeGenCounter > (4 * length))
{
throw new IllegalStateException("Too many iterations in Shawe-Taylor Random_Prime Routine");
}
}
}
STOutput rec = implSTRandomPrime(d, (length + 3)/2, primeSeed);
BigInteger c0 = rec.getPrime();
primeSeed = rec.getPrimeSeed();
int primeGenCounter = rec.getPrimeGenCounter();
int outlen = 8 * dLen;
int iterations = (length - 1)/outlen;
int oldCounter = primeGenCounter;
BigInteger x = hashGen(d, primeSeed, iterations + 1);
x = x.mod(ONE.shiftLeft(length - 1)).setBit(length - 1);
BigInteger c0x2 = c0.shiftLeft(1);
BigInteger tx2 = x.subtract(ONE).divide(c0x2).add(ONE).shiftLeft(1);
int dt = 0;
BigInteger c = tx2.multiply(c0).add(ONE);
/*
* TODO Since the candidate primes are generated by constant steps ('c0x2'),
* sieving could be used here in place of the 'hasAnySmallFactors' approach.
*/
for (;;)
{
if (c.bitLength() > length)
{
tx2 = ONE.shiftLeft(length - 1).subtract(ONE).divide(c0x2).add(ONE).shiftLeft(1);
c = tx2.multiply(c0).add(ONE);
}
++primeGenCounter;
/*
* This is an optimization of the original algorithm, using trial division to screen out
* many non-primes quickly.
*
* NOTE: 'primeSeed' is still incremented as if we performed the full check!
*/
if (!implHasAnySmallFactors(c))
{
BigInteger a = hashGen(d, primeSeed, iterations + 1);
a = a.mod(c.subtract(THREE)).add(TWO);
tx2 = tx2.add(BigInteger.valueOf(dt));
dt = 0;
BigInteger z = a.modPow(tx2, c);
if (c.gcd(z.subtract(ONE)).equals(ONE) && z.modPow(c0, c).equals(ONE))
{
return new STOutput(c, primeSeed, primeGenCounter);
}
}
else
{
inc(primeSeed, iterations + 1);
}
if (primeGenCounter >= ((4 * length) + oldCounter))
{
throw new IllegalStateException("Too many iterations in Shawe-Taylor Random_Prime Routine");
}
dt += 2;
c = c.add(c0x2);
}
}
private static int extract32(byte[] bs)
{
int result = 0;
int count = Math.min(4, bs.length);
for (int i = 0; i < count; ++i)
{
int b = bs[bs.length - (i + 1)] & 0xFF;
result |= (b << (8 * i));
}
return result;
}
private static void hash(Digest d, byte[] input, byte[] output, int outPos)
{
d.update(input, 0, input.length);
d.doFinal(output, outPos);
}
private static BigInteger hashGen(Digest d, byte[] seed, int count)
{
int dLen = d.getDigestSize();
int pos = count * dLen;
byte[] buf = new byte[pos];
for (int i = 0; i < count; ++i)
{
pos -= dLen;
hash(d, seed, buf, pos);
inc(seed, 1);
}
return new BigInteger(1, buf);
}
private static void inc(byte[] seed, int c)
{
int pos = seed.length;
while (c > 0 && --pos >= 0)
{
c += (seed[pos] & 0xFF);
seed[pos] = (byte)c;
c >>>= 8;
}
}
private static boolean isPrime32(long x)
{
if (x >>> 32 != 0L)
{
throw new IllegalArgumentException("Size limit exceeded");
}
/*
* Use wheel factorization with 2, 3, 5 to select trial divisors.
*/
if (x <= 5L)
{
return x == 2L || x == 3L || x == 5L;
}
if ((x & 1L) == 0L || (x % 3L) == 0L || (x % 5L) == 0L)
{
return false;
}
long[] ds = new long[]{ 1L, 7L, 11L, 13L, 17L, 19L, 23L, 29L };
long base = 0L;
for (int pos = 1; ; pos = 0)
{
/*
* Trial division by wheel-selected divisors
*/
while (pos < ds.length)
{
long d = base + ds[pos];
if (x % d == 0L)
{
return x < 30L;
}
++pos;
}
base += 30L;
if (base * base >= x)
{
return true;
}
}
}
} |
/**
* Autogenerated by Thrift Compiler (0.9.1)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package tachyon.thrift;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class WorkerService {
public interface Iface {
public void accessBlock(long blockId) throws org.apache.thrift.TException;
public void addCheckpoint(long userId, int fileId) throws FileDoesNotExistException, SuspectedFileSizeException, FailedToCheckpointException, BlockInfoException, org.apache.thrift.TException;
public boolean asyncCheckpoint(int fileId) throws TachyonException, org.apache.thrift.TException;
public void cacheBlock(long userId, long blockId) throws FileDoesNotExistException, SuspectedFileSizeException, BlockInfoException, org.apache.thrift.TException;
public String getUserUfsTempFolder(long userId) throws org.apache.thrift.TException;
public String lockBlock(long blockId, long userId) throws FileDoesNotExistException, org.apache.thrift.TException;
public boolean promoteBlock(long userId, long blockId) throws org.apache.thrift.TException;
public void cancelBlock(long userId, long blockId) throws org.apache.thrift.TException;
public String getBlockLocation(long userId, long blockId, long initialBytes) throws OutOfSpaceException, FileAlreadyExistException, org.apache.thrift.TException;
public boolean requestSpace(long userId, long blockId, long requestBytes) throws FileDoesNotExistException, org.apache.thrift.TException;
public boolean unlockBlock(long blockId, long userId) throws org.apache.thrift.TException;
public void userHeartbeat(long userId) throws org.apache.thrift.TException;
}
public interface AsyncIface {
public void accessBlock(long blockId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void addCheckpoint(long userId, int fileId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void asyncCheckpoint(int fileId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void cacheBlock(long userId, long blockId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void getUserUfsTempFolder(long userId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void lockBlock(long blockId, long userId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void promoteBlock(long userId, long blockId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void cancelBlock(long userId, long blockId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void getBlockLocation(long userId, long blockId, long initialBytes, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void requestSpace(long userId, long blockId, long requestBytes, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void unlockBlock(long blockId, long userId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
public void userHeartbeat(long userId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
}
public static class Client extends org.apache.thrift.TServiceClient implements Iface {
public static class Factory implements org.apache.thrift.TServiceClientFactory<Client> {
public Factory() {}
public Client getClient(org.apache.thrift.protocol.TProtocol prot) {
return new Client(prot);
}
public Client getClient(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
return new Client(iprot, oprot);
}
}
public Client(org.apache.thrift.protocol.TProtocol prot)
{
super(prot, prot);
}
public Client(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
super(iprot, oprot);
}
public void accessBlock(long blockId) throws org.apache.thrift.TException
{
send_accessBlock(blockId);
recv_accessBlock();
}
public void send_accessBlock(long blockId) throws org.apache.thrift.TException
{
accessBlock_args args = new accessBlock_args();
args.setBlockId(blockId);
sendBase("accessBlock", args);
}
public void recv_accessBlock() throws org.apache.thrift.TException
{
accessBlock_result result = new accessBlock_result();
receiveBase(result, "accessBlock");
return;
}
public void addCheckpoint(long userId, int fileId) throws FileDoesNotExistException, SuspectedFileSizeException, FailedToCheckpointException, BlockInfoException, org.apache.thrift.TException
{
send_addCheckpoint(userId, fileId);
recv_addCheckpoint();
}
public void send_addCheckpoint(long userId, int fileId) throws org.apache.thrift.TException
{
addCheckpoint_args args = new addCheckpoint_args();
args.setUserId(userId);
args.setFileId(fileId);
sendBase("addCheckpoint", args);
}
public void recv_addCheckpoint() throws FileDoesNotExistException, SuspectedFileSizeException, FailedToCheckpointException, BlockInfoException, org.apache.thrift.TException
{
addCheckpoint_result result = new addCheckpoint_result();
receiveBase(result, "addCheckpoint");
if (result.eP != null) {
throw result.eP;
}
if (result.eS != null) {
throw result.eS;
}
if (result.eF != null) {
throw result.eF;
}
if (result.eB != null) {
throw result.eB;
}
return;
}
public boolean asyncCheckpoint(int fileId) throws TachyonException, org.apache.thrift.TException
{
send_asyncCheckpoint(fileId);
return recv_asyncCheckpoint();
}
public void send_asyncCheckpoint(int fileId) throws org.apache.thrift.TException
{
asyncCheckpoint_args args = new asyncCheckpoint_args();
args.setFileId(fileId);
sendBase("asyncCheckpoint", args);
}
public boolean recv_asyncCheckpoint() throws TachyonException, org.apache.thrift.TException
{
asyncCheckpoint_result result = new asyncCheckpoint_result();
receiveBase(result, "asyncCheckpoint");
if (result.isSetSuccess()) {
return result.success;
}
if (result.e != null) {
throw result.e;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "asyncCheckpoint failed: unknown result");
}
public void cacheBlock(long userId, long blockId) throws FileDoesNotExistException, SuspectedFileSizeException, BlockInfoException, org.apache.thrift.TException
{
send_cacheBlock(userId, blockId);
recv_cacheBlock();
}
public void send_cacheBlock(long userId, long blockId) throws org.apache.thrift.TException
{
cacheBlock_args args = new cacheBlock_args();
args.setUserId(userId);
args.setBlockId(blockId);
sendBase("cacheBlock", args);
}
public void recv_cacheBlock() throws FileDoesNotExistException, SuspectedFileSizeException, BlockInfoException, org.apache.thrift.TException
{
cacheBlock_result result = new cacheBlock_result();
receiveBase(result, "cacheBlock");
if (result.eP != null) {
throw result.eP;
}
if (result.eS != null) {
throw result.eS;
}
if (result.eB != null) {
throw result.eB;
}
return;
}
public String getUserUfsTempFolder(long userId) throws org.apache.thrift.TException
{
send_getUserUfsTempFolder(userId);
return recv_getUserUfsTempFolder();
}
public void send_getUserUfsTempFolder(long userId) throws org.apache.thrift.TException
{
getUserUfsTempFolder_args args = new getUserUfsTempFolder_args();
args.setUserId(userId);
sendBase("getUserUfsTempFolder", args);
}
public String recv_getUserUfsTempFolder() throws org.apache.thrift.TException
{
getUserUfsTempFolder_result result = new getUserUfsTempFolder_result();
receiveBase(result, "getUserUfsTempFolder");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getUserUfsTempFolder failed: unknown result");
}
public String lockBlock(long blockId, long userId) throws FileDoesNotExistException, org.apache.thrift.TException
{
send_lockBlock(blockId, userId);
return recv_lockBlock();
}
public void send_lockBlock(long blockId, long userId) throws org.apache.thrift.TException
{
lockBlock_args args = new lockBlock_args();
args.setBlockId(blockId);
args.setUserId(userId);
sendBase("lockBlock", args);
}
public String recv_lockBlock() throws FileDoesNotExistException, org.apache.thrift.TException
{
lockBlock_result result = new lockBlock_result();
receiveBase(result, "lockBlock");
if (result.isSetSuccess()) {
return result.success;
}
if (result.eP != null) {
throw result.eP;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "lockBlock failed: unknown result");
}
public boolean promoteBlock(long userId, long blockId) throws org.apache.thrift.TException
{
send_promoteBlock(userId, blockId);
return recv_promoteBlock();
}
public void send_promoteBlock(long userId, long blockId) throws org.apache.thrift.TException
{
promoteBlock_args args = new promoteBlock_args();
args.setUserId(userId);
args.setBlockId(blockId);
sendBase("promoteBlock", args);
}
public boolean recv_promoteBlock() throws org.apache.thrift.TException
{
promoteBlock_result result = new promoteBlock_result();
receiveBase(result, "promoteBlock");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "promoteBlock failed: unknown result");
}
public void cancelBlock(long userId, long blockId) throws org.apache.thrift.TException
{
send_cancelBlock(userId, blockId);
recv_cancelBlock();
}
public void send_cancelBlock(long userId, long blockId) throws org.apache.thrift.TException
{
cancelBlock_args args = new cancelBlock_args();
args.setUserId(userId);
args.setBlockId(blockId);
sendBase("cancelBlock", args);
}
public void recv_cancelBlock() throws org.apache.thrift.TException
{
cancelBlock_result result = new cancelBlock_result();
receiveBase(result, "cancelBlock");
return;
}
public String getBlockLocation(long userId, long blockId, long initialBytes) throws OutOfSpaceException, FileAlreadyExistException, org.apache.thrift.TException
{
send_getBlockLocation(userId, blockId, initialBytes);
return recv_getBlockLocation();
}
public void send_getBlockLocation(long userId, long blockId, long initialBytes) throws org.apache.thrift.TException
{
getBlockLocation_args args = new getBlockLocation_args();
args.setUserId(userId);
args.setBlockId(blockId);
args.setInitialBytes(initialBytes);
sendBase("getBlockLocation", args);
}
public String recv_getBlockLocation() throws OutOfSpaceException, FileAlreadyExistException, org.apache.thrift.TException
{
getBlockLocation_result result = new getBlockLocation_result();
receiveBase(result, "getBlockLocation");
if (result.isSetSuccess()) {
return result.success;
}
if (result.eP != null) {
throw result.eP;
}
if (result.eS != null) {
throw result.eS;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "getBlockLocation failed: unknown result");
}
public boolean requestSpace(long userId, long blockId, long requestBytes) throws FileDoesNotExistException, org.apache.thrift.TException
{
send_requestSpace(userId, blockId, requestBytes);
return recv_requestSpace();
}
public void send_requestSpace(long userId, long blockId, long requestBytes) throws org.apache.thrift.TException
{
requestSpace_args args = new requestSpace_args();
args.setUserId(userId);
args.setBlockId(blockId);
args.setRequestBytes(requestBytes);
sendBase("requestSpace", args);
}
public boolean recv_requestSpace() throws FileDoesNotExistException, org.apache.thrift.TException
{
requestSpace_result result = new requestSpace_result();
receiveBase(result, "requestSpace");
if (result.isSetSuccess()) {
return result.success;
}
if (result.eP != null) {
throw result.eP;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "requestSpace failed: unknown result");
}
public boolean unlockBlock(long blockId, long userId) throws org.apache.thrift.TException
{
send_unlockBlock(blockId, userId);
return recv_unlockBlock();
}
public void send_unlockBlock(long blockId, long userId) throws org.apache.thrift.TException
{
unlockBlock_args args = new unlockBlock_args();
args.setBlockId(blockId);
args.setUserId(userId);
sendBase("unlockBlock", args);
}
public boolean recv_unlockBlock() throws org.apache.thrift.TException
{
unlockBlock_result result = new unlockBlock_result();
receiveBase(result, "unlockBlock");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "unlockBlock failed: unknown result");
}
public void userHeartbeat(long userId) throws org.apache.thrift.TException
{
send_userHeartbeat(userId);
recv_userHeartbeat();
}
public void send_userHeartbeat(long userId) throws org.apache.thrift.TException
{
userHeartbeat_args args = new userHeartbeat_args();
args.setUserId(userId);
sendBase("userHeartbeat", args);
}
public void recv_userHeartbeat() throws org.apache.thrift.TException
{
userHeartbeat_result result = new userHeartbeat_result();
receiveBase(result, "userHeartbeat");
return;
}
}
public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface {
public static class Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {
private org.apache.thrift.async.TAsyncClientManager clientManager;
private org.apache.thrift.protocol.TProtocolFactory protocolFactory;
public Factory(org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.protocol.TProtocolFactory protocolFactory) {
this.clientManager = clientManager;
this.protocolFactory = protocolFactory;
}
public AsyncClient getAsyncClient(org.apache.thrift.transport.TNonblockingTransport transport) {
return new AsyncClient(protocolFactory, clientManager, transport);
}
}
public AsyncClient(org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.transport.TNonblockingTransport transport) {
super(protocolFactory, clientManager, transport);
}
public void accessBlock(long blockId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
checkReady();
accessBlock_call method_call = new accessBlock_call(blockId, resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class accessBlock_call extends org.apache.thrift.async.TAsyncMethodCall {
private long blockId;
public accessBlock_call(long blockId, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, false);
this.blockId = blockId;
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("accessBlock", org.apache.thrift.protocol.TMessageType.CALL, 0));
accessBlock_args args = new accessBlock_args();
args.setBlockId(blockId);
args.write(prot);
prot.writeMessageEnd();
}
public void getResult() throws org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
(new Client(prot)).recv_accessBlock();
}
}
public void addCheckpoint(long userId, int fileId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
checkReady();
addCheckpoint_call method_call = new addCheckpoint_call(userId, fileId, resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class addCheckpoint_call extends org.apache.thrift.async.TAsyncMethodCall {
private long userId;
private int fileId;
public addCheckpoint_call(long userId, int fileId, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, false);
this.userId = userId;
this.fileId = fileId;
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("addCheckpoint", org.apache.thrift.protocol.TMessageType.CALL, 0));
addCheckpoint_args args = new addCheckpoint_args();
args.setUserId(userId);
args.setFileId(fileId);
args.write(prot);
prot.writeMessageEnd();
}
public void getResult() throws FileDoesNotExistException, SuspectedFileSizeException, FailedToCheckpointException, BlockInfoException, org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
(new Client(prot)).recv_addCheckpoint();
}
}
public void asyncCheckpoint(int fileId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
checkReady();
asyncCheckpoint_call method_call = new asyncCheckpoint_call(fileId, resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class asyncCheckpoint_call extends org.apache.thrift.async.TAsyncMethodCall {
private int fileId;
public asyncCheckpoint_call(int fileId, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, false);
this.fileId = fileId;
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("asyncCheckpoint", org.apache.thrift.protocol.TMessageType.CALL, 0));
asyncCheckpoint_args args = new asyncCheckpoint_args();
args.setFileId(fileId);
args.write(prot);
prot.writeMessageEnd();
}
public boolean getResult() throws TachyonException, org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
return (new Client(prot)).recv_asyncCheckpoint();
}
}
public void cacheBlock(long userId, long blockId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
checkReady();
cacheBlock_call method_call = new cacheBlock_call(userId, blockId, resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class cacheBlock_call extends org.apache.thrift.async.TAsyncMethodCall {
private long userId;
private long blockId;
public cacheBlock_call(long userId, long blockId, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, false);
this.userId = userId;
this.blockId = blockId;
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("cacheBlock", org.apache.thrift.protocol.TMessageType.CALL, 0));
cacheBlock_args args = new cacheBlock_args();
args.setUserId(userId);
args.setBlockId(blockId);
args.write(prot);
prot.writeMessageEnd();
}
public void getResult() throws FileDoesNotExistException, SuspectedFileSizeException, BlockInfoException, org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
(new Client(prot)).recv_cacheBlock();
}
}
public void getUserUfsTempFolder(long userId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
checkReady();
getUserUfsTempFolder_call method_call = new getUserUfsTempFolder_call(userId, resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class getUserUfsTempFolder_call extends org.apache.thrift.async.TAsyncMethodCall {
private long userId;
public getUserUfsTempFolder_call(long userId, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, false);
this.userId = userId;
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("getUserUfsTempFolder", org.apache.thrift.protocol.TMessageType.CALL, 0));
getUserUfsTempFolder_args args = new getUserUfsTempFolder_args();
args.setUserId(userId);
args.write(prot);
prot.writeMessageEnd();
}
public String getResult() throws org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
return (new Client(prot)).recv_getUserUfsTempFolder();
}
}
public void lockBlock(long blockId, long userId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
checkReady();
lockBlock_call method_call = new lockBlock_call(blockId, userId, resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class lockBlock_call extends org.apache.thrift.async.TAsyncMethodCall {
private long blockId;
private long userId;
public lockBlock_call(long blockId, long userId, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, false);
this.blockId = blockId;
this.userId = userId;
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("lockBlock", org.apache.thrift.protocol.TMessageType.CALL, 0));
lockBlock_args args = new lockBlock_args();
args.setBlockId(blockId);
args.setUserId(userId);
args.write(prot);
prot.writeMessageEnd();
}
public String getResult() throws FileDoesNotExistException, org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
return (new Client(prot)).recv_lockBlock();
}
}
public void promoteBlock(long userId, long blockId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
checkReady();
promoteBlock_call method_call = new promoteBlock_call(userId, blockId, resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class promoteBlock_call extends org.apache.thrift.async.TAsyncMethodCall {
private long userId;
private long blockId;
public promoteBlock_call(long userId, long blockId, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, false);
this.userId = userId;
this.blockId = blockId;
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("promoteBlock", org.apache.thrift.protocol.TMessageType.CALL, 0));
promoteBlock_args args = new promoteBlock_args();
args.setUserId(userId);
args.setBlockId(blockId);
args.write(prot);
prot.writeMessageEnd();
}
public boolean getResult() throws org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
return (new Client(prot)).recv_promoteBlock();
}
}
public void cancelBlock(long userId, long blockId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
checkReady();
cancelBlock_call method_call = new cancelBlock_call(userId, blockId, resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class cancelBlock_call extends org.apache.thrift.async.TAsyncMethodCall {
private long userId;
private long blockId;
public cancelBlock_call(long userId, long blockId, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, false);
this.userId = userId;
this.blockId = blockId;
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("cancelBlock", org.apache.thrift.protocol.TMessageType.CALL, 0));
cancelBlock_args args = new cancelBlock_args();
args.setUserId(userId);
args.setBlockId(blockId);
args.write(prot);
prot.writeMessageEnd();
}
public void getResult() throws org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
(new Client(prot)).recv_cancelBlock();
}
}
public void getBlockLocation(long userId, long blockId, long initialBytes, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
checkReady();
getBlockLocation_call method_call = new getBlockLocation_call(userId, blockId, initialBytes, resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class getBlockLocation_call extends org.apache.thrift.async.TAsyncMethodCall {
private long userId;
private long blockId;
private long initialBytes;
public getBlockLocation_call(long userId, long blockId, long initialBytes, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, false);
this.userId = userId;
this.blockId = blockId;
this.initialBytes = initialBytes;
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("getBlockLocation", org.apache.thrift.protocol.TMessageType.CALL, 0));
getBlockLocation_args args = new getBlockLocation_args();
args.setUserId(userId);
args.setBlockId(blockId);
args.setInitialBytes(initialBytes);
args.write(prot);
prot.writeMessageEnd();
}
public String getResult() throws OutOfSpaceException, FileAlreadyExistException, org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
return (new Client(prot)).recv_getBlockLocation();
}
}
public void requestSpace(long userId, long blockId, long requestBytes, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
checkReady();
requestSpace_call method_call = new requestSpace_call(userId, blockId, requestBytes, resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class requestSpace_call extends org.apache.thrift.async.TAsyncMethodCall {
private long userId;
private long blockId;
private long requestBytes;
public requestSpace_call(long userId, long blockId, long requestBytes, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, false);
this.userId = userId;
this.blockId = blockId;
this.requestBytes = requestBytes;
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("requestSpace", org.apache.thrift.protocol.TMessageType.CALL, 0));
requestSpace_args args = new requestSpace_args();
args.setUserId(userId);
args.setBlockId(blockId);
args.setRequestBytes(requestBytes);
args.write(prot);
prot.writeMessageEnd();
}
public boolean getResult() throws FileDoesNotExistException, org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
return (new Client(prot)).recv_requestSpace();
}
}
public void unlockBlock(long blockId, long userId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
checkReady();
unlockBlock_call method_call = new unlockBlock_call(blockId, userId, resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class unlockBlock_call extends org.apache.thrift.async.TAsyncMethodCall {
private long blockId;
private long userId;
public unlockBlock_call(long blockId, long userId, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, false);
this.blockId = blockId;
this.userId = userId;
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("unlockBlock", org.apache.thrift.protocol.TMessageType.CALL, 0));
unlockBlock_args args = new unlockBlock_args();
args.setBlockId(blockId);
args.setUserId(userId);
args.write(prot);
prot.writeMessageEnd();
}
public boolean getResult() throws org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
return (new Client(prot)).recv_unlockBlock();
}
}
public void userHeartbeat(long userId, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
checkReady();
userHeartbeat_call method_call = new userHeartbeat_call(userId, resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class userHeartbeat_call extends org.apache.thrift.async.TAsyncMethodCall {
private long userId;
public userHeartbeat_call(long userId, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, false);
this.userId = userId;
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("userHeartbeat", org.apache.thrift.protocol.TMessageType.CALL, 0));
userHeartbeat_args args = new userHeartbeat_args();
args.setUserId(userId);
args.write(prot);
prot.writeMessageEnd();
}
public void getResult() throws org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
(new Client(prot)).recv_userHeartbeat();
}
}
}
public static class Processor<I extends Iface> extends org.apache.thrift.TBaseProcessor<I> implements org.apache.thrift.TProcessor {
private static final Logger LOGGER = LoggerFactory.getLogger(Processor.class.getName());
public Processor(I iface) {
super(iface, getProcessMap(new HashMap<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>>()));
}
protected Processor(I iface, Map<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> processMap) {
super(iface, getProcessMap(processMap));
}
private static <I extends Iface> Map<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> getProcessMap(Map<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> processMap) {
processMap.put("accessBlock", new accessBlock());
processMap.put("addCheckpoint", new addCheckpoint());
processMap.put("asyncCheckpoint", new asyncCheckpoint());
processMap.put("cacheBlock", new cacheBlock());
processMap.put("getUserUfsTempFolder", new getUserUfsTempFolder());
processMap.put("lockBlock", new lockBlock());
processMap.put("promoteBlock", new promoteBlock());
processMap.put("cancelBlock", new cancelBlock());
processMap.put("getBlockLocation", new getBlockLocation());
processMap.put("requestSpace", new requestSpace());
processMap.put("unlockBlock", new unlockBlock());
processMap.put("userHeartbeat", new userHeartbeat());
return processMap;
}
public static class accessBlock<I extends Iface> extends org.apache.thrift.ProcessFunction<I, accessBlock_args> {
public accessBlock() {
super("accessBlock");
}
public accessBlock_args getEmptyArgsInstance() {
return new accessBlock_args();
}
protected boolean isOneway() {
return false;
}
public accessBlock_result getResult(I iface, accessBlock_args args) throws org.apache.thrift.TException {
accessBlock_result result = new accessBlock_result();
iface.accessBlock(args.blockId);
return result;
}
}
public static class addCheckpoint<I extends Iface> extends org.apache.thrift.ProcessFunction<I, addCheckpoint_args> {
public addCheckpoint() {
super("addCheckpoint");
}
public addCheckpoint_args getEmptyArgsInstance() {
return new addCheckpoint_args();
}
protected boolean isOneway() {
return false;
}
public addCheckpoint_result getResult(I iface, addCheckpoint_args args) throws org.apache.thrift.TException {
addCheckpoint_result result = new addCheckpoint_result();
try {
iface.addCheckpoint(args.userId, args.fileId);
} catch (FileDoesNotExistException eP) {
result.eP = eP;
} catch (SuspectedFileSizeException eS) {
result.eS = eS;
} catch (FailedToCheckpointException eF) {
result.eF = eF;
} catch (BlockInfoException eB) {
result.eB = eB;
}
return result;
}
}
public static class asyncCheckpoint<I extends Iface> extends org.apache.thrift.ProcessFunction<I, asyncCheckpoint_args> {
public asyncCheckpoint() {
super("asyncCheckpoint");
}
public asyncCheckpoint_args getEmptyArgsInstance() {
return new asyncCheckpoint_args();
}
protected boolean isOneway() {
return false;
}
public asyncCheckpoint_result getResult(I iface, asyncCheckpoint_args args) throws org.apache.thrift.TException {
asyncCheckpoint_result result = new asyncCheckpoint_result();
try {
result.success = iface.asyncCheckpoint(args.fileId);
result.setSuccessIsSet(true);
} catch (TachyonException e) {
result.e = e;
}
return result;
}
}
public static class cacheBlock<I extends Iface> extends org.apache.thrift.ProcessFunction<I, cacheBlock_args> {
public cacheBlock() {
super("cacheBlock");
}
public cacheBlock_args getEmptyArgsInstance() {
return new cacheBlock_args();
}
protected boolean isOneway() {
return false;
}
public cacheBlock_result getResult(I iface, cacheBlock_args args) throws org.apache.thrift.TException {
cacheBlock_result result = new cacheBlock_result();
try {
iface.cacheBlock(args.userId, args.blockId);
} catch (FileDoesNotExistException eP) {
result.eP = eP;
} catch (SuspectedFileSizeException eS) {
result.eS = eS;
} catch (BlockInfoException eB) {
result.eB = eB;
}
return result;
}
}
public static class getUserUfsTempFolder<I extends Iface> extends org.apache.thrift.ProcessFunction<I, getUserUfsTempFolder_args> {
public getUserUfsTempFolder() {
super("getUserUfsTempFolder");
}
public getUserUfsTempFolder_args getEmptyArgsInstance() {
return new getUserUfsTempFolder_args();
}
protected boolean isOneway() {
return false;
}
public getUserUfsTempFolder_result getResult(I iface, getUserUfsTempFolder_args args) throws org.apache.thrift.TException {
getUserUfsTempFolder_result result = new getUserUfsTempFolder_result();
result.success = iface.getUserUfsTempFolder(args.userId);
return result;
}
}
public static class lockBlock<I extends Iface> extends org.apache.thrift.ProcessFunction<I, lockBlock_args> {
public lockBlock() {
super("lockBlock");
}
public lockBlock_args getEmptyArgsInstance() {
return new lockBlock_args();
}
protected boolean isOneway() {
return false;
}
public lockBlock_result getResult(I iface, lockBlock_args args) throws org.apache.thrift.TException {
lockBlock_result result = new lockBlock_result();
try {
result.success = iface.lockBlock(args.blockId, args.userId);
} catch (FileDoesNotExistException eP) {
result.eP = eP;
}
return result;
}
}
public static class promoteBlock<I extends Iface> extends org.apache.thrift.ProcessFunction<I, promoteBlock_args> {
public promoteBlock() {
super("promoteBlock");
}
public promoteBlock_args getEmptyArgsInstance() {
return new promoteBlock_args();
}
protected boolean isOneway() {
return false;
}
public promoteBlock_result getResult(I iface, promoteBlock_args args) throws org.apache.thrift.TException {
promoteBlock_result result = new promoteBlock_result();
result.success = iface.promoteBlock(args.userId, args.blockId);
result.setSuccessIsSet(true);
return result;
}
}
public static class cancelBlock<I extends Iface> extends org.apache.thrift.ProcessFunction<I, cancelBlock_args> {
public cancelBlock() {
super("cancelBlock");
}
public cancelBlock_args getEmptyArgsInstance() {
return new cancelBlock_args();
}
protected boolean isOneway() {
return false;
}
public cancelBlock_result getResult(I iface, cancelBlock_args args) throws org.apache.thrift.TException {
cancelBlock_result result = new cancelBlock_result();
iface.cancelBlock(args.userId, args.blockId);
return result;
}
}
public static class getBlockLocation<I extends Iface> extends org.apache.thrift.ProcessFunction<I, getBlockLocation_args> {
public getBlockLocation() {
super("getBlockLocation");
}
public getBlockLocation_args getEmptyArgsInstance() {
return new getBlockLocation_args();
}
protected boolean isOneway() {
return false;
}
public getBlockLocation_result getResult(I iface, getBlockLocation_args args) throws org.apache.thrift.TException {
getBlockLocation_result result = new getBlockLocation_result();
try {
result.success = iface.getBlockLocation(args.userId, args.blockId, args.initialBytes);
} catch (OutOfSpaceException eP) {
result.eP = eP;
} catch (FileAlreadyExistException eS) {
result.eS = eS;
}
return result;
}
}
public static class requestSpace<I extends Iface> extends org.apache.thrift.ProcessFunction<I, requestSpace_args> {
public requestSpace() {
super("requestSpace");
}
public requestSpace_args getEmptyArgsInstance() {
return new requestSpace_args();
}
protected boolean isOneway() {
return false;
}
public requestSpace_result getResult(I iface, requestSpace_args args) throws org.apache.thrift.TException {
requestSpace_result result = new requestSpace_result();
try {
result.success = iface.requestSpace(args.userId, args.blockId, args.requestBytes);
result.setSuccessIsSet(true);
} catch (FileDoesNotExistException eP) {
result.eP = eP;
}
return result;
}
}
public static class unlockBlock<I extends Iface> extends org.apache.thrift.ProcessFunction<I, unlockBlock_args> {
public unlockBlock() {
super("unlockBlock");
}
public unlockBlock_args getEmptyArgsInstance() {
return new unlockBlock_args();
}
protected boolean isOneway() {
return false;
}
public unlockBlock_result getResult(I iface, unlockBlock_args args) throws org.apache.thrift.TException {
unlockBlock_result result = new unlockBlock_result();
result.success = iface.unlockBlock(args.blockId, args.userId);
result.setSuccessIsSet(true);
return result;
}
}
public static class userHeartbeat<I extends Iface> extends org.apache.thrift.ProcessFunction<I, userHeartbeat_args> {
public userHeartbeat() {
super("userHeartbeat");
}
public userHeartbeat_args getEmptyArgsInstance() {
return new userHeartbeat_args();
}
protected boolean isOneway() {
return false;
}
public userHeartbeat_result getResult(I iface, userHeartbeat_args args) throws org.apache.thrift.TException {
userHeartbeat_result result = new userHeartbeat_result();
iface.userHeartbeat(args.userId);
return result;
}
}
}
public static class AsyncProcessor<I extends AsyncIface> extends org.apache.thrift.TBaseAsyncProcessor<I> {
private static final Logger LOGGER = LoggerFactory.getLogger(AsyncProcessor.class.getName());
public AsyncProcessor(I iface) {
super(iface, getProcessMap(new HashMap<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>>()));
}
protected AsyncProcessor(I iface, Map<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>> processMap) {
super(iface, getProcessMap(processMap));
}
private static <I extends AsyncIface> Map<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase,?>> getProcessMap(Map<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>> processMap) {
processMap.put("accessBlock", new accessBlock());
processMap.put("addCheckpoint", new addCheckpoint());
processMap.put("asyncCheckpoint", new asyncCheckpoint());
processMap.put("cacheBlock", new cacheBlock());
processMap.put("getUserUfsTempFolder", new getUserUfsTempFolder());
processMap.put("lockBlock", new lockBlock());
processMap.put("promoteBlock", new promoteBlock());
processMap.put("cancelBlock", new cancelBlock());
processMap.put("getBlockLocation", new getBlockLocation());
processMap.put("requestSpace", new requestSpace());
processMap.put("unlockBlock", new unlockBlock());
processMap.put("userHeartbeat", new userHeartbeat());
return processMap;
}
public static class accessBlock<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, accessBlock_args, Void> {
public accessBlock() {
super("accessBlock");
}
public accessBlock_args getEmptyArgsInstance() {
return new accessBlock_args();
}
public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new AsyncMethodCallback<Void>() {
public void onComplete(Void o) {
accessBlock_result result = new accessBlock_result();
try {
fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
return;
} catch (Exception e) {
LOGGER.error("Exception writing to internal frame buffer", e);
}
fb.close();
}
public void onError(Exception e) {
byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
org.apache.thrift.TBase msg;
accessBlock_result result = new accessBlock_result();
{
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
}
try {
fcall.sendResponse(fb,msg,msgType,seqid);
return;
} catch (Exception ex) {
LOGGER.error("Exception writing to internal frame buffer", ex);
}
fb.close();
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, accessBlock_args args, org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws TException {
iface.accessBlock(args.blockId,resultHandler);
}
}
public static class addCheckpoint<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, addCheckpoint_args, Void> {
public addCheckpoint() {
super("addCheckpoint");
}
public addCheckpoint_args getEmptyArgsInstance() {
return new addCheckpoint_args();
}
public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new AsyncMethodCallback<Void>() {
public void onComplete(Void o) {
addCheckpoint_result result = new addCheckpoint_result();
try {
fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
return;
} catch (Exception e) {
LOGGER.error("Exception writing to internal frame buffer", e);
}
fb.close();
}
public void onError(Exception e) {
byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
org.apache.thrift.TBase msg;
addCheckpoint_result result = new addCheckpoint_result();
if (e instanceof FileDoesNotExistException) {
result.eP = (FileDoesNotExistException) e;
result.setEPIsSet(true);
msg = result;
}
else if (e instanceof SuspectedFileSizeException) {
result.eS = (SuspectedFileSizeException) e;
result.setESIsSet(true);
msg = result;
}
else if (e instanceof FailedToCheckpointException) {
result.eF = (FailedToCheckpointException) e;
result.setEFIsSet(true);
msg = result;
}
else if (e instanceof BlockInfoException) {
result.eB = (BlockInfoException) e;
result.setEBIsSet(true);
msg = result;
}
else
{
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
}
try {
fcall.sendResponse(fb,msg,msgType,seqid);
return;
} catch (Exception ex) {
LOGGER.error("Exception writing to internal frame buffer", ex);
}
fb.close();
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, addCheckpoint_args args, org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws TException {
iface.addCheckpoint(args.userId, args.fileId,resultHandler);
}
}
public static class asyncCheckpoint<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, asyncCheckpoint_args, Boolean> {
public asyncCheckpoint() {
super("asyncCheckpoint");
}
public asyncCheckpoint_args getEmptyArgsInstance() {
return new asyncCheckpoint_args();
}
public AsyncMethodCallback<Boolean> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new AsyncMethodCallback<Boolean>() {
public void onComplete(Boolean o) {
asyncCheckpoint_result result = new asyncCheckpoint_result();
result.success = o;
result.setSuccessIsSet(true);
try {
fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
return;
} catch (Exception e) {
LOGGER.error("Exception writing to internal frame buffer", e);
}
fb.close();
}
public void onError(Exception e) {
byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
org.apache.thrift.TBase msg;
asyncCheckpoint_result result = new asyncCheckpoint_result();
if (e instanceof TachyonException) {
result.e = (TachyonException) e;
result.setEIsSet(true);
msg = result;
}
else
{
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
}
try {
fcall.sendResponse(fb,msg,msgType,seqid);
return;
} catch (Exception ex) {
LOGGER.error("Exception writing to internal frame buffer", ex);
}
fb.close();
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, asyncCheckpoint_args args, org.apache.thrift.async.AsyncMethodCallback<Boolean> resultHandler) throws TException {
iface.asyncCheckpoint(args.fileId,resultHandler);
}
}
public static class cacheBlock<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, cacheBlock_args, Void> {
public cacheBlock() {
super("cacheBlock");
}
public cacheBlock_args getEmptyArgsInstance() {
return new cacheBlock_args();
}
public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new AsyncMethodCallback<Void>() {
public void onComplete(Void o) {
cacheBlock_result result = new cacheBlock_result();
try {
fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
return;
} catch (Exception e) {
LOGGER.error("Exception writing to internal frame buffer", e);
}
fb.close();
}
public void onError(Exception e) {
byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
org.apache.thrift.TBase msg;
cacheBlock_result result = new cacheBlock_result();
if (e instanceof FileDoesNotExistException) {
result.eP = (FileDoesNotExistException) e;
result.setEPIsSet(true);
msg = result;
}
else if (e instanceof SuspectedFileSizeException) {
result.eS = (SuspectedFileSizeException) e;
result.setESIsSet(true);
msg = result;
}
else if (e instanceof BlockInfoException) {
result.eB = (BlockInfoException) e;
result.setEBIsSet(true);
msg = result;
}
else
{
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
}
try {
fcall.sendResponse(fb,msg,msgType,seqid);
return;
} catch (Exception ex) {
LOGGER.error("Exception writing to internal frame buffer", ex);
}
fb.close();
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, cacheBlock_args args, org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws TException {
iface.cacheBlock(args.userId, args.blockId,resultHandler);
}
}
public static class getUserUfsTempFolder<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, getUserUfsTempFolder_args, String> {
public getUserUfsTempFolder() {
super("getUserUfsTempFolder");
}
public getUserUfsTempFolder_args getEmptyArgsInstance() {
return new getUserUfsTempFolder_args();
}
public AsyncMethodCallback<String> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new AsyncMethodCallback<String>() {
public void onComplete(String o) {
getUserUfsTempFolder_result result = new getUserUfsTempFolder_result();
result.success = o;
try {
fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
return;
} catch (Exception e) {
LOGGER.error("Exception writing to internal frame buffer", e);
}
fb.close();
}
public void onError(Exception e) {
byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
org.apache.thrift.TBase msg;
getUserUfsTempFolder_result result = new getUserUfsTempFolder_result();
{
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
}
try {
fcall.sendResponse(fb,msg,msgType,seqid);
return;
} catch (Exception ex) {
LOGGER.error("Exception writing to internal frame buffer", ex);
}
fb.close();
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, getUserUfsTempFolder_args args, org.apache.thrift.async.AsyncMethodCallback<String> resultHandler) throws TException {
iface.getUserUfsTempFolder(args.userId,resultHandler);
}
}
public static class lockBlock<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, lockBlock_args, String> {
public lockBlock() {
super("lockBlock");
}
public lockBlock_args getEmptyArgsInstance() {
return new lockBlock_args();
}
public AsyncMethodCallback<String> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new AsyncMethodCallback<String>() {
public void onComplete(String o) {
lockBlock_result result = new lockBlock_result();
result.success = o;
try {
fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
return;
} catch (Exception e) {
LOGGER.error("Exception writing to internal frame buffer", e);
}
fb.close();
}
public void onError(Exception e) {
byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
org.apache.thrift.TBase msg;
lockBlock_result result = new lockBlock_result();
if (e instanceof FileDoesNotExistException) {
result.eP = (FileDoesNotExistException) e;
result.setEPIsSet(true);
msg = result;
}
else
{
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
}
try {
fcall.sendResponse(fb,msg,msgType,seqid);
return;
} catch (Exception ex) {
LOGGER.error("Exception writing to internal frame buffer", ex);
}
fb.close();
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, lockBlock_args args, org.apache.thrift.async.AsyncMethodCallback<String> resultHandler) throws TException {
iface.lockBlock(args.blockId, args.userId,resultHandler);
}
}
public static class promoteBlock<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, promoteBlock_args, Boolean> {
public promoteBlock() {
super("promoteBlock");
}
public promoteBlock_args getEmptyArgsInstance() {
return new promoteBlock_args();
}
public AsyncMethodCallback<Boolean> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new AsyncMethodCallback<Boolean>() {
public void onComplete(Boolean o) {
promoteBlock_result result = new promoteBlock_result();
result.success = o;
result.setSuccessIsSet(true);
try {
fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
return;
} catch (Exception e) {
LOGGER.error("Exception writing to internal frame buffer", e);
}
fb.close();
}
public void onError(Exception e) {
byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
org.apache.thrift.TBase msg;
promoteBlock_result result = new promoteBlock_result();
{
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
}
try {
fcall.sendResponse(fb,msg,msgType,seqid);
return;
} catch (Exception ex) {
LOGGER.error("Exception writing to internal frame buffer", ex);
}
fb.close();
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, promoteBlock_args args, org.apache.thrift.async.AsyncMethodCallback<Boolean> resultHandler) throws TException {
iface.promoteBlock(args.userId, args.blockId,resultHandler);
}
}
public static class cancelBlock<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, cancelBlock_args, Void> {
public cancelBlock() {
super("cancelBlock");
}
public cancelBlock_args getEmptyArgsInstance() {
return new cancelBlock_args();
}
public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new AsyncMethodCallback<Void>() {
public void onComplete(Void o) {
cancelBlock_result result = new cancelBlock_result();
try {
fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
return;
} catch (Exception e) {
LOGGER.error("Exception writing to internal frame buffer", e);
}
fb.close();
}
public void onError(Exception e) {
byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
org.apache.thrift.TBase msg;
cancelBlock_result result = new cancelBlock_result();
{
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
}
try {
fcall.sendResponse(fb,msg,msgType,seqid);
return;
} catch (Exception ex) {
LOGGER.error("Exception writing to internal frame buffer", ex);
}
fb.close();
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, cancelBlock_args args, org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws TException {
iface.cancelBlock(args.userId, args.blockId,resultHandler);
}
}
public static class getBlockLocation<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, getBlockLocation_args, String> {
public getBlockLocation() {
super("getBlockLocation");
}
public getBlockLocation_args getEmptyArgsInstance() {
return new getBlockLocation_args();
}
public AsyncMethodCallback<String> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new AsyncMethodCallback<String>() {
public void onComplete(String o) {
getBlockLocation_result result = new getBlockLocation_result();
result.success = o;
try {
fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
return;
} catch (Exception e) {
LOGGER.error("Exception writing to internal frame buffer", e);
}
fb.close();
}
public void onError(Exception e) {
byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
org.apache.thrift.TBase msg;
getBlockLocation_result result = new getBlockLocation_result();
if (e instanceof OutOfSpaceException) {
result.eP = (OutOfSpaceException) e;
result.setEPIsSet(true);
msg = result;
}
else if (e instanceof FileAlreadyExistException) {
result.eS = (FileAlreadyExistException) e;
result.setESIsSet(true);
msg = result;
}
else
{
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
}
try {
fcall.sendResponse(fb,msg,msgType,seqid);
return;
} catch (Exception ex) {
LOGGER.error("Exception writing to internal frame buffer", ex);
}
fb.close();
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, getBlockLocation_args args, org.apache.thrift.async.AsyncMethodCallback<String> resultHandler) throws TException {
iface.getBlockLocation(args.userId, args.blockId, args.initialBytes,resultHandler);
}
}
public static class requestSpace<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, requestSpace_args, Boolean> {
public requestSpace() {
super("requestSpace");
}
public requestSpace_args getEmptyArgsInstance() {
return new requestSpace_args();
}
public AsyncMethodCallback<Boolean> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new AsyncMethodCallback<Boolean>() {
public void onComplete(Boolean o) {
requestSpace_result result = new requestSpace_result();
result.success = o;
result.setSuccessIsSet(true);
try {
fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
return;
} catch (Exception e) {
LOGGER.error("Exception writing to internal frame buffer", e);
}
fb.close();
}
public void onError(Exception e) {
byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
org.apache.thrift.TBase msg;
requestSpace_result result = new requestSpace_result();
if (e instanceof FileDoesNotExistException) {
result.eP = (FileDoesNotExistException) e;
result.setEPIsSet(true);
msg = result;
}
else
{
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
}
try {
fcall.sendResponse(fb,msg,msgType,seqid);
return;
} catch (Exception ex) {
LOGGER.error("Exception writing to internal frame buffer", ex);
}
fb.close();
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, requestSpace_args args, org.apache.thrift.async.AsyncMethodCallback<Boolean> resultHandler) throws TException {
iface.requestSpace(args.userId, args.blockId, args.requestBytes,resultHandler);
}
}
public static class unlockBlock<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, unlockBlock_args, Boolean> {
public unlockBlock() {
super("unlockBlock");
}
public unlockBlock_args getEmptyArgsInstance() {
return new unlockBlock_args();
}
public AsyncMethodCallback<Boolean> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new AsyncMethodCallback<Boolean>() {
public void onComplete(Boolean o) {
unlockBlock_result result = new unlockBlock_result();
result.success = o;
result.setSuccessIsSet(true);
try {
fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
return;
} catch (Exception e) {
LOGGER.error("Exception writing to internal frame buffer", e);
}
fb.close();
}
public void onError(Exception e) {
byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
org.apache.thrift.TBase msg;
unlockBlock_result result = new unlockBlock_result();
{
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
}
try {
fcall.sendResponse(fb,msg,msgType,seqid);
return;
} catch (Exception ex) {
LOGGER.error("Exception writing to internal frame buffer", ex);
}
fb.close();
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, unlockBlock_args args, org.apache.thrift.async.AsyncMethodCallback<Boolean> resultHandler) throws TException {
iface.unlockBlock(args.blockId, args.userId,resultHandler);
}
}
public static class userHeartbeat<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, userHeartbeat_args, Void> {
public userHeartbeat() {
super("userHeartbeat");
}
public userHeartbeat_args getEmptyArgsInstance() {
return new userHeartbeat_args();
}
public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new AsyncMethodCallback<Void>() {
public void onComplete(Void o) {
userHeartbeat_result result = new userHeartbeat_result();
try {
fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
return;
} catch (Exception e) {
LOGGER.error("Exception writing to internal frame buffer", e);
}
fb.close();
}
public void onError(Exception e) {
byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
org.apache.thrift.TBase msg;
userHeartbeat_result result = new userHeartbeat_result();
{
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
}
try {
fcall.sendResponse(fb,msg,msgType,seqid);
return;
} catch (Exception ex) {
LOGGER.error("Exception writing to internal frame buffer", ex);
}
fb.close();
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, userHeartbeat_args args, org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws TException {
iface.userHeartbeat(args.userId,resultHandler);
}
}
}
public static class accessBlock_args implements org.apache.thrift.TBase<accessBlock_args, accessBlock_args._Fields>, java.io.Serializable, Cloneable, Comparable<accessBlock_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("accessBlock_args");
private static final org.apache.thrift.protocol.TField BLOCK_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("blockId", org.apache.thrift.protocol.TType.I64, (short)1);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new accessBlock_argsStandardSchemeFactory());
schemes.put(TupleScheme.class, new accessBlock_argsTupleSchemeFactory());
}
public long blockId; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
BLOCK_ID((short)1, "blockId");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // BLOCK_ID
return BLOCK_ID;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __BLOCKID_ISSET_ID = 0;
private byte __isset_bitfield = 0;
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.BLOCK_ID, new org.apache.thrift.meta_data.FieldMetaData("blockId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(accessBlock_args.class, metaDataMap);
}
public accessBlock_args() {
}
public accessBlock_args(
long blockId)
{
this();
this.blockId = blockId;
setBlockIdIsSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public accessBlock_args(accessBlock_args other) {
__isset_bitfield = other.__isset_bitfield;
this.blockId = other.blockId;
}
public accessBlock_args deepCopy() {
return new accessBlock_args(this);
}
@Override
public void clear() {
setBlockIdIsSet(false);
this.blockId = 0;
}
public long getBlockId() {
return this.blockId;
}
public accessBlock_args setBlockId(long blockId) {
this.blockId = blockId;
setBlockIdIsSet(true);
return this;
}
public void unsetBlockId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __BLOCKID_ISSET_ID);
}
/** Returns true if field blockId is set (has been assigned a value) and false otherwise */
public boolean isSetBlockId() {
return EncodingUtils.testBit(__isset_bitfield, __BLOCKID_ISSET_ID);
}
public void setBlockIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __BLOCKID_ISSET_ID, value);
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case BLOCK_ID:
if (value == null) {
unsetBlockId();
} else {
setBlockId((Long)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case BLOCK_ID:
return Long.valueOf(getBlockId());
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case BLOCK_ID:
return isSetBlockId();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof accessBlock_args)
return this.equals((accessBlock_args)that);
return false;
}
public boolean equals(accessBlock_args that) {
if (that == null)
return false;
boolean this_present_blockId = true;
boolean that_present_blockId = true;
if (this_present_blockId || that_present_blockId) {
if (!(this_present_blockId && that_present_blockId))
return false;
if (this.blockId != that.blockId)
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(accessBlock_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetBlockId()).compareTo(other.isSetBlockId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetBlockId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.blockId, other.blockId);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("accessBlock_args(");
boolean first = true;
sb.append("blockId:");
sb.append(this.blockId);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class accessBlock_argsStandardSchemeFactory implements SchemeFactory {
public accessBlock_argsStandardScheme getScheme() {
return new accessBlock_argsStandardScheme();
}
}
private static class accessBlock_argsStandardScheme extends StandardScheme<accessBlock_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, accessBlock_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // BLOCK_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.blockId = iprot.readI64();
struct.setBlockIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, accessBlock_args struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(BLOCK_ID_FIELD_DESC);
oprot.writeI64(struct.blockId);
oprot.writeFieldEnd();
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class accessBlock_argsTupleSchemeFactory implements SchemeFactory {
public accessBlock_argsTupleScheme getScheme() {
return new accessBlock_argsTupleScheme();
}
}
private static class accessBlock_argsTupleScheme extends TupleScheme<accessBlock_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, accessBlock_args struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetBlockId()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetBlockId()) {
oprot.writeI64(struct.blockId);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, accessBlock_args struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.blockId = iprot.readI64();
struct.setBlockIdIsSet(true);
}
}
}
}
public static class accessBlock_result implements org.apache.thrift.TBase<accessBlock_result, accessBlock_result._Fields>, java.io.Serializable, Cloneable, Comparable<accessBlock_result> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("accessBlock_result");
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new accessBlock_resultStandardSchemeFactory());
schemes.put(TupleScheme.class, new accessBlock_resultTupleSchemeFactory());
}
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
;
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(accessBlock_result.class, metaDataMap);
}
public accessBlock_result() {
}
/**
* Performs a deep copy on <i>other</i>.
*/
public accessBlock_result(accessBlock_result other) {
}
public accessBlock_result deepCopy() {
return new accessBlock_result(this);
}
@Override
public void clear() {
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof accessBlock_result)
return this.equals((accessBlock_result)that);
return false;
}
public boolean equals(accessBlock_result that) {
if (that == null)
return false;
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(accessBlock_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("accessBlock_result(");
boolean first = true;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class accessBlock_resultStandardSchemeFactory implements SchemeFactory {
public accessBlock_resultStandardScheme getScheme() {
return new accessBlock_resultStandardScheme();
}
}
private static class accessBlock_resultStandardScheme extends StandardScheme<accessBlock_result> {
public void read(org.apache.thrift.protocol.TProtocol iprot, accessBlock_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, accessBlock_result struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class accessBlock_resultTupleSchemeFactory implements SchemeFactory {
public accessBlock_resultTupleScheme getScheme() {
return new accessBlock_resultTupleScheme();
}
}
private static class accessBlock_resultTupleScheme extends TupleScheme<accessBlock_result> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, accessBlock_result struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, accessBlock_result struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
}
}
}
public static class addCheckpoint_args implements org.apache.thrift.TBase<addCheckpoint_args, addCheckpoint_args._Fields>, java.io.Serializable, Cloneable, Comparable<addCheckpoint_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("addCheckpoint_args");
private static final org.apache.thrift.protocol.TField USER_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("userId", org.apache.thrift.protocol.TType.I64, (short)1);
private static final org.apache.thrift.protocol.TField FILE_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("fileId", org.apache.thrift.protocol.TType.I32, (short)2);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new addCheckpoint_argsStandardSchemeFactory());
schemes.put(TupleScheme.class, new addCheckpoint_argsTupleSchemeFactory());
}
public long userId; // required
public int fileId; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
USER_ID((short)1, "userId"),
FILE_ID((short)2, "fileId");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // USER_ID
return USER_ID;
case 2: // FILE_ID
return FILE_ID;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __USERID_ISSET_ID = 0;
private static final int __FILEID_ISSET_ID = 1;
private byte __isset_bitfield = 0;
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.USER_ID, new org.apache.thrift.meta_data.FieldMetaData("userId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.FILE_ID, new org.apache.thrift.meta_data.FieldMetaData("fileId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(addCheckpoint_args.class, metaDataMap);
}
public addCheckpoint_args() {
}
public addCheckpoint_args(
long userId,
int fileId)
{
this();
this.userId = userId;
setUserIdIsSet(true);
this.fileId = fileId;
setFileIdIsSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public addCheckpoint_args(addCheckpoint_args other) {
__isset_bitfield = other.__isset_bitfield;
this.userId = other.userId;
this.fileId = other.fileId;
}
public addCheckpoint_args deepCopy() {
return new addCheckpoint_args(this);
}
@Override
public void clear() {
setUserIdIsSet(false);
this.userId = 0;
setFileIdIsSet(false);
this.fileId = 0;
}
public long getUserId() {
return this.userId;
}
public addCheckpoint_args setUserId(long userId) {
this.userId = userId;
setUserIdIsSet(true);
return this;
}
public void unsetUserId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __USERID_ISSET_ID);
}
/** Returns true if field userId is set (has been assigned a value) and false otherwise */
public boolean isSetUserId() {
return EncodingUtils.testBit(__isset_bitfield, __USERID_ISSET_ID);
}
public void setUserIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __USERID_ISSET_ID, value);
}
public int getFileId() {
return this.fileId;
}
public addCheckpoint_args setFileId(int fileId) {
this.fileId = fileId;
setFileIdIsSet(true);
return this;
}
public void unsetFileId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __FILEID_ISSET_ID);
}
/** Returns true if field fileId is set (has been assigned a value) and false otherwise */
public boolean isSetFileId() {
return EncodingUtils.testBit(__isset_bitfield, __FILEID_ISSET_ID);
}
public void setFileIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __FILEID_ISSET_ID, value);
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case USER_ID:
if (value == null) {
unsetUserId();
} else {
setUserId((Long)value);
}
break;
case FILE_ID:
if (value == null) {
unsetFileId();
} else {
setFileId((Integer)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case USER_ID:
return Long.valueOf(getUserId());
case FILE_ID:
return Integer.valueOf(getFileId());
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case USER_ID:
return isSetUserId();
case FILE_ID:
return isSetFileId();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof addCheckpoint_args)
return this.equals((addCheckpoint_args)that);
return false;
}
public boolean equals(addCheckpoint_args that) {
if (that == null)
return false;
boolean this_present_userId = true;
boolean that_present_userId = true;
if (this_present_userId || that_present_userId) {
if (!(this_present_userId && that_present_userId))
return false;
if (this.userId != that.userId)
return false;
}
boolean this_present_fileId = true;
boolean that_present_fileId = true;
if (this_present_fileId || that_present_fileId) {
if (!(this_present_fileId && that_present_fileId))
return false;
if (this.fileId != that.fileId)
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(addCheckpoint_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetUserId()).compareTo(other.isSetUserId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetUserId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.userId, other.userId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetFileId()).compareTo(other.isSetFileId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetFileId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.fileId, other.fileId);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("addCheckpoint_args(");
boolean first = true;
sb.append("userId:");
sb.append(this.userId);
first = false;
if (!first) sb.append(", ");
sb.append("fileId:");
sb.append(this.fileId);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class addCheckpoint_argsStandardSchemeFactory implements SchemeFactory {
public addCheckpoint_argsStandardScheme getScheme() {
return new addCheckpoint_argsStandardScheme();
}
}
private static class addCheckpoint_argsStandardScheme extends StandardScheme<addCheckpoint_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, addCheckpoint_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // USER_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.userId = iprot.readI64();
struct.setUserIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // FILE_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.fileId = iprot.readI32();
struct.setFileIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, addCheckpoint_args struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(USER_ID_FIELD_DESC);
oprot.writeI64(struct.userId);
oprot.writeFieldEnd();
oprot.writeFieldBegin(FILE_ID_FIELD_DESC);
oprot.writeI32(struct.fileId);
oprot.writeFieldEnd();
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class addCheckpoint_argsTupleSchemeFactory implements SchemeFactory {
public addCheckpoint_argsTupleScheme getScheme() {
return new addCheckpoint_argsTupleScheme();
}
}
private static class addCheckpoint_argsTupleScheme extends TupleScheme<addCheckpoint_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, addCheckpoint_args struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetUserId()) {
optionals.set(0);
}
if (struct.isSetFileId()) {
optionals.set(1);
}
oprot.writeBitSet(optionals, 2);
if (struct.isSetUserId()) {
oprot.writeI64(struct.userId);
}
if (struct.isSetFileId()) {
oprot.writeI32(struct.fileId);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, addCheckpoint_args struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(2);
if (incoming.get(0)) {
struct.userId = iprot.readI64();
struct.setUserIdIsSet(true);
}
if (incoming.get(1)) {
struct.fileId = iprot.readI32();
struct.setFileIdIsSet(true);
}
}
}
}
public static class addCheckpoint_result implements org.apache.thrift.TBase<addCheckpoint_result, addCheckpoint_result._Fields>, java.io.Serializable, Cloneable, Comparable<addCheckpoint_result> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("addCheckpoint_result");
private static final org.apache.thrift.protocol.TField E_P_FIELD_DESC = new org.apache.thrift.protocol.TField("eP", org.apache.thrift.protocol.TType.STRUCT, (short)1);
private static final org.apache.thrift.protocol.TField E_S_FIELD_DESC = new org.apache.thrift.protocol.TField("eS", org.apache.thrift.protocol.TType.STRUCT, (short)2);
private static final org.apache.thrift.protocol.TField E_F_FIELD_DESC = new org.apache.thrift.protocol.TField("eF", org.apache.thrift.protocol.TType.STRUCT, (short)3);
private static final org.apache.thrift.protocol.TField E_B_FIELD_DESC = new org.apache.thrift.protocol.TField("eB", org.apache.thrift.protocol.TType.STRUCT, (short)4);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new addCheckpoint_resultStandardSchemeFactory());
schemes.put(TupleScheme.class, new addCheckpoint_resultTupleSchemeFactory());
}
public FileDoesNotExistException eP; // required
public SuspectedFileSizeException eS; // required
public FailedToCheckpointException eF; // required
public BlockInfoException eB; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
E_P((short)1, "eP"),
E_S((short)2, "eS"),
E_F((short)3, "eF"),
E_B((short)4, "eB");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1:
return E_P;
case 2:
return E_S;
case 3:
return E_F;
case 4:
return E_B;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.E_P, new org.apache.thrift.meta_data.FieldMetaData("eP", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT)));
tmpMap.put(_Fields.E_S, new org.apache.thrift.meta_data.FieldMetaData("eS", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT)));
tmpMap.put(_Fields.E_F, new org.apache.thrift.meta_data.FieldMetaData("eF", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT)));
tmpMap.put(_Fields.E_B, new org.apache.thrift.meta_data.FieldMetaData("eB", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(addCheckpoint_result.class, metaDataMap);
}
public addCheckpoint_result() {
}
public addCheckpoint_result(
FileDoesNotExistException eP,
SuspectedFileSizeException eS,
FailedToCheckpointException eF,
BlockInfoException eB)
{
this();
this.eP = eP;
this.eS = eS;
this.eF = eF;
this.eB = eB;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public addCheckpoint_result(addCheckpoint_result other) {
if (other.isSetEP()) {
this.eP = new FileDoesNotExistException(other.eP);
}
if (other.isSetES()) {
this.eS = new SuspectedFileSizeException(other.eS);
}
if (other.isSetEF()) {
this.eF = new FailedToCheckpointException(other.eF);
}
if (other.isSetEB()) {
this.eB = new BlockInfoException(other.eB);
}
}
public addCheckpoint_result deepCopy() {
return new addCheckpoint_result(this);
}
@Override
public void clear() {
this.eP = null;
this.eS = null;
this.eF = null;
this.eB = null;
}
public FileDoesNotExistException getEP() {
return this.eP;
}
public addCheckpoint_result setEP(FileDoesNotExistException eP) {
this.eP = eP;
return this;
}
public void unsetEP() {
this.eP = null;
}
/** Returns true if field eP is set (has been assigned a value) and false otherwise */
public boolean isSetEP() {
return this.eP != null;
}
public void setEPIsSet(boolean value) {
if (!value) {
this.eP = null;
}
}
public SuspectedFileSizeException getES() {
return this.eS;
}
public addCheckpoint_result setES(SuspectedFileSizeException eS) {
this.eS = eS;
return this;
}
public void unsetES() {
this.eS = null;
}
/** Returns true if field eS is set (has been assigned a value) and false otherwise */
public boolean isSetES() {
return this.eS != null;
}
public void setESIsSet(boolean value) {
if (!value) {
this.eS = null;
}
}
public FailedToCheckpointException getEF() {
return this.eF;
}
public addCheckpoint_result setEF(FailedToCheckpointException eF) {
this.eF = eF;
return this;
}
public void unsetEF() {
this.eF = null;
}
/** Returns true if field eF is set (has been assigned a value) and false otherwise */
public boolean isSetEF() {
return this.eF != null;
}
public void setEFIsSet(boolean value) {
if (!value) {
this.eF = null;
}
}
public BlockInfoException getEB() {
return this.eB;
}
public addCheckpoint_result setEB(BlockInfoException eB) {
this.eB = eB;
return this;
}
public void unsetEB() {
this.eB = null;
}
/** Returns true if field eB is set (has been assigned a value) and false otherwise */
public boolean isSetEB() {
return this.eB != null;
}
public void setEBIsSet(boolean value) {
if (!value) {
this.eB = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case E_P:
if (value == null) {
unsetEP();
} else {
setEP((FileDoesNotExistException)value);
}
break;
case E_S:
if (value == null) {
unsetES();
} else {
setES((SuspectedFileSizeException)value);
}
break;
case E_F:
if (value == null) {
unsetEF();
} else {
setEF((FailedToCheckpointException)value);
}
break;
case E_B:
if (value == null) {
unsetEB();
} else {
setEB((BlockInfoException)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case E_P:
return getEP();
case E_S:
return getES();
case E_F:
return getEF();
case E_B:
return getEB();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case E_P:
return isSetEP();
case E_S:
return isSetES();
case E_F:
return isSetEF();
case E_B:
return isSetEB();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof addCheckpoint_result)
return this.equals((addCheckpoint_result)that);
return false;
}
public boolean equals(addCheckpoint_result that) {
if (that == null)
return false;
boolean this_present_eP = true && this.isSetEP();
boolean that_present_eP = true && that.isSetEP();
if (this_present_eP || that_present_eP) {
if (!(this_present_eP && that_present_eP))
return false;
if (!this.eP.equals(that.eP))
return false;
}
boolean this_present_eS = true && this.isSetES();
boolean that_present_eS = true && that.isSetES();
if (this_present_eS || that_present_eS) {
if (!(this_present_eS && that_present_eS))
return false;
if (!this.eS.equals(that.eS))
return false;
}
boolean this_present_eF = true && this.isSetEF();
boolean that_present_eF = true && that.isSetEF();
if (this_present_eF || that_present_eF) {
if (!(this_present_eF && that_present_eF))
return false;
if (!this.eF.equals(that.eF))
return false;
}
boolean this_present_eB = true && this.isSetEB();
boolean that_present_eB = true && that.isSetEB();
if (this_present_eB || that_present_eB) {
if (!(this_present_eB && that_present_eB))
return false;
if (!this.eB.equals(that.eB))
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(addCheckpoint_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetEP()).compareTo(other.isSetEP());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetEP()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.eP, other.eP);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetES()).compareTo(other.isSetES());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetES()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.eS, other.eS);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetEF()).compareTo(other.isSetEF());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetEF()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.eF, other.eF);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetEB()).compareTo(other.isSetEB());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetEB()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.eB, other.eB);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("addCheckpoint_result(");
boolean first = true;
sb.append("eP:");
if (this.eP == null) {
sb.append("null");
} else {
sb.append(this.eP);
}
first = false;
if (!first) sb.append(", ");
sb.append("eS:");
if (this.eS == null) {
sb.append("null");
} else {
sb.append(this.eS);
}
first = false;
if (!first) sb.append(", ");
sb.append("eF:");
if (this.eF == null) {
sb.append("null");
} else {
sb.append(this.eF);
}
first = false;
if (!first) sb.append(", ");
sb.append("eB:");
if (this.eB == null) {
sb.append("null");
} else {
sb.append(this.eB);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class addCheckpoint_resultStandardSchemeFactory implements SchemeFactory {
public addCheckpoint_resultStandardScheme getScheme() {
return new addCheckpoint_resultStandardScheme();
}
}
private static class addCheckpoint_resultStandardScheme extends StandardScheme<addCheckpoint_result> {
public void read(org.apache.thrift.protocol.TProtocol iprot, addCheckpoint_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1:
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.eP = new FileDoesNotExistException();
struct.eP.read(iprot);
struct.setEPIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2:
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.eS = new SuspectedFileSizeException();
struct.eS.read(iprot);
struct.setESIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3:
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.eF = new FailedToCheckpointException();
struct.eF.read(iprot);
struct.setEFIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 4:
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.eB = new BlockInfoException();
struct.eB.read(iprot);
struct.setEBIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, addCheckpoint_result struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.eP != null) {
oprot.writeFieldBegin(E_P_FIELD_DESC);
struct.eP.write(oprot);
oprot.writeFieldEnd();
}
if (struct.eS != null) {
oprot.writeFieldBegin(E_S_FIELD_DESC);
struct.eS.write(oprot);
oprot.writeFieldEnd();
}
if (struct.eF != null) {
oprot.writeFieldBegin(E_F_FIELD_DESC);
struct.eF.write(oprot);
oprot.writeFieldEnd();
}
if (struct.eB != null) {
oprot.writeFieldBegin(E_B_FIELD_DESC);
struct.eB.write(oprot);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class addCheckpoint_resultTupleSchemeFactory implements SchemeFactory {
public addCheckpoint_resultTupleScheme getScheme() {
return new addCheckpoint_resultTupleScheme();
}
}
private static class addCheckpoint_resultTupleScheme extends TupleScheme<addCheckpoint_result> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, addCheckpoint_result struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetEP()) {
optionals.set(0);
}
if (struct.isSetES()) {
optionals.set(1);
}
if (struct.isSetEF()) {
optionals.set(2);
}
if (struct.isSetEB()) {
optionals.set(3);
}
oprot.writeBitSet(optionals, 4);
if (struct.isSetEP()) {
struct.eP.write(oprot);
}
if (struct.isSetES()) {
struct.eS.write(oprot);
}
if (struct.isSetEF()) {
struct.eF.write(oprot);
}
if (struct.isSetEB()) {
struct.eB.write(oprot);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, addCheckpoint_result struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(4);
if (incoming.get(0)) {
struct.eP = new FileDoesNotExistException();
struct.eP.read(iprot);
struct.setEPIsSet(true);
}
if (incoming.get(1)) {
struct.eS = new SuspectedFileSizeException();
struct.eS.read(iprot);
struct.setESIsSet(true);
}
if (incoming.get(2)) {
struct.eF = new FailedToCheckpointException();
struct.eF.read(iprot);
struct.setEFIsSet(true);
}
if (incoming.get(3)) {
struct.eB = new BlockInfoException();
struct.eB.read(iprot);
struct.setEBIsSet(true);
}
}
}
}
public static class asyncCheckpoint_args implements org.apache.thrift.TBase<asyncCheckpoint_args, asyncCheckpoint_args._Fields>, java.io.Serializable, Cloneable, Comparable<asyncCheckpoint_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("asyncCheckpoint_args");
private static final org.apache.thrift.protocol.TField FILE_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("fileId", org.apache.thrift.protocol.TType.I32, (short)1);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new asyncCheckpoint_argsStandardSchemeFactory());
schemes.put(TupleScheme.class, new asyncCheckpoint_argsTupleSchemeFactory());
}
public int fileId; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
FILE_ID((short)1, "fileId");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // FILE_ID
return FILE_ID;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __FILEID_ISSET_ID = 0;
private byte __isset_bitfield = 0;
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.FILE_ID, new org.apache.thrift.meta_data.FieldMetaData("fileId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(asyncCheckpoint_args.class, metaDataMap);
}
public asyncCheckpoint_args() {
}
public asyncCheckpoint_args(
int fileId)
{
this();
this.fileId = fileId;
setFileIdIsSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public asyncCheckpoint_args(asyncCheckpoint_args other) {
__isset_bitfield = other.__isset_bitfield;
this.fileId = other.fileId;
}
public asyncCheckpoint_args deepCopy() {
return new asyncCheckpoint_args(this);
}
@Override
public void clear() {
setFileIdIsSet(false);
this.fileId = 0;
}
public int getFileId() {
return this.fileId;
}
public asyncCheckpoint_args setFileId(int fileId) {
this.fileId = fileId;
setFileIdIsSet(true);
return this;
}
public void unsetFileId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __FILEID_ISSET_ID);
}
/** Returns true if field fileId is set (has been assigned a value) and false otherwise */
public boolean isSetFileId() {
return EncodingUtils.testBit(__isset_bitfield, __FILEID_ISSET_ID);
}
public void setFileIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __FILEID_ISSET_ID, value);
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case FILE_ID:
if (value == null) {
unsetFileId();
} else {
setFileId((Integer)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case FILE_ID:
return Integer.valueOf(getFileId());
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case FILE_ID:
return isSetFileId();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof asyncCheckpoint_args)
return this.equals((asyncCheckpoint_args)that);
return false;
}
public boolean equals(asyncCheckpoint_args that) {
if (that == null)
return false;
boolean this_present_fileId = true;
boolean that_present_fileId = true;
if (this_present_fileId || that_present_fileId) {
if (!(this_present_fileId && that_present_fileId))
return false;
if (this.fileId != that.fileId)
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(asyncCheckpoint_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetFileId()).compareTo(other.isSetFileId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetFileId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.fileId, other.fileId);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("asyncCheckpoint_args(");
boolean first = true;
sb.append("fileId:");
sb.append(this.fileId);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class asyncCheckpoint_argsStandardSchemeFactory implements SchemeFactory {
public asyncCheckpoint_argsStandardScheme getScheme() {
return new asyncCheckpoint_argsStandardScheme();
}
}
private static class asyncCheckpoint_argsStandardScheme extends StandardScheme<asyncCheckpoint_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, asyncCheckpoint_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // FILE_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.fileId = iprot.readI32();
struct.setFileIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, asyncCheckpoint_args struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(FILE_ID_FIELD_DESC);
oprot.writeI32(struct.fileId);
oprot.writeFieldEnd();
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class asyncCheckpoint_argsTupleSchemeFactory implements SchemeFactory {
public asyncCheckpoint_argsTupleScheme getScheme() {
return new asyncCheckpoint_argsTupleScheme();
}
}
private static class asyncCheckpoint_argsTupleScheme extends TupleScheme<asyncCheckpoint_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, asyncCheckpoint_args struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetFileId()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetFileId()) {
oprot.writeI32(struct.fileId);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, asyncCheckpoint_args struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.fileId = iprot.readI32();
struct.setFileIdIsSet(true);
}
}
}
}
public static class asyncCheckpoint_result implements org.apache.thrift.TBase<asyncCheckpoint_result, asyncCheckpoint_result._Fields>, java.io.Serializable, Cloneable, Comparable<asyncCheckpoint_result> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("asyncCheckpoint_result");
private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.BOOL, (short)0);
private static final org.apache.thrift.protocol.TField E_FIELD_DESC = new org.apache.thrift.protocol.TField("e", org.apache.thrift.protocol.TType.STRUCT, (short)1);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new asyncCheckpoint_resultStandardSchemeFactory());
schemes.put(TupleScheme.class, new asyncCheckpoint_resultTupleSchemeFactory());
}
public boolean success; // required
public TachyonException e; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
SUCCESS((short)0, "success"),
E((short)1, "e");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 0: // SUCCESS
return SUCCESS;
case 1:
return E;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __SUCCESS_ISSET_ID = 0;
private byte __isset_bitfield = 0;
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
tmpMap.put(_Fields.E, new org.apache.thrift.meta_data.FieldMetaData("e", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(asyncCheckpoint_result.class, metaDataMap);
}
public asyncCheckpoint_result() {
}
public asyncCheckpoint_result(
boolean success,
TachyonException e)
{
this();
this.success = success;
setSuccessIsSet(true);
this.e = e;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public asyncCheckpoint_result(asyncCheckpoint_result other) {
__isset_bitfield = other.__isset_bitfield;
this.success = other.success;
if (other.isSetE()) {
this.e = new TachyonException(other.e);
}
}
public asyncCheckpoint_result deepCopy() {
return new asyncCheckpoint_result(this);
}
@Override
public void clear() {
setSuccessIsSet(false);
this.success = false;
this.e = null;
}
public boolean isSuccess() {
return this.success;
}
public asyncCheckpoint_result setSuccess(boolean success) {
this.success = success;
setSuccessIsSet(true);
return this;
}
public void unsetSuccess() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __SUCCESS_ISSET_ID);
}
/** Returns true if field success is set (has been assigned a value) and false otherwise */
public boolean isSetSuccess() {
return EncodingUtils.testBit(__isset_bitfield, __SUCCESS_ISSET_ID);
}
public void setSuccessIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __SUCCESS_ISSET_ID, value);
}
public TachyonException getE() {
return this.e;
}
public asyncCheckpoint_result setE(TachyonException e) {
this.e = e;
return this;
}
public void unsetE() {
this.e = null;
}
/** Returns true if field e is set (has been assigned a value) and false otherwise */
public boolean isSetE() {
return this.e != null;
}
public void setEIsSet(boolean value) {
if (!value) {
this.e = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case SUCCESS:
if (value == null) {
unsetSuccess();
} else {
setSuccess((Boolean)value);
}
break;
case E:
if (value == null) {
unsetE();
} else {
setE((TachyonException)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case SUCCESS:
return Boolean.valueOf(isSuccess());
case E:
return getE();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case SUCCESS:
return isSetSuccess();
case E:
return isSetE();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof asyncCheckpoint_result)
return this.equals((asyncCheckpoint_result)that);
return false;
}
public boolean equals(asyncCheckpoint_result that) {
if (that == null)
return false;
boolean this_present_success = true;
boolean that_present_success = true;
if (this_present_success || that_present_success) {
if (!(this_present_success && that_present_success))
return false;
if (this.success != that.success)
return false;
}
boolean this_present_e = true && this.isSetE();
boolean that_present_e = true && that.isSetE();
if (this_present_e || that_present_e) {
if (!(this_present_e && that_present_e))
return false;
if (!this.e.equals(that.e))
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(asyncCheckpoint_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetSuccess()).compareTo(other.isSetSuccess());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSuccess()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetE()).compareTo(other.isSetE());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetE()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.e, other.e);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("asyncCheckpoint_result(");
boolean first = true;
sb.append("success:");
sb.append(this.success);
first = false;
if (!first) sb.append(", ");
sb.append("e:");
if (this.e == null) {
sb.append("null");
} else {
sb.append(this.e);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class asyncCheckpoint_resultStandardSchemeFactory implements SchemeFactory {
public asyncCheckpoint_resultStandardScheme getScheme() {
return new asyncCheckpoint_resultStandardScheme();
}
}
private static class asyncCheckpoint_resultStandardScheme extends StandardScheme<asyncCheckpoint_result> {
public void read(org.apache.thrift.protocol.TProtocol iprot, asyncCheckpoint_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 0: // SUCCESS
if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
struct.success = iprot.readBool();
struct.setSuccessIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 1:
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.e = new TachyonException();
struct.e.read(iprot);
struct.setEIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, asyncCheckpoint_result struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.isSetSuccess()) {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
oprot.writeBool(struct.success);
oprot.writeFieldEnd();
}
if (struct.e != null) {
oprot.writeFieldBegin(E_FIELD_DESC);
struct.e.write(oprot);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class asyncCheckpoint_resultTupleSchemeFactory implements SchemeFactory {
public asyncCheckpoint_resultTupleScheme getScheme() {
return new asyncCheckpoint_resultTupleScheme();
}
}
private static class asyncCheckpoint_resultTupleScheme extends TupleScheme<asyncCheckpoint_result> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, asyncCheckpoint_result struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetSuccess()) {
optionals.set(0);
}
if (struct.isSetE()) {
optionals.set(1);
}
oprot.writeBitSet(optionals, 2);
if (struct.isSetSuccess()) {
oprot.writeBool(struct.success);
}
if (struct.isSetE()) {
struct.e.write(oprot);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, asyncCheckpoint_result struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(2);
if (incoming.get(0)) {
struct.success = iprot.readBool();
struct.setSuccessIsSet(true);
}
if (incoming.get(1)) {
struct.e = new TachyonException();
struct.e.read(iprot);
struct.setEIsSet(true);
}
}
}
}
public static class cacheBlock_args implements org.apache.thrift.TBase<cacheBlock_args, cacheBlock_args._Fields>, java.io.Serializable, Cloneable, Comparable<cacheBlock_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("cacheBlock_args");
private static final org.apache.thrift.protocol.TField USER_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("userId", org.apache.thrift.protocol.TType.I64, (short)1);
private static final org.apache.thrift.protocol.TField BLOCK_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("blockId", org.apache.thrift.protocol.TType.I64, (short)2);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new cacheBlock_argsStandardSchemeFactory());
schemes.put(TupleScheme.class, new cacheBlock_argsTupleSchemeFactory());
}
public long userId; // required
public long blockId; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
USER_ID((short)1, "userId"),
BLOCK_ID((short)2, "blockId");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // USER_ID
return USER_ID;
case 2: // BLOCK_ID
return BLOCK_ID;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __USERID_ISSET_ID = 0;
private static final int __BLOCKID_ISSET_ID = 1;
private byte __isset_bitfield = 0;
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.USER_ID, new org.apache.thrift.meta_data.FieldMetaData("userId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.BLOCK_ID, new org.apache.thrift.meta_data.FieldMetaData("blockId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(cacheBlock_args.class, metaDataMap);
}
public cacheBlock_args() {
}
public cacheBlock_args(
long userId,
long blockId)
{
this();
this.userId = userId;
setUserIdIsSet(true);
this.blockId = blockId;
setBlockIdIsSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public cacheBlock_args(cacheBlock_args other) {
__isset_bitfield = other.__isset_bitfield;
this.userId = other.userId;
this.blockId = other.blockId;
}
public cacheBlock_args deepCopy() {
return new cacheBlock_args(this);
}
@Override
public void clear() {
setUserIdIsSet(false);
this.userId = 0;
setBlockIdIsSet(false);
this.blockId = 0;
}
public long getUserId() {
return this.userId;
}
public cacheBlock_args setUserId(long userId) {
this.userId = userId;
setUserIdIsSet(true);
return this;
}
public void unsetUserId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __USERID_ISSET_ID);
}
/** Returns true if field userId is set (has been assigned a value) and false otherwise */
public boolean isSetUserId() {
return EncodingUtils.testBit(__isset_bitfield, __USERID_ISSET_ID);
}
public void setUserIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __USERID_ISSET_ID, value);
}
public long getBlockId() {
return this.blockId;
}
public cacheBlock_args setBlockId(long blockId) {
this.blockId = blockId;
setBlockIdIsSet(true);
return this;
}
public void unsetBlockId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __BLOCKID_ISSET_ID);
}
/** Returns true if field blockId is set (has been assigned a value) and false otherwise */
public boolean isSetBlockId() {
return EncodingUtils.testBit(__isset_bitfield, __BLOCKID_ISSET_ID);
}
public void setBlockIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __BLOCKID_ISSET_ID, value);
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case USER_ID:
if (value == null) {
unsetUserId();
} else {
setUserId((Long)value);
}
break;
case BLOCK_ID:
if (value == null) {
unsetBlockId();
} else {
setBlockId((Long)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case USER_ID:
return Long.valueOf(getUserId());
case BLOCK_ID:
return Long.valueOf(getBlockId());
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case USER_ID:
return isSetUserId();
case BLOCK_ID:
return isSetBlockId();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof cacheBlock_args)
return this.equals((cacheBlock_args)that);
return false;
}
public boolean equals(cacheBlock_args that) {
if (that == null)
return false;
boolean this_present_userId = true;
boolean that_present_userId = true;
if (this_present_userId || that_present_userId) {
if (!(this_present_userId && that_present_userId))
return false;
if (this.userId != that.userId)
return false;
}
boolean this_present_blockId = true;
boolean that_present_blockId = true;
if (this_present_blockId || that_present_blockId) {
if (!(this_present_blockId && that_present_blockId))
return false;
if (this.blockId != that.blockId)
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(cacheBlock_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetUserId()).compareTo(other.isSetUserId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetUserId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.userId, other.userId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetBlockId()).compareTo(other.isSetBlockId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetBlockId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.blockId, other.blockId);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("cacheBlock_args(");
boolean first = true;
sb.append("userId:");
sb.append(this.userId);
first = false;
if (!first) sb.append(", ");
sb.append("blockId:");
sb.append(this.blockId);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class cacheBlock_argsStandardSchemeFactory implements SchemeFactory {
public cacheBlock_argsStandardScheme getScheme() {
return new cacheBlock_argsStandardScheme();
}
}
private static class cacheBlock_argsStandardScheme extends StandardScheme<cacheBlock_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, cacheBlock_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // USER_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.userId = iprot.readI64();
struct.setUserIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // BLOCK_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.blockId = iprot.readI64();
struct.setBlockIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, cacheBlock_args struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(USER_ID_FIELD_DESC);
oprot.writeI64(struct.userId);
oprot.writeFieldEnd();
oprot.writeFieldBegin(BLOCK_ID_FIELD_DESC);
oprot.writeI64(struct.blockId);
oprot.writeFieldEnd();
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class cacheBlock_argsTupleSchemeFactory implements SchemeFactory {
public cacheBlock_argsTupleScheme getScheme() {
return new cacheBlock_argsTupleScheme();
}
}
private static class cacheBlock_argsTupleScheme extends TupleScheme<cacheBlock_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, cacheBlock_args struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetUserId()) {
optionals.set(0);
}
if (struct.isSetBlockId()) {
optionals.set(1);
}
oprot.writeBitSet(optionals, 2);
if (struct.isSetUserId()) {
oprot.writeI64(struct.userId);
}
if (struct.isSetBlockId()) {
oprot.writeI64(struct.blockId);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, cacheBlock_args struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(2);
if (incoming.get(0)) {
struct.userId = iprot.readI64();
struct.setUserIdIsSet(true);
}
if (incoming.get(1)) {
struct.blockId = iprot.readI64();
struct.setBlockIdIsSet(true);
}
}
}
}
public static class cacheBlock_result implements org.apache.thrift.TBase<cacheBlock_result, cacheBlock_result._Fields>, java.io.Serializable, Cloneable, Comparable<cacheBlock_result> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("cacheBlock_result");
private static final org.apache.thrift.protocol.TField E_P_FIELD_DESC = new org.apache.thrift.protocol.TField("eP", org.apache.thrift.protocol.TType.STRUCT, (short)1);
private static final org.apache.thrift.protocol.TField E_S_FIELD_DESC = new org.apache.thrift.protocol.TField("eS", org.apache.thrift.protocol.TType.STRUCT, (short)2);
private static final org.apache.thrift.protocol.TField E_B_FIELD_DESC = new org.apache.thrift.protocol.TField("eB", org.apache.thrift.protocol.TType.STRUCT, (short)3);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new cacheBlock_resultStandardSchemeFactory());
schemes.put(TupleScheme.class, new cacheBlock_resultTupleSchemeFactory());
}
public FileDoesNotExistException eP; // required
public SuspectedFileSizeException eS; // required
public BlockInfoException eB; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
E_P((short)1, "eP"),
E_S((short)2, "eS"),
E_B((short)3, "eB");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1:
return E_P;
case 2:
return E_S;
case 3:
return E_B;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.E_P, new org.apache.thrift.meta_data.FieldMetaData("eP", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT)));
tmpMap.put(_Fields.E_S, new org.apache.thrift.meta_data.FieldMetaData("eS", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT)));
tmpMap.put(_Fields.E_B, new org.apache.thrift.meta_data.FieldMetaData("eB", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(cacheBlock_result.class, metaDataMap);
}
public cacheBlock_result() {
}
public cacheBlock_result(
FileDoesNotExistException eP,
SuspectedFileSizeException eS,
BlockInfoException eB)
{
this();
this.eP = eP;
this.eS = eS;
this.eB = eB;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public cacheBlock_result(cacheBlock_result other) {
if (other.isSetEP()) {
this.eP = new FileDoesNotExistException(other.eP);
}
if (other.isSetES()) {
this.eS = new SuspectedFileSizeException(other.eS);
}
if (other.isSetEB()) {
this.eB = new BlockInfoException(other.eB);
}
}
public cacheBlock_result deepCopy() {
return new cacheBlock_result(this);
}
@Override
public void clear() {
this.eP = null;
this.eS = null;
this.eB = null;
}
public FileDoesNotExistException getEP() {
return this.eP;
}
public cacheBlock_result setEP(FileDoesNotExistException eP) {
this.eP = eP;
return this;
}
public void unsetEP() {
this.eP = null;
}
/** Returns true if field eP is set (has been assigned a value) and false otherwise */
public boolean isSetEP() {
return this.eP != null;
}
public void setEPIsSet(boolean value) {
if (!value) {
this.eP = null;
}
}
public SuspectedFileSizeException getES() {
return this.eS;
}
public cacheBlock_result setES(SuspectedFileSizeException eS) {
this.eS = eS;
return this;
}
public void unsetES() {
this.eS = null;
}
/** Returns true if field eS is set (has been assigned a value) and false otherwise */
public boolean isSetES() {
return this.eS != null;
}
public void setESIsSet(boolean value) {
if (!value) {
this.eS = null;
}
}
public BlockInfoException getEB() {
return this.eB;
}
public cacheBlock_result setEB(BlockInfoException eB) {
this.eB = eB;
return this;
}
public void unsetEB() {
this.eB = null;
}
/** Returns true if field eB is set (has been assigned a value) and false otherwise */
public boolean isSetEB() {
return this.eB != null;
}
public void setEBIsSet(boolean value) {
if (!value) {
this.eB = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case E_P:
if (value == null) {
unsetEP();
} else {
setEP((FileDoesNotExistException)value);
}
break;
case E_S:
if (value == null) {
unsetES();
} else {
setES((SuspectedFileSizeException)value);
}
break;
case E_B:
if (value == null) {
unsetEB();
} else {
setEB((BlockInfoException)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case E_P:
return getEP();
case E_S:
return getES();
case E_B:
return getEB();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case E_P:
return isSetEP();
case E_S:
return isSetES();
case E_B:
return isSetEB();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof cacheBlock_result)
return this.equals((cacheBlock_result)that);
return false;
}
public boolean equals(cacheBlock_result that) {
if (that == null)
return false;
boolean this_present_eP = true && this.isSetEP();
boolean that_present_eP = true && that.isSetEP();
if (this_present_eP || that_present_eP) {
if (!(this_present_eP && that_present_eP))
return false;
if (!this.eP.equals(that.eP))
return false;
}
boolean this_present_eS = true && this.isSetES();
boolean that_present_eS = true && that.isSetES();
if (this_present_eS || that_present_eS) {
if (!(this_present_eS && that_present_eS))
return false;
if (!this.eS.equals(that.eS))
return false;
}
boolean this_present_eB = true && this.isSetEB();
boolean that_present_eB = true && that.isSetEB();
if (this_present_eB || that_present_eB) {
if (!(this_present_eB && that_present_eB))
return false;
if (!this.eB.equals(that.eB))
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(cacheBlock_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetEP()).compareTo(other.isSetEP());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetEP()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.eP, other.eP);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetES()).compareTo(other.isSetES());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetES()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.eS, other.eS);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetEB()).compareTo(other.isSetEB());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetEB()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.eB, other.eB);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("cacheBlock_result(");
boolean first = true;
sb.append("eP:");
if (this.eP == null) {
sb.append("null");
} else {
sb.append(this.eP);
}
first = false;
if (!first) sb.append(", ");
sb.append("eS:");
if (this.eS == null) {
sb.append("null");
} else {
sb.append(this.eS);
}
first = false;
if (!first) sb.append(", ");
sb.append("eB:");
if (this.eB == null) {
sb.append("null");
} else {
sb.append(this.eB);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class cacheBlock_resultStandardSchemeFactory implements SchemeFactory {
public cacheBlock_resultStandardScheme getScheme() {
return new cacheBlock_resultStandardScheme();
}
}
private static class cacheBlock_resultStandardScheme extends StandardScheme<cacheBlock_result> {
public void read(org.apache.thrift.protocol.TProtocol iprot, cacheBlock_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1:
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.eP = new FileDoesNotExistException();
struct.eP.read(iprot);
struct.setEPIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2:
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.eS = new SuspectedFileSizeException();
struct.eS.read(iprot);
struct.setESIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3:
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.eB = new BlockInfoException();
struct.eB.read(iprot);
struct.setEBIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, cacheBlock_result struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.eP != null) {
oprot.writeFieldBegin(E_P_FIELD_DESC);
struct.eP.write(oprot);
oprot.writeFieldEnd();
}
if (struct.eS != null) {
oprot.writeFieldBegin(E_S_FIELD_DESC);
struct.eS.write(oprot);
oprot.writeFieldEnd();
}
if (struct.eB != null) {
oprot.writeFieldBegin(E_B_FIELD_DESC);
struct.eB.write(oprot);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class cacheBlock_resultTupleSchemeFactory implements SchemeFactory {
public cacheBlock_resultTupleScheme getScheme() {
return new cacheBlock_resultTupleScheme();
}
}
private static class cacheBlock_resultTupleScheme extends TupleScheme<cacheBlock_result> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, cacheBlock_result struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetEP()) {
optionals.set(0);
}
if (struct.isSetES()) {
optionals.set(1);
}
if (struct.isSetEB()) {
optionals.set(2);
}
oprot.writeBitSet(optionals, 3);
if (struct.isSetEP()) {
struct.eP.write(oprot);
}
if (struct.isSetES()) {
struct.eS.write(oprot);
}
if (struct.isSetEB()) {
struct.eB.write(oprot);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, cacheBlock_result struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(3);
if (incoming.get(0)) {
struct.eP = new FileDoesNotExistException();
struct.eP.read(iprot);
struct.setEPIsSet(true);
}
if (incoming.get(1)) {
struct.eS = new SuspectedFileSizeException();
struct.eS.read(iprot);
struct.setESIsSet(true);
}
if (incoming.get(2)) {
struct.eB = new BlockInfoException();
struct.eB.read(iprot);
struct.setEBIsSet(true);
}
}
}
}
public static class getUserUfsTempFolder_args implements org.apache.thrift.TBase<getUserUfsTempFolder_args, getUserUfsTempFolder_args._Fields>, java.io.Serializable, Cloneable, Comparable<getUserUfsTempFolder_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getUserUfsTempFolder_args");
private static final org.apache.thrift.protocol.TField USER_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("userId", org.apache.thrift.protocol.TType.I64, (short)1);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new getUserUfsTempFolder_argsStandardSchemeFactory());
schemes.put(TupleScheme.class, new getUserUfsTempFolder_argsTupleSchemeFactory());
}
public long userId; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
USER_ID((short)1, "userId");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // USER_ID
return USER_ID;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __USERID_ISSET_ID = 0;
private byte __isset_bitfield = 0;
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.USER_ID, new org.apache.thrift.meta_data.FieldMetaData("userId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getUserUfsTempFolder_args.class, metaDataMap);
}
public getUserUfsTempFolder_args() {
}
public getUserUfsTempFolder_args(
long userId)
{
this();
this.userId = userId;
setUserIdIsSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public getUserUfsTempFolder_args(getUserUfsTempFolder_args other) {
__isset_bitfield = other.__isset_bitfield;
this.userId = other.userId;
}
public getUserUfsTempFolder_args deepCopy() {
return new getUserUfsTempFolder_args(this);
}
@Override
public void clear() {
setUserIdIsSet(false);
this.userId = 0;
}
public long getUserId() {
return this.userId;
}
public getUserUfsTempFolder_args setUserId(long userId) {
this.userId = userId;
setUserIdIsSet(true);
return this;
}
public void unsetUserId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __USERID_ISSET_ID);
}
/** Returns true if field userId is set (has been assigned a value) and false otherwise */
public boolean isSetUserId() {
return EncodingUtils.testBit(__isset_bitfield, __USERID_ISSET_ID);
}
public void setUserIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __USERID_ISSET_ID, value);
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case USER_ID:
if (value == null) {
unsetUserId();
} else {
setUserId((Long)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case USER_ID:
return Long.valueOf(getUserId());
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case USER_ID:
return isSetUserId();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof getUserUfsTempFolder_args)
return this.equals((getUserUfsTempFolder_args)that);
return false;
}
public boolean equals(getUserUfsTempFolder_args that) {
if (that == null)
return false;
boolean this_present_userId = true;
boolean that_present_userId = true;
if (this_present_userId || that_present_userId) {
if (!(this_present_userId && that_present_userId))
return false;
if (this.userId != that.userId)
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(getUserUfsTempFolder_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetUserId()).compareTo(other.isSetUserId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetUserId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.userId, other.userId);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("getUserUfsTempFolder_args(");
boolean first = true;
sb.append("userId:");
sb.append(this.userId);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class getUserUfsTempFolder_argsStandardSchemeFactory implements SchemeFactory {
public getUserUfsTempFolder_argsStandardScheme getScheme() {
return new getUserUfsTempFolder_argsStandardScheme();
}
}
private static class getUserUfsTempFolder_argsStandardScheme extends StandardScheme<getUserUfsTempFolder_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, getUserUfsTempFolder_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // USER_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.userId = iprot.readI64();
struct.setUserIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, getUserUfsTempFolder_args struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(USER_ID_FIELD_DESC);
oprot.writeI64(struct.userId);
oprot.writeFieldEnd();
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class getUserUfsTempFolder_argsTupleSchemeFactory implements SchemeFactory {
public getUserUfsTempFolder_argsTupleScheme getScheme() {
return new getUserUfsTempFolder_argsTupleScheme();
}
}
private static class getUserUfsTempFolder_argsTupleScheme extends TupleScheme<getUserUfsTempFolder_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, getUserUfsTempFolder_args struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetUserId()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetUserId()) {
oprot.writeI64(struct.userId);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, getUserUfsTempFolder_args struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.userId = iprot.readI64();
struct.setUserIdIsSet(true);
}
}
}
}
public static class getUserUfsTempFolder_result implements org.apache.thrift.TBase<getUserUfsTempFolder_result, getUserUfsTempFolder_result._Fields>, java.io.Serializable, Cloneable, Comparable<getUserUfsTempFolder_result> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getUserUfsTempFolder_result");
private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRING, (short)0);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new getUserUfsTempFolder_resultStandardSchemeFactory());
schemes.put(TupleScheme.class, new getUserUfsTempFolder_resultTupleSchemeFactory());
}
public String success; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
SUCCESS((short)0, "success");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 0: // SUCCESS
return SUCCESS;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getUserUfsTempFolder_result.class, metaDataMap);
}
public getUserUfsTempFolder_result() {
}
public getUserUfsTempFolder_result(
String success)
{
this();
this.success = success;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public getUserUfsTempFolder_result(getUserUfsTempFolder_result other) {
if (other.isSetSuccess()) {
this.success = other.success;
}
}
public getUserUfsTempFolder_result deepCopy() {
return new getUserUfsTempFolder_result(this);
}
@Override
public void clear() {
this.success = null;
}
public String getSuccess() {
return this.success;
}
public getUserUfsTempFolder_result setSuccess(String success) {
this.success = success;
return this;
}
public void unsetSuccess() {
this.success = null;
}
/** Returns true if field success is set (has been assigned a value) and false otherwise */
public boolean isSetSuccess() {
return this.success != null;
}
public void setSuccessIsSet(boolean value) {
if (!value) {
this.success = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case SUCCESS:
if (value == null) {
unsetSuccess();
} else {
setSuccess((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case SUCCESS:
return getSuccess();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case SUCCESS:
return isSetSuccess();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof getUserUfsTempFolder_result)
return this.equals((getUserUfsTempFolder_result)that);
return false;
}
public boolean equals(getUserUfsTempFolder_result that) {
if (that == null)
return false;
boolean this_present_success = true && this.isSetSuccess();
boolean that_present_success = true && that.isSetSuccess();
if (this_present_success || that_present_success) {
if (!(this_present_success && that_present_success))
return false;
if (!this.success.equals(that.success))
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(getUserUfsTempFolder_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetSuccess()).compareTo(other.isSetSuccess());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSuccess()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("getUserUfsTempFolder_result(");
boolean first = true;
sb.append("success:");
if (this.success == null) {
sb.append("null");
} else {
sb.append(this.success);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class getUserUfsTempFolder_resultStandardSchemeFactory implements SchemeFactory {
public getUserUfsTempFolder_resultStandardScheme getScheme() {
return new getUserUfsTempFolder_resultStandardScheme();
}
}
private static class getUserUfsTempFolder_resultStandardScheme extends StandardScheme<getUserUfsTempFolder_result> {
public void read(org.apache.thrift.protocol.TProtocol iprot, getUserUfsTempFolder_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 0: // SUCCESS
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.success = iprot.readString();
struct.setSuccessIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, getUserUfsTempFolder_result struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.success != null) {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
oprot.writeString(struct.success);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class getUserUfsTempFolder_resultTupleSchemeFactory implements SchemeFactory {
public getUserUfsTempFolder_resultTupleScheme getScheme() {
return new getUserUfsTempFolder_resultTupleScheme();
}
}
private static class getUserUfsTempFolder_resultTupleScheme extends TupleScheme<getUserUfsTempFolder_result> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, getUserUfsTempFolder_result struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetSuccess()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetSuccess()) {
oprot.writeString(struct.success);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, getUserUfsTempFolder_result struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.success = iprot.readString();
struct.setSuccessIsSet(true);
}
}
}
}
public static class lockBlock_args implements org.apache.thrift.TBase<lockBlock_args, lockBlock_args._Fields>, java.io.Serializable, Cloneable, Comparable<lockBlock_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("lockBlock_args");
private static final org.apache.thrift.protocol.TField BLOCK_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("blockId", org.apache.thrift.protocol.TType.I64, (short)1);
private static final org.apache.thrift.protocol.TField USER_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("userId", org.apache.thrift.protocol.TType.I64, (short)2);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new lockBlock_argsStandardSchemeFactory());
schemes.put(TupleScheme.class, new lockBlock_argsTupleSchemeFactory());
}
public long blockId; // required
public long userId; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
BLOCK_ID((short)1, "blockId"),
USER_ID((short)2, "userId");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // BLOCK_ID
return BLOCK_ID;
case 2: // USER_ID
return USER_ID;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __BLOCKID_ISSET_ID = 0;
private static final int __USERID_ISSET_ID = 1;
private byte __isset_bitfield = 0;
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.BLOCK_ID, new org.apache.thrift.meta_data.FieldMetaData("blockId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.USER_ID, new org.apache.thrift.meta_data.FieldMetaData("userId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(lockBlock_args.class, metaDataMap);
}
public lockBlock_args() {
}
public lockBlock_args(
long blockId,
long userId)
{
this();
this.blockId = blockId;
setBlockIdIsSet(true);
this.userId = userId;
setUserIdIsSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public lockBlock_args(lockBlock_args other) {
__isset_bitfield = other.__isset_bitfield;
this.blockId = other.blockId;
this.userId = other.userId;
}
public lockBlock_args deepCopy() {
return new lockBlock_args(this);
}
@Override
public void clear() {
setBlockIdIsSet(false);
this.blockId = 0;
setUserIdIsSet(false);
this.userId = 0;
}
public long getBlockId() {
return this.blockId;
}
public lockBlock_args setBlockId(long blockId) {
this.blockId = blockId;
setBlockIdIsSet(true);
return this;
}
public void unsetBlockId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __BLOCKID_ISSET_ID);
}
/** Returns true if field blockId is set (has been assigned a value) and false otherwise */
public boolean isSetBlockId() {
return EncodingUtils.testBit(__isset_bitfield, __BLOCKID_ISSET_ID);
}
public void setBlockIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __BLOCKID_ISSET_ID, value);
}
public long getUserId() {
return this.userId;
}
public lockBlock_args setUserId(long userId) {
this.userId = userId;
setUserIdIsSet(true);
return this;
}
public void unsetUserId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __USERID_ISSET_ID);
}
/** Returns true if field userId is set (has been assigned a value) and false otherwise */
public boolean isSetUserId() {
return EncodingUtils.testBit(__isset_bitfield, __USERID_ISSET_ID);
}
public void setUserIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __USERID_ISSET_ID, value);
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case BLOCK_ID:
if (value == null) {
unsetBlockId();
} else {
setBlockId((Long)value);
}
break;
case USER_ID:
if (value == null) {
unsetUserId();
} else {
setUserId((Long)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case BLOCK_ID:
return Long.valueOf(getBlockId());
case USER_ID:
return Long.valueOf(getUserId());
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case BLOCK_ID:
return isSetBlockId();
case USER_ID:
return isSetUserId();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof lockBlock_args)
return this.equals((lockBlock_args)that);
return false;
}
public boolean equals(lockBlock_args that) {
if (that == null)
return false;
boolean this_present_blockId = true;
boolean that_present_blockId = true;
if (this_present_blockId || that_present_blockId) {
if (!(this_present_blockId && that_present_blockId))
return false;
if (this.blockId != that.blockId)
return false;
}
boolean this_present_userId = true;
boolean that_present_userId = true;
if (this_present_userId || that_present_userId) {
if (!(this_present_userId && that_present_userId))
return false;
if (this.userId != that.userId)
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(lockBlock_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetBlockId()).compareTo(other.isSetBlockId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetBlockId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.blockId, other.blockId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetUserId()).compareTo(other.isSetUserId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetUserId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.userId, other.userId);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("lockBlock_args(");
boolean first = true;
sb.append("blockId:");
sb.append(this.blockId);
first = false;
if (!first) sb.append(", ");
sb.append("userId:");
sb.append(this.userId);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class lockBlock_argsStandardSchemeFactory implements SchemeFactory {
public lockBlock_argsStandardScheme getScheme() {
return new lockBlock_argsStandardScheme();
}
}
private static class lockBlock_argsStandardScheme extends StandardScheme<lockBlock_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, lockBlock_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // BLOCK_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.blockId = iprot.readI64();
struct.setBlockIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // USER_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.userId = iprot.readI64();
struct.setUserIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, lockBlock_args struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(BLOCK_ID_FIELD_DESC);
oprot.writeI64(struct.blockId);
oprot.writeFieldEnd();
oprot.writeFieldBegin(USER_ID_FIELD_DESC);
oprot.writeI64(struct.userId);
oprot.writeFieldEnd();
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class lockBlock_argsTupleSchemeFactory implements SchemeFactory {
public lockBlock_argsTupleScheme getScheme() {
return new lockBlock_argsTupleScheme();
}
}
private static class lockBlock_argsTupleScheme extends TupleScheme<lockBlock_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, lockBlock_args struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetBlockId()) {
optionals.set(0);
}
if (struct.isSetUserId()) {
optionals.set(1);
}
oprot.writeBitSet(optionals, 2);
if (struct.isSetBlockId()) {
oprot.writeI64(struct.blockId);
}
if (struct.isSetUserId()) {
oprot.writeI64(struct.userId);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, lockBlock_args struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(2);
if (incoming.get(0)) {
struct.blockId = iprot.readI64();
struct.setBlockIdIsSet(true);
}
if (incoming.get(1)) {
struct.userId = iprot.readI64();
struct.setUserIdIsSet(true);
}
}
}
}
public static class lockBlock_result implements org.apache.thrift.TBase<lockBlock_result, lockBlock_result._Fields>, java.io.Serializable, Cloneable, Comparable<lockBlock_result> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("lockBlock_result");
private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRING, (short)0);
private static final org.apache.thrift.protocol.TField E_P_FIELD_DESC = new org.apache.thrift.protocol.TField("eP", org.apache.thrift.protocol.TType.STRUCT, (short)1);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new lockBlock_resultStandardSchemeFactory());
schemes.put(TupleScheme.class, new lockBlock_resultTupleSchemeFactory());
}
public String success; // required
public FileDoesNotExistException eP; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
SUCCESS((short)0, "success"),
E_P((short)1, "eP");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 0: // SUCCESS
return SUCCESS;
case 1:
return E_P;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.E_P, new org.apache.thrift.meta_data.FieldMetaData("eP", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(lockBlock_result.class, metaDataMap);
}
public lockBlock_result() {
}
public lockBlock_result(
String success,
FileDoesNotExistException eP)
{
this();
this.success = success;
this.eP = eP;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public lockBlock_result(lockBlock_result other) {
if (other.isSetSuccess()) {
this.success = other.success;
}
if (other.isSetEP()) {
this.eP = new FileDoesNotExistException(other.eP);
}
}
public lockBlock_result deepCopy() {
return new lockBlock_result(this);
}
@Override
public void clear() {
this.success = null;
this.eP = null;
}
public String getSuccess() {
return this.success;
}
public lockBlock_result setSuccess(String success) {
this.success = success;
return this;
}
public void unsetSuccess() {
this.success = null;
}
/** Returns true if field success is set (has been assigned a value) and false otherwise */
public boolean isSetSuccess() {
return this.success != null;
}
public void setSuccessIsSet(boolean value) {
if (!value) {
this.success = null;
}
}
public FileDoesNotExistException getEP() {
return this.eP;
}
public lockBlock_result setEP(FileDoesNotExistException eP) {
this.eP = eP;
return this;
}
public void unsetEP() {
this.eP = null;
}
/** Returns true if field eP is set (has been assigned a value) and false otherwise */
public boolean isSetEP() {
return this.eP != null;
}
public void setEPIsSet(boolean value) {
if (!value) {
this.eP = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case SUCCESS:
if (value == null) {
unsetSuccess();
} else {
setSuccess((String)value);
}
break;
case E_P:
if (value == null) {
unsetEP();
} else {
setEP((FileDoesNotExistException)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case SUCCESS:
return getSuccess();
case E_P:
return getEP();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case SUCCESS:
return isSetSuccess();
case E_P:
return isSetEP();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof lockBlock_result)
return this.equals((lockBlock_result)that);
return false;
}
public boolean equals(lockBlock_result that) {
if (that == null)
return false;
boolean this_present_success = true && this.isSetSuccess();
boolean that_present_success = true && that.isSetSuccess();
if (this_present_success || that_present_success) {
if (!(this_present_success && that_present_success))
return false;
if (!this.success.equals(that.success))
return false;
}
boolean this_present_eP = true && this.isSetEP();
boolean that_present_eP = true && that.isSetEP();
if (this_present_eP || that_present_eP) {
if (!(this_present_eP && that_present_eP))
return false;
if (!this.eP.equals(that.eP))
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(lockBlock_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetSuccess()).compareTo(other.isSetSuccess());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSuccess()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetEP()).compareTo(other.isSetEP());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetEP()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.eP, other.eP);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("lockBlock_result(");
boolean first = true;
sb.append("success:");
if (this.success == null) {
sb.append("null");
} else {
sb.append(this.success);
}
first = false;
if (!first) sb.append(", ");
sb.append("eP:");
if (this.eP == null) {
sb.append("null");
} else {
sb.append(this.eP);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class lockBlock_resultStandardSchemeFactory implements SchemeFactory {
public lockBlock_resultStandardScheme getScheme() {
return new lockBlock_resultStandardScheme();
}
}
private static class lockBlock_resultStandardScheme extends StandardScheme<lockBlock_result> {
public void read(org.apache.thrift.protocol.TProtocol iprot, lockBlock_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 0: // SUCCESS
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.success = iprot.readString();
struct.setSuccessIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 1:
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.eP = new FileDoesNotExistException();
struct.eP.read(iprot);
struct.setEPIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, lockBlock_result struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.success != null) {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
oprot.writeString(struct.success);
oprot.writeFieldEnd();
}
if (struct.eP != null) {
oprot.writeFieldBegin(E_P_FIELD_DESC);
struct.eP.write(oprot);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class lockBlock_resultTupleSchemeFactory implements SchemeFactory {
public lockBlock_resultTupleScheme getScheme() {
return new lockBlock_resultTupleScheme();
}
}
private static class lockBlock_resultTupleScheme extends TupleScheme<lockBlock_result> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, lockBlock_result struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetSuccess()) {
optionals.set(0);
}
if (struct.isSetEP()) {
optionals.set(1);
}
oprot.writeBitSet(optionals, 2);
if (struct.isSetSuccess()) {
oprot.writeString(struct.success);
}
if (struct.isSetEP()) {
struct.eP.write(oprot);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, lockBlock_result struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(2);
if (incoming.get(0)) {
struct.success = iprot.readString();
struct.setSuccessIsSet(true);
}
if (incoming.get(1)) {
struct.eP = new FileDoesNotExistException();
struct.eP.read(iprot);
struct.setEPIsSet(true);
}
}
}
}
public static class promoteBlock_args implements org.apache.thrift.TBase<promoteBlock_args, promoteBlock_args._Fields>, java.io.Serializable, Cloneable, Comparable<promoteBlock_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("promoteBlock_args");
private static final org.apache.thrift.protocol.TField USER_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("userId", org.apache.thrift.protocol.TType.I64, (short)1);
private static final org.apache.thrift.protocol.TField BLOCK_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("blockId", org.apache.thrift.protocol.TType.I64, (short)2);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new promoteBlock_argsStandardSchemeFactory());
schemes.put(TupleScheme.class, new promoteBlock_argsTupleSchemeFactory());
}
public long userId; // required
public long blockId; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
USER_ID((short)1, "userId"),
BLOCK_ID((short)2, "blockId");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // USER_ID
return USER_ID;
case 2: // BLOCK_ID
return BLOCK_ID;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __USERID_ISSET_ID = 0;
private static final int __BLOCKID_ISSET_ID = 1;
private byte __isset_bitfield = 0;
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.USER_ID, new org.apache.thrift.meta_data.FieldMetaData("userId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.BLOCK_ID, new org.apache.thrift.meta_data.FieldMetaData("blockId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(promoteBlock_args.class, metaDataMap);
}
public promoteBlock_args() {
}
public promoteBlock_args(
long userId,
long blockId)
{
this();
this.userId = userId;
setUserIdIsSet(true);
this.blockId = blockId;
setBlockIdIsSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public promoteBlock_args(promoteBlock_args other) {
__isset_bitfield = other.__isset_bitfield;
this.userId = other.userId;
this.blockId = other.blockId;
}
public promoteBlock_args deepCopy() {
return new promoteBlock_args(this);
}
@Override
public void clear() {
setUserIdIsSet(false);
this.userId = 0;
setBlockIdIsSet(false);
this.blockId = 0;
}
public long getUserId() {
return this.userId;
}
public promoteBlock_args setUserId(long userId) {
this.userId = userId;
setUserIdIsSet(true);
return this;
}
public void unsetUserId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __USERID_ISSET_ID);
}
/** Returns true if field userId is set (has been assigned a value) and false otherwise */
public boolean isSetUserId() {
return EncodingUtils.testBit(__isset_bitfield, __USERID_ISSET_ID);
}
public void setUserIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __USERID_ISSET_ID, value);
}
public long getBlockId() {
return this.blockId;
}
public promoteBlock_args setBlockId(long blockId) {
this.blockId = blockId;
setBlockIdIsSet(true);
return this;
}
public void unsetBlockId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __BLOCKID_ISSET_ID);
}
/** Returns true if field blockId is set (has been assigned a value) and false otherwise */
public boolean isSetBlockId() {
return EncodingUtils.testBit(__isset_bitfield, __BLOCKID_ISSET_ID);
}
public void setBlockIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __BLOCKID_ISSET_ID, value);
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case USER_ID:
if (value == null) {
unsetUserId();
} else {
setUserId((Long)value);
}
break;
case BLOCK_ID:
if (value == null) {
unsetBlockId();
} else {
setBlockId((Long)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case USER_ID:
return Long.valueOf(getUserId());
case BLOCK_ID:
return Long.valueOf(getBlockId());
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case USER_ID:
return isSetUserId();
case BLOCK_ID:
return isSetBlockId();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof promoteBlock_args)
return this.equals((promoteBlock_args)that);
return false;
}
public boolean equals(promoteBlock_args that) {
if (that == null)
return false;
boolean this_present_userId = true;
boolean that_present_userId = true;
if (this_present_userId || that_present_userId) {
if (!(this_present_userId && that_present_userId))
return false;
if (this.userId != that.userId)
return false;
}
boolean this_present_blockId = true;
boolean that_present_blockId = true;
if (this_present_blockId || that_present_blockId) {
if (!(this_present_blockId && that_present_blockId))
return false;
if (this.blockId != that.blockId)
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(promoteBlock_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetUserId()).compareTo(other.isSetUserId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetUserId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.userId, other.userId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetBlockId()).compareTo(other.isSetBlockId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetBlockId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.blockId, other.blockId);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("promoteBlock_args(");
boolean first = true;
sb.append("userId:");
sb.append(this.userId);
first = false;
if (!first) sb.append(", ");
sb.append("blockId:");
sb.append(this.blockId);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class promoteBlock_argsStandardSchemeFactory implements SchemeFactory {
public promoteBlock_argsStandardScheme getScheme() {
return new promoteBlock_argsStandardScheme();
}
}
private static class promoteBlock_argsStandardScheme extends StandardScheme<promoteBlock_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, promoteBlock_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // USER_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.userId = iprot.readI64();
struct.setUserIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // BLOCK_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.blockId = iprot.readI64();
struct.setBlockIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, promoteBlock_args struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(USER_ID_FIELD_DESC);
oprot.writeI64(struct.userId);
oprot.writeFieldEnd();
oprot.writeFieldBegin(BLOCK_ID_FIELD_DESC);
oprot.writeI64(struct.blockId);
oprot.writeFieldEnd();
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class promoteBlock_argsTupleSchemeFactory implements SchemeFactory {
public promoteBlock_argsTupleScheme getScheme() {
return new promoteBlock_argsTupleScheme();
}
}
private static class promoteBlock_argsTupleScheme extends TupleScheme<promoteBlock_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, promoteBlock_args struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetUserId()) {
optionals.set(0);
}
if (struct.isSetBlockId()) {
optionals.set(1);
}
oprot.writeBitSet(optionals, 2);
if (struct.isSetUserId()) {
oprot.writeI64(struct.userId);
}
if (struct.isSetBlockId()) {
oprot.writeI64(struct.blockId);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, promoteBlock_args struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(2);
if (incoming.get(0)) {
struct.userId = iprot.readI64();
struct.setUserIdIsSet(true);
}
if (incoming.get(1)) {
struct.blockId = iprot.readI64();
struct.setBlockIdIsSet(true);
}
}
}
}
public static class promoteBlock_result implements org.apache.thrift.TBase<promoteBlock_result, promoteBlock_result._Fields>, java.io.Serializable, Cloneable, Comparable<promoteBlock_result> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("promoteBlock_result");
private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.BOOL, (short)0);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new promoteBlock_resultStandardSchemeFactory());
schemes.put(TupleScheme.class, new promoteBlock_resultTupleSchemeFactory());
}
public boolean success; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
SUCCESS((short)0, "success");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 0: // SUCCESS
return SUCCESS;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __SUCCESS_ISSET_ID = 0;
private byte __isset_bitfield = 0;
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(promoteBlock_result.class, metaDataMap);
}
public promoteBlock_result() {
}
public promoteBlock_result(
boolean success)
{
this();
this.success = success;
setSuccessIsSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public promoteBlock_result(promoteBlock_result other) {
__isset_bitfield = other.__isset_bitfield;
this.success = other.success;
}
public promoteBlock_result deepCopy() {
return new promoteBlock_result(this);
}
@Override
public void clear() {
setSuccessIsSet(false);
this.success = false;
}
public boolean isSuccess() {
return this.success;
}
public promoteBlock_result setSuccess(boolean success) {
this.success = success;
setSuccessIsSet(true);
return this;
}
public void unsetSuccess() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __SUCCESS_ISSET_ID);
}
/** Returns true if field success is set (has been assigned a value) and false otherwise */
public boolean isSetSuccess() {
return EncodingUtils.testBit(__isset_bitfield, __SUCCESS_ISSET_ID);
}
public void setSuccessIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __SUCCESS_ISSET_ID, value);
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case SUCCESS:
if (value == null) {
unsetSuccess();
} else {
setSuccess((Boolean)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case SUCCESS:
return Boolean.valueOf(isSuccess());
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case SUCCESS:
return isSetSuccess();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof promoteBlock_result)
return this.equals((promoteBlock_result)that);
return false;
}
public boolean equals(promoteBlock_result that) {
if (that == null)
return false;
boolean this_present_success = true;
boolean that_present_success = true;
if (this_present_success || that_present_success) {
if (!(this_present_success && that_present_success))
return false;
if (this.success != that.success)
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(promoteBlock_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetSuccess()).compareTo(other.isSetSuccess());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSuccess()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("promoteBlock_result(");
boolean first = true;
sb.append("success:");
sb.append(this.success);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class promoteBlock_resultStandardSchemeFactory implements SchemeFactory {
public promoteBlock_resultStandardScheme getScheme() {
return new promoteBlock_resultStandardScheme();
}
}
private static class promoteBlock_resultStandardScheme extends StandardScheme<promoteBlock_result> {
public void read(org.apache.thrift.protocol.TProtocol iprot, promoteBlock_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 0: // SUCCESS
if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
struct.success = iprot.readBool();
struct.setSuccessIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, promoteBlock_result struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.isSetSuccess()) {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
oprot.writeBool(struct.success);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class promoteBlock_resultTupleSchemeFactory implements SchemeFactory {
public promoteBlock_resultTupleScheme getScheme() {
return new promoteBlock_resultTupleScheme();
}
}
private static class promoteBlock_resultTupleScheme extends TupleScheme<promoteBlock_result> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, promoteBlock_result struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetSuccess()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetSuccess()) {
oprot.writeBool(struct.success);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, promoteBlock_result struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.success = iprot.readBool();
struct.setSuccessIsSet(true);
}
}
}
}
public static class cancelBlock_args implements org.apache.thrift.TBase<cancelBlock_args, cancelBlock_args._Fields>, java.io.Serializable, Cloneable, Comparable<cancelBlock_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("cancelBlock_args");
private static final org.apache.thrift.protocol.TField USER_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("userId", org.apache.thrift.protocol.TType.I64, (short)1);
private static final org.apache.thrift.protocol.TField BLOCK_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("blockId", org.apache.thrift.protocol.TType.I64, (short)2);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new cancelBlock_argsStandardSchemeFactory());
schemes.put(TupleScheme.class, new cancelBlock_argsTupleSchemeFactory());
}
public long userId; // required
public long blockId; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
USER_ID((short)1, "userId"),
BLOCK_ID((short)2, "blockId");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // USER_ID
return USER_ID;
case 2: // BLOCK_ID
return BLOCK_ID;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __USERID_ISSET_ID = 0;
private static final int __BLOCKID_ISSET_ID = 1;
private byte __isset_bitfield = 0;
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.USER_ID, new org.apache.thrift.meta_data.FieldMetaData("userId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.BLOCK_ID, new org.apache.thrift.meta_data.FieldMetaData("blockId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(cancelBlock_args.class, metaDataMap);
}
public cancelBlock_args() {
}
public cancelBlock_args(
long userId,
long blockId)
{
this();
this.userId = userId;
setUserIdIsSet(true);
this.blockId = blockId;
setBlockIdIsSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public cancelBlock_args(cancelBlock_args other) {
__isset_bitfield = other.__isset_bitfield;
this.userId = other.userId;
this.blockId = other.blockId;
}
public cancelBlock_args deepCopy() {
return new cancelBlock_args(this);
}
@Override
public void clear() {
setUserIdIsSet(false);
this.userId = 0;
setBlockIdIsSet(false);
this.blockId = 0;
}
public long getUserId() {
return this.userId;
}
public cancelBlock_args setUserId(long userId) {
this.userId = userId;
setUserIdIsSet(true);
return this;
}
public void unsetUserId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __USERID_ISSET_ID);
}
/** Returns true if field userId is set (has been assigned a value) and false otherwise */
public boolean isSetUserId() {
return EncodingUtils.testBit(__isset_bitfield, __USERID_ISSET_ID);
}
public void setUserIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __USERID_ISSET_ID, value);
}
public long getBlockId() {
return this.blockId;
}
public cancelBlock_args setBlockId(long blockId) {
this.blockId = blockId;
setBlockIdIsSet(true);
return this;
}
public void unsetBlockId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __BLOCKID_ISSET_ID);
}
/** Returns true if field blockId is set (has been assigned a value) and false otherwise */
public boolean isSetBlockId() {
return EncodingUtils.testBit(__isset_bitfield, __BLOCKID_ISSET_ID);
}
public void setBlockIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __BLOCKID_ISSET_ID, value);
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case USER_ID:
if (value == null) {
unsetUserId();
} else {
setUserId((Long)value);
}
break;
case BLOCK_ID:
if (value == null) {
unsetBlockId();
} else {
setBlockId((Long)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case USER_ID:
return Long.valueOf(getUserId());
case BLOCK_ID:
return Long.valueOf(getBlockId());
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case USER_ID:
return isSetUserId();
case BLOCK_ID:
return isSetBlockId();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof cancelBlock_args)
return this.equals((cancelBlock_args)that);
return false;
}
public boolean equals(cancelBlock_args that) {
if (that == null)
return false;
boolean this_present_userId = true;
boolean that_present_userId = true;
if (this_present_userId || that_present_userId) {
if (!(this_present_userId && that_present_userId))
return false;
if (this.userId != that.userId)
return false;
}
boolean this_present_blockId = true;
boolean that_present_blockId = true;
if (this_present_blockId || that_present_blockId) {
if (!(this_present_blockId && that_present_blockId))
return false;
if (this.blockId != that.blockId)
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(cancelBlock_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetUserId()).compareTo(other.isSetUserId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetUserId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.userId, other.userId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetBlockId()).compareTo(other.isSetBlockId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetBlockId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.blockId, other.blockId);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("cancelBlock_args(");
boolean first = true;
sb.append("userId:");
sb.append(this.userId);
first = false;
if (!first) sb.append(", ");
sb.append("blockId:");
sb.append(this.blockId);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class cancelBlock_argsStandardSchemeFactory implements SchemeFactory {
public cancelBlock_argsStandardScheme getScheme() {
return new cancelBlock_argsStandardScheme();
}
}
private static class cancelBlock_argsStandardScheme extends StandardScheme<cancelBlock_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, cancelBlock_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // USER_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.userId = iprot.readI64();
struct.setUserIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // BLOCK_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.blockId = iprot.readI64();
struct.setBlockIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, cancelBlock_args struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(USER_ID_FIELD_DESC);
oprot.writeI64(struct.userId);
oprot.writeFieldEnd();
oprot.writeFieldBegin(BLOCK_ID_FIELD_DESC);
oprot.writeI64(struct.blockId);
oprot.writeFieldEnd();
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class cancelBlock_argsTupleSchemeFactory implements SchemeFactory {
public cancelBlock_argsTupleScheme getScheme() {
return new cancelBlock_argsTupleScheme();
}
}
private static class cancelBlock_argsTupleScheme extends TupleScheme<cancelBlock_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, cancelBlock_args struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetUserId()) {
optionals.set(0);
}
if (struct.isSetBlockId()) {
optionals.set(1);
}
oprot.writeBitSet(optionals, 2);
if (struct.isSetUserId()) {
oprot.writeI64(struct.userId);
}
if (struct.isSetBlockId()) {
oprot.writeI64(struct.blockId);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, cancelBlock_args struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(2);
if (incoming.get(0)) {
struct.userId = iprot.readI64();
struct.setUserIdIsSet(true);
}
if (incoming.get(1)) {
struct.blockId = iprot.readI64();
struct.setBlockIdIsSet(true);
}
}
}
}
public static class cancelBlock_result implements org.apache.thrift.TBase<cancelBlock_result, cancelBlock_result._Fields>, java.io.Serializable, Cloneable, Comparable<cancelBlock_result> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("cancelBlock_result");
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new cancelBlock_resultStandardSchemeFactory());
schemes.put(TupleScheme.class, new cancelBlock_resultTupleSchemeFactory());
}
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
;
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(cancelBlock_result.class, metaDataMap);
}
public cancelBlock_result() {
}
/**
* Performs a deep copy on <i>other</i>.
*/
public cancelBlock_result(cancelBlock_result other) {
}
public cancelBlock_result deepCopy() {
return new cancelBlock_result(this);
}
@Override
public void clear() {
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof cancelBlock_result)
return this.equals((cancelBlock_result)that);
return false;
}
public boolean equals(cancelBlock_result that) {
if (that == null)
return false;
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(cancelBlock_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("cancelBlock_result(");
boolean first = true;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class cancelBlock_resultStandardSchemeFactory implements SchemeFactory {
public cancelBlock_resultStandardScheme getScheme() {
return new cancelBlock_resultStandardScheme();
}
}
private static class cancelBlock_resultStandardScheme extends StandardScheme<cancelBlock_result> {
public void read(org.apache.thrift.protocol.TProtocol iprot, cancelBlock_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, cancelBlock_result struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class cancelBlock_resultTupleSchemeFactory implements SchemeFactory {
public cancelBlock_resultTupleScheme getScheme() {
return new cancelBlock_resultTupleScheme();
}
}
private static class cancelBlock_resultTupleScheme extends TupleScheme<cancelBlock_result> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, cancelBlock_result struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, cancelBlock_result struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
}
}
}
public static class getBlockLocation_args implements org.apache.thrift.TBase<getBlockLocation_args, getBlockLocation_args._Fields>, java.io.Serializable, Cloneable, Comparable<getBlockLocation_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getBlockLocation_args");
private static final org.apache.thrift.protocol.TField USER_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("userId", org.apache.thrift.protocol.TType.I64, (short)1);
private static final org.apache.thrift.protocol.TField BLOCK_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("blockId", org.apache.thrift.protocol.TType.I64, (short)2);
private static final org.apache.thrift.protocol.TField INITIAL_BYTES_FIELD_DESC = new org.apache.thrift.protocol.TField("initialBytes", org.apache.thrift.protocol.TType.I64, (short)3);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new getBlockLocation_argsStandardSchemeFactory());
schemes.put(TupleScheme.class, new getBlockLocation_argsTupleSchemeFactory());
}
public long userId; // required
public long blockId; // required
public long initialBytes; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
USER_ID((short)1, "userId"),
BLOCK_ID((short)2, "blockId"),
INITIAL_BYTES((short)3, "initialBytes");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // USER_ID
return USER_ID;
case 2: // BLOCK_ID
return BLOCK_ID;
case 3: // INITIAL_BYTES
return INITIAL_BYTES;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __USERID_ISSET_ID = 0;
private static final int __BLOCKID_ISSET_ID = 1;
private static final int __INITIALBYTES_ISSET_ID = 2;
private byte __isset_bitfield = 0;
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.USER_ID, new org.apache.thrift.meta_data.FieldMetaData("userId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.BLOCK_ID, new org.apache.thrift.meta_data.FieldMetaData("blockId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.INITIAL_BYTES, new org.apache.thrift.meta_data.FieldMetaData("initialBytes", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getBlockLocation_args.class, metaDataMap);
}
public getBlockLocation_args() {
}
public getBlockLocation_args(
long userId,
long blockId,
long initialBytes)
{
this();
this.userId = userId;
setUserIdIsSet(true);
this.blockId = blockId;
setBlockIdIsSet(true);
this.initialBytes = initialBytes;
setInitialBytesIsSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public getBlockLocation_args(getBlockLocation_args other) {
__isset_bitfield = other.__isset_bitfield;
this.userId = other.userId;
this.blockId = other.blockId;
this.initialBytes = other.initialBytes;
}
public getBlockLocation_args deepCopy() {
return new getBlockLocation_args(this);
}
@Override
public void clear() {
setUserIdIsSet(false);
this.userId = 0;
setBlockIdIsSet(false);
this.blockId = 0;
setInitialBytesIsSet(false);
this.initialBytes = 0;
}
public long getUserId() {
return this.userId;
}
public getBlockLocation_args setUserId(long userId) {
this.userId = userId;
setUserIdIsSet(true);
return this;
}
public void unsetUserId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __USERID_ISSET_ID);
}
/** Returns true if field userId is set (has been assigned a value) and false otherwise */
public boolean isSetUserId() {
return EncodingUtils.testBit(__isset_bitfield, __USERID_ISSET_ID);
}
public void setUserIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __USERID_ISSET_ID, value);
}
public long getBlockId() {
return this.blockId;
}
public getBlockLocation_args setBlockId(long blockId) {
this.blockId = blockId;
setBlockIdIsSet(true);
return this;
}
public void unsetBlockId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __BLOCKID_ISSET_ID);
}
/** Returns true if field blockId is set (has been assigned a value) and false otherwise */
public boolean isSetBlockId() {
return EncodingUtils.testBit(__isset_bitfield, __BLOCKID_ISSET_ID);
}
public void setBlockIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __BLOCKID_ISSET_ID, value);
}
public long getInitialBytes() {
return this.initialBytes;
}
public getBlockLocation_args setInitialBytes(long initialBytes) {
this.initialBytes = initialBytes;
setInitialBytesIsSet(true);
return this;
}
public void unsetInitialBytes() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __INITIALBYTES_ISSET_ID);
}
/** Returns true if field initialBytes is set (has been assigned a value) and false otherwise */
public boolean isSetInitialBytes() {
return EncodingUtils.testBit(__isset_bitfield, __INITIALBYTES_ISSET_ID);
}
public void setInitialBytesIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __INITIALBYTES_ISSET_ID, value);
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case USER_ID:
if (value == null) {
unsetUserId();
} else {
setUserId((Long)value);
}
break;
case BLOCK_ID:
if (value == null) {
unsetBlockId();
} else {
setBlockId((Long)value);
}
break;
case INITIAL_BYTES:
if (value == null) {
unsetInitialBytes();
} else {
setInitialBytes((Long)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case USER_ID:
return Long.valueOf(getUserId());
case BLOCK_ID:
return Long.valueOf(getBlockId());
case INITIAL_BYTES:
return Long.valueOf(getInitialBytes());
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case USER_ID:
return isSetUserId();
case BLOCK_ID:
return isSetBlockId();
case INITIAL_BYTES:
return isSetInitialBytes();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof getBlockLocation_args)
return this.equals((getBlockLocation_args)that);
return false;
}
public boolean equals(getBlockLocation_args that) {
if (that == null)
return false;
boolean this_present_userId = true;
boolean that_present_userId = true;
if (this_present_userId || that_present_userId) {
if (!(this_present_userId && that_present_userId))
return false;
if (this.userId != that.userId)
return false;
}
boolean this_present_blockId = true;
boolean that_present_blockId = true;
if (this_present_blockId || that_present_blockId) {
if (!(this_present_blockId && that_present_blockId))
return false;
if (this.blockId != that.blockId)
return false;
}
boolean this_present_initialBytes = true;
boolean that_present_initialBytes = true;
if (this_present_initialBytes || that_present_initialBytes) {
if (!(this_present_initialBytes && that_present_initialBytes))
return false;
if (this.initialBytes != that.initialBytes)
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(getBlockLocation_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetUserId()).compareTo(other.isSetUserId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetUserId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.userId, other.userId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetBlockId()).compareTo(other.isSetBlockId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetBlockId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.blockId, other.blockId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetInitialBytes()).compareTo(other.isSetInitialBytes());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetInitialBytes()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.initialBytes, other.initialBytes);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("getBlockLocation_args(");
boolean first = true;
sb.append("userId:");
sb.append(this.userId);
first = false;
if (!first) sb.append(", ");
sb.append("blockId:");
sb.append(this.blockId);
first = false;
if (!first) sb.append(", ");
sb.append("initialBytes:");
sb.append(this.initialBytes);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class getBlockLocation_argsStandardSchemeFactory implements SchemeFactory {
public getBlockLocation_argsStandardScheme getScheme() {
return new getBlockLocation_argsStandardScheme();
}
}
private static class getBlockLocation_argsStandardScheme extends StandardScheme<getBlockLocation_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, getBlockLocation_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // USER_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.userId = iprot.readI64();
struct.setUserIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // BLOCK_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.blockId = iprot.readI64();
struct.setBlockIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3: // INITIAL_BYTES
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.initialBytes = iprot.readI64();
struct.setInitialBytesIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, getBlockLocation_args struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(USER_ID_FIELD_DESC);
oprot.writeI64(struct.userId);
oprot.writeFieldEnd();
oprot.writeFieldBegin(BLOCK_ID_FIELD_DESC);
oprot.writeI64(struct.blockId);
oprot.writeFieldEnd();
oprot.writeFieldBegin(INITIAL_BYTES_FIELD_DESC);
oprot.writeI64(struct.initialBytes);
oprot.writeFieldEnd();
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class getBlockLocation_argsTupleSchemeFactory implements SchemeFactory {
public getBlockLocation_argsTupleScheme getScheme() {
return new getBlockLocation_argsTupleScheme();
}
}
private static class getBlockLocation_argsTupleScheme extends TupleScheme<getBlockLocation_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, getBlockLocation_args struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetUserId()) {
optionals.set(0);
}
if (struct.isSetBlockId()) {
optionals.set(1);
}
if (struct.isSetInitialBytes()) {
optionals.set(2);
}
oprot.writeBitSet(optionals, 3);
if (struct.isSetUserId()) {
oprot.writeI64(struct.userId);
}
if (struct.isSetBlockId()) {
oprot.writeI64(struct.blockId);
}
if (struct.isSetInitialBytes()) {
oprot.writeI64(struct.initialBytes);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, getBlockLocation_args struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(3);
if (incoming.get(0)) {
struct.userId = iprot.readI64();
struct.setUserIdIsSet(true);
}
if (incoming.get(1)) {
struct.blockId = iprot.readI64();
struct.setBlockIdIsSet(true);
}
if (incoming.get(2)) {
struct.initialBytes = iprot.readI64();
struct.setInitialBytesIsSet(true);
}
}
}
}
public static class getBlockLocation_result implements org.apache.thrift.TBase<getBlockLocation_result, getBlockLocation_result._Fields>, java.io.Serializable, Cloneable, Comparable<getBlockLocation_result> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("getBlockLocation_result");
private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRING, (short)0);
private static final org.apache.thrift.protocol.TField E_P_FIELD_DESC = new org.apache.thrift.protocol.TField("eP", org.apache.thrift.protocol.TType.STRUCT, (short)1);
private static final org.apache.thrift.protocol.TField E_S_FIELD_DESC = new org.apache.thrift.protocol.TField("eS", org.apache.thrift.protocol.TType.STRUCT, (short)2);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new getBlockLocation_resultStandardSchemeFactory());
schemes.put(TupleScheme.class, new getBlockLocation_resultTupleSchemeFactory());
}
public String success; // required
public OutOfSpaceException eP; // required
public FileAlreadyExistException eS; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
SUCCESS((short)0, "success"),
E_P((short)1, "eP"),
E_S((short)2, "eS");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 0: // SUCCESS
return SUCCESS;
case 1:
return E_P;
case 2:
return E_S;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.E_P, new org.apache.thrift.meta_data.FieldMetaData("eP", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT)));
tmpMap.put(_Fields.E_S, new org.apache.thrift.meta_data.FieldMetaData("eS", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(getBlockLocation_result.class, metaDataMap);
}
public getBlockLocation_result() {
}
public getBlockLocation_result(
String success,
OutOfSpaceException eP,
FileAlreadyExistException eS)
{
this();
this.success = success;
this.eP = eP;
this.eS = eS;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public getBlockLocation_result(getBlockLocation_result other) {
if (other.isSetSuccess()) {
this.success = other.success;
}
if (other.isSetEP()) {
this.eP = new OutOfSpaceException(other.eP);
}
if (other.isSetES()) {
this.eS = new FileAlreadyExistException(other.eS);
}
}
public getBlockLocation_result deepCopy() {
return new getBlockLocation_result(this);
}
@Override
public void clear() {
this.success = null;
this.eP = null;
this.eS = null;
}
public String getSuccess() {
return this.success;
}
public getBlockLocation_result setSuccess(String success) {
this.success = success;
return this;
}
public void unsetSuccess() {
this.success = null;
}
/** Returns true if field success is set (has been assigned a value) and false otherwise */
public boolean isSetSuccess() {
return this.success != null;
}
public void setSuccessIsSet(boolean value) {
if (!value) {
this.success = null;
}
}
public OutOfSpaceException getEP() {
return this.eP;
}
public getBlockLocation_result setEP(OutOfSpaceException eP) {
this.eP = eP;
return this;
}
public void unsetEP() {
this.eP = null;
}
/** Returns true if field eP is set (has been assigned a value) and false otherwise */
public boolean isSetEP() {
return this.eP != null;
}
public void setEPIsSet(boolean value) {
if (!value) {
this.eP = null;
}
}
public FileAlreadyExistException getES() {
return this.eS;
}
public getBlockLocation_result setES(FileAlreadyExistException eS) {
this.eS = eS;
return this;
}
public void unsetES() {
this.eS = null;
}
/** Returns true if field eS is set (has been assigned a value) and false otherwise */
public boolean isSetES() {
return this.eS != null;
}
public void setESIsSet(boolean value) {
if (!value) {
this.eS = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case SUCCESS:
if (value == null) {
unsetSuccess();
} else {
setSuccess((String)value);
}
break;
case E_P:
if (value == null) {
unsetEP();
} else {
setEP((OutOfSpaceException)value);
}
break;
case E_S:
if (value == null) {
unsetES();
} else {
setES((FileAlreadyExistException)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case SUCCESS:
return getSuccess();
case E_P:
return getEP();
case E_S:
return getES();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case SUCCESS:
return isSetSuccess();
case E_P:
return isSetEP();
case E_S:
return isSetES();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof getBlockLocation_result)
return this.equals((getBlockLocation_result)that);
return false;
}
public boolean equals(getBlockLocation_result that) {
if (that == null)
return false;
boolean this_present_success = true && this.isSetSuccess();
boolean that_present_success = true && that.isSetSuccess();
if (this_present_success || that_present_success) {
if (!(this_present_success && that_present_success))
return false;
if (!this.success.equals(that.success))
return false;
}
boolean this_present_eP = true && this.isSetEP();
boolean that_present_eP = true && that.isSetEP();
if (this_present_eP || that_present_eP) {
if (!(this_present_eP && that_present_eP))
return false;
if (!this.eP.equals(that.eP))
return false;
}
boolean this_present_eS = true && this.isSetES();
boolean that_present_eS = true && that.isSetES();
if (this_present_eS || that_present_eS) {
if (!(this_present_eS && that_present_eS))
return false;
if (!this.eS.equals(that.eS))
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(getBlockLocation_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetSuccess()).compareTo(other.isSetSuccess());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSuccess()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetEP()).compareTo(other.isSetEP());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetEP()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.eP, other.eP);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetES()).compareTo(other.isSetES());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetES()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.eS, other.eS);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("getBlockLocation_result(");
boolean first = true;
sb.append("success:");
if (this.success == null) {
sb.append("null");
} else {
sb.append(this.success);
}
first = false;
if (!first) sb.append(", ");
sb.append("eP:");
if (this.eP == null) {
sb.append("null");
} else {
sb.append(this.eP);
}
first = false;
if (!first) sb.append(", ");
sb.append("eS:");
if (this.eS == null) {
sb.append("null");
} else {
sb.append(this.eS);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class getBlockLocation_resultStandardSchemeFactory implements SchemeFactory {
public getBlockLocation_resultStandardScheme getScheme() {
return new getBlockLocation_resultStandardScheme();
}
}
private static class getBlockLocation_resultStandardScheme extends StandardScheme<getBlockLocation_result> {
public void read(org.apache.thrift.protocol.TProtocol iprot, getBlockLocation_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 0: // SUCCESS
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.success = iprot.readString();
struct.setSuccessIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 1:
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.eP = new OutOfSpaceException();
struct.eP.read(iprot);
struct.setEPIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2:
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.eS = new FileAlreadyExistException();
struct.eS.read(iprot);
struct.setESIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, getBlockLocation_result struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.success != null) {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
oprot.writeString(struct.success);
oprot.writeFieldEnd();
}
if (struct.eP != null) {
oprot.writeFieldBegin(E_P_FIELD_DESC);
struct.eP.write(oprot);
oprot.writeFieldEnd();
}
if (struct.eS != null) {
oprot.writeFieldBegin(E_S_FIELD_DESC);
struct.eS.write(oprot);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class getBlockLocation_resultTupleSchemeFactory implements SchemeFactory {
public getBlockLocation_resultTupleScheme getScheme() {
return new getBlockLocation_resultTupleScheme();
}
}
private static class getBlockLocation_resultTupleScheme extends TupleScheme<getBlockLocation_result> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, getBlockLocation_result struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetSuccess()) {
optionals.set(0);
}
if (struct.isSetEP()) {
optionals.set(1);
}
if (struct.isSetES()) {
optionals.set(2);
}
oprot.writeBitSet(optionals, 3);
if (struct.isSetSuccess()) {
oprot.writeString(struct.success);
}
if (struct.isSetEP()) {
struct.eP.write(oprot);
}
if (struct.isSetES()) {
struct.eS.write(oprot);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, getBlockLocation_result struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(3);
if (incoming.get(0)) {
struct.success = iprot.readString();
struct.setSuccessIsSet(true);
}
if (incoming.get(1)) {
struct.eP = new OutOfSpaceException();
struct.eP.read(iprot);
struct.setEPIsSet(true);
}
if (incoming.get(2)) {
struct.eS = new FileAlreadyExistException();
struct.eS.read(iprot);
struct.setESIsSet(true);
}
}
}
}
public static class requestSpace_args implements org.apache.thrift.TBase<requestSpace_args, requestSpace_args._Fields>, java.io.Serializable, Cloneable, Comparable<requestSpace_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("requestSpace_args");
private static final org.apache.thrift.protocol.TField USER_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("userId", org.apache.thrift.protocol.TType.I64, (short)1);
private static final org.apache.thrift.protocol.TField BLOCK_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("blockId", org.apache.thrift.protocol.TType.I64, (short)2);
private static final org.apache.thrift.protocol.TField REQUEST_BYTES_FIELD_DESC = new org.apache.thrift.protocol.TField("requestBytes", org.apache.thrift.protocol.TType.I64, (short)3);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new requestSpace_argsStandardSchemeFactory());
schemes.put(TupleScheme.class, new requestSpace_argsTupleSchemeFactory());
}
public long userId; // required
public long blockId; // required
public long requestBytes; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
USER_ID((short)1, "userId"),
BLOCK_ID((short)2, "blockId"),
REQUEST_BYTES((short)3, "requestBytes");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // USER_ID
return USER_ID;
case 2: // BLOCK_ID
return BLOCK_ID;
case 3: // REQUEST_BYTES
return REQUEST_BYTES;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __USERID_ISSET_ID = 0;
private static final int __BLOCKID_ISSET_ID = 1;
private static final int __REQUESTBYTES_ISSET_ID = 2;
private byte __isset_bitfield = 0;
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.USER_ID, new org.apache.thrift.meta_data.FieldMetaData("userId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.BLOCK_ID, new org.apache.thrift.meta_data.FieldMetaData("blockId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.REQUEST_BYTES, new org.apache.thrift.meta_data.FieldMetaData("requestBytes", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(requestSpace_args.class, metaDataMap);
}
public requestSpace_args() {
}
public requestSpace_args(
long userId,
long blockId,
long requestBytes)
{
this();
this.userId = userId;
setUserIdIsSet(true);
this.blockId = blockId;
setBlockIdIsSet(true);
this.requestBytes = requestBytes;
setRequestBytesIsSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public requestSpace_args(requestSpace_args other) {
__isset_bitfield = other.__isset_bitfield;
this.userId = other.userId;
this.blockId = other.blockId;
this.requestBytes = other.requestBytes;
}
public requestSpace_args deepCopy() {
return new requestSpace_args(this);
}
@Override
public void clear() {
setUserIdIsSet(false);
this.userId = 0;
setBlockIdIsSet(false);
this.blockId = 0;
setRequestBytesIsSet(false);
this.requestBytes = 0;
}
public long getUserId() {
return this.userId;
}
public requestSpace_args setUserId(long userId) {
this.userId = userId;
setUserIdIsSet(true);
return this;
}
public void unsetUserId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __USERID_ISSET_ID);
}
/** Returns true if field userId is set (has been assigned a value) and false otherwise */
public boolean isSetUserId() {
return EncodingUtils.testBit(__isset_bitfield, __USERID_ISSET_ID);
}
public void setUserIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __USERID_ISSET_ID, value);
}
public long getBlockId() {
return this.blockId;
}
public requestSpace_args setBlockId(long blockId) {
this.blockId = blockId;
setBlockIdIsSet(true);
return this;
}
public void unsetBlockId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __BLOCKID_ISSET_ID);
}
/** Returns true if field blockId is set (has been assigned a value) and false otherwise */
public boolean isSetBlockId() {
return EncodingUtils.testBit(__isset_bitfield, __BLOCKID_ISSET_ID);
}
public void setBlockIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __BLOCKID_ISSET_ID, value);
}
public long getRequestBytes() {
return this.requestBytes;
}
public requestSpace_args setRequestBytes(long requestBytes) {
this.requestBytes = requestBytes;
setRequestBytesIsSet(true);
return this;
}
public void unsetRequestBytes() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __REQUESTBYTES_ISSET_ID);
}
/** Returns true if field requestBytes is set (has been assigned a value) and false otherwise */
public boolean isSetRequestBytes() {
return EncodingUtils.testBit(__isset_bitfield, __REQUESTBYTES_ISSET_ID);
}
public void setRequestBytesIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __REQUESTBYTES_ISSET_ID, value);
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case USER_ID:
if (value == null) {
unsetUserId();
} else {
setUserId((Long)value);
}
break;
case BLOCK_ID:
if (value == null) {
unsetBlockId();
} else {
setBlockId((Long)value);
}
break;
case REQUEST_BYTES:
if (value == null) {
unsetRequestBytes();
} else {
setRequestBytes((Long)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case USER_ID:
return Long.valueOf(getUserId());
case BLOCK_ID:
return Long.valueOf(getBlockId());
case REQUEST_BYTES:
return Long.valueOf(getRequestBytes());
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case USER_ID:
return isSetUserId();
case BLOCK_ID:
return isSetBlockId();
case REQUEST_BYTES:
return isSetRequestBytes();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof requestSpace_args)
return this.equals((requestSpace_args)that);
return false;
}
public boolean equals(requestSpace_args that) {
if (that == null)
return false;
boolean this_present_userId = true;
boolean that_present_userId = true;
if (this_present_userId || that_present_userId) {
if (!(this_present_userId && that_present_userId))
return false;
if (this.userId != that.userId)
return false;
}
boolean this_present_blockId = true;
boolean that_present_blockId = true;
if (this_present_blockId || that_present_blockId) {
if (!(this_present_blockId && that_present_blockId))
return false;
if (this.blockId != that.blockId)
return false;
}
boolean this_present_requestBytes = true;
boolean that_present_requestBytes = true;
if (this_present_requestBytes || that_present_requestBytes) {
if (!(this_present_requestBytes && that_present_requestBytes))
return false;
if (this.requestBytes != that.requestBytes)
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(requestSpace_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetUserId()).compareTo(other.isSetUserId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetUserId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.userId, other.userId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetBlockId()).compareTo(other.isSetBlockId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetBlockId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.blockId, other.blockId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetRequestBytes()).compareTo(other.isSetRequestBytes());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetRequestBytes()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.requestBytes, other.requestBytes);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("requestSpace_args(");
boolean first = true;
sb.append("userId:");
sb.append(this.userId);
first = false;
if (!first) sb.append(", ");
sb.append("blockId:");
sb.append(this.blockId);
first = false;
if (!first) sb.append(", ");
sb.append("requestBytes:");
sb.append(this.requestBytes);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class requestSpace_argsStandardSchemeFactory implements SchemeFactory {
public requestSpace_argsStandardScheme getScheme() {
return new requestSpace_argsStandardScheme();
}
}
private static class requestSpace_argsStandardScheme extends StandardScheme<requestSpace_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, requestSpace_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // USER_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.userId = iprot.readI64();
struct.setUserIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // BLOCK_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.blockId = iprot.readI64();
struct.setBlockIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3: // REQUEST_BYTES
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.requestBytes = iprot.readI64();
struct.setRequestBytesIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, requestSpace_args struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(USER_ID_FIELD_DESC);
oprot.writeI64(struct.userId);
oprot.writeFieldEnd();
oprot.writeFieldBegin(BLOCK_ID_FIELD_DESC);
oprot.writeI64(struct.blockId);
oprot.writeFieldEnd();
oprot.writeFieldBegin(REQUEST_BYTES_FIELD_DESC);
oprot.writeI64(struct.requestBytes);
oprot.writeFieldEnd();
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class requestSpace_argsTupleSchemeFactory implements SchemeFactory {
public requestSpace_argsTupleScheme getScheme() {
return new requestSpace_argsTupleScheme();
}
}
private static class requestSpace_argsTupleScheme extends TupleScheme<requestSpace_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, requestSpace_args struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetUserId()) {
optionals.set(0);
}
if (struct.isSetBlockId()) {
optionals.set(1);
}
if (struct.isSetRequestBytes()) {
optionals.set(2);
}
oprot.writeBitSet(optionals, 3);
if (struct.isSetUserId()) {
oprot.writeI64(struct.userId);
}
if (struct.isSetBlockId()) {
oprot.writeI64(struct.blockId);
}
if (struct.isSetRequestBytes()) {
oprot.writeI64(struct.requestBytes);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, requestSpace_args struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(3);
if (incoming.get(0)) {
struct.userId = iprot.readI64();
struct.setUserIdIsSet(true);
}
if (incoming.get(1)) {
struct.blockId = iprot.readI64();
struct.setBlockIdIsSet(true);
}
if (incoming.get(2)) {
struct.requestBytes = iprot.readI64();
struct.setRequestBytesIsSet(true);
}
}
}
}
public static class requestSpace_result implements org.apache.thrift.TBase<requestSpace_result, requestSpace_result._Fields>, java.io.Serializable, Cloneable, Comparable<requestSpace_result> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("requestSpace_result");
private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.BOOL, (short)0);
private static final org.apache.thrift.protocol.TField E_P_FIELD_DESC = new org.apache.thrift.protocol.TField("eP", org.apache.thrift.protocol.TType.STRUCT, (short)1);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new requestSpace_resultStandardSchemeFactory());
schemes.put(TupleScheme.class, new requestSpace_resultTupleSchemeFactory());
}
public boolean success; // required
public FileDoesNotExistException eP; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
SUCCESS((short)0, "success"),
E_P((short)1, "eP");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 0: // SUCCESS
return SUCCESS;
case 1:
return E_P;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __SUCCESS_ISSET_ID = 0;
private byte __isset_bitfield = 0;
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
tmpMap.put(_Fields.E_P, new org.apache.thrift.meta_data.FieldMetaData("eP", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(requestSpace_result.class, metaDataMap);
}
public requestSpace_result() {
}
public requestSpace_result(
boolean success,
FileDoesNotExistException eP)
{
this();
this.success = success;
setSuccessIsSet(true);
this.eP = eP;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public requestSpace_result(requestSpace_result other) {
__isset_bitfield = other.__isset_bitfield;
this.success = other.success;
if (other.isSetEP()) {
this.eP = new FileDoesNotExistException(other.eP);
}
}
public requestSpace_result deepCopy() {
return new requestSpace_result(this);
}
@Override
public void clear() {
setSuccessIsSet(false);
this.success = false;
this.eP = null;
}
public boolean isSuccess() {
return this.success;
}
public requestSpace_result setSuccess(boolean success) {
this.success = success;
setSuccessIsSet(true);
return this;
}
public void unsetSuccess() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __SUCCESS_ISSET_ID);
}
/** Returns true if field success is set (has been assigned a value) and false otherwise */
public boolean isSetSuccess() {
return EncodingUtils.testBit(__isset_bitfield, __SUCCESS_ISSET_ID);
}
public void setSuccessIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __SUCCESS_ISSET_ID, value);
}
public FileDoesNotExistException getEP() {
return this.eP;
}
public requestSpace_result setEP(FileDoesNotExistException eP) {
this.eP = eP;
return this;
}
public void unsetEP() {
this.eP = null;
}
/** Returns true if field eP is set (has been assigned a value) and false otherwise */
public boolean isSetEP() {
return this.eP != null;
}
public void setEPIsSet(boolean value) {
if (!value) {
this.eP = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case SUCCESS:
if (value == null) {
unsetSuccess();
} else {
setSuccess((Boolean)value);
}
break;
case E_P:
if (value == null) {
unsetEP();
} else {
setEP((FileDoesNotExistException)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case SUCCESS:
return Boolean.valueOf(isSuccess());
case E_P:
return getEP();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case SUCCESS:
return isSetSuccess();
case E_P:
return isSetEP();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof requestSpace_result)
return this.equals((requestSpace_result)that);
return false;
}
public boolean equals(requestSpace_result that) {
if (that == null)
return false;
boolean this_present_success = true;
boolean that_present_success = true;
if (this_present_success || that_present_success) {
if (!(this_present_success && that_present_success))
return false;
if (this.success != that.success)
return false;
}
boolean this_present_eP = true && this.isSetEP();
boolean that_present_eP = true && that.isSetEP();
if (this_present_eP || that_present_eP) {
if (!(this_present_eP && that_present_eP))
return false;
if (!this.eP.equals(that.eP))
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(requestSpace_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetSuccess()).compareTo(other.isSetSuccess());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSuccess()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetEP()).compareTo(other.isSetEP());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetEP()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.eP, other.eP);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("requestSpace_result(");
boolean first = true;
sb.append("success:");
sb.append(this.success);
first = false;
if (!first) sb.append(", ");
sb.append("eP:");
if (this.eP == null) {
sb.append("null");
} else {
sb.append(this.eP);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class requestSpace_resultStandardSchemeFactory implements SchemeFactory {
public requestSpace_resultStandardScheme getScheme() {
return new requestSpace_resultStandardScheme();
}
}
private static class requestSpace_resultStandardScheme extends StandardScheme<requestSpace_result> {
public void read(org.apache.thrift.protocol.TProtocol iprot, requestSpace_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 0: // SUCCESS
if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
struct.success = iprot.readBool();
struct.setSuccessIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 1:
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.eP = new FileDoesNotExistException();
struct.eP.read(iprot);
struct.setEPIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, requestSpace_result struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.isSetSuccess()) {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
oprot.writeBool(struct.success);
oprot.writeFieldEnd();
}
if (struct.eP != null) {
oprot.writeFieldBegin(E_P_FIELD_DESC);
struct.eP.write(oprot);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class requestSpace_resultTupleSchemeFactory implements SchemeFactory {
public requestSpace_resultTupleScheme getScheme() {
return new requestSpace_resultTupleScheme();
}
}
private static class requestSpace_resultTupleScheme extends TupleScheme<requestSpace_result> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, requestSpace_result struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetSuccess()) {
optionals.set(0);
}
if (struct.isSetEP()) {
optionals.set(1);
}
oprot.writeBitSet(optionals, 2);
if (struct.isSetSuccess()) {
oprot.writeBool(struct.success);
}
if (struct.isSetEP()) {
struct.eP.write(oprot);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, requestSpace_result struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(2);
if (incoming.get(0)) {
struct.success = iprot.readBool();
struct.setSuccessIsSet(true);
}
if (incoming.get(1)) {
struct.eP = new FileDoesNotExistException();
struct.eP.read(iprot);
struct.setEPIsSet(true);
}
}
}
}
public static class unlockBlock_args implements org.apache.thrift.TBase<unlockBlock_args, unlockBlock_args._Fields>, java.io.Serializable, Cloneable, Comparable<unlockBlock_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("unlockBlock_args");
private static final org.apache.thrift.protocol.TField BLOCK_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("blockId", org.apache.thrift.protocol.TType.I64, (short)1);
private static final org.apache.thrift.protocol.TField USER_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("userId", org.apache.thrift.protocol.TType.I64, (short)2);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new unlockBlock_argsStandardSchemeFactory());
schemes.put(TupleScheme.class, new unlockBlock_argsTupleSchemeFactory());
}
public long blockId; // required
public long userId; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
BLOCK_ID((short)1, "blockId"),
USER_ID((short)2, "userId");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // BLOCK_ID
return BLOCK_ID;
case 2: // USER_ID
return USER_ID;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __BLOCKID_ISSET_ID = 0;
private static final int __USERID_ISSET_ID = 1;
private byte __isset_bitfield = 0;
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.BLOCK_ID, new org.apache.thrift.meta_data.FieldMetaData("blockId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.USER_ID, new org.apache.thrift.meta_data.FieldMetaData("userId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(unlockBlock_args.class, metaDataMap);
}
public unlockBlock_args() {
}
public unlockBlock_args(
long blockId,
long userId)
{
this();
this.blockId = blockId;
setBlockIdIsSet(true);
this.userId = userId;
setUserIdIsSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public unlockBlock_args(unlockBlock_args other) {
__isset_bitfield = other.__isset_bitfield;
this.blockId = other.blockId;
this.userId = other.userId;
}
public unlockBlock_args deepCopy() {
return new unlockBlock_args(this);
}
@Override
public void clear() {
setBlockIdIsSet(false);
this.blockId = 0;
setUserIdIsSet(false);
this.userId = 0;
}
public long getBlockId() {
return this.blockId;
}
public unlockBlock_args setBlockId(long blockId) {
this.blockId = blockId;
setBlockIdIsSet(true);
return this;
}
public void unsetBlockId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __BLOCKID_ISSET_ID);
}
/** Returns true if field blockId is set (has been assigned a value) and false otherwise */
public boolean isSetBlockId() {
return EncodingUtils.testBit(__isset_bitfield, __BLOCKID_ISSET_ID);
}
public void setBlockIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __BLOCKID_ISSET_ID, value);
}
public long getUserId() {
return this.userId;
}
public unlockBlock_args setUserId(long userId) {
this.userId = userId;
setUserIdIsSet(true);
return this;
}
public void unsetUserId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __USERID_ISSET_ID);
}
/** Returns true if field userId is set (has been assigned a value) and false otherwise */
public boolean isSetUserId() {
return EncodingUtils.testBit(__isset_bitfield, __USERID_ISSET_ID);
}
public void setUserIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __USERID_ISSET_ID, value);
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case BLOCK_ID:
if (value == null) {
unsetBlockId();
} else {
setBlockId((Long)value);
}
break;
case USER_ID:
if (value == null) {
unsetUserId();
} else {
setUserId((Long)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case BLOCK_ID:
return Long.valueOf(getBlockId());
case USER_ID:
return Long.valueOf(getUserId());
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case BLOCK_ID:
return isSetBlockId();
case USER_ID:
return isSetUserId();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof unlockBlock_args)
return this.equals((unlockBlock_args)that);
return false;
}
public boolean equals(unlockBlock_args that) {
if (that == null)
return false;
boolean this_present_blockId = true;
boolean that_present_blockId = true;
if (this_present_blockId || that_present_blockId) {
if (!(this_present_blockId && that_present_blockId))
return false;
if (this.blockId != that.blockId)
return false;
}
boolean this_present_userId = true;
boolean that_present_userId = true;
if (this_present_userId || that_present_userId) {
if (!(this_present_userId && that_present_userId))
return false;
if (this.userId != that.userId)
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(unlockBlock_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetBlockId()).compareTo(other.isSetBlockId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetBlockId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.blockId, other.blockId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetUserId()).compareTo(other.isSetUserId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetUserId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.userId, other.userId);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("unlockBlock_args(");
boolean first = true;
sb.append("blockId:");
sb.append(this.blockId);
first = false;
if (!first) sb.append(", ");
sb.append("userId:");
sb.append(this.userId);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class unlockBlock_argsStandardSchemeFactory implements SchemeFactory {
public unlockBlock_argsStandardScheme getScheme() {
return new unlockBlock_argsStandardScheme();
}
}
private static class unlockBlock_argsStandardScheme extends StandardScheme<unlockBlock_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, unlockBlock_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // BLOCK_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.blockId = iprot.readI64();
struct.setBlockIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // USER_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.userId = iprot.readI64();
struct.setUserIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, unlockBlock_args struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(BLOCK_ID_FIELD_DESC);
oprot.writeI64(struct.blockId);
oprot.writeFieldEnd();
oprot.writeFieldBegin(USER_ID_FIELD_DESC);
oprot.writeI64(struct.userId);
oprot.writeFieldEnd();
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class unlockBlock_argsTupleSchemeFactory implements SchemeFactory {
public unlockBlock_argsTupleScheme getScheme() {
return new unlockBlock_argsTupleScheme();
}
}
private static class unlockBlock_argsTupleScheme extends TupleScheme<unlockBlock_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, unlockBlock_args struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetBlockId()) {
optionals.set(0);
}
if (struct.isSetUserId()) {
optionals.set(1);
}
oprot.writeBitSet(optionals, 2);
if (struct.isSetBlockId()) {
oprot.writeI64(struct.blockId);
}
if (struct.isSetUserId()) {
oprot.writeI64(struct.userId);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, unlockBlock_args struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(2);
if (incoming.get(0)) {
struct.blockId = iprot.readI64();
struct.setBlockIdIsSet(true);
}
if (incoming.get(1)) {
struct.userId = iprot.readI64();
struct.setUserIdIsSet(true);
}
}
}
}
public static class unlockBlock_result implements org.apache.thrift.TBase<unlockBlock_result, unlockBlock_result._Fields>, java.io.Serializable, Cloneable, Comparable<unlockBlock_result> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("unlockBlock_result");
private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.BOOL, (short)0);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new unlockBlock_resultStandardSchemeFactory());
schemes.put(TupleScheme.class, new unlockBlock_resultTupleSchemeFactory());
}
public boolean success; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
SUCCESS((short)0, "success");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 0: // SUCCESS
return SUCCESS;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __SUCCESS_ISSET_ID = 0;
private byte __isset_bitfield = 0;
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(unlockBlock_result.class, metaDataMap);
}
public unlockBlock_result() {
}
public unlockBlock_result(
boolean success)
{
this();
this.success = success;
setSuccessIsSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public unlockBlock_result(unlockBlock_result other) {
__isset_bitfield = other.__isset_bitfield;
this.success = other.success;
}
public unlockBlock_result deepCopy() {
return new unlockBlock_result(this);
}
@Override
public void clear() {
setSuccessIsSet(false);
this.success = false;
}
public boolean isSuccess() {
return this.success;
}
public unlockBlock_result setSuccess(boolean success) {
this.success = success;
setSuccessIsSet(true);
return this;
}
public void unsetSuccess() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __SUCCESS_ISSET_ID);
}
/** Returns true if field success is set (has been assigned a value) and false otherwise */
public boolean isSetSuccess() {
return EncodingUtils.testBit(__isset_bitfield, __SUCCESS_ISSET_ID);
}
public void setSuccessIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __SUCCESS_ISSET_ID, value);
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case SUCCESS:
if (value == null) {
unsetSuccess();
} else {
setSuccess((Boolean)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case SUCCESS:
return Boolean.valueOf(isSuccess());
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case SUCCESS:
return isSetSuccess();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof unlockBlock_result)
return this.equals((unlockBlock_result)that);
return false;
}
public boolean equals(unlockBlock_result that) {
if (that == null)
return false;
boolean this_present_success = true;
boolean that_present_success = true;
if (this_present_success || that_present_success) {
if (!(this_present_success && that_present_success))
return false;
if (this.success != that.success)
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(unlockBlock_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetSuccess()).compareTo(other.isSetSuccess());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSuccess()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("unlockBlock_result(");
boolean first = true;
sb.append("success:");
sb.append(this.success);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class unlockBlock_resultStandardSchemeFactory implements SchemeFactory {
public unlockBlock_resultStandardScheme getScheme() {
return new unlockBlock_resultStandardScheme();
}
}
private static class unlockBlock_resultStandardScheme extends StandardScheme<unlockBlock_result> {
public void read(org.apache.thrift.protocol.TProtocol iprot, unlockBlock_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 0: // SUCCESS
if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
struct.success = iprot.readBool();
struct.setSuccessIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, unlockBlock_result struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.isSetSuccess()) {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
oprot.writeBool(struct.success);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class unlockBlock_resultTupleSchemeFactory implements SchemeFactory {
public unlockBlock_resultTupleScheme getScheme() {
return new unlockBlock_resultTupleScheme();
}
}
private static class unlockBlock_resultTupleScheme extends TupleScheme<unlockBlock_result> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, unlockBlock_result struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetSuccess()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetSuccess()) {
oprot.writeBool(struct.success);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, unlockBlock_result struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.success = iprot.readBool();
struct.setSuccessIsSet(true);
}
}
}
}
public static class userHeartbeat_args implements org.apache.thrift.TBase<userHeartbeat_args, userHeartbeat_args._Fields>, java.io.Serializable, Cloneable, Comparable<userHeartbeat_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("userHeartbeat_args");
private static final org.apache.thrift.protocol.TField USER_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("userId", org.apache.thrift.protocol.TType.I64, (short)1);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new userHeartbeat_argsStandardSchemeFactory());
schemes.put(TupleScheme.class, new userHeartbeat_argsTupleSchemeFactory());
}
public long userId; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
USER_ID((short)1, "userId");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // USER_ID
return USER_ID;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __USERID_ISSET_ID = 0;
private byte __isset_bitfield = 0;
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.USER_ID, new org.apache.thrift.meta_data.FieldMetaData("userId", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(userHeartbeat_args.class, metaDataMap);
}
public userHeartbeat_args() {
}
public userHeartbeat_args(
long userId)
{
this();
this.userId = userId;
setUserIdIsSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public userHeartbeat_args(userHeartbeat_args other) {
__isset_bitfield = other.__isset_bitfield;
this.userId = other.userId;
}
public userHeartbeat_args deepCopy() {
return new userHeartbeat_args(this);
}
@Override
public void clear() {
setUserIdIsSet(false);
this.userId = 0;
}
public long getUserId() {
return this.userId;
}
public userHeartbeat_args setUserId(long userId) {
this.userId = userId;
setUserIdIsSet(true);
return this;
}
public void unsetUserId() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __USERID_ISSET_ID);
}
/** Returns true if field userId is set (has been assigned a value) and false otherwise */
public boolean isSetUserId() {
return EncodingUtils.testBit(__isset_bitfield, __USERID_ISSET_ID);
}
public void setUserIdIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __USERID_ISSET_ID, value);
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case USER_ID:
if (value == null) {
unsetUserId();
} else {
setUserId((Long)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case USER_ID:
return Long.valueOf(getUserId());
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case USER_ID:
return isSetUserId();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof userHeartbeat_args)
return this.equals((userHeartbeat_args)that);
return false;
}
public boolean equals(userHeartbeat_args that) {
if (that == null)
return false;
boolean this_present_userId = true;
boolean that_present_userId = true;
if (this_present_userId || that_present_userId) {
if (!(this_present_userId && that_present_userId))
return false;
if (this.userId != that.userId)
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(userHeartbeat_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetUserId()).compareTo(other.isSetUserId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetUserId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.userId, other.userId);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("userHeartbeat_args(");
boolean first = true;
sb.append("userId:");
sb.append(this.userId);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class userHeartbeat_argsStandardSchemeFactory implements SchemeFactory {
public userHeartbeat_argsStandardScheme getScheme() {
return new userHeartbeat_argsStandardScheme();
}
}
private static class userHeartbeat_argsStandardScheme extends StandardScheme<userHeartbeat_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, userHeartbeat_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // USER_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.userId = iprot.readI64();
struct.setUserIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, userHeartbeat_args struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(USER_ID_FIELD_DESC);
oprot.writeI64(struct.userId);
oprot.writeFieldEnd();
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class userHeartbeat_argsTupleSchemeFactory implements SchemeFactory {
public userHeartbeat_argsTupleScheme getScheme() {
return new userHeartbeat_argsTupleScheme();
}
}
private static class userHeartbeat_argsTupleScheme extends TupleScheme<userHeartbeat_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, userHeartbeat_args struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetUserId()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetUserId()) {
oprot.writeI64(struct.userId);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, userHeartbeat_args struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.userId = iprot.readI64();
struct.setUserIdIsSet(true);
}
}
}
}
public static class userHeartbeat_result implements org.apache.thrift.TBase<userHeartbeat_result, userHeartbeat_result._Fields>, java.io.Serializable, Cloneable, Comparable<userHeartbeat_result> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("userHeartbeat_result");
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new userHeartbeat_resultStandardSchemeFactory());
schemes.put(TupleScheme.class, new userHeartbeat_resultTupleSchemeFactory());
}
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
;
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(userHeartbeat_result.class, metaDataMap);
}
public userHeartbeat_result() {
}
/**
* Performs a deep copy on <i>other</i>.
*/
public userHeartbeat_result(userHeartbeat_result other) {
}
public userHeartbeat_result deepCopy() {
return new userHeartbeat_result(this);
}
@Override
public void clear() {
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof userHeartbeat_result)
return this.equals((userHeartbeat_result)that);
return false;
}
public boolean equals(userHeartbeat_result that) {
if (that == null)
return false;
return true;
}
@Override
public int hashCode() {
return 0;
}
@Override
public int compareTo(userHeartbeat_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("userHeartbeat_result(");
boolean first = true;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class userHeartbeat_resultStandardSchemeFactory implements SchemeFactory {
public userHeartbeat_resultStandardScheme getScheme() {
return new userHeartbeat_resultStandardScheme();
}
}
private static class userHeartbeat_resultStandardScheme extends StandardScheme<userHeartbeat_result> {
public void read(org.apache.thrift.protocol.TProtocol iprot, userHeartbeat_result struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, userHeartbeat_result struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class userHeartbeat_resultTupleSchemeFactory implements SchemeFactory {
public userHeartbeat_resultTupleScheme getScheme() {
return new userHeartbeat_resultTupleScheme();
}
}
private static class userHeartbeat_resultTupleScheme extends TupleScheme<userHeartbeat_result> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, userHeartbeat_result struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, userHeartbeat_result struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
}
}
}
} |
/*
* $Id$
* $URL$
*/
package org.subethamail.core.admin;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
import javax.annotation.security.RunAs;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jboss.annotation.ejb.Service;
import org.jboss.annotation.security.SecurityDomain;
import org.subethamail.core.util.EntityManipulatorBean;
import org.subethamail.entity.Mail;
import org.subethamail.entity.SubscriptionHold;
/**
* Service which wakes up once a night and performs cleanup operations.
* Old held messages and held subscriptions are pruned.
*
* @author Jeff Schnitzer
*/
@Service(name="Cleanup", objectName="subetha:service=Cleanup")
@SecurityDomain("subetha")
@RunAs("siteAdmin")
public class CleanupBean extends EntityManipulatorBean implements CleanupManagement
{
private static Log log = LogFactory.getLog(CleanupBean.class);
/** Keep held subscriptions around for 30 days */
public static final long MAX_HELD_SUB_AGE_MILLIS = 1000 * 60 * 60 * 24 * 30;
/** Keep held messages around for 7 days */
public static final long MAX_HELD_MSG_AGE_MILLIS = 1000 * 60 * 60 * 24 * 7;
class CleanupTask extends TimerTask
{
@Override
public void run()
{
try
{
Context ctx = new InitialContext();
CleanupManagement cleaner = (CleanupManagement)ctx.lookup(CleanupManagement.JNDI_NAME);
cleaner.cleanup();
}
catch (NamingException ex) { throw new RuntimeException(ex); }
}
}
/**
* Timer used to schedule the service event.
*/
Timer timer = new Timer("Cleanup", false);
/* (non-Javadoc)
* @see org.subethamail.core.admin.CleanupManagement#start()
*/
public void start() throws Exception
{
log.info("Starting cleanup service");
// Schedule rebuilds to occur nightly at 4am, plus some random slop time.
// The slop time makes this play nicer in a clustered environment. Really
// this should be a HA singleton service, but that would require running
// JBoss in a clustered configuration even on a single box.
Calendar next = Calendar.getInstance();
next.set(Calendar.HOUR_OF_DAY, 4);
next.set(Calendar.MINUTE, (int)(60 * Math.random()));
next.set(Calendar.SECOND, (int)(60 * Math.random()));
next.set(Calendar.MILLISECOND, (int)(1000 * Math.random()));
Calendar now = Calendar.getInstance();
if (now.after(next))
next.add(Calendar.DAY_OF_YEAR, 1);
final long millisInDay = 1000 * 60 * 60 * 24;
this.timer.scheduleAtFixedRate(new CleanupTask(), next.getTime(), millisInDay);
log.info("Cleanup will occur daily starting at " + next.getTime());
}
/* (non-Javadoc)
* @see org.subethamail.core.admin.CleanupManagement#stop()
*/
public void stop() throws Exception
{
log.info("Stopping cleanup service");
this.timer.cancel();
}
/*
* (non-Javadoc)
* @see org.subethamail.core.admin.CleanupManagement#cleanup()
*/
public void cleanup()
{
this.cleanupHeldSubscriptions();
this.cleanupHeldMail();
}
/**
* Purges old subscription holds.
*/
protected void cleanupHeldSubscriptions()
{
Date cutoff = new Date(System.currentTimeMillis() - MAX_HELD_SUB_AGE_MILLIS);
if (log.isDebugEnabled())
log.debug("Purging held subscriptions older than " + cutoff);
int count = 0;
List<SubscriptionHold> holds = this.em.findHeldSubscriptionsOlderThan(cutoff);
for (SubscriptionHold hold: holds)
{
if (log.isDebugEnabled())
log.debug("Deleting obsolete hold: " + hold);
this.em.remove(hold);
count++;
}
if (count > 0)
if (log.isInfoEnabled())
log.info(count + " obsolete subscription holds removed with cutoff: " + cutoff);
}
/**
* Purges old held messages.
*/
protected void cleanupHeldMail()
{
Date cutoff = new Date(System.currentTimeMillis() - MAX_HELD_MSG_AGE_MILLIS);
if (log.isDebugEnabled())
log.debug("Purging held mail older than " + cutoff);
int count = 0;
List<Mail> holds = this.em.findHeldMailOlderThan(cutoff);
for (Mail hold: holds)
{
if (log.isDebugEnabled())
log.debug("Deleting obsolete hold: " + hold);
this.em.remove(hold);
count++;
}
if (count > 0)
if (log.isInfoEnabled())
log.info(count + " obsolete message holds removed");
}
} |
package cucumber.runtime;
import cucumber.api.HookType;
import cucumber.api.Plugin;
import cucumber.api.Result;
import cucumber.api.Scenario;
import cucumber.api.StepDefinitionReporter;
import cucumber.api.TestCase;
import cucumber.api.event.ConcurrentEventListener;
import cucumber.api.event.EventHandler;
import cucumber.api.event.EventListener;
import cucumber.api.event.EventPublisher;
import cucumber.api.event.StepDefinedEvent;
import cucumber.api.event.TestCaseFinished;
import cucumber.api.event.TestStepFinished;
import cucumber.runner.EventBus;
import cucumber.runner.TestBackendSupplier;
import cucumber.runner.TestHelper;
import cucumber.runner.TimeService;
import cucumber.runner.TimeServiceEventBus;
import cucumber.runner.TimeServiceStub;
import cucumber.runtime.formatter.FormatterBuilder;
import cucumber.runtime.formatter.FormatterSpy;
import cucumber.runtime.io.ClasspathResourceLoader;
import cucumber.runtime.io.Resource;
import cucumber.runtime.io.ResourceLoader;
import cucumber.runtime.model.CucumberFeature;
import gherkin.ast.ScenarioDefinition;
import gherkin.ast.Step;
import gherkin.pickles.PickleStep;
import gherkin.pickles.PickleTag;
import io.cucumber.core.options.CommandlineOptionsParser;
import io.cucumber.stepexpression.Argument;
import io.cucumber.stepexpression.TypeRegistry;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.mockito.ArgumentCaptor;
import org.mockito.ArgumentMatchers;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import static cucumber.runner.TestHelper.feature;
import static cucumber.runner.TestHelper.result;
import static java.util.Collections.singletonList;
import static java.util.concurrent.TimeUnit.HOURS;
import static java.util.concurrent.TimeUnit.MINUTES;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static uk.co.datumedge.hamcrest.json.SameJSONAs.sameJSONAs;
public class RuntimeTest {
private final static long ANY_TIMESTAMP = 1234567890;
private final TypeRegistry TYPE_REGISTRY = new TypeRegistry(Locale.ENGLISH);
private final EventBus bus = new TimeServiceEventBus(TimeService.SYSTEM);
@Rule
public ExpectedException expectedException = ExpectedException.none();
@Test
public void runs_feature_with_json_formatter() {
final CucumberFeature feature = feature("test.feature", "" +
"Feature: feature name\n" +
" Background: background name\n" +
" Given b\n" +
" Scenario: scenario name\n" +
" When s\n");
StringBuilder out = new StringBuilder();
Plugin jsonFormatter = FormatterBuilder.jsonFormatter(out);
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
BackendSupplier backendSupplier = new TestBackendSupplier() {
@Override
public void loadGlue(Glue glue, List<URI> gluePaths) {
}
};
FeatureSupplier featureSupplier = new FeatureSupplier() {
@Override
public List<CucumberFeature> get() {
return singletonList(feature);
}
};
Runtime.builder()
.withBackendSupplier(backendSupplier)
.withAdditionalPlugins(jsonFormatter)
.withResourceLoader(new ClasspathResourceLoader(classLoader))
.withEventBus(new TimeServiceEventBus(new TimeServiceStub(0)))
.withFeatureSupplier(featureSupplier)
.build()
.run();
String expected = "" +
"[\n" +
" {\n" +
" \"line\": 1,\n" +
" \"elements\": [\n" +
" {\n" +
" \"line\": 2,\n" +
" \"name\": \"background name\",\n" +
" \"description\": \"\",\n" +
" \"type\": \"background\",\n" +
" \"keyword\": \"Background\",\n" +
" \"steps\": [\n" +
" {\n" +
" \"result\": {\n" +
" \"status\": \"undefined\"\n" +
" },\n" +
" \"line\": 3,\n" +
" \"name\": \"b\",\n" +
" \"match\": {},\n" +
" \"keyword\": \"Given \"\n" +
" }\n" +
" ]\n" +
" },\n" +
" {\n" +
" \"line\": 4,\n" +
" \"name\": \"scenario name\",\n" +
" \"description\": \"\",\n" +
" \"id\": \"feature-name;scenario-name\",\n" +
" \"start_timestamp\": \"1970-01-01T00:00:00.000Z\",\n" +
" \"type\": \"scenario\",\n" +
" \"keyword\": \"Scenario\",\n" +
" \"steps\": [\n" +
" {\n" +
" \"result\": {\n" +
" \"status\": \"undefined\"\n" +
" },\n" +
" \"line\": 5,\n" +
" \"name\": \"s\",\n" +
" \"match\": {},\n" +
" \"keyword\": \"When \"\n" +
" }\n" +
" ]\n" +
" }\n" +
" ],\n" +
" \"name\": \"feature name\",\n" +
" \"description\": \"\",\n" +
" \"id\": \"feature-name\",\n" +
" \"keyword\": \"Feature\",\n" +
" \"uri\": \"file:test.feature\",\n" +
" \"tags\": []\n" +
" }\n" +
"]";
assertThat(out.toString(), sameJSONAs(expected));
}
@Test
public void strict_with_passed_scenarios() {
Runtime runtime = createStrictRuntime();
bus.send(testCaseFinishedWithStatus(Result.Type.PASSED));
assertEquals(0x0, runtime.exitStatus());
}
@Test
public void non_strict_with_passed_scenarios() {
Runtime runtime = createNonStrictRuntime();
bus.send(testCaseFinishedWithStatus(Result.Type.PASSED));
assertEquals(0x0, runtime.exitStatus());
}
@Test
public void non_strict_with_undefined_scenarios() {
Runtime runtime = createNonStrictRuntime();
bus.send(testCaseFinishedWithStatus(Result.Type.UNDEFINED));
assertEquals(0x0, runtime.exitStatus());
}
@Test
public void strict_with_undefined_scenarios() {
Runtime runtime = createStrictRuntime();
bus.send(testCaseFinishedWithStatus(Result.Type.UNDEFINED));
assertEquals(0x1, runtime.exitStatus());
}
@Test
public void strict_with_pending_scenarios() {
Runtime runtime = createStrictRuntime();
bus.send(testCaseFinishedWithStatus(Result.Type.PENDING));
assertEquals(0x1, runtime.exitStatus());
}
@Test
public void non_strict_with_pending_scenarios() {
Runtime runtime = createNonStrictRuntime();
bus.send(testCaseFinishedWithStatus(Result.Type.PENDING));
assertEquals(0x0, runtime.exitStatus());
}
@Test
public void non_strict_with_skipped_scenarios() {
Runtime runtime = createNonStrictRuntime();
bus.send(testCaseFinishedWithStatus(Result.Type.SKIPPED));
assertEquals(0x0, runtime.exitStatus());
}
@Test
public void strict_with_skipped_scenarios() {
Runtime runtime = createNonStrictRuntime();
bus.send(testCaseFinishedWithStatus(Result.Type.SKIPPED));
assertEquals(0x0, runtime.exitStatus());
}
@Test
public void non_strict_with_failed_scenarios() {
Runtime runtime = createNonStrictRuntime();
bus.send(testCaseFinishedWithStatus(Result.Type.FAILED));
assertEquals(0x1, runtime.exitStatus());
}
@Test
public void strict_with_failed_scenarios() {
Runtime runtime = createStrictRuntime();
bus.send(testCaseFinishedWithStatus(Result.Type.FAILED));
assertEquals(0x1, runtime.exitStatus());
}
@Test
public void non_strict_with_ambiguous_scenarios() {
Runtime runtime = createNonStrictRuntime();
bus.send(testCaseFinishedWithStatus(Result.Type.AMBIGUOUS));
assertEquals(0x1, runtime.exitStatus());
}
@Test
public void strict_with_ambiguous_scenarios() {
Runtime runtime = createStrictRuntime();
bus.send(testCaseFinishedWithStatus(Result.Type.AMBIGUOUS));
assertEquals(0x1, runtime.exitStatus());
}
@Test
public void should_pass_if_no_features_are_found() {
ResourceLoader resourceLoader = createResourceLoaderThatFindsNoFeatures();
Runtime runtime = createStrictRuntime(resourceLoader);
runtime.run();
assertEquals(0x0, runtime.exitStatus());
}
@Test
public void reports_step_definitions_to_plugin() {
ResourceLoader resourceLoader = mock(ResourceLoader.class);
final StubStepDefinition stepDefinition = new StubStepDefinition("some pattern", new TypeRegistry(Locale.ENGLISH));
TestBackendSupplier testBackendSupplier = new TestBackendSupplier() {
@Override
public void loadGlue(Glue glue, List<URI> gluePaths) {
glue.addStepDefinition(stepDefinition);
}
};
Runtime.builder()
.withResourceLoader(resourceLoader)
.withRuntimeOptions(
new CommandlineOptionsParser()
.parse("--plugin", "cucumber.runtime.RuntimeTest$StepdefsPrinter")
.build()
)
.withBackendSupplier(testBackendSupplier)
.build()
.run();
assertSame(stepDefinition, StepdefsPrinter.instance.stepDefinition);
}
public static class StepdefsPrinter implements StepDefinitionReporter {
static StepdefsPrinter instance;
StepDefinition stepDefinition;
public StepdefsPrinter() {
instance = this;
}
@Override
public void stepDefinition(StepDefinition stepDefinition) {
this.stepDefinition = stepDefinition;
}
}
@Test
public void should_make_scenario_name_available_to_hooks() throws Throwable {
CucumberFeature feature = TestHelper.feature("path/test.feature",
"Feature: feature name\n" +
" Scenario: scenario name\n" +
" Given first step\n" +
" When second step\n" +
" Then third step\n");
HookDefinition beforeHook = mock(HookDefinition.class);
when(beforeHook.matches(ArgumentMatchers.<PickleTag>anyCollection())).thenReturn(true);
Runtime runtime = createRuntimeWithMockedGlue(beforeHook, HookType.Before, feature);
runtime.run();
ArgumentCaptor<Scenario> capturedScenario = ArgumentCaptor.forClass(Scenario.class);
verify(beforeHook).execute(capturedScenario.capture());
assertEquals("scenario name", capturedScenario.getValue().getName());
}
@Test
public void should_call_formatter_for_two_scenarios_with_background() {
CucumberFeature feature = TestHelper.feature("path/test.feature", "" +
"Feature: feature name\n" +
" Background: background\n" +
" Given first step\n" +
" Scenario: scenario_1 name\n" +
" When second step\n" +
" Then third step\n" +
" Scenario: scenario_2 name\n" +
" Then second step\n");
Map<String, Result> stepsToResult = new HashMap<>();
stepsToResult.put("first step", result("passed"));
stepsToResult.put("second step", result("passed"));
stepsToResult.put("third step", result("passed"));
String formatterOutput = runFeatureWithFormatterSpy(feature, stepsToResult);
assertEquals("" +
"TestCase started\n" +
" TestStep started\n" +
" TestStep finished\n" +
" TestStep started\n" +
" TestStep finished\n" +
" TestStep started\n" +
" TestStep finished\n" +
"TestCase finished\n" +
"TestCase started\n" +
" TestStep started\n" +
" TestStep finished\n" +
" TestStep started\n" +
" TestStep finished\n" +
"TestCase finished\n" +
"TestRun finished\n", formatterOutput);
}
@Test
public void should_call_formatter_for_scenario_outline_with_two_examples_table_and_background() {
CucumberFeature feature = TestHelper.feature("path/test.feature", "" +
"Feature: feature name\n" +
" Background: background\n" +
" Given first step\n" +
" Scenario Outline: scenario outline name\n" +
" When <x> step\n" +
" Then <y> step\n" +
" Examples: examples 1 name\n" +
" | x | y |\n" +
" | second | third |\n" +
" | second | third |\n" +
" Examples: examples 2 name\n" +
" | x | y |\n" +
" | second | third |\n");
Map<String, Result> stepsToResult = new HashMap<>();
stepsToResult.put("first step", result("passed"));
stepsToResult.put("second step", result("passed"));
stepsToResult.put("third step", result("passed"));
String formatterOutput = runFeatureWithFormatterSpy(feature, stepsToResult);
assertEquals("" +
"TestCase started\n" +
" TestStep started\n" +
" TestStep finished\n" +
" TestStep started\n" +
" TestStep finished\n" +
" TestStep started\n" +
" TestStep finished\n" +
"TestCase finished\n" +
"TestCase started\n" +
" TestStep started\n" +
" TestStep finished\n" +
" TestStep started\n" +
" TestStep finished\n" +
" TestStep started\n" +
" TestStep finished\n" +
"TestCase finished\n" +
"TestCase started\n" +
" TestStep started\n" +
" TestStep finished\n" +
" TestStep started\n" +
" TestStep finished\n" +
" TestStep started\n" +
" TestStep finished\n" +
"TestCase finished\n" +
"TestRun finished\n", formatterOutput);
}
@Test
public void should_call_formatter_with_correct_sequence_of_events_when_running_in_parallel() {
CucumberFeature feature1 = TestHelper.feature("path/test.feature", "" +
"Feature: feature name 1\n" +
" Scenario: scenario_1 name\n" +
" Given first step\n" +
" Scenario: scenario_2 name\n" +
" Given first step\n");
CucumberFeature feature2 = TestHelper.feature("path/test2.feature", "" +
"Feature: feature name 2\n" +
" Scenario: scenario_2 name\n" +
" Given first step\n");
CucumberFeature feature3 = TestHelper.feature("path/test3.feature", "" +
"Feature: feature name 3\n" +
" Scenario: scenario_3 name\n" +
" Given first step\n");
Map<String, Result> stepsToResult = new HashMap<>();
stepsToResult.put("first step", result("passed"));
FormatterSpy formatterSpy = new FormatterSpy();
final List<CucumberFeature> features = Arrays.asList(feature1, feature2, feature3);
TestHelper.builder()
.withFeatures(features)
.withStepsToResult(stepsToResult)
.withFormatterUnderTest(formatterSpy)
.withTimeServiceType(TestHelper.TimeServiceType.REAL_TIME)
.withRuntimeArgs("--threads", String.valueOf(features.size()))
.build()
.run();
String formatterOutput = formatterSpy.toString();
assertEquals("" +
"TestCase started\n" +
" TestStep started\n" +
" TestStep finished\n" +
"TestCase finished\n" +
"TestCase started\n" +
" TestStep started\n" +
" TestStep finished\n" +
"TestCase finished\n" +
"TestCase started\n" +
" TestStep started\n" +
" TestStep finished\n" +
"TestCase finished\n" +
"TestCase started\n" +
" TestStep started\n" +
" TestStep finished\n" +
"TestCase finished\n" +
"TestRun finished\n", formatterOutput);
}
@Test
public void should_fail_on_event_listener_exception_when_running_in_parallel() {
CucumberFeature feature1 = TestHelper.feature("path/test.feature", "" +
"Feature: feature name 1\n" +
" Scenario: scenario_1 name\n" +
" Given first step\n" +
" Scenario: scenario_2 name\n" +
" Given first step\n");
CucumberFeature feature2 = TestHelper.feature("path/test2.feature", "" +
"Feature: feature name 2\n" +
" Scenario: scenario_2 name\n" +
" Given first step\n");
ConcurrentEventListener brokenEventListener = new ConcurrentEventListener() {
@Override
public void setEventPublisher(EventPublisher publisher) {
publisher.registerHandlerFor(TestStepFinished.class, new EventHandler<TestStepFinished>() {
@Override
public void receive(TestStepFinished event) {
throw new RuntimeException("boom");
}
});
}
};
expectedException.expect(CompositeCucumberException.class);
expectedException.expectMessage("There were 3 exceptions");
TestHelper.builder()
.withFeatures(Arrays.asList(feature1, feature2))
.withFormatterUnderTest(brokenEventListener)
.withTimeServiceType(TestHelper.TimeServiceType.REAL_TIME)
.withRuntimeArgs("--threads", "2")
.build()
.run();
}
@Test
public void should_interrupt_waiting_plugins() throws InterruptedException {
final CucumberFeature feature1 = TestHelper.feature("path/test.feature", "" +
"Feature: feature name 1\n" +
" Scenario: scenario_1 name\n" +
" Given first step\n" +
" Scenario: scenario_2 name\n" +
" Given first step\n");
final CucumberFeature feature2 = TestHelper.feature("path/test2.feature", "" +
"Feature: feature name 2\n" +
" Scenario: scenario_2 name\n" +
" Given first step\n");
final CountDownLatch threadBlocked = new CountDownLatch(1);
final CountDownLatch interruptHit = new CountDownLatch(1);
final ConcurrentEventListener brokenEventListener = new ConcurrentEventListener() {
@Override
public void setEventPublisher(EventPublisher publisher) {
publisher.registerHandlerFor(TestStepFinished.class, new EventHandler<TestStepFinished>() {
@Override
public void receive(TestStepFinished event) {
try {
threadBlocked.countDown();
HOURS.sleep(1);
} catch (InterruptedException ignored) {
interruptHit.countDown();
}
}
});
}
};
Thread thread = new Thread(new Runnable() {
@Override
public void run() {
TestHelper.builder()
.withFeatures(Arrays.asList(feature1, feature2))
.withFormatterUnderTest(brokenEventListener)
.withTimeServiceType(TestHelper.TimeServiceType.REAL_TIME)
.withRuntimeArgs("--threads", "2")
.build()
.run();
}
});
thread.start();
threadBlocked.await(1, SECONDS);
thread.interrupt();
interruptHit.await(1, SECONDS);
assertEquals(0, interruptHit.getCount());
}
@Test
public void generates_events_for_glue_and_scenario_scoped_glue() {
final CucumberFeature feature = feature("test.feature", "" +
"Feature: feature name\n" +
" Scenario: Run a scenario once\n" +
" Given global scoped\n" +
" And scenario scoped\n" +
" Scenario: Then do it again\n" +
" Given global scoped\n" +
" And scenario scoped\n" +
"");
final List<StepDefinition> stepDefinedEvents = new ArrayList<>();
Plugin eventListener = new EventListener() {
@Override
public void setEventPublisher(EventPublisher publisher) {
publisher.registerHandlerFor(StepDefinedEvent.class, new EventHandler<StepDefinedEvent>() {
@Override
public void receive(StepDefinedEvent event) {
stepDefinedEvents.add(event.stepDefinition);
}
});
}
};
final List<StepDefinition> definedStepDefinitions = new ArrayList<>();
BackendSupplier backendSupplier = new TestBackendSupplier() {
private Glue glue;
@Override
public void loadGlue(Glue glue, List<URI> gluePaths) {
this.glue = glue;
final StepDefinition mockedStepDefinition = new MockedStepDefinition();
definedStepDefinitions.add(mockedStepDefinition);
glue.addStepDefinition(mockedStepDefinition);
}
@Override
public void buildWorld() {
final StepDefinition mockedScenarioScopedStepDefinition = new MockedScenarioScopedStepDefinition();
definedStepDefinitions.add(mockedScenarioScopedStepDefinition);
glue.addStepDefinition(mockedScenarioScopedStepDefinition);
}
};
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
FeatureSupplier featureSupplier = new FeatureSupplier() {
@Override
public List<CucumberFeature> get() {
return singletonList(feature);
}
};
Runtime.builder()
.withBackendSupplier(backendSupplier)
.withAdditionalPlugins(eventListener)
.withResourceLoader(new ClasspathResourceLoader(classLoader))
.withEventBus(new TimeServiceEventBus(new TimeServiceStub(0)))
.withFeatureSupplier(featureSupplier)
.build()
.run();
assertThat(stepDefinedEvents, equalTo(definedStepDefinitions));
for (StepDefinition stepDefinedEvent : stepDefinedEvents) {
if (stepDefinedEvent instanceof MockedScenarioScopedStepDefinition) {
MockedScenarioScopedStepDefinition mocked = (MockedScenarioScopedStepDefinition) stepDefinedEvent;
assertTrue("Scenario scoped step definition should be disposed of", mocked.disposed);
}
}
}
private String runFeatureWithFormatterSpy(CucumberFeature feature, Map<String, Result> stepsToResult) {
FormatterSpy formatterSpy = new FormatterSpy();
TestHelper.builder()
.withFeatures(feature)
.withStepsToResult(stepsToResult)
.withFormatterUnderTest(formatterSpy)
.withTimeServiceType(TestHelper.TimeServiceType.REAL_TIME)
.build()
.run();
return formatterSpy.toString();
}
private ResourceLoader createResourceLoaderThatFindsNoFeatures() {
ResourceLoader resourceLoader = mock(ResourceLoader.class);
when(resourceLoader.resources(any(URI.class), eq(".feature"))).thenReturn(Collections.<Resource>emptyList());
return resourceLoader;
}
private Runtime createStrictRuntime() {
return createRuntime("-g", "anything", "--strict");
}
private Runtime createNonStrictRuntime() {
return createRuntime("-g", "anything");
}
private Runtime createStrictRuntime(ResourceLoader resourceLoader) {
return createRuntime(resourceLoader, Thread.currentThread().getContextClassLoader(), "-g", "anything", "--strict");
}
private Runtime createRuntime(String... runtimeArgs) {
ResourceLoader resourceLoader = mock(ResourceLoader.class);
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
return createRuntime(resourceLoader, classLoader, runtimeArgs);
}
private Runtime createRuntime(ResourceLoader resourceLoader, ClassLoader classLoader, String... runtimeArgs) {
BackendSupplier backendSupplier = new TestBackendSupplier() {
@Override
public void loadGlue(Glue glue, List<URI> gluePaths) {
}
};
return Runtime.builder()
.withRuntimeOptions(
new CommandlineOptionsParser()
.parse(runtimeArgs)
.build()
)
.withClassLoader(classLoader)
.withResourceLoader(resourceLoader)
.withBackendSupplier(backendSupplier)
.withEventBus(bus)
.build();
}
private Runtime createRuntimeWithMockedGlue(final HookDefinition hook,
final HookType hookType,
final CucumberFeature feature,
String... runtimeArgs) {
TestBackendSupplier testBackendSupplier = new TestBackendSupplier() {
@Override
public void loadGlue(Glue glue, List<URI> gluePaths) {
for (ScenarioDefinition child : feature.getGherkinFeature().getFeature().getChildren()) {
for (Step step : child.getSteps()) {
mockMatch(glue, step.getText());
}
}
mockHook(glue, hook, hookType);
}
};
FeatureSupplier featureSupplier = new FeatureSupplier() {
@Override
public List<CucumberFeature> get() {
return singletonList(feature);
}
};
return Runtime.builder()
.withRuntimeOptions(
new CommandlineOptionsParser()
.parse(runtimeArgs)
.build()
)
.withBackendSupplier(testBackendSupplier)
.withFeatureSupplier(featureSupplier)
.build();
}
private void mockMatch(Glue glue, String text) {
StepDefinition stepDefinition = new StubStepDefinition(text, TYPE_REGISTRY);
glue.addStepDefinition(stepDefinition);
}
private void mockHook(Glue glue, HookDefinition hook, HookType hookType) {
switch (hookType) {
case Before:
glue.addBeforeHook(hook);
return;
case After:
glue.addAfterHook(hook);
return;
case AfterStep:
glue.addBeforeHook(hook);
return;
default:
throw new IllegalArgumentException(hookType.name());
}
}
private TestCaseFinished testCaseFinishedWithStatus(Result.Type resultStatus) {
return new TestCaseFinished(ANY_TIMESTAMP, ANY_TIMESTAMP, mock(TestCase.class), new Result(resultStatus, 0L, null));
}
private static final class MockedStepDefinition implements StepDefinition {
@Override
public List<Argument> matchedArguments(PickleStep step) {
return step.getText().equals(getPattern()) ? new ArrayList<Argument>() : null;
}
@Override
public String getLocation(boolean detail) {
return "mocked step definition";
}
@Override
public Integer getParameterCount() {
return 0;
}
@Override
public void execute(Object[] args) throws Throwable {
}
@Override
public boolean isDefinedAt(StackTraceElement stackTraceElement) {
return false;
}
@Override
public String getPattern() {
return "global scoped";
}
@Override
public boolean isScenarioScoped() {
return true;
}
}
private static final class MockedScenarioScopedStepDefinition implements StepDefinition, ScenarioScoped {
boolean disposed;
@Override
public void disposeScenarioScope() {
this.disposed = true;
}
@Override
public List<Argument> matchedArguments(PickleStep step) {
return step.getText().equals(getPattern()) ? new ArrayList<Argument>() : null;
}
@Override
public String getLocation(boolean detail) {
return "mocked scenario scoped step definition";
}
@Override
public Integer getParameterCount() {
return 0;
}
@Override
public void execute(Object[] args) {
}
@Override
public boolean isDefinedAt(StackTraceElement stackTraceElement) {
return false;
}
@Override
public String getPattern() {
return "scenario scoped";
}
@Override
public boolean isScenarioScoped() {
return true;
}
}
} |
package com.exedio.cope.util;
import java.io.IOException;
import java.io.PrintStream;
import java.io.Serializable;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import com.exedio.cope.Cope;
import com.exedio.cope.Item;
import com.exedio.cope.Model;
import com.exedio.cope.NoSuchIDException;
import com.exedio.cope.StringField;
import com.exedio.cope.pattern.MapField;
import com.exedio.cope.util.ConnectToken;
import com.exedio.cope.util.ServletUtil;
import com.exedio.cops.Cop;
import com.exedio.cops.CopsServlet;
public abstract class Editor implements Filter
{
private final Model model;
/**
* Subclasses must define a public no-args constructor
* providing the model.
*/
protected Editor(final Model model)
{
if(model==null)
throw new NullPointerException("model was null in " + getClass().getName());
this.model = model;
}
private ConnectToken connectToken = null;
public final void init(final FilterConfig config)
{
connectToken = ServletUtil.connect(model, config.getServletContext(), getClass().getName());
}
public final void destroy()
{
connectToken.returnIt();
connectToken = null;
}
/**
* If you want persistent sessions,
* the make implementors of this interface serializable.
*/
public interface Login
{
String getName();
}
protected abstract Login login(String user, String password);
public final void doFilter(
final ServletRequest servletRequest,
final ServletResponse response,
final FilterChain chain) throws IOException, ServletException
{
if(!(servletRequest instanceof HttpServletRequest))
{
chain.doFilter(servletRequest, response);
return;
}
final HttpServletRequest request = (HttpServletRequest)servletRequest;
servletRequest.setCharacterEncoding(CopsServlet.ENCODING);
if(LOGIN_URL_PATH_INFO.equals(request.getPathInfo()))
{
doLogin(request, (HttpServletResponse)response);
return;
}
final HttpSession httpSession = request.getSession(false);
if(httpSession!=null)
{
final Object session = httpSession.getAttribute(SESSION);
if(session!=null)
{
try
{
tls.set(new TL(request, (HttpServletResponse)response, (Session)session));
chain.doFilter(request, response);
}
finally
{
tls.remove();
}
}
else
chain.doFilter(request, response);
}
else
{
chain.doFilter(request, response);
}
}
static final String AVOID_COLLISION = "contentEditorBar823658617";
static final String REFERER = "referer";
static final String TOGGLE_BORDERS = "borders";
static final String SAVE_FEATURE = "feature";
static final String SAVE_ITEM = "item";
static final String SAVE_KIND = "kind";
static final String SAVE_LINE = "line";
static final String SAVE_AREA = "area";
static final String SAVE_KIND_LINE = "kindLine";
static final String SAVE_KIND_AREA = "kindArea";
private final void doBar(
final HttpServletRequest request,
final Session session)
{
if(Cop.isPost(request) && request.getParameter(AVOID_COLLISION)!=null)
{
if(request.getParameter(TOGGLE_BORDERS)!=null)
{
session.borders = !session.borders;
}
final String featureID = request.getParameter(SAVE_FEATURE);
if(featureID!=null)
{
final String itemID = request.getParameter(SAVE_ITEM);
final String kind = request.getParameter(SAVE_KIND);
final String value;
if(SAVE_KIND_LINE.equals(kind))
value = request.getParameter(SAVE_LINE);
else if(SAVE_KIND_AREA.equals(kind))
value = request.getParameter(SAVE_AREA);
else
throw new RuntimeException(kind);
try
{
model.startTransaction(getClass().getName() + "#save");
final StringField feature = (StringField)model.findFeatureByID(featureID);
assert feature!=null : featureID;
final Item item = model.findByID(itemID);
String v = value;
if("".equals(v))
v = null;
feature.set(item, v);
model.commit();
}
catch(NoSuchIDException e)
{
throw new RuntimeException(e);
}
finally
{
model.rollbackIfNotCommitted();
}
}
}
}
static final String LOGIN_URL = "contentEditorLogin.html";
private static final String LOGIN_URL_PATH_INFO = '/' + LOGIN_URL;
static final String LOGIN = "login";
static final String LOGIN_USER = "user";
static final String LOGIN_PASSWORD = "password";
private final void doLogin(
final HttpServletRequest request,
final HttpServletResponse response)
throws IOException
{
response.setContentType("text/html; charset="+CopsServlet.ENCODING);
final HttpSession httpSession = request.getSession(true);
final Session session = (Session)httpSession.getAttribute(SESSION);
if(session==null)
{
PrintStream out = null;
try
{
if(Cop.isPost(request) && request.getParameter(LOGIN)!=null)
{
final String user = request.getParameter(LOGIN_USER);
final String password = request.getParameter(LOGIN_PASSWORD);
try
{
model.startTransaction(getClass().getName() + "#login");
final Login login = login(user, password);
if(login!=null)
{
final String name = login.getName();
httpSession.setAttribute(Editor.SESSION, new Session(login, name));
response.sendRedirect(response.encodeRedirectURL(request.getContextPath() + request.getServletPath() + '/'));
}
else
{
out = new PrintStream(response.getOutputStream(), false, CopsServlet.ENCODING);
Editor_Jspm.writeLogin(out, response, user);
}
model.commit();
}
finally
{
model.rollbackIfNotCommitted();
}
}
else
{
out = new PrintStream(response.getOutputStream(), false, CopsServlet.ENCODING);
Editor_Jspm.writeLogin(out, response, null);
}
}
finally
{
if(out!=null)
out.close();
}
}
else
{
doBar(request, session);
if(Cop.isPost(request))
{
final String referer = request.getParameter(REFERER);
if(referer!=null)
response.sendRedirect(response.encodeRedirectURL(request.getContextPath() + request.getServletPath() + referer));
}
}
}
private static final String SESSION = Session.class.getCanonicalName();
static final class Session implements Serializable // for session persistence
{
private static final long serialVersionUID = 1l;
final Login login;
final String loginName;
boolean borders = false;
Session(final Login login, final String loginName)
{
this.login = login;
this.loginName = loginName;
assert login!=null;
}
@Override
public String toString()
{
// must not call login#getName() here,
// because this may require a transaction,
// which may not be present,
// especially when this method is called by lamdba probe.
return
(loginName!=null ? ('"' + loginName + '"') : login.getClass().getName()) +
" borders=" + (borders ? "on" : "off");
}
}
private static final class TL
{
final HttpServletRequest request;
final HttpServletResponse response;
final Session session;
TL(final HttpServletRequest request, final HttpServletResponse response, final Session session)
{
this.request = request;
this.response = response;
this.session = session;
assert request!=null;
assert response!=null;
assert session!=null;
}
}
private static final ThreadLocal<TL> tls = new ThreadLocal<TL>();
public static final boolean isActive()
{
return tls.get()!=null;
}
@SuppressWarnings("cast") // OK: for eclipse because of the javac bug
private static final <K> Item getItem(final MapField<K, String> map, final K key, final Item item)
{
return
(Item)map.getRelationType().searchSingletonStrict( // cast is needed because of a bug in javac
map.getKey().equal(key).and(
Cope.equalAndCast(map.getParent(item.getCopeType().getJavaClass()), item)));
}
public static final <K> String edit(final String content, final MapField<K, String> feature, final Item item, final K key)
{
final TL tl = tls.get();
if(tl==null || !tl.session.borders)
return content;
return edit(
tl, false,
content,
(StringField)feature.getValue(),
getItem(feature, key, item));
}
public static final <K> String editBlock(final String content, final MapField<K, String> feature, final Item item, final K key)
{
final TL tl = tls.get();
if(tl==null || !tl.session.borders)
return content;
return edit(
tl, true,
content,
(StringField)feature.getValue(),
getItem(feature, key, item));
}
public static final String edit(final String content, final StringField feature, final Item item)
{
final TL tl = tls.get();
if(tl==null || !tl.session.borders)
return content;
return edit(tl, false, content, feature, item);
}
static final String EDIT_METHOD = AVOID_COLLISION + "edit";
private static final String edit(final TL tl, final boolean block, final String content, final StringField feature, final Item item)
{
assert tl.session.borders;
assert feature!=null;
assert item!=null;
assert !feature.isFinal();
assert feature.getType().isAssignableFrom(item.getCopeType()) : item.getCopeID()+'-'+feature.getID();
final String tag = block ? "div" : "span";
final StringBuilder bf = new StringBuilder();
bf.append('<').
append(tag).
append(
" class=\"contentEditorLink\"" +
" onclick=\"" +
EDIT_METHOD + "(this,'").
append(feature.getID()).
append("','").
append(item.getCopeID()).
append("','").
append(block ? Cop.encodeXml(feature.get(item)).replaceAll("\n", "\\\\n").replaceAll("\r", "\\\\r") : Cop.encodeXml(feature.get(item))).
append("');return false;\"").
append('>').
append(content).
append("</").
append(tag).
append('>');
return bf.toString();
}
public static final void writeBar(final PrintStream out)
{
final TL tl = tls.get();
if(tl==null)
return;
final HttpServletRequest request = tl.request;
Editor_Jspm.writeBar(out,
tl.response.encodeURL(request.getContextPath() + request.getServletPath() + LOGIN_URL_PATH_INFO),
request.getPathInfo(),
tl.session.borders,
tl.session.login.getName());
}
} |
package radlab.rain.util;
import java.util.Random;
public class NegativeExponential
{
private Random _random;
private double _mean = 0.0;
public NegativeExponential( double mean )
{
this._mean = mean;
this._random = new Random();
}
public NegativeExponential( double mean, Random rng )
{
this._mean = mean;
this._random = rng;
}
// Note: we don't do any truncation e.g. to cap random numbers generated to n*mean or anything
// clients/consumers can do any capping.
public double nextDouble()
{
if( this._mean == 0 )
return 0.0;
double rndValU = this._random.nextDouble();
double next = -1 * this._mean * Math.log( rndValU );
return next;
}
public double getMean() { return this._mean; }
public void setMean( double val ) { this._mean = val; }
public static void main( String[] args )
{
double total = 0.0;
double min = Double.MAX_VALUE;
double max = Double.MIN_VALUE;
int iterations = 100;//00000;
NegativeExponential nexp = new NegativeExponential( 1/5.0 );
for( int i = 0; i < iterations; i ++ )
{
double val = nexp.nextDouble();
if( val < min )
min = val;
if( val > max )
max = val;
total += val;
//System.out.println( val );
}
System.out.println( "Avg: " + (total/(double)iterations) );
System.out.println( "Min: " + min );
System.out.println( "Max: " + max );
}
} |
package de.kleppmann.maniation.maths;
import java.text.DecimalFormat;
public class Quaternion {
private double w, x, y, z, mag;
private Quaternion inverse;
private Quaternion(boolean nonsense) {}
public Quaternion() {
this.w = 1.0; this.x = 0.0; this.y = 0.0; this.z = 0.0;
this.inverse = new Quaternion(true);
inverse.w = 1.0; inverse.x = 0.0; inverse.y = 0.0; inverse.z = 0.0;
inverse.inverse = this;
}
public Quaternion(double w, double x, double y, double z) {
this.w = w;
this.x = x;
this.y = y;
this.z = z;
double m = w*w + x*x + y*y + z*z;
this.mag = Math.sqrt(m);
this.inverse = new Quaternion(true);
this.inverse.w = w/m;
this.inverse.x = -x/m;
this.inverse.y = -y/m;
this.inverse.z = -z/m;
this.inverse.mag = 1.0/this.mag;
this.inverse.inverse = this;
}
public Quaternion(Vector3D v) {
this.w = 0.0;
this.x = v.getComponent(0);
this.y = v.getComponent(1);
this.z = v.getComponent(2);
this.inverse = null;
}
public double getW() {
return w;
}
public double getX() {
return x;
}
public double getY() {
return y;
}
public double getZ() {
return z;
}
public String toString() {
DecimalFormat format = new DecimalFormat("
return "Quaternion(w: " + format.format(w) +
", x: " + format.format(x) +
", y: " + format.format(y) +
", z: " + format.format(z) + ")";
}
public Quaternion mult(Quaternion other) {
return new Quaternion(
this.w*other.w - this.x*other.x - this.y*other.y - this.z*other.z,
this.w*other.x + this.x*other.w + this.y*other.z - this.z*other.y,
this.w*other.y + this.y*other.w + this.z*other.x - this.x*other.z,
this.w*other.z + this.z*other.w + this.x*other.y - this.y*other.x
);
}
public Quaternion add(Quaternion other) {
return new Quaternion(this.w+other.w, this.x+other.x,
this.y+other.y, this.z+other.z);
}
public Quaternion subtract(Quaternion other) {
return new Quaternion(this.w-other.w, this.x-other.x,
this.y-other.y, this.z-other.z);
}
public Quaternion quergs(Quaternion delta) {
double mag = delta.getMagnitude();
if (mag < 1e-20) return this;
long n = Math.round(mag/Math.PI - 0.5);
double d = mag - Math.PI*(n + 0.5);
if ((d < 1e-6) && (d > -1e-6)) return new Quaternion(delta.w/mag,
delta.x/mag, delta.y/mag, delta.z/mag);
double t = Math.tan(mag)/mag;
double wn = this.w + t*delta.w;
double xn = this.x + t*delta.x;
double yn = this.y + t*delta.y;
double zn = this.z + t*delta.z;
mag = Math.sqrt(wn*wn + xn*xn + yn*yn + zn*zn);
return new Quaternion(wn/mag, xn/mag, yn/mag, zn/mag);
}
public Quaternion getInverse() {
return inverse;
}
public Vector3D transform(Vector3D v) {
if (v.getDimension() != 3) throw new IllegalArgumentException();
Quaternion t = mult(new Quaternion(v)).mult(inverse);
return new Vector3D(t.x, t.y, t.z);
}
public double getMagnitude() {
return mag;
}
public Matrix33 toMatrix() {
return new Matrix33(
1.0 - 2.0*(y*y + z*z), 2.0*(x*y - w*z), 2.0*(x*z + w*y),
2.0*(x*y + w*z), 1.0 - 2.0*(x*x + z*z), 2.0*(y*z - w*x),
2.0*(x*z - w*y), 2.0*(y*z + w*x), 1.0 - 2.0*(x*x + y*y));
}
public Quaternion interpolateTo(Quaternion dest, double amount) {
double theta = Math.acos(this.x*dest.x + this.y*dest.y + this.z*dest.z + this.w*dest.w);
double sinTheta = Math.sin(theta);
double v1 = Math.sin((1.0 - amount)*theta) / sinTheta;
double v2 = Math.sin(amount*theta) / sinTheta;
return new Quaternion(
v1*this.w + v2*dest.w,
v1*this.x + v2*dest.x,
v1*this.y + v2*dest.y,
v1*this.z + v2*dest.z);
}
public EulerAngles toEuler() {
double sy = 2.0*(w*y - x*z);
double cy = Math.sqrt(1 - sy*sy);
double sx, cx, sz, cz;
if (Math.abs(cy) < 1e-6) {
sx = 2.0*(w*x - y*z);
cx = 1.0 - 2.0*(x*x + z*z);
sz = 0.0;
cz = 1.0;
} else {
sx = 2.0*(y*z + w*x)/cy;
cx = (1.0 - 2.0*(x*x + y*y))/cy;
sz = 2.0*(x*y + w*z)/cy;
cz = (1.0 - 2.0*(y*y + z*z))/cy;
}
double rotX = Math.acos(cx); if (sx < 0) rotX = 2*Math.PI - rotX;
double rotY = Math.acos(cy); if (sy < 0) rotY = 2*Math.PI - rotY;
double rotZ = Math.acos(cz); if (sz < 0) rotZ = 2*Math.PI - rotZ;
return new EulerAngles(EulerAngles.Convention.ROLL_PITCH_YAW, rotX, rotY, rotZ);
}
public static Quaternion fromXRotation(double angle) {
return new Quaternion(Math.cos(angle/2.0), Math.sin(angle/2.0), 0.0, 0.0);
}
public static Quaternion fromYRotation(double angle) {
return new Quaternion(Math.cos(angle/2.0), 0.0, Math.sin(angle/2.0), 0.0);
}
public static Quaternion fromZRotation(double angle) {
return new Quaternion(Math.cos(angle/2.0), 0.0, 0.0, Math.sin(angle/2.0));
}
public static Quaternion fromAxisRotation(Vector3D axis, double angle) {
axis = axis.normalize();
double s = Math.sin(angle/2.0);
return new Quaternion(Math.cos(angle/2.0), s*axis.getComponent(0),
s*axis.getComponent(1), s*axis.getComponent(2));
}
public static Quaternion fromDirectionRoll(Vector3D original, Vector3D transformed, double roll) {
Quaternion rot;
original = original.normalize();
transformed = transformed.normalize();
double dotprod = original.mult(transformed);
double angle = Math.acos(dotprod);
Vector3D axis = original.cross(transformed);
if (axis.magnitude() > 1e-10) {
// Usual case: rotate about axis perpendicular to the two vectors
rot = fromAxisRotation(axis, angle);
} else {
if (dotprod < 0) {
// `original' and `transformed' point in opposite directions.
// Find an arbitrary vector orthogonal to `original'
do {
axis = original.cross(new Vector3D(Math.random(), Math.random(), Math.random()));
} while (axis.magnitude() < 1e-6);
rot = fromAxisRotation(axis, Math.PI);
} else {
// No rotation
rot = new Quaternion();
}
}
return rot.mult(fromAxisRotation(original, roll));
}
} |
package com.rultor.drain;
import com.jcabi.aspects.Immutable;
import com.jcabi.aspects.Loggable;
import com.rexsl.test.RestTester;
import com.rultor.spi.Drain;
import com.rultor.spi.Pageable;
import com.rultor.spi.Stand;
import com.rultor.spi.Work;
import com.rultor.tools.Time;
import java.io.IOException;
import java.io.InputStream;
import java.io.SequenceInputStream;
import java.io.StringWriter;
import java.net.HttpURLConnection;
import java.net.URLEncoder;
import javax.json.Json;
import javax.validation.constraints.NotNull;
import javax.ws.rs.core.MediaType;
import lombok.EqualsAndHashCode;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.CharEncoding;
import org.apache.http.HttpHeaders;
/**
* Mirrored to a web {@link Stand}.
*
* @author Yegor Bugayenko (yegor@tpc2.com)
* @version $Id$
* @since 1.0
* @checkstyle ClassDataAbstractionCoupling (500 lines)
*/
@Immutable
@EqualsAndHashCode(of = "origin")
@Loggable(Loggable.DEBUG)
public final class Standed implements Drain {
/**
* Work we're in.
*/
private final transient Work work;
/**
* Original drain.
*/
private final transient Drain origin;
/**
* Name of stand.
*/
private final transient String stand;
/**
* Secret key of it.
*/
private final transient String key;
/**
* Public ctor.
* @param wrk Work we're in
* @param name Name of stand
* @param secret Secret key of the stand
* @param drain Main drain
* @checkstyle ParameterNumber (8 lines)
*/
public Standed(
@NotNull(message = "work can't be NULL") final Work wrk,
@NotNull(message = "name of stand can't be NULL") final String name,
@NotNull(message = "key can't be NULL") final String secret,
@NotNull(message = "drain can't be NULL") final Drain drain) {
this.work = wrk;
this.stand = name;
this.key = secret;
this.origin = drain;
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
return String.format(
"%s standed at `%s`",
this.origin, this.stand
);
}
/**
* {@inheritDoc}
*/
@Override
public Pageable<Time> pulses() throws IOException {
return this.origin.pulses();
}
/**
* {@inheritDoc}
*/
@Override
public void append(final Iterable<String> lines) throws IOException {
for (String line : lines) {
this.send(line);
}
this.origin.append(lines);
}
/**
* {@inheritDoc}
*/
@Override
public InputStream read() throws IOException {
return new SequenceInputStream(
IOUtils.toInputStream(
String.format(
"Standed: stand='%s'\n",
this.stand
)
),
this.origin.read()
);
}
/**
* Send line to stand.
* @param line The line
* @throws IOException If fails
*/
private void send(final String line) throws IOException {
final StringWriter writer = new StringWriter();
Json.createGenerator(writer)
.writeStartObject()
.write("stand", this.stand)
.write("key", this.key)
.write("xembly", line)
.writeStartObject("work")
.write("owner", this.work.owner().toString())
.write("unit", this.work.unit())
.write("started", this.work.started().toString())
.writeEnd()
.writeEnd()
.close();
final String body = String.format(
"Action=SendMessage&Version=2011-10-01&MessageBody=%s",
URLEncoder.encode(writer.toString(), CharEncoding.UTF_8)
);
try {
RestTester
.start(Stand.QUEUE)
.header(HttpHeaders.CONTENT_ENCODING, CharEncoding.UTF_8)
.header(
HttpHeaders.CONTENT_LENGTH,
body.getBytes(CharEncoding.UTF_8).length
)
.header(
HttpHeaders.CONTENT_TYPE,
MediaType.APPLICATION_FORM_URLENCODED
)
.post("sending one line to stand SQS queue", body)
.assertStatus(HttpURLConnection.HTTP_OK);
} catch (AssertionError ex) {
throw new IOException(ex);
}
}
} |
package com.exedio.cope.pattern;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import com.exedio.cope.Item;
import com.exedio.cope.LengthViolationException;
import com.exedio.cope.MandatoryViolationException;
import com.exedio.cope.ObjectAttribute;
import com.exedio.cope.Pattern;
import com.exedio.cope.ReadOnlyViolationException;
import com.exedio.cope.UniqueViolationException;
public final class CustomAttribute extends Pattern
{
final ObjectAttribute storage;
Method getter;
Method setter;
Class valueType;
public CustomAttribute(final ObjectAttribute storage)
{
this.storage = storage;
if(storage==null)
throw new NullPointerException("storage must not be null");
registerSource(storage);
}
public ObjectAttribute getStorage()
{
return storage;
}
public void initialize()
{
final String name = getName();
if(!storage.isInitialized())
initialize(storage, name+"Storage");
final String nameUpper =
name.length()==1
? Character.toString(Character.toUpperCase(name.charAt(0)))
: (Character.toUpperCase(name.charAt(0)) + name.substring(1));
final String nameGetter = "get" + nameUpper;
final String nameSetter = "set" + nameUpper;
final Class javaClass = getType().getJavaClass();
final Method getter;
try
{
getter = javaClass.getDeclaredMethod(nameGetter, (Class[])null);
}
catch(NoSuchMethodException e)
{
throw new RuntimeException("no suitable getter method found for custom attribute "+name, e);
}
final Class valueType = getter.getReturnType();
final Method setter;
try
{
setter = javaClass.getDeclaredMethod(nameSetter, new Class[]{valueType});
}
catch(NoSuchMethodException e)
{
throw new RuntimeException("no suitable setter method found for custom attribute "+name, e);
}
this.getter = getter;
this.setter = setter;
this.valueType = valueType;
}
final Class getValueType()
{
assert valueType!=null;
return valueType;
}
public final Object get(final Item item)
{
try
{
return getter.invoke(item, (Object[])null);
}
catch(IllegalArgumentException e)
{
throw new RuntimeException(e);
}
catch(IllegalAccessException e)
{
throw new RuntimeException(e);
}
catch(InvocationTargetException e)
{
throw new RuntimeException(e);
}
}
public final void set(final Item item, final Object value)
throws
UniqueViolationException,
MandatoryViolationException,
LengthViolationException,
ReadOnlyViolationException
{
try
{
setter.invoke(item, new Object[]{value});
}
catch(IllegalArgumentException e)
{
throw new RuntimeException(e);
}
catch(IllegalAccessException e)
{
throw new RuntimeException(e);
}
catch(InvocationTargetException e)
{
throw new RuntimeException(e);
}
}
} |
package net.kikuchy.genum;
import net.kikuchy.genum.entity.EnumeratorMetaData;
import net.kikuchy.genum.entity.EnumeratorValue;
import net.kikuchy.genum.internal.StringUtil;
import org.apache.commons.lang3.tuple.Pair;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.List;
import java.util.zip.DataFormatException;
/**
* Generate Java Enum class file from arrayed data source.
* Genum needs package name, enum's class name, data source and java file's destination.
* <p>
* Genum can use custom {@link SourceLoader}, so you can use JSON or other format if you make the custom loader.
* </p>
*/
public final class Genum {
private String packageName;
private String className;
private GenumOption option;
private SourceLoader sourceLoader;
/**
* Execute generation of java enum source code.
*
* @param sourceArrayStream {@link InputStream} of arrayed data source.
* @param generatedCodeDestination {@link OutputStream} that generated java enum source code will be written.
* @throws DataFormatException Thrown if data source format is not supported.
* @throws IOException If streams not worked well.
*/
public void generate(InputStream sourceArrayStream, OutputStream generatedCodeDestination)
throws DataFormatException, IOException {
List<EnumeratorValue> values = sourceLoader.parse(sourceArrayStream, "");
EnumeratorMetaData metaData = new EnumeratorMetaData(packageName, className, values);
SourceGenerator adapter = new SourceGenerator(metaData);
adapter.adapt(generatedCodeDestination);
}
static class Builder {
private String packageName;
private String className;
private GenumOption option;
private SourceLoader sourceLoader;
public Builder(String packageName, String className, SourceLoader sourceLoader) {
this.className = className;
this.packageName = packageName;
this.sourceLoader = sourceLoader;
}
public Builder(String canonicalClassName, SourceLoader sourceLoader) {
Pair<String, String> packClass = StringUtil.splitPackageNameAndClassName(canonicalClassName);
this.className = packClass.getRight();
this.packageName = packClass.getLeft();
this.sourceLoader = sourceLoader;
}
public Builder setOption(GenumOption option) {
this.option = option;
return this;
}
public Genum build() {
Genum genum = new Genum();
genum.sourceLoader = sourceLoader;
genum.packageName = packageName;
genum.className = className;
genum.option = option;
return genum;
}
}
} |
import integercompression.*;
import java.util.*;
public class example {
public static void main(String[] args) {
basicExample();
advancedExample();
}
public static void basicExample() {
int[] data = new int[2342351];
System.out.println("Compressing "+data.length+" integers in one go");
// data should be sorted for best
//results
for(int k = 0; k < data.length; ++k)
data[k] = k;
// Very important: the data is in sorted order!!! If not, you
// will get very poor compression with IntegratedBinaryPacking,
// you should use another CODEC.
// next we compose a CODEC. Most of the processing
// will be done with binary packing, and leftovers will
// be processed using variable byte
IntegratedIntegerCODEC codec = new
IntegratedComposition(
new IntegratedBinaryPacking(),
new IntegratedVariableByte());
// output vector should be large enough...
int [] compressed = new int[data.length];
// compressed might not be large enough in some cases
// if you get java.lang.ArrayIndexOutOfBoundsException, try
// allocating more memory
/**
*
* compressing
*
*/
IntWrapper inputoffset = new IntWrapper(0);
IntWrapper outputoffset = new IntWrapper(0);
codec.compress(data,inputoffset,data.length,compressed,outputoffset);
// got it!
// inputoffset should be at data.length but outputoffset tells
// us where we are...
System.out.println("compressed from "+data.length*4/1024+"KB to "+outputoffset.intValue()*4/1024+"KB");
// we can repack the data: (optional)
compressed = Arrays.copyOf(compressed,outputoffset.intValue());
/**
*
* now uncompressing
*
*/
int[] recovered = new int[data.length];
IntWrapper recoffset = new IntWrapper(0);
codec.uncompress(compressed,new IntWrapper(0),compressed.length,recovered,recoffset);
if(Arrays.equals(data,recovered))
System.out.println("data is recovered without loss");
else
throw new RuntimeException("bug"); // could use assert
System.out.println();
}
/**
* This is like the basic example, but we
* show how to process larger arrays in chunks.
*
* Some of this code was written by Pavel Klinov.
*/
public static void advancedExample() {
int TotalSize = 2342351; // some arbitrary number
int ChunkSize = 16384; // size of each chunk, choose a multiple of 128
System.out.println("Compressing "+TotalSize+" integers using chunks of "+ChunkSize+" integers ("+ChunkSize*4/1024+"KB)");
System.out.println("(It is often better for applications to work in chunks fitting in CPU cache.)");
int[] data = new int[TotalSize];
// data should be sorted for best
//results
for(int k = 0; k < data.length; ++k)
data[k] = k;
// next we compose a CODEC. Most of the processing
// will be done with binary packing, and leftovers will
// be processed using variable byte, using variable byte
// only for the last chunk!
IntegratedIntegerCODEC regularcodec = new
IntegratedBinaryPacking();
IntegratedVariableByte ivb = new IntegratedVariableByte();
IntegratedIntegerCODEC lastcodec = new
IntegratedComposition(regularcodec,ivb);
// output vector should be large enough...
int [] compressed = new int[TotalSize];
/**
*
* compressing
*
*/
IntWrapper inputoffset = new IntWrapper(0);
IntWrapper outputoffset = new IntWrapper(0);
for(int k = 0; k < TotalSize / ChunkSize; ++k)
regularcodec.compress(data,inputoffset,ChunkSize,compressed,outputoffset);
lastcodec.compress(data, inputoffset, TotalSize % ChunkSize, compressed, outputoffset);
// got it!
// inputoffset should be at data.length but outputoffset tells
// us where we are...
System.out.println("compressed from "+data.length*4/1024+"KB to "+outputoffset.intValue()*4/1024+"KB");
// we can repack the data:
compressed = Arrays.copyOf(compressed,outputoffset.intValue());
/**
*
* now uncompressing
*
* We are *not* assuming that the original array length is known, however
* we assume that the chunk size (ChunkSize) is known.
*
*/
int[] recovered = new int[ChunkSize];
IntWrapper compoff = new IntWrapper(0);
IntWrapper recoffset;
int currentpos = 0;
while(compoff.get()<compressed.length) {
recoffset = new IntWrapper(0);
regularcodec.uncompress(compressed,compoff,compressed.length - compoff.get(),recovered,recoffset);
if(recoffset.get() < ChunkSize) {// last chunk detected
ivb.uncompress(compressed,compoff,compressed.length - compoff.get(),recovered,recoffset);
}
for(int i = 0; i < recoffset.get(); ++i) {
if(data[currentpos+i] != recovered[i]) throw new RuntimeException("bug"); // could use assert
}
currentpos += recoffset.get();
}
System.out.println("data is recovered without loss");
System.out.println();
}
} |
package kawa.lang;
import codegen.*;
/**
* Class used to implement "let" syntax (and variants) for Scheme.
* @author Per Bothner
*/
public class LetExp extends ScopeExp
{
Expression[] inits;
public Expression body;
public LetExp (Expression[] i) { inits = i; }
public Object eval (Environment env)
throws UnboundSymbol, WrongArguments, WrongType, GenericError
{
throw new GenericError ("internal error - LetExp.eval called");
}
/* Recursive helper routine, to store the values on the stack
* into the variables in vars, in reverse order. */
private final void store_rest (Compilation comp, Variable vars)
{
if (vars != null)
{
store_rest (comp, vars.nextVar ());
SetExp.compile_store ((Declaration) vars, comp);
}
}
public void compile (Compilation comp, int flags)
{
/* Compile all the initializations, leaving the results
on the stack (in reverse order). */
for (int i = 0; i < inits.length; i++)
inits[i].compile (comp, 0);
comp.method.enterScope (scope);
/* Assign the initial values to the proper variables, in reverse order. */
store_rest (comp, firstVar ());
body.compile_with_linenumber (comp, flags);
comp.method.pop_scope ();
}
public void print (java.io.PrintStream ps)
{
ps.print("(#%let (");
Variable var = firstVar ();
int i = 0;
for (; var != null; var = var.nextVar ())
{
if (i > 0)
ps.print(" ");
ps.print("(");
ps.print(((Declaration) var).string_name());
ps.print(" ");
if (var.isArtificial ())
ps.print ("<artificial>");
else
{
if (inits[i] == null)
ps.print ("<null>");
else
inits[i].print (ps);
i++;
}
ps.print(")");
}
ps.print(") ");
body.print (ps);
ps.print(")");
}
} |
package ly.count.android.sdk;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.content.pm.PackageManager;
import android.util.Log;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Calendar;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
public class Countly {
/**
* Current version of the Count.ly Android SDK as a displayable string.
*/
public static final String COUNTLY_SDK_VERSION_STRING = "15.08.01";
/**
* Default string used in the begin session metrics if the
* app version cannot be found.
*/
public static final String DEFAULT_APP_VERSION = "1.0";
/**
* Tag used in all logging in the Count.ly SDK.
*/
public static final String TAG = "Countly";
/**
* Determines how many custom events can be queued locally before
* an attempt is made to submit them to a Count.ly server.
*/
private static final int EVENT_QUEUE_SIZE_THRESHOLD = 10;
/**
* How often onTimer() is called.
*/
private static final long TIMER_DELAY_IN_SECONDS = 60;
protected static List<String> publicKeyPinCertificates;
/**
* Enum used in Countly.initMessaging() method which controls what kind of
* app installation it is. Later (in Countly Dashboard or when calling Countly API method),
* you'll be able to choose whether you want to send a message to test devices,
* or to production ones.
*/
public static enum CountlyMessagingMode {
TEST,
PRODUCTION,
}
private static class SingletonHolder {
static final Countly instance = new Countly();
}
private ConnectionQueue connectionQueue_;
@SuppressWarnings("FieldCanBeLocal")
private ScheduledExecutorService timerService_;
private EventQueue eventQueue_;
private long prevSessionDurationStartTime_;
private int activityCount_;
private boolean disableUpdateSessionRequests_;
private boolean enableLogging_;
private Countly.CountlyMessagingMode messagingMode_;
private Context context_;
//user data access
public static UserData userData;
//track views
private String lastView = null;
private int lastViewStart = 0;
private boolean firstView = true;
private boolean autoViewTracker = false;
/**
* Returns the Countly singleton.
*/
public static Countly sharedInstance() {
return SingletonHolder.instance;
}
/**
* Constructs a Countly object.
* Creates a new ConnectionQueue and initializes the session timer.
*/
Countly() {
connectionQueue_ = new ConnectionQueue();
Countly.userData = new UserData(connectionQueue_);
timerService_ = Executors.newSingleThreadScheduledExecutor();
timerService_.scheduleWithFixedDelay(new Runnable() {
@Override
public void run() {
onTimer();
}
}, TIMER_DELAY_IN_SECONDS, TIMER_DELAY_IN_SECONDS, TimeUnit.SECONDS);
}
public Countly init(final Context context, final String serverURL, final String appKey) {
return init(context, serverURL, appKey, null, OpenUDIDAdapter.isOpenUDIDAvailable() ? DeviceId.Type.OPEN_UDID : DeviceId.Type.ADVERTISING_ID);
}
public Countly init(final Context context, final String serverURL, final String appKey, final String deviceID) {
return init(context, serverURL, appKey, deviceID, null);
}
public synchronized Countly init(final Context context, final String serverURL, final String appKey, final String deviceID, DeviceId.Type idMode) {
if (context == null) {
throw new IllegalArgumentException("valid context is required");
}
if (!isValidURL(serverURL)) {
throw new IllegalArgumentException("valid serverURL is required");
}
if (appKey == null || appKey.length() == 0) {
throw new IllegalArgumentException("valid appKey is required");
}
if (deviceID != null && deviceID.length() == 0) {
throw new IllegalArgumentException("valid deviceID is required");
}
if (deviceID == null && idMode == null) {
if (OpenUDIDAdapter.isOpenUDIDAvailable()) idMode = DeviceId.Type.OPEN_UDID;
else if (AdvertisingIdAdapter.isAdvertisingIdAvailable()) idMode = DeviceId.Type.ADVERTISING_ID;
}
if (deviceID == null && idMode == DeviceId.Type.OPEN_UDID && !OpenUDIDAdapter.isOpenUDIDAvailable()) {
throw new IllegalArgumentException("valid deviceID is required because OpenUDID is not available");
}
if (deviceID == null && idMode == DeviceId.Type.ADVERTISING_ID && !AdvertisingIdAdapter.isAdvertisingIdAvailable()) {
throw new IllegalArgumentException("valid deviceID is required because Advertising ID is not available (you need to include Google Play services 4.0+ into your project)");
}
if (eventQueue_ != null && (!connectionQueue_.getServerURL().equals(serverURL) ||
!connectionQueue_.getAppKey().equals(appKey) ||
!DeviceId.deviceIDEqualsNullSafe(deviceID, idMode, connectionQueue_.getDeviceId()) )) {
throw new IllegalStateException("Countly cannot be reinitialized with different values");
}
// In some cases CountlyMessaging does some background processing, so it needs a way
// to start Countly on itself
if (MessagingAdapter.isMessagingAvailable()) {
MessagingAdapter.storeConfiguration(context, serverURL, appKey, deviceID, idMode);
}
// if we get here and eventQueue_ != null, init is being called again with the same values,
// so there is nothing to do, because we are already initialized with those values
if (eventQueue_ == null) {
DeviceId deviceIdInstance;
if (deviceID != null) {
deviceIdInstance = new DeviceId(deviceID);
} else {
deviceIdInstance = new DeviceId(idMode);
}
final CountlyStore countlyStore = new CountlyStore(context);
deviceIdInstance.init(context, countlyStore, true);
connectionQueue_.setServerURL(serverURL);
connectionQueue_.setAppKey(appKey);
connectionQueue_.setCountlyStore(countlyStore);
connectionQueue_.setDeviceId(deviceIdInstance);
eventQueue_ = new EventQueue(countlyStore);
}
context_ = context;
// context is allowed to be changed on the second init call
connectionQueue_.setContext(context);
return this;
}
/**
* Checks whether Countly.init has been already called.
* @return true if Countly is ready to use
*/
public synchronized boolean isInitialized() {
return eventQueue_ != null;
}
public Countly initMessaging(Activity activity, Class<? extends Activity> activityClass, String projectID, Countly.CountlyMessagingMode mode) {
return initMessaging(activity, activityClass, projectID, null, mode);
}
public synchronized Countly initMessaging(Activity activity, Class<? extends Activity> activityClass, String projectID, String[] buttonNames, Countly.CountlyMessagingMode mode) {
if (mode != null && !MessagingAdapter.isMessagingAvailable()) {
throw new IllegalStateException("you need to include countly-messaging-sdk-android library instead of countly-sdk-android if you want to use Countly Messaging");
} else {
messagingMode_ = mode;
if (!MessagingAdapter.init(activity, activityClass, projectID, buttonNames)) {
throw new IllegalStateException("couldn't initialize Countly Messaging");
}
}
if (MessagingAdapter.isMessagingAvailable()) {
MessagingAdapter.storeConfiguration(connectionQueue_.getContext(), connectionQueue_.getServerURL(), connectionQueue_.getAppKey(), connectionQueue_.getDeviceId().getId(), connectionQueue_.getDeviceId().getType());
}
return this;
}
public synchronized void halt() {
eventQueue_ = null;
final CountlyStore countlyStore = connectionQueue_.getCountlyStore();
if (countlyStore != null) {
countlyStore.clear();
}
connectionQueue_.setContext(null);
connectionQueue_.setServerURL(null);
connectionQueue_.setAppKey(null);
connectionQueue_.setCountlyStore(null);
prevSessionDurationStartTime_ = 0;
activityCount_ = 0;
}
public synchronized void onStart(Activity activity) {
if (eventQueue_ == null) {
throw new IllegalStateException("init must be called before onStart");
}
++activityCount_;
if (activityCount_ == 1) {
onStartHelper();
}
//check if there is an install referrer data
String referrer = ReferrerReceiver.getReferrer(context_);
if (Countly.sharedInstance().isLoggingEnabled()) {
Log.d(Countly.TAG, "Checking referrer: " + referrer);
}
if(referrer != null){
connectionQueue_.sendReferrerData(referrer);
ReferrerReceiver.deleteReferrer(context_);
}
CrashDetails.inForeground();
if(autoViewTracker){
recordView(activity.getClass().getName());
}
}
/**
* Called when the first Activity is started. Sends a begin session event to the server
* and initializes application session tracking.
*/
void onStartHelper() {
prevSessionDurationStartTime_ = System.nanoTime();
connectionQueue_.beginSession();
}
public synchronized void onStop() {
if (eventQueue_ == null) {
throw new IllegalStateException("init must be called before onStop");
}
if (activityCount_ == 0) {
throw new IllegalStateException("must call onStart before onStop");
}
--activityCount_;
if (activityCount_ == 0) {
onStopHelper();
}
CrashDetails.inBackground();
//report current view duration
reportViewDuration();
}
/**
* Called when final Activity is stopped. Sends an end session event to the server,
* also sends any unsent custom events.
*/
void onStopHelper() {
connectionQueue_.endSession(roundedSecondsSinceLastSessionDurationUpdate());
prevSessionDurationStartTime_ = 0;
if (eventQueue_.size() > 0) {
connectionQueue_.recordEvents(eventQueue_.events());
}
}
/**
* Called when GCM Registration ID is received. Sends a token session event to the server.
*/
public void onRegistrationId(String registrationId) {
connectionQueue_.tokenSession(registrationId, messagingMode_);
}
public void recordEvent(final String key) {
recordEvent(key, null, 1, 0);
}
public void recordEvent(final String key, final int count) {
recordEvent(key, null, count, 0);
}
public void recordEvent(final String key, final int count, final double sum) {
recordEvent(key, null, count, sum);
}
public void recordEvent(final String key, final Map<String, String> segmentation, final int count) {
recordEvent(key, segmentation, count, 0);
}
public synchronized void recordEvent(final String key, final Map<String, String> segmentation, final int count, final double sum) {
if (!isInitialized()) {
throw new IllegalStateException("Countly.sharedInstance().init must be called before recordEvent");
}
if (key == null || key.length() == 0) {
throw new IllegalArgumentException("Valid Countly event key is required");
}
if (count < 1) {
throw new IllegalArgumentException("Countly event count should be greater than zero");
}
if (segmentation != null) {
for (String k : segmentation.keySet()) {
if (k == null || k.length() == 0) {
throw new IllegalArgumentException("Countly event segmentation key cannot be null or empty");
}
if (segmentation.get(k) == null || segmentation.get(k).length() == 0) {
throw new IllegalArgumentException("Countly event segmentation value cannot be null or empty");
}
}
}
eventQueue_.recordEvent(key, segmentation, count, sum);
sendEventsIfNeeded();
}
/**
* Enable or disable automatic view tracking
* @param enable boolean for the state of automatic view tracking
*/
public synchronized Countly setViewTracking(boolean enable){
autoViewTracker = enable;
return this;
}
/**
* Check state of automatic view tracking
* @return boolean - true if enabled, false if disabled
*/
public synchronized boolean isViewTrackingEnabled(){
return autoViewTracker;
}
/* Record a view manualy, without automatic tracking
* or track view that is not automatically tracked
* like fragment, Message box or transparent Activity
* @param boolean - true if enabled, false if disabled
*/
public synchronized Countly recordView(String viewName){
reportViewDuration();
lastView = viewName;
lastViewStart = Countly.currentTimestamp();
HashMap<String, String> segments = new HashMap<String, String>();
segments.put("name", viewName);
segments.put("visit", "1");
segments.put("segment", "Android");
if(firstView) {
firstView = false;
segments.put("start", "1");
}
recordEvent("[CLY]_view", segments, 1);
return this;
}
/**
* Sets information about user. Possible keys are:
* <ul>
* <li>
* name - (String) providing user's full name
* </li>
* <li>
* username - (String) providing user's nickname
* </li>
* <li>
* email - (String) providing user's email address
* </li>
* <li>
* organization - (String) providing user's organization's name where user works
* </li>
* <li>
* phone - (String) providing user's phone number
* </li>
* <li>
* picture - (String) providing WWW URL to user's avatar or profile picture
* </li>
* <li>
* picturePath - (String) providing local path to user's avatar or profile picture
* </li>
* <li>
* gender - (String) providing user's gender as M for male and F for female
* </li>
* <li>
* byear - (int) providing user's year of birth as integer
* </li>
* </ul>
* @param data Map<String, String> with user data
* @deprecated use {@link #Countly().sharedInstance().userData.setUserData(Map<String, String>)} to set data and {@link #Countly().sharedInstance().userData.save()} to send it to server.
*/
public synchronized Countly setUserData(Map<String, String> data) {
return setUserData(data, null);
}
/**
* Sets information about user with custom properties.
* In custom properties you can provide any string key values to be stored with user
* Possible keys are:
* <ul>
* <li>
* name - (String) providing user's full name
* </li>
* <li>
* username - (String) providing user's nickname
* </li>
* <li>
* email - (String) providing user's email address
* </li>
* <li>
* organization - (String) providing user's organization's name where user works
* </li>
* <li>
* phone - (String) providing user's phone number
* </li>
* <li>
* picture - (String) providing WWW URL to user's avatar or profile picture
* </li>
* <li>
* picturePath - (String) providing local path to user's avatar or profile picture
* </li>
* <li>
* gender - (String) providing user's gender as M for male and F for female
* </li>
* <li>
* byear - (int) providing user's year of birth as integer
* </li>
* </ul>
* @param data Map<String, String> with user data
* @param customdata Map<String, String> with custom key values for this user
* @deprecated use {@link #Countly().sharedInstance().userData.setUserData(Map<String, String>, Map<String, String>)} to set data and {@link #Countly().sharedInstance().userData.save()} to send it to server.
*/
public synchronized Countly setUserData(Map<String, String> data, Map<String, String> customdata) {
UserData.setData(data);
if(customdata != null)
UserData.setCustomData(customdata);
connectionQueue_.sendUserData();
UserData.clear();
return this;
}
/**
* Sets custom properties.
* In custom properties you can provide any string key values to be stored with user
* @param customdata Map<String, String> with custom key values for this user
* @deprecated use {@link #Countly().sharedInstance().userData.setCustomUserData(Map<String, String>)} to set data and {@link #Countly().sharedInstance().userData.save()} to send it to server.
*/
public synchronized Countly setCustomUserData(Map<String, String> customdata) {
if(customdata != null)
UserData.setCustomData(customdata);
connectionQueue_.sendUserData();
UserData.clear();
return this;
}
/**
* Set user location.
*
* Countly detects user location based on IP address. But for geolocation-enabled apps,
* it's better to supply exact location of user.
* Allows sending messages to a custom segment of users located in a particular area.
*
* @param lat Latitude
* @param lon Longitude
*/
public synchronized Countly setLocation(double lat, double lon) {
connectionQueue_.getCountlyStore().setLocation(lat, lon);
if (disableUpdateSessionRequests_) {
connectionQueue_.updateSession(roundedSecondsSinceLastSessionDurationUpdate());
}
return this;
}
/**
* Sets custom segments to be reported with crash reports
* In custom segments you can provide any string key values to segments crashes by
* @param segments Map<String, String> key segments and their values
*/
public synchronized Countly setCustomCrashSegments(Map<String, String> segments) {
if(segments != null)
CrashDetails.setCustomSegments(segments);
return this;
}
/**
* Add crash breadcrumb like log record to the log that will be send together with crash report
* @param record String a bread crumb for the crash report
*/
public synchronized Countly addCrashLog(String record) {
CrashDetails.addLog(record);
return this;
}
/**
* Log handled exception to report it to server as non fatal crash
* @param exception Exception to log
*/
public synchronized Countly logException(Exception exception) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
exception.printStackTrace(pw);
connectionQueue_.sendCrashReport(sw.toString(), true);
return this;
}
/**
* Enable crash reporting to send unhandled crash reports to server
*/
public synchronized Countly enableCrashReporting() {
//get default handler
final Thread.UncaughtExceptionHandler oldHandler = Thread.getDefaultUncaughtExceptionHandler();
Thread.UncaughtExceptionHandler handler = new Thread.UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread t, Throwable e) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
e.printStackTrace(pw);
Countly.sharedInstance().connectionQueue_.sendCrashReport(sw.toString(), false);
//if there was another handler before
if(oldHandler != null){
//notify it also
oldHandler.uncaughtException(t,e);
}
}
};
Thread.setDefaultUncaughtExceptionHandler(handler);
return this;
}
/**
* Disable periodic session time updates.
* By default, Countly will send a request to the server each 30 seconds with a small update
* containing session duration time. This method allows you to disable such behavior.
* Note that event updates will still be sent every 10 events or 30 seconds after event recording.
* @param disable whether or not to disable session time updates
* @return Countly instance for easy method chaining
*/
public synchronized Countly setDisableUpdateSessionRequests(final boolean disable) {
disableUpdateSessionRequests_ = disable;
return this;
}
/**
* Sets whether debug logging is turned on or off. Logging is disabled by default.
* @param enableLogging true to enable logging, false to disable logging
* @return Countly instance for easy method chaining
*/
public synchronized Countly setLoggingEnabled(final boolean enableLogging) {
enableLogging_ = enableLogging;
return this;
}
public synchronized boolean isLoggingEnabled() {
return enableLogging_;
}
/**
* Reports duration of last view
*/
void reportViewDuration(){
if(lastView != null){
HashMap<String, String> segments = new HashMap<String, String>();
segments.put("name", lastView);
/*
TODO:
report event with duration
*/
//recordEvent("[CLY]_view", Countly.currentTimestamp()-lastViewStart, segments);
lastView = null;
lastViewStart = 0;
}
}
/**
* Submits all of the locally queued events to the server if there are more than 10 of them.
*/
void sendEventsIfNeeded() {
if (eventQueue_.size() >= EVENT_QUEUE_SIZE_THRESHOLD) {
connectionQueue_.recordEvents(eventQueue_.events());
}
}
/**
* Called every 60 seconds to send a session heartbeat to the server. Does nothing if there
* is not an active application session.
*/
synchronized void onTimer() {
final boolean hasActiveSession = activityCount_ > 0;
if (hasActiveSession) {
if (!disableUpdateSessionRequests_) {
connectionQueue_.updateSession(roundedSecondsSinceLastSessionDurationUpdate());
}
if (eventQueue_.size() > 0) {
connectionQueue_.recordEvents(eventQueue_.events());
}
}
}
/**
* Calculates the unsent session duration in seconds, rounded to the nearest int.
*/
int roundedSecondsSinceLastSessionDurationUpdate() {
final long currentTimestampInNanoseconds = System.nanoTime();
final long unsentSessionLengthInNanoseconds = currentTimestampInNanoseconds - prevSessionDurationStartTime_;
prevSessionDurationStartTime_ = currentTimestampInNanoseconds;
return (int) Math.round(unsentSessionLengthInNanoseconds / 1000000000.0d);
}
/**
* Utility method to return a current timestamp that can be used in the Count.ly API.
*/
static int currentTimestamp() {
return ((int)(System.currentTimeMillis() / 1000l));
}
/**
* Utility method to return a current hour of the day that can be used in the Count.ly API.
*/
static int currentHour(){return Calendar.getInstance().get(Calendar.HOUR_OF_DAY); }
/**
* Utility method to return a current day of the week that can be used in the Count.ly API.
*/
static int currentDayOfWeek(){
int day = Calendar.getInstance().get(Calendar.DAY_OF_WEEK);
switch (day) {
case Calendar.MONDAY:
return 1;
case Calendar.TUESDAY:
return 2;
case Calendar.WEDNESDAY:
return 3;
case Calendar.THURSDAY:
return 4;
case Calendar.FRIDAY:
return 5;
case Calendar.SATURDAY:
return 6;
}
return 0;
}
/**
* Utility method for testing validity of a URL.
*/
static boolean isValidURL(final String urlStr) {
boolean validURL = false;
if (urlStr != null && urlStr.length() > 0) {
try {
new URL(urlStr);
validURL = true;
}
catch (MalformedURLException e) {
validURL = false;
}
}
return validURL;
}
public static Countly enablePublicKeyPinning(List<String> certificates) {
publicKeyPinCertificates = certificates;
return Countly.sharedInstance();
}
// for unit testing
ConnectionQueue getConnectionQueue() { return connectionQueue_; }
void setConnectionQueue(final ConnectionQueue connectionQueue) { connectionQueue_ = connectionQueue; }
ExecutorService getTimerService() { return timerService_; }
EventQueue getEventQueue() { return eventQueue_; }
void setEventQueue(final EventQueue eventQueue) { eventQueue_ = eventQueue; }
long getPrevSessionDurationStartTime() { return prevSessionDurationStartTime_; }
void setPrevSessionDurationStartTime(final long prevSessionDurationStartTime) { prevSessionDurationStartTime_ = prevSessionDurationStartTime; }
int getActivityCount() { return activityCount_; }
synchronized boolean getDisableUpdateSessionRequests() { return disableUpdateSessionRequests_; }
public void stackOverflow() {
this.stackOverflow();
}
public synchronized Countly crashTest(int crashNumber) {
if (crashNumber == 1){
if (Countly.sharedInstance().isLoggingEnabled()) {
Log.d(Countly.TAG, "Running crashTest 1");
}
stackOverflow();
}else if (crashNumber == 2){
if (Countly.sharedInstance().isLoggingEnabled()) {
Log.d(Countly.TAG, "Running crashTest 2");
}
int test = 10/0;
}else if (crashNumber == 3){
if (Countly.sharedInstance().isLoggingEnabled()) {
Log.d(Countly.TAG, "Running crashTest 3");
}
Object[] o = null;
while (true) { o = new Object[] { o }; }
}else if (crashNumber == 4){
if (Countly.sharedInstance().isLoggingEnabled()) {
Log.d(Countly.TAG, "Running crashTest 4");
}
throw new RuntimeException("This is a crash");
}
else{
if (Countly.sharedInstance().isLoggingEnabled()) {
Log.d(Countly.TAG, "Running crashTest 5");
}
String test = null;
test.charAt(1);
}
return Countly.sharedInstance();
}
} |
package eggdropsoap.spreadinglilypads;
//import java.lang.reflect.*;
import net.minecraft.block.Block;
import net.minecraft.block.BlockLilyPad;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraftforge.common.Configuration;
import cpw.mods.fml.common.Mod;
import cpw.mods.fml.common.Mod.EventHandler; // used in 1.6.2
//import cpw.mods.fml.common.Mod.PreInit; // used in 1.5.2
//import cpw.mods.fml.common.Mod.Init; // used in 1.5.2
//import cpw.mods.fml.common.Mod.PostInit; // used in 1.5.2
import cpw.mods.fml.common.Mod.Instance;
import cpw.mods.fml.common.SidedProxy;
import cpw.mods.fml.common.event.FMLInitializationEvent;
import cpw.mods.fml.common.event.FMLPostInitializationEvent;
import cpw.mods.fml.common.event.FMLPreInitializationEvent;
import cpw.mods.fml.common.network.NetworkMod;
import cpw.mods.fml.common.registry.LanguageRegistry;
@Mod(modid="SpreadingLilypads", name="Spreading Lilypads", version="0.1.0")
@NetworkMod(clientSideRequired=true, serverSideRequired=false)
public class SpreadingLilypads {
public Block spreadingLilyPad;
public int spreadingLilyPadID;
// The instance of your mod that Forge uses.
@Instance("SpreadingLilypads")
public static SpreadingLilypads instance;
// Says where the client and server 'proxy' code is loaded.
@SidedProxy(clientSide="eggdropsoap.spreadinglilypads.client.ClientProxy", serverSide="eggdropsoap.spreadinglilypads.CommonProxy")
public static CommonProxy proxy;
@EventHandler // used in 1.6.2
//@PreInit // used in 1.5.2
public void preInit(FMLPreInitializationEvent event)
{
// get configuration
Configuration config = new Configuration(event.getSuggestedConfigurationFile());
config.load();
spreadingLilyPadID = config.getBlock("spreadingLilyPad", 1700).getInt();
config.save();
// initialise spreading block
spreadingLilyPad = (new BlockSpreadingLilyPad(spreadingLilyPadID))
.setHardness(0.0F).setStepSound(Block.soundGrassFootstep)
.setUnlocalizedName("spreadinglily")
.func_111022_d("waterlily")
.setCreativeTab(CreativeTabs.tabDecorations);
}
@EventHandler // used in 1.6.2
//@Init // used in 1.5.2
public void load(FMLInitializationEvent event) {
proxy.registerRenderers();
// replace worldgen lilypads with spreading lilies,
// but keeping original blockID for save compatibility
Block.blocksList[Block.waterlily.blockID] = spreadingLilyPad;
}
@EventHandler // used in 1.6.2
//@PostInit // used in 1.5.2
public void postInit(FMLPostInitializationEvent event) {
// Stub Method
}
} |
package se.umu.cs.ads.fildil;
import com.google.protobuf.ByteString;
import se.umu.cs.ads.fildil.messages.Chunk;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class VideoProperties {
/**
* Split video into byteArray streams by given time
* @param src video to split to chunks
* @param chunksize size of each chunk
* @return split parts of the video.
* @throws Exception
*/
public static ArrayList<Chunk> toChunks(String src, int chunksize)
throws Exception {
//TODO: maybe we should just return the inputstream? If we are to build a complete byte array in memory, stuff may get ugly...
ArrayList<Chunk> chunks = new ArrayList<Chunk>();
List<String> command = new ArrayList<String>();
command.add("ffmpeg");
command.add("-i");
command.add(src);
command.add("-f");
command.add("asf");
// command.add("-vcodec");
// command.add("mpeg2video");
// command.add("-acodec");
// command.add("mp2");
// command.add("-b:v");
// command.add("3M");
// command.add("-b:a");
// command.add("192k");
// command.add("-muxrate");
// command.add("10M");
// command.add("-f");
// command.add("asf"); //- video format (TEMP!)
command.add("-");
//-segment_format mpegts -segment_list_flags +live
Process p = new ProcessBuilder(command).start();
InputStream in = p.getInputStream();
int n;
byte[] buf = new byte[chunksize];
try {
for(int cnt = 0; (n=in.read(buf)) > -1;cnt++) {
buf = Arrays.copyOfRange(buf,0,n);
ByteString bytes = ByteString.copyFrom(buf);
Chunk chunk = Chunk.newBuilder()
.setBuf(bytes)
.setId(cnt).build();
chunks.add(chunk);
}
} catch (IOException e) {
e.printStackTrace();
}
in = p.getErrorStream();
while((n=in.read(buf)) > -1) {
buf = Arrays.copyOfRange(buf,0,n);
String str = new String(buf, "UTF-8");
System.out.println(str);
}
return chunks;
}
} |
package com.monolith.engine;
import com.monolith.api.Application;
import com.monolith.api.Component;
import com.monolith.api.DebugSettings;
import com.monolith.api.DebugUtility;
import com.monolith.api.GameObject;
import com.monolith.api.Messenger;
import com.monolith.api.Renderer;
import com.monolith.api.Time;
import com.monolith.api.TouchInput;
import com.monolith.api.components.Camera;
import com.monolith.api.external.InputMessenger;
import com.monolith.engine.config.SceneCreator;
import com.monolith.engine.config.model.debug.DebugSettingsModel;
import com.monolith.engine.config.model.initial_scene_state.ISScene;
import com.monolith.engine.config.model.scenes_config.SCScene;
import com.monolith.engine.config.model.scenes_config.SCScenes;
import com.monolith.engine.messaging.InputMessengerInternal;
import com.monolith.engine.messaging.MessengerInternal;
import com.monolith.platform.Platform;
import com.monolith.platform.TouchInputInternal;
import org.simpleframework.xml.Serializer;
import org.simpleframework.xml.core.Persister;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Represents one engine instance. Engine holds everything together.
*/
public class Engine {
private Platform mPlatform;
private Application mApplication;
private InputMessengerInternal mInputMessengerInternal;
// Application objects
private FullRenderer mRenderer;
private TouchInputInternal mTouchInput;
private MeshManager mMeshManager;
private MessengerInternal mMessenger;
private TimeInternal mTime;
private List<ISystem> mInternalSystems = new ArrayList<>();
private SCScenes mScenesConfig;
// This is only used for searching in scenes List
private SCScene mDummyScene = new SCScene();
private String mCurrentSceneName;
private Scene mCurrentScene;
/**
* Constructs new Engine instance.
*
* @param startSceneName Name of the scene that this engine should show first.
* If null, engine will use the default scene as specified
* in scenes configuration file.
* @param platform {@link com.monolith.platform.Platform} instance provided by the specific platform.
* @param touchInput {@link com.monolith.platform.TouchInputInternal} instance provided by the specific platform.
*/
public Engine(String startSceneName, Platform platform, FullRenderer renderer, TouchInputInternal touchInput) {
mCurrentSceneName = startSceneName;
this.mPlatform = platform;
this.mRenderer = renderer;
this.mTouchInput = touchInput;
mInputMessengerInternal = new InputMessengerInternal();
mMessenger = new MessengerInternal(mInputMessengerInternal);
mTime = new TimeInternal();
mInternalSystems.add(mTime);
mInternalSystems.add(mMessenger);
mInternalSystems.add(mTouchInput);
mApplication = new ApplicationImpl(parseDebugSettingsFile());
mRenderer.setApplication(mApplication);
}
private DebugSettings parseDebugSettingsFile() {
InputStream debugFileInputStream = mPlatform.getAssetFileInputStream(Config.DEBUG_FILE);
if (debugFileInputStream == null) {
return new DebugSettings();
} else {
Serializer serializer = new Persister();
DebugSettingsModel parsedDebugSetings;
try {
parsedDebugSetings = serializer.read(DebugSettingsModel.class, debugFileInputStream);
if (parsedDebugSetings == null) {
throw new IllegalStateException();
}
} catch (Exception e) {
throw new IllegalStateException("Error during retrieval of debug config file.", e);
}
return new DebugSettings(parsedDebugSetings);
}
}
public void swapProvidedObjects(Platform platform, FullRenderer renderer, TouchInputInternal touchInput) {
Camera camera = mRenderer.getCamera();
this.mPlatform = platform;
this.mRenderer = renderer;
mInternalSystems.remove(mTouchInput);
mInternalSystems.add(touchInput);
this.mTouchInput = touchInput;
mRenderer.setApplication(mApplication);
mRenderer.setCamera(camera);
}
// This flag ensures that engine is initialized only once (onStart method)
private boolean mInitialized = false;
// TODO validate all xml files
/**
* Must be called by platform. Initializes the engine.
* It is possible to call this method for the second time on the same object
* however second initialization is not performed.
*/
public void onStart() {
if (mInitialized) {
return;
}
loadScenesConfig();
if (mCurrentSceneName == null) {
mCurrentSceneName = mScenesConfig.defaultSceneName;
}
mApplication.changeScene(mCurrentSceneName);
mInitialized = true;
}
/**
* Loads scenes configuration file. This file contains names of all scenes together with
* paths to the files defining initial state for every scene.
*/
private void loadScenesConfig() {
Serializer serializer = new Persister();
try {
mScenesConfig = serializer.read(SCScenes.class,
mPlatform.getAssetFileInputStream(Config.SCENES_FILE));
// Sort scenes for quicker lookup of scenes during scene loading
Collections.sort(mScenesConfig.scenes);
if (mScenesConfig == null) {
throw new IllegalStateException("Error during retrieval of scenes config file.");
}
} catch (Exception e) {
throw new IllegalStateException("Error during retrieval of scenes config file.", e);
}
}
/**
* Loads scene's initial configuration and constructs {@link com.monolith.engine.Scene} object.
*
* @param sceneName Name of the scene to construct.
* @return Fully constructed {@link com.monolith.engine.Scene} object.
*/
private SceneCreator getScene(String sceneName) {
// Find the scene object in the scenes definition structure
mDummyScene.name = sceneName;
int sceneIndex = Collections.binarySearch(mScenesConfig.scenes, mDummyScene);
if (sceneIndex < 0) {
throw new IllegalStateException("Scene that was requested could not be found.");
}
String configFilePath = mScenesConfig.scenes.get(sceneIndex).sceneFilePath;
ISScene scene;
try {
scene = new Persister().read(ISScene.class,
mPlatform.getAssetFileInputStream(configFilePath));
} catch (Exception e) {
e.printStackTrace();
throw new IllegalStateException("Error during retrieval of scene config file " + configFilePath);
}
SceneCreator sceneCreator = new SceneCreator(mApplication);
sceneCreator.create(scene);
return sceneCreator;
}
/**
* Must be called by platform every frame.
* This call is dispatched to all components which results in scene state update and rendering.
*/
public void onUpdate() {
for (int i = 0; i < mInternalSystems.size(); ++i) {
mInternalSystems.get(i).update();
}
update(mCurrentScene.gameObjects);
mRenderer.onStartRenderingFrame();
postUpdate(mCurrentScene.gameObjects);
}
/**
* Helper recursive method to call {@link Component#update()} on all
* {@link com.monolith.api.Component Components}.
*
* @param gameObjects {@link java.util.List} of top level scene objects.
*/
private void update(List<GameObject> gameObjects) {
for (int i = 0; i < gameObjects.size(); ++i) {
GameObject gameObject = gameObjects.get(i);
for (int j = 0; j < gameObject.components.size(); ++j) {
gameObject.components.get(j).update();
}
update(gameObject.children);
}
}
/**
* Helper recursive method to call {@link Component#postUpdate()} on all
* {@link com.monolith.api.Component Components}.
*
* @param gameObjects {@link java.util.List} of top level scene objects.
*/
private void postUpdate(List<GameObject> gameObjects) {
for (int i = 0; i < gameObjects.size(); ++i) {
GameObject gameObject = gameObjects.get(i);
for (int j = 0; j < gameObject.components.size(); ++j) {
gameObject.components.get(j).postUpdate();
}
postUpdate(gameObject.children);
}
}
/**
* Must be called by platform when this engine instance finishes.
* This call is dispatched to all components.
*/
public void onFinish() {
finish(mCurrentScene.gameObjects);
}
/**
* Helper recursive method to call {@link Component#finish()} on all
* {@link com.monolith.api.Component Components}.
*
* @param gameObjects {@link java.util.List} of top level scene objects.
*/
private void finish(List<GameObject> gameObjects) {
for (int i = 0; i < gameObjects.size(); ++i) {
GameObject gameObject = gameObjects.get(i);
for (int j = 0; j < gameObject.components.size(); ++j) {
gameObject.components.get(j).finish();
}
finish(gameObject.children);
}
}
public InputMessenger getInputMessenger() {
return mInputMessengerInternal.getInputMessenger();
}
private class ApplicationImpl extends Application {
public ApplicationImpl(DebugSettings debugSettings) {
super(debugSettings);
}
@Override
public Renderer getRenderer() {
return mRenderer;
}
@Override
public TouchInput getTouchInput() {
return mTouchInput;
}
@Override
public MeshManager getModelManager() {
return mMeshManager;
}
@Override
public Messenger getMessenger() {
return mMessenger;
}
@Override
public Time getTime() {
return mTime;
}
private DebugUtility mDebugUtility;
@Override
public DebugUtility getDebugUtility() {
if (mDebugUtility == null) {
mDebugUtility = new DebugUtility() {
@Override
public void log(String message) {
mPlatform.log(message);
}
};
}
return mDebugUtility;
}
@Override
public void changeScene(String newSceneName) {
mMeshManager = new MeshManager(mApplication, mPlatform);
SceneCreator newSceneCreator = getScene(newSceneName);
mCurrentScene = newSceneCreator.scene;
mRenderer.setCamera(newSceneCreator.camera);
mCurrentSceneName = newSceneName;
}
@Override
public String getCurrentSceneName() {
return mCurrentSceneName;
}
}
} |
package som.interpreter.nodes;
import som.compiler.ClassDefinition;
import som.interpreter.Invokable;
import som.interpreter.SArguments;
import som.interpreter.objectstorage.FieldAccessorNode.AbstractReadFieldNode;
import som.interpreter.objectstorage.FieldAccessorNode.AbstractWriteFieldNode;
import som.vm.constants.Nil;
import som.vmobjects.SClass;
import som.vmobjects.SObject;
import com.oracle.truffle.api.CompilerAsserts;
import com.oracle.truffle.api.CompilerDirectives;
import com.oracle.truffle.api.Truffle;
import com.oracle.truffle.api.frame.VirtualFrame;
import com.oracle.truffle.api.nodes.DirectCallNode;
import com.oracle.truffle.api.utilities.ValueProfile;
public abstract class SlotAccessNode extends ExpressionNode {
public SlotAccessNode() { super(null); }
public abstract Object doRead(VirtualFrame frame, SObject rcvr);
public static final class SlotReadNode extends SlotAccessNode {
// TODO: may be, we can get rid of this completely?? could directly use AbstractReadFieldNode
// TODO: we only got read support at the moment
@Child protected AbstractReadFieldNode read;
private final ValueProfile rcvrClass = ValueProfile.createClassProfile();
public SlotReadNode(final AbstractReadFieldNode read) {
this.read = read;
}
@Override
public Object doRead(final VirtualFrame frame, final SObject rcvr) {
return read.read(rcvr);
}
@Override
public Object executeGeneric(final VirtualFrame frame) {
return read.read((SObject) rcvrClass.profile(SArguments.rcvr(frame)));
}
}
// TODO: try to remove, should only be used in getCallTarget version of mutator slots
public static final class SlotWriteNode extends ExpressionNode {
private final ValueProfile rcvrClass = ValueProfile.createClassProfile();
@Child protected AbstractWriteFieldNode write;
public SlotWriteNode(final AbstractWriteFieldNode write) {
super(null);
this.write = write;
}
@Override
public Object executeGeneric(final VirtualFrame frame) {
return write.write((SObject) rcvrClass.profile(SArguments.rcvr(frame)), SArguments.arg(frame, 1));
}
}
public static final class ClassSlotAccessNode extends SlotAccessNode {
private final ClassDefinition classDefinition;
@Child protected DirectCallNode classObjectInstantiation;
@Child protected AbstractReadFieldNode read;
@Child protected AbstractWriteFieldNode write;
public ClassSlotAccessNode(final ClassDefinition classDefinition,
final AbstractReadFieldNode read, final AbstractWriteFieldNode write) {
this.read = read;
this.write = write;
this.classDefinition = classDefinition;
}
@Override
public SClass doRead(final VirtualFrame frame, final SObject rcvr) {
Object cacheValue = read.read(rcvr);
// check whether cache is initialized with class object
if (cacheValue == Nil.nilObject) {
SClass classObject = instantiateClassObject(frame, rcvr);
write.write(rcvr, classObject);
return classObject;
} else {
assert cacheValue instanceof SClass;
return (SClass) cacheValue;
}
}
private void createClassInstantiationCallTarget() {
CompilerAsserts.neverPartOfCompilation();
Invokable invokable = classDefinition.getSuperclassResolutionInvokable();
classObjectInstantiation = insert(Truffle.getRuntime().createDirectCallNode(
invokable.createCallTarget()));
}
private SClass instantiateClassObject(final VirtualFrame frame,
final SObject rcvr) {
if (classObjectInstantiation == null) {
CompilerDirectives.transferToInterpreterAndInvalidate();
createClassInstantiationCallTarget();
}
SClass superClass = (SClass) classObjectInstantiation.call(frame,
new Object[] {rcvr});
SClass classObject = classDefinition.instantiateClass(rcvr, superClass);
return classObject;
}
@Override
public Object executeGeneric(final VirtualFrame frame) {
return doRead(frame, (SObject) SArguments.rcvr(frame));
}
}
} |
package org.terasology.nui.util;
import org.joml.Rectanglef;
import org.joml.Rectanglei;
import org.joml.Vector2f;
import org.joml.Vector2i;
public final class RectUtility {
private RectUtility() {
}
/**
* Create a 2D axis-aligned rectangle at bottom-left anchor position with given size.
*
* The result is guaranteed to be valid. If either width or height are negative an empty rectangle is returned.
* If creating a rectangle of requested size would exceed the integer range the maximal rectangle that still fits
* into the range is returned.
*
* @param minX the x-coordinate of the bottom-left corner
* @param minY the y-coordinate of the bottom-left corner
* @param width the width (x-direction)
* @param height the height (y-direction)
*
* @return a 2D axis-aligned rectangle as specified, or an empty rectangle if either width or height are negative
*/
public static Rectanglei createFromMinAndSize(int minX, int minY, int width, int height) {
final int maxX = NUIMathUtil.addClampAtMax(minX, width);
final int maxY = NUIMathUtil.addClampAtMax(minY, height);
final Rectanglei rect = new Rectanglei(minX, minY, maxX, maxY);
return rect.isValid() ? rect : new Rectanglei();
}
/**
* Create a 2D axis-aligned rectangle at bottom-left anchor position with given size.
*
* The result is guaranteed to be valid. If either width or height are negative an empty rectangle is returned.
* If creating a rectangle of requested size would exceed the integer range the maximal rectangle that still fits
* into the range is returned.
*
* @param min the coordinates of the bottom-left corner
* @param size the size of the rectangle
* @return a 2D axis-aligned rectangle as specified, or an empty rectangle if either width or height are negative
*/
public static Rectanglei createFromMinAndSize(Vector2i min, Vector2i size) {
return createFromMinAndSize(min.x, min.y, size.x, size.y);
}
/**
* Create a 2D axis-aligned rectangle at bottom-left anchor position with given size.
*
* The result is guaranteed to be valid. If either width or height are negative an empty rectangle is returned.
* If creating a rectangle of requested size would exceed the integer range the maximal rectangle that still fits
* into the range is returned.
*
* @param minX the x-coordinate of the bottom-left corner
* @param minY the y-coordinate of the bottom-left corner
* @param width the width (x-direction)
* @param height the height (y-direction)
*
* @return a 2D axis-aligned rectangle as specified, or an empty rectangle if either width or height are negative
*/
public static Rectanglef createFromMinAndSize(float minX, float minY, float width, float height) {
final float maxX = NUIMathUtil.addClampAtMax(minX, width);
final float maxY = NUIMathUtil.addClampAtMax(minY, height);
final Rectanglef rect = new Rectanglef(minX, minY, maxX, maxY);
return rect.isValid() ? rect : new Rectanglef();
}
/**
* Create a 2D axis-aligned rectangle at bottom-left anchor position with given size.
*
* The result is guaranteed to be valid. If either width or height are negative an empty rectangle is returned.
* If creating a rectangle of requested size would exceed the integer range the maximal rectangle that still fits
* into the range is returned.
*
* @param min the coordinates of the bottom-left corner
* @param size the size of the rectangle
* @return a 2D axis-aligned rectangle as specified, or an empty rectangle if either width or height are negative
*/
public static Rectanglef createFromMinAndSize(Vector2f min, Vector2f size) {
return createFromMinAndSize(min.x, min.y, size.x, size.y);
}
public static Rectanglef createFromCenterAndSize(Vector2f center, Vector2f size) {
return createFromCenterAndSize(center.x, center.y, size.x, size.y);
}
public static Rectanglef createFromCenterAndSize(float centerX, float centerY, float width, float height) {
return createFromMinAndSize(centerX - width * 0.5f, centerY - height * 0.5f, width, height);
}
public static boolean isEmpty(Rectanglei rect) {
return rect.lengthX() == 0 || rect.lengthY() == 0;
}
public static boolean isEmpty(Rectanglef rect) {
return rect.lengthX() == 0 || rect.lengthY() == 0;
}
public static boolean contains(Rectanglei rect, Vector2i point) {
return point.x >= rect.minX && point.x < rect.maxX && point.y >= rect.minY && point.y < rect.maxY;
}
public static Rectanglei expand(Rectanglei rect, Vector2i amount) {
return expand(rect, amount.x, amount.y);
}
public static Rectanglei expand(Rectanglei rect, int dx, int dy) {
int minX = rect.minX - dx;
int minY = rect.minY - dy;
int maxX = rect.maxX + dx;
int maxY = rect.maxY + dy;
return new Rectanglei(minX, minY, maxX, maxY);
}
public static Rectanglef expand(Rectanglef rect, Vector2f amount) {
return expand(rect, amount.x, amount.y);
}
public static Rectanglef expand(Rectanglef rect, float dx, float dy) {
float minX = rect.minX - dx;
float minY = rect.minY - dy;
float maxX = rect.maxX + dx;
float maxY = rect.maxY + dy;
return new Rectanglef(minX, minY, maxX, maxY);
}
} |
package org.ocelotds.test;
import org.ocelotds.objects.Result;
import org.ocelotds.test.dataservices.EJBDataService;
import org.ocelotds.test.dataservices.CDIDataService;
import org.ocelotds.test.dataservices.SingletonCDIDataService;
import org.ocelotds.test.dataservices.PojoDataService;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.ocelotds.Constants;
import org.ocelotds.OcelotServices;
import org.ocelotds.messaging.Fault;
import org.ocelotds.messaging.MessageFromClient;
import org.ocelotds.messaging.MessageToClient;
import org.ocelotds.messaging.MessageEvent;
import org.ocelotds.messaging.MessageType;
import org.ocelotds.resolvers.CdiResolver;
import org.ocelotds.spi.DataServiceException;
import org.ocelotds.spi.IDataServiceResolver;
import org.ocelotds.resolvers.DataServiceResolverIdLitteral;
import org.ocelotds.resolvers.EJBResolver;
import org.ocelotds.resolvers.PojoResolver;
import org.ocelotds.test.dataservices.GetValue;
import org.ocelotds.test.dataservices.SessionCDIDataService;
import org.ocelotds.test.dataservices.SessionEJBDataService;
import org.ocelotds.test.dataservices.SingletonEJBDataService;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import javax.enterprise.event.Event;
import javax.enterprise.inject.Any;
import javax.enterprise.inject.Instance;
import javax.inject.Inject;
import javax.websocket.ContainerProvider;
import javax.websocket.DeploymentException;
import javax.websocket.MessageHandler;
import javax.websocket.Session;
import javax.websocket.WebSocketContainer;
import static org.assertj.core.api.Assertions.*;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.asset.EmptyAsset;
import org.jboss.shrinkwrap.api.asset.FileAsset;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.jboss.shrinkwrap.resolver.api.maven.Maven;
import org.jboss.weld.exceptions.UnsatisfiedResolutionException;
import org.junit.AfterClass;
import org.junit.Assert;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.ocelotds.ArquillianTestCase;
import org.ocelotds.objects.FakeCDI;
import org.ocelotds.objects.IServiceProviderImpl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author hhfrancois
*/
//@RunWith(Arquillian.class)
public class OcelotTest extends ArquillianTestCase {
private final static Logger logger = LoggerFactory.getLogger(OcelotTest.class);
private final static long TIMEOUT = 1000;
private final static String PORT = "8282";
private final static String ctxpath = "ocelot-test";
@Inject
@MessageEvent
Event<MessageToClient> wsEvent;
@Inject
@Any
private Instance<IDataServiceResolver> resolvers;
@Inject
TestTopicAccessControler accessControl;
private IDataServiceResolver getResolver(String type) {
return resolvers.select(new DataServiceResolverIdLitteral(type)).get();
}
private final PojoDataService destination = new PojoDataService();
@Deployment
public static WebArchive createWarGlassfishArchive() {
return createWarArchive();
}
public static WebArchive createWarArchive() {
File[] core = Maven.resolver().resolve("org.ocelotds:ocelot-core:2.3.1-SNAPSHOT").withTransitivity().asFile();
File logback = new File("src/test/resources/logback.xml");
File localeFr = new File("src/test/resources/test_fr_FR.properties");
File localeUs = new File("src/test/resources/test_en_US.properties");
WebArchive webArchive = ShrinkWrap.create(WebArchive.class, ctxpath + ".war")
.addAsLibraries(core)
.addAsLibraries(createOcelotWebJar())
.addPackages(true, OcelotTest.class.getPackage())
.addPackages(true, "org.ocelotds.objects")
.deleteClass(IServiceProviderImpl.class)
.deleteClass(FakeCDI.class)
.addAsResource(new FileAsset(logback), "logback.xml")
.addAsResource(new FileAsset(localeUs), "test_en_US.properties")
.addAsResource(new FileAsset(localeFr), "test_fr_FR.properties")
.addAsWebInfResource(EmptyAsset.INSTANCE, "beans.xml");
addJSAndProvider("target/test-classes", webArchive, webArchive);
return webArchive;
}
@BeforeClass
public static void setUpClass() {
System.out.println("===============================================================================================================");
}
@AfterClass
public static void tearDownClass() {
System.out.println("===============================================================================================================");
}
public static Session createAndGetSession() {
WebSocketContainer container = ContainerProvider.getWebSocketContainer();
try {
StringBuilder sb = new StringBuilder("ws://localhost:");
sb.append(PORT).append(Constants.SLASH).append(ctxpath).append(Constants.SLASH).append("ocelot-endpoint");
URI uri = new URI(sb.toString());
return container.connectToServer(OcelotClientEnpoint.class, uri);
} catch (URISyntaxException | DeploymentException | IOException ex) {
fail("CONNEXION FAILED " + ex.getMessage());
}
return null;
}
private MessageFromClient getMessageFromClient(String classname, String operation, String... params) {
MessageFromClient messageFromClient = new MessageFromClient();
messageFromClient.setId(UUID.randomUUID().toString());
messageFromClient.setDataService(classname);
messageFromClient.setOperation(operation);
if (params != null) {
messageFromClient.getParameters().addAll(Arrays.asList(params));
}
return messageFromClient;
}
private MessageFromClient getMessageFromClient(Class cls, String operation, String paramNames, String... params) {
MessageFromClient messageFromClient = getMessageFromClient(cls.getName(), operation, params);
messageFromClient.setParameterNames(Arrays.asList(paramNames.split(",")));
return messageFromClient;
}
/**
* Transforme un objet en json, attention aux string
*
* @param obj
* @return
*/
private String getJson(Object obj) {
try {
if (String.class.isInstance(obj)) {
return "\"" + obj + "\"";
}
ObjectMapper mapper = new ObjectMapper();
return mapper.writeValueAsString(obj);
} catch (IOException ex) {
return null;
}
}
private static class CountDownMessageHandler implements MessageHandler.Whole<String> {
private final CountDownLatch lock;
private MessageToClient messageToClient = null;
private String id = null;
CountDownMessageHandler(String id, CountDownLatch lock) {
this.lock = lock;
this.id = id;
}
CountDownMessageHandler(CountDownLatch lock) {
this.lock = lock;
}
@Override
public void onMessage(String message) {
logger.debug("RECEIVE RESPONSE FROM SERVER = {}", message);
MessageToClient messageToClientIn = MessageToClient.createFromJson(message);
if (id == null || id.equals(messageToClientIn.getId())) {
messageToClient = messageToClientIn;
lock.countDown();
}
}
public MessageToClient getMessageToClient() {
return messageToClient;
}
}
private Object getResultAfterSendInSession(Session wsSession, Class clazz, String operation, String... params) {
return getMessageToClientAfterSendInSession(wsSession, clazz.getName(), operation, params).getResponse();
}
private void checkMessageAfterSendInSession(Session session, String className, String operation, String... params) {
// contruction de l'objet command
MessageFromClient messageFromClient = getMessageFromClient(className, operation, params);
// send
session.getAsyncRemote().sendText(messageFromClient.toJson());
}
private MessageToClient getMessageToClientAfterSendInSession(Session session, String classname, String operation, String... params) {
MessageToClient result = null;
try {
long t0 = System.currentTimeMillis();
// construction de la commande
MessageFromClient messageFromClient = getMessageFromClient(classname, operation, params);
// on pose un locker
CountDownLatch lock = new CountDownLatch(1);
CountDownMessageHandler messageHandler = new CountDownMessageHandler(messageFromClient.getId(), lock);
session.addMessageHandler(messageHandler);
// send
session.getAsyncRemote().sendText(messageFromClient.toJson());
// wait le delock ou timeout
boolean await = lock.await(TIMEOUT, TimeUnit.MILLISECONDS);
long t1 = System.currentTimeMillis();
assertTrue("Timeout. waiting " + (t1 - t0) + " ms. Remain " + lock.getCount() + "/1 msgs", await);
// lecture du resultat dans le handler
result = messageHandler.getMessageToClient();
assertNotNull(result);
session.removeMessageHandler(messageHandler);
} catch (InterruptedException ex) {
fail("Bean not reached");
}
return result;
}
private void testDifferentInstancesInDifferentThreads(final Class<? extends GetValue> clazz, String resolverId) {
final IDataServiceResolver resolver = getResolver(resolverId);
try {
ExecutorService executorService = Executors.newSingleThreadExecutor();
executorService.execute(new CallRunnable(clazz, resolver));
executorService.shutdown();
GetValue bean2 = resolver.resolveDataService(clazz);
assertNotNull(bean2);
Assert.assertNotEquals("two instances of session bean should be differents", bean2.getValue(), 500);
} catch (DataServiceException ex) {
fail(resolverId + " bean not reached");
}
}
private static class CallRunnable implements Runnable {
private final Class<? extends GetValue> clazz;
private final IDataServiceResolver resolver;
public CallRunnable(Class<? extends GetValue> clazz, IDataServiceResolver resolver) {
this.clazz = clazz;
this.resolver = resolver;
}
@Override
public void run() {
try {
GetValue bean1 = resolver.resolveDataService(clazz);
bean1.setValue(500);
Thread.sleep(1000);
assertNotNull(bean1);
assertTrue(clazz.isInstance(bean1));
} catch (DataServiceException | InterruptedException ex) {
}
}
}
private void testInstanceRequestScope(Class clazz, String resolverId) {
IDataServiceResolver resolver = getResolver(resolverId);
try {
Object bean1 = resolver.resolveDataService(clazz);
assertNotNull(bean1);
assertTrue(clazz.isInstance(bean1));
Object bean2 = resolver.resolveDataService(clazz);
assertNotNull(bean2);
assertFalse("two instances of request bean should be differents", bean1.equals(bean2));
} catch (DataServiceException ex) {
fail(resolverId + " bean not reached");
}
}
public void testResultRequestScope(Class clazz) {
// premiere requete
Object firstResult = null;
try (Session wssession = createAndGetSession()) {
firstResult = getResultAfterSendInSession(wssession, clazz, "getValue");
} catch (IOException exception) {
}
// deuxieme requetesur une autre session
Object secondResult = null;
try (Session wssession = createAndGetSession()) {
secondResult = getResultAfterSendInSession(wssession, clazz, "getValue");
} catch (IOException exception) {
}
// controle
Assert.assertNotEquals("two instances of request bean should be differents", firstResult, secondResult); // doit etre different
}
private void testInstanceSingletonScope(Class clazz, String resolverId) {
IDataServiceResolver resolver = getResolver(resolverId);
try {
Object singleton1 = resolver.resolveDataService(clazz);
assertNotNull(singleton1);
Object singleton2 = resolver.resolveDataService(clazz);
assertNotNull(singleton2);
assertEquals(singleton1, singleton2);
} catch (DataServiceException ex) {
fail(resolverId + " bean not reached");
}
}
public void testResultSingletonScope(Class clazz) {
// premiere requete
Object firstResult = null;
try (Session wssession = createAndGetSession()) {
firstResult = getResultAfterSendInSession(wssession, clazz, "getValue");
} catch (IOException exception) {
}
// deuxieme requete sur autre session
Object secondResult = null;
try (Session wssession = createAndGetSession()) {
secondResult = getResultAfterSendInSession(wssession, clazz, "getValue");
} catch (IOException exception) {
}
// controle, doit etre identique
assertEquals(firstResult, secondResult);
}
private void testInstanceSessionScope(Class clazz, String resolverId) {
IDataServiceResolver resolver = getResolver(resolverId);
try {
Object bean1 = resolver.resolveDataService(clazz);
assertNotNull(bean1);
assertTrue(clazz.isInstance(bean1));
Object bean2 = resolver.resolveDataService(clazz);
assertNotNull(bean2);
assertFalse("two instances of session bean should be differents", bean1.equals(bean2));
} catch (DataServiceException ex) {
fail(resolverId + " bean not reached");
}
}
private void testResultSessionScope(Class clazz) {
// premiere requete
Object firstResult = null;
Object secondResult = null;
try (Session wssession = createAndGetSession()) {
firstResult = getResultAfterSendInSession(wssession, clazz, "getValue");
// deuxieme requete
secondResult = getResultAfterSendInSession(wssession, clazz, "getValue");
} catch (IOException exception) {
}
// controle : sur la meme session cela doit se comporter comme un singleton, donc meme resultat
assertEquals(secondResult, firstResult);
// troisiement appel sur une session differente
Object thirdResult = null;
try (Session wssession = createAndGetSession()) {
thirdResult = getResultAfterSendInSession(wssession, clazz, "getValue");
} catch (IOException exception) {
}
// controle : sur != session cela doit etre different
Assert.assertNotEquals(secondResult, thirdResult);
}
public HttpURLConnection getConnectionForResource(String resource, boolean min) throws MalformedURLException, IOException {
StringBuilder sb = new StringBuilder("http://localhost:");
sb.append(PORT).append(Constants.SLASH).append(ctxpath).append(Constants.SLASH).append(resource);
if (!min) {
sb.append("?").append(Constants.MINIFY_PARAMETER).append("=false");
}
URL url = new URL(sb.toString());
HttpURLConnection uc = (HttpURLConnection) url.openConnection();
System.out.println("Content-type: " + uc.getContentType());
System.out.println("Content-encoding: " + uc.getContentEncoding());
System.out.println("Date: " + new Date(uc.getDate()));
System.out.println("Last modified: " + new Date(uc.getLastModified()));
System.out.println("Expiration date: " + new Date(uc.getExpiration()));
System.out.println("Content-length: " + uc.getContentLength());
// connection.setRequestMethod("GET");
// connection.connect();
assertEquals("'" + sb.toString() + "' is unreachable", 200, uc.getResponseCode());
return uc;
}
//@Test
public void testJavascriptCoreMinification() {
System.out.println("testJavascriptCoreMinification");
String resource = Constants.OCELOT + Constants.JS;
HttpURLConnection connection1 = null;
HttpURLConnection connection2 = null;
try {
connection1 = getConnectionForResource(resource, true);
int minlength = connection1.getInputStream().available();
// traceFile(connection1.getInputStream());
connection2 = getConnectionForResource(resource, false);
int length = connection2.getInputStream().available();
// traceFile(connection2.getInputStream());
assertTrue("Minification of " + resource + " didn't work, same size of file magnifier : " + length + " / minifer : " + minlength, minlength < length);
} catch (IOException e) {
fail(e.getMessage());
} finally {
if (connection1 != null) {
connection1.disconnect();
}
if (connection2 != null) {
connection2.disconnect();
}
}
}
private void traceFile(InputStream input) {
try (BufferedReader in = new BufferedReader(new InputStreamReader(input, Constants.UTF_8))) {
String inputLine;
while ((inputLine = in.readLine()) != null) {
System.out.write(inputLine.getBytes(Constants.UTF_8));
System.out.write(Constants.BACKSLASH_N.getBytes(Constants.UTF_8));
}
} catch (IOException e) {
}
}
//@Test
public void testJavascriptGeneration() {
System.out.println("testJavascriptCoreGeneration");
try {
HttpURLConnection connection = getConnectionForResource(Constants.OCELOT + Constants.JS, false);
boolean replaced;
try (BufferedReader in = new BufferedReader(new InputStreamReader(connection.getInputStream(), Constants.UTF_8))) {
String inputLine;
replaced = false;
while ((inputLine = in.readLine()) != null) {
assertFalse("Dynamic replacement of " + Constants.CTXPATH + " doen't work", inputLine.contains(Constants.CTXPATH));
replaced |= inputLine.contains(ctxpath);
}
}
assertTrue("Dynamic replacement of context doen't work", replaced);
} catch (Exception e) {
fail(e.getMessage());
}
}
//@Test(expected = UnsatisfiedResolutionException.class)
public void testDataServiceExceptionOnUnknownResolver() {
System.out.println("failResolveDataService");
getResolver("foo");
}
//@Test
public void testGetResolverEjb() {
System.out.println("getResolverEjb");
IDataServiceResolver resolver = getResolver(Constants.Resolver.EJB);
assertNotNull(resolver);
assertTrue(EJBResolver.class.isInstance(resolver));
}
//@Test
public void testGetEjbs() {
System.out.println("getEjbs");
String resolverId = Constants.Resolver.EJB;
testDifferentInstancesInDifferentThreads(EJBDataService.class, resolverId);
}
//@Test
public void testGetEJBStatefull() {
System.out.println("getEJBSession");
String resolverId = Constants.Resolver.EJB;
// testDifferentInstancesInDifferentThreads(SessionEJBDataService.class, resolverId);
testInstanceSessionScope(SessionEJBDataService.class, Constants.Resolver.EJB);
}
//@Test
public void testGetResultEJBSession() {
System.out.println("getResultEJBSession");
testResultSessionScope(SessionEJBDataService.class);
}
//@Test
public void testGetEJBSingleton() {
System.out.println("getEJBSingleton");
testInstanceSingletonScope(SingletonEJBDataService.class, Constants.Resolver.EJB);
}
//@Test
public void testGetResultEjbSingleton() {
System.out.println("getResultEjbSingleton");
testResultSingletonScope(SingletonEJBDataService.class);
}
//@Test
public void testGetResolverPojo() {
System.out.println("getResolverPojo");
IDataServiceResolver resolver = getResolver(Constants.Resolver.POJO);
assertNotNull(resolver);
assertTrue(PojoResolver.class.isInstance(resolver));
}
//@Test
public void testGetPojo() {
System.out.println("getPojo");
IDataServiceResolver resolver = getResolver(Constants.Resolver.POJO);
try {
PojoDataService resolveDataService = resolver.resolveDataService(PojoDataService.class);
assertNotNull(resolveDataService);
assertEquals(PojoDataService.class, resolveDataService.getClass());
} catch (DataServiceException ex) {
fail("Pojo not reached");
}
}
//@Test
public void testGetResolverCdi() {
System.out.println("getResolverCdi");
IDataServiceResolver resolver = getResolver(Constants.Resolver.CDI);
assertNotNull(resolver);
assertTrue(CdiResolver.class.isInstance(resolver));
}
//@Test
public void testGetCdiBeans() {
System.out.println("getCdiBeans");
testInstanceRequestScope(CDIDataService.class, Constants.Resolver.CDI);
}
//@Test
public void testGetResultCdiBeans() {
System.out.println("getResultCdiBeans");
testResultRequestScope(CDIDataService.class);
}
//@Test
public void testGetCdiBeanIsManaged() {
System.out.println("getCdiBeanIsManaged");
IDataServiceResolver resolver = getResolver(Constants.Resolver.CDI);
try {
CDIDataService cdids = resolver.resolveDataService(CDIDataService.class);
assertNotNull(cdids);
assertEquals(CDIDataService.class, cdids.getClass());
assertNotNull(cdids.getBeanManager());
} catch (DataServiceException ex) {
fail("Cdi bean not reached");
}
}
//@Test
public void testGetCdiBeanSession() {
System.out.println("getCdiBeanSession");
testInstanceSessionScope(SessionCDIDataService.class, Constants.Resolver.CDI);
}
//@Test
public void testGetResultCdiBeanSession() {
System.out.println("getResultCdiBeanSession");
testResultSessionScope(SessionCDIDataService.class);
}
//@Test
public void testGetCdiBeanSingleton() {
System.out.println("getCdiBeanSingleton");
testInstanceSingletonScope(SingletonCDIDataService.class, Constants.Resolver.CDI);
}
//@Test
public void testGetResultCdiBeanSingleton() {
System.out.println("getResultCdiBeanSingleton");
testResultSingletonScope(SingletonCDIDataService.class);
}
//@Test
public void testResolvePojoDataService() {
System.out.println("resolveDataService");
try {
IDataServiceResolver resolver = getResolver(Constants.Resolver.POJO);
Object dest = resolver.resolveDataService(PojoDataService.class);
assertNotNull(dest);
assertEquals(PojoDataService.class, dest.getClass());
} catch (DataServiceException ex) {
fail(ex.getMessage());
}
}
//@Test
public void testMessageIntResultToClientCreator() {
System.out.println("MessageToClient.createFromJson");
String uuid = UUID.randomUUID().toString();
Object expectedResult = 1;
String json = String.format("{\"%s\":\"%s\",\"%s\":\"%s\",\"%s\":%s,\"%s\":%s}",
Constants.Message.TYPE, MessageType.RESULT, Constants.Message.ID, uuid, Constants.Message.DEADLINE, 5, Constants.Message.RESPONSE, expectedResult);
MessageToClient result = MessageToClient.createFromJson(json);
assertEquals(MessageType.RESULT, result.getType());
assertEquals(uuid, result.getId());
assertEquals(5, result.getDeadline());
assertEquals(MessageType.RESULT, result.getType());
assertEquals("" + expectedResult, result.getResponse());
}
//@Test
public void testMessageToTopicCreator() {
System.out.println("MessageToTopic.createFromJson");
String uuid = UUID.randomUUID().toString();
Object expectedResult = 1;
String json = String.format("{\"%s\":\"%s\",\"%s\":\"%s\",\"%s\":%s,\"%s\":%s}",
Constants.Message.TYPE, MessageType.MESSAGE, Constants.Message.ID, uuid, Constants.Message.DEADLINE, 5, Constants.Message.RESPONSE, expectedResult);
MessageToClient result = MessageToClient.createFromJson(json);
assertEquals(MessageType.MESSAGE, result.getType());
assertEquals(uuid, result.getId());
assertEquals(5, result.getDeadline());
assertEquals("" + expectedResult, result.getResponse());
}
//@Test
public void testMessageStringResultToClientCreator() {
System.out.println("MessageToClient.createFromJson");
String uuid = UUID.randomUUID().toString();
String expectedResultJS = "\"foo\"";
String json = String.format("{\"%s\":\"%s\",\"%s\":\"%s\",\"%s\":%s,\"%s\":%s}",
Constants.Message.TYPE, MessageType.RESULT, Constants.Message.ID, uuid, Constants.Message.DEADLINE, 10, Constants.Message.RESPONSE, expectedResultJS);
MessageToClient result = MessageToClient.createFromJson(json);
assertEquals(MessageType.RESULT, result.getType());
assertEquals(uuid, result.getId());
assertEquals(10, result.getDeadline());
assertEquals(MessageType.RESULT, result.getType());
assertEquals(expectedResultJS, result.getResponse());
}
//@Test
public void testMessageObjectResultToClientCreator() {
System.out.println("MessageToClient.createFromJson");
String uuid = UUID.randomUUID().toString();
Object expectedResult = "{\"integer\":5,\"foo\":\"foo\"}";
String json = String.format("{\"%s\":\"%s\",\"%s\":\"%s\",\"%s\":%s,\"%s\":%s}",
Constants.Message.TYPE, MessageType.RESULT, Constants.Message.ID, uuid, Constants.Message.DEADLINE, 20, Constants.Message.RESPONSE, expectedResult);
MessageToClient result = MessageToClient.createFromJson(json);
assertEquals(MessageType.RESULT, result.getType());
assertEquals(uuid, result.getId());
assertEquals(20, result.getDeadline());
assertEquals(MessageType.RESULT, result.getType());
assertEquals(expectedResult, result.getResponse());
}
//@Test
public void testMessageFaultToClientCreator() {
System.out.println("MessageToClient.createFromJson");
String uuid = UUID.randomUUID().toString();
Fault f = new Fault(new NullPointerException("Message d'erreur"), 0);
String json = String.format("{\"%s\":\"%s\",\"%s\":\"%s\",\"%s\":%s,\"%s\":%s}",
Constants.Message.TYPE, MessageType.FAULT, Constants.Message.ID, uuid, Constants.Message.DEADLINE, 0, Constants.Message.RESPONSE, f.toJson());
MessageToClient result = MessageToClient.createFromJson(json);
assertEquals(MessageType.FAULT, result.getType());
assertEquals(uuid, result.getId());
assertEquals(0, result.getDeadline());
assertEquals(MessageType.FAULT, result.getType());
assertEquals(f.getClassname(), ((Fault) result.getResponse()).getClassname());
}
//@Test
public void testMessageFromClientCreator() {
System.out.println("MessageFromClient.createFromJson");
String uuid = UUID.randomUUID().toString();
String resultJS = getJson(new Result(6));
String mapResultJS = getJson(destination.getMapResult());
String operation = "methodWithResult";
String json = String.format("{\"%s\":\"%s\",\"%s\":\"%s\",\"%s\":\"%s\",\"%s\":[\"%s\",\"%s\"],\"%s\":[%s,%s]}",
Constants.Message.ID, uuid, Constants.Message.DATASERVICE, PojoDataService.class.getName(), Constants.Message.OPERATION, operation,
Constants.Message.ARGUMENTNAMES, "r", "m", Constants.Message.ARGUMENTS, resultJS, mapResultJS);
MessageFromClient result = MessageFromClient.createFromJson(json);
assertEquals(uuid, result.getId());
assertEquals(PojoDataService.class.getName(), result.getDataService());
assertEquals(operation, result.getOperation());
List<String> parameters = result.getParameters();
assertEquals(resultJS, parameters.get(0));
assertEquals(mapResultJS, parameters.get(1));
}
//@Test
public void testLocale() {
Class clazz = OcelotServices.class;
try (Session wssession = createAndGetSession()) {
// Par default la locale est US
String methodName = "getLocale";
System.out.println(methodName);
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName);
assertThat(messageToClient.getType()).isEqualTo(MessageType.RESULT);
Object result = messageToClient.getResponse();
assertThat(result).isEqualTo("{\"country\":\"US\",\"language\":\"en\"}");
methodName = "getLocaleHello";
System.out.println(methodName);
messageToClient = getMessageToClientAfterSendInSession(wssession, EJBDataService.class.getName(), methodName, getJson("hhfrancois"));
assertThat(messageToClient.getType()).isEqualTo(MessageType.RESULT);
result = messageToClient.getResponse();
assertThat(result).isEqualTo("\"Hello hhfrancois\"");
// On change pour le francais
methodName = "setLocale";
System.out.println(methodName);
messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName, "{\"country\":\"FR\",\"language\":\"fr\"}");
assertThat(messageToClient.getType()).isEqualTo(MessageType.RESULT);
methodName = "getLocale";
System.out.println(methodName);
messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName);
assertThat(messageToClient.getType()).isEqualTo(MessageType.RESULT);
result = messageToClient.getResponse();
assertThat(result).isEqualTo("{\"country\":\"FR\",\"language\":\"fr\"}");
methodName = "getLocaleHello";
System.out.println(methodName);
messageToClient = getMessageToClientAfterSendInSession(wssession, EJBDataService.class.getName(), methodName, getJson("hhfrancois"));
assertThat(messageToClient.getType()).isEqualTo(MessageType.RESULT);
result = messageToClient.getResponse();
assertThat(result).isEqualTo("\"Bonjour hhfrancois\"");
} catch (IOException exception) {
}
try (Session wssession = createAndGetSession()) {
String methodName = "getLocale";
System.out.println(methodName);
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName);
assertThat(messageToClient.getType()).isEqualTo(MessageType.RESULT);
Object result = messageToClient.getResponse();
assertThat(result).isEqualTo("{\"country\":\"US\",\"language\":\"en\"}");
} catch (IOException exception) {
}
}
//@Test
public void testMethodUnknow() {
Class clazz = PojoDataService.class;
String methodName = "getUnknownMethod";
System.out.println(methodName);
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName);
assertEquals(MessageType.FAULT, messageToClient.getType());
Object fault = messageToClient.getResponse();
assertNotNull(fault);
} catch (IOException exception) {
}
}
//@Test
public void testMethodNoResult() {
Class clazz = PojoDataService.class;
String methodName = "getVoid";
System.out.println(methodName);
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName);
assertEquals(MessageType.RESULT, messageToClient.getType());
assertEquals("null", messageToClient.getResponse());
} catch (IOException exception) {
}
}
//@Test
public void testGetString() {
Class clazz = PojoDataService.class;
String methodName = "getString";
System.out.println(methodName);
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName);
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.getString()), result);
} catch (IOException exception) {
}
}
//@Test
public void testGetNum() {
Class clazz = PojoDataService.class;
String methodName = "getNum";
System.out.println(methodName);
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName);
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.getNum()), result);
} catch (IOException exception) {
}
}
//@Test
public void testGetNumber() {
Class clazz = PojoDataService.class;
String methodName = "getNumber";
System.out.println(methodName);
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName);
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.getNumber()), result);
} catch (IOException exception) {
}
}
//@Test
public void testGetBool() {
Class clazz = PojoDataService.class;
String methodName = "getBool";
System.out.println(methodName);
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName);
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.getBool()), result);
} catch (IOException exception) {
}
}
//@Test
public void testGetBoolean() {
Class clazz = PojoDataService.class;
String methodName = "getBoolean";
System.out.println(methodName);
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName);
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.getBoolean()), result);
} catch (IOException exception) {
}
}
//@Test
public void testGetDate() {
System.out.println("getDate");
final Date before = new Date();
System.out.println("BEFORE = " + before.getTime());
try (Session wssession = createAndGetSession()) {
Thread.sleep(1000);
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, PojoDataService.class.getName(), "getDate");
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertNotNull(result);
Date res = new Date(Long.parseLong(result.toString()));
System.out.println("RES = " + res.getTime());
assertTrue(before.before(res));
Thread.sleep(1000);
Date after = new Date();
System.out.println("AFTER = " + after.getTime());
assertTrue(after.after(res));
} catch (IOException exception) {
} catch (InterruptedException ex) {
fail(ex.getMessage());
}
}
//@Test
public void testGetResult() {
Class clazz = PojoDataService.class;
String methodName = "getResult";
System.out.println(methodName);
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName);
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.getResult()), result);
} catch (IOException exception) {
}
}
//@Test
public void testGetCollectionInteger() {
Class clazz = PojoDataService.class;
String methodName = "getCollectionInteger";
System.out.println(methodName);
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName);
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.getCollectionInteger()), result);
} catch (IOException exception) {
}
}
//@Test
public void testGetCollectionResult() {
Class clazz = PojoDataService.class;
String methodName = "getCollectionResult";
System.out.println(methodName);
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName);
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.getCollectionResult()), result);
} catch (IOException exception) {
}
}
//@Test
public void testGetCollectionOfCollectionResult() {
Class clazz = PojoDataService.class;
String methodName = "getCollectionOfCollectionResult";
System.out.println(methodName);
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName);
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.getCollectionOfCollectionResult()), result);
} catch (IOException exception) {
}
}
//@Test
public void testGetMapResult() {
Class clazz = PojoDataService.class;
String methodName = "getMapResult";
System.out.println(methodName);
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName);
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.getMapResult()), result);
} catch (IOException exception) {
}
}
//@Test
public void testMethodWithNum() {
Class clazz = PojoDataService.class;
String methodName = "methodWithNum";
System.out.println(methodName);
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName, getJson(1));
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.methodWithNum(1)), result);
} catch (IOException exception) {
}
}
//@Test
public void testMethodWithNumber() {
Class clazz = PojoDataService.class;
String methodName = "methodWithNumber";
System.out.println(methodName);
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName, getJson(2));
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.methodWithNumber(2)), result);
} catch (IOException exception) {
}
}
//@Test
public void testMethodWithBool() {
Class clazz = PojoDataService.class;
String methodName = "methodWithBool";
System.out.println(methodName);
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName, getJson(true));
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.methodWithBool(true)), result);
} catch (IOException exception) {
}
}
//@Test
public void testMethodWithBoolean() {
Class clazz = PojoDataService.class;
String methodName = "methodWithBoolean";
System.out.println(methodName);
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName, getJson(false));
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.methodWithBoolean(false)), result);
} catch (IOException exception) {
}
}
//@Test
public void testMethodWithDate() {
Class clazz = PojoDataService.class;
String methodName = "methodWithDate";
System.out.println(methodName);
Object arg = new Date();
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName, getJson(arg));
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.methodWithDate((Date) arg)), result);
} catch (IOException exception) {
}
}
//@Test
public void testMethodWithResult() {
Class clazz = PojoDataService.class;
String methodName = "methodWithResult";
System.out.println(methodName);
Object arg = new Result(6);
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName, getJson(arg));
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.methodWithResult((Result) arg)), result);
} catch (IOException exception) {
}
}
//@Test
public void testMethodWithArrayInteger() {
Class clazz = PojoDataService.class;
String methodName = "methodWithArrayInteger";
System.out.println(methodName);
Object arg = new Integer[]{1, 2};
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName, getJson(arg));
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.methodWithArrayInteger((Integer[]) arg)), result);
} catch (IOException exception) {
}
}
//@Test
public void testMethodWithCollectionInteger() {
Class clazz = PojoDataService.class;
String methodName = "methodWithCollectionInteger";
System.out.println(methodName);
Object arg = destination.getCollectionInteger();
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName, getJson(arg));
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.methodWithCollectionInteger((Collection<Integer>) arg)), result);
} catch (IOException exception) {
}
}
//@Test
public void testMethodWithArrayResult() {
Class clazz = PojoDataService.class;
String methodName = "methodWithArrayResult";
System.out.println(methodName);
Object arg = new Result[]{new Result(1), new Result(2)};
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName, getJson(arg));
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.methodWithArrayResult((Result[]) arg)), result);
} catch (IOException exception) {
}
}
//@Test
public void testMethodWithCollectionResult() {
Class clazz = PojoDataService.class;
String methodName = "methodWithCollectionResult";
System.out.println(methodName);
Object arg = destination.getCollectionResult();
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName, getJson(arg));
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.methodWithCollectionResult((Collection<Result>) arg)), result);
} catch (IOException exception) {
}
}
//@Test
public void testMethodWithMapResult() {
Class clazz = PojoDataService.class;
String methodName = "methodWithMapResult";
System.out.println(methodName);
Object arg = destination.getMapResult();
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName, getJson(arg));
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.methodWithMapResult((Map<String, Result>) arg)), result);
} catch (IOException exception) {
}
}
//@Test
public void testMethodWithCollectionOfCollectionResult() {
Class clazz = PojoDataService.class;
String methodName = "methodWithCollectionOfCollectionResult";
System.out.println(methodName);
Object arg = destination.getCollectionOfCollectionResult();
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName, getJson(arg));
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.methodWithCollectionOfCollectionResult((Collection<Collection<Result>>) arg)), result);
} catch (IOException exception) {
}
}
//@Test
public void testMethodWithManyParameters() {
Class clazz = PojoDataService.class;
String methodName = "methodWithManyParameters";
System.out.println(methodName);
Collection<String> cl = new ArrayList<>();
cl.add("foo");
cl.add("foo");
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName, getJson("foo"), getJson(5), getJson(new Result(3)), getJson(cl));
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.methodWithManyParameters("foo", 5, new Result(3), cl)), result);
} catch (IOException exception) {
}
}
//@Test
public void testMethodThatThrowException() {
Class clazz = PojoDataService.class;
String methodName = "methodThatThrowException";
System.out.println(methodName);
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName);
assertEquals(MessageType.FAULT, messageToClient.getType());
Fault fault = (Fault) messageToClient.getResponse();
assertEquals(MethodException.class.getName(), fault.getClassname());
} catch (IOException exception) {
}
}
//@Test
public void testMethodWithAlmostSameSignature1() {
Class clazz = PojoDataService.class;
String methodName = "methodWithAlmostSameSignature";
System.out.println(methodName + "(int)");
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName, getJson(5));
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.methodWithAlmostSameSignature(5)), result);
} catch (IOException exception) {
}
}
//@Test
public void testMethodWithAlmostSameSignature2() {
Class clazz = PojoDataService.class;
String methodName = "methodWithAlmostSameSignature";
System.out.println(methodName + "(string)");
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName, getJson("foo"));
assertEquals(MessageType.RESULT, messageToClient.getType());
Object result = messageToClient.getResponse();
assertEquals(getJson(destination.methodWithAlmostSameSignature("foo")), result);
} catch (IOException exception) {
}
}
final int NB_SIMUL_METHODS = 500;
//@Test
public void testCallMultiMethodsMultiSessions() {
int nb = NB_SIMUL_METHODS;
System.out.println("call" + nb + "MethodsMultiSession");
ExecutorService executorService = Executors.newFixedThreadPool(nb);
final List<Session> sessions = new ArrayList<>();
try {
final Class clazz = EJBDataService.class;
final String methodName = "getValue";
long t0 = System.currentTimeMillis();
final CountDownLatch lock = new CountDownLatch(nb);
for (int i = 0; i < nb; i++) {
Session session = OcelotTest.createAndGetSession();
sessions.add(session);
session.addMessageHandler(new CountDownMessageHandler(lock));
executorService.execute(new TestThread(clazz, methodName, session));
}
boolean await = lock.await(30L * nb, TimeUnit.MILLISECONDS);
long t1 = System.currentTimeMillis();
assertTrue("Timeout. waiting " + (t1 - t0) + " ms. Remain " + lock.getCount() + "/" + nb + " msgs", await);
} catch (InterruptedException ex) {
fail(ex.getMessage());
} finally {
for (Session session : sessions) {
try {
session.close();
} catch (IOException ex) {
}
}
executorService.shutdown();
}
}
//@Test
public void testCallMultiMethodsMonoSessions() {
int nb = NB_SIMUL_METHODS;
System.out.println("call" + nb + "MethodsMonoSession");
ExecutorService executorService = Executors.newFixedThreadPool(nb);
try (Session session = OcelotTest.createAndGetSession()) {
final CountDownLatch lock = new CountDownLatch(nb);
CountDownMessageHandler messageHandler = new CountDownMessageHandler(lock);
session.addMessageHandler(messageHandler);
final Class clazz = EJBDataService.class;
final String methodName = "getValue";
long t0 = System.currentTimeMillis();
for (int i = 0; i < nb; i++) {
executorService.execute(new TestThread(clazz, methodName, session));
}
boolean await = lock.await(10L * nb, TimeUnit.MILLISECONDS);
long t1 = System.currentTimeMillis();
assertTrue("Timeout. waiting " + (t1 - t0) + " ms. Remain " + lock.getCount() + " msgs", await);
} catch (IOException | InterruptedException ex) {
fail(ex.getMessage());
} finally {
executorService.shutdown();
}
}
private class TestThread implements Runnable {
private final Class clazz;
private final String methodName;
private final Session wsSession;
public TestThread(Class clazz, String methodName, Session wsSession) {
this.clazz = clazz;
this.methodName = methodName;
this.wsSession = wsSession;
}
@Override
public void run() {
checkMessageAfterSendInSession(wsSession, clazz.getName(), methodName);
}
}
/**
* Test d'envoi d'un message generant un message de suppression de cache
*/
//@Test
public void testSendRemoveCacheMessage() {
System.out.println("sendRemoveCacheMessage");
final String topic = "ocelot-cleancache";
System.out.println("Enregistrement au Topic '" + topic + "'");
Class clazz = OcelotServices.class;
String methodName = "subscribe";
System.out.println(methodName);
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName, getJson(topic));
assertEquals(MessageType.RESULT, messageToClient.getType());
long t0 = System.currentTimeMillis();
MessageFromClient messageFromClient = getMessageFromClient(EJBDataService.class, "generateCleanCacheMessage", "\"a\",\"r\"", getJson(""), getJson(new Result(5)));
CountDownLatch lock = new CountDownLatch(2);
CountDownMessageHandler messageHandler = new CountDownMessageHandler(lock);
wssession.addMessageHandler(messageHandler);
// send
wssession.getAsyncRemote().sendText(messageFromClient.toJson());
// wait le delock ou timeout
boolean await = lock.await(TIMEOUT, TimeUnit.MILLISECONDS);
long t1 = System.currentTimeMillis();
assertTrue("Timeout. waiting " + (t1 - t0) + " ms. Remain " + lock.getCount() + "/2 msgs", await);
wssession.removeMessageHandler(messageHandler);
} catch (InterruptedException | IOException ex) {
fail(ex.getMessage());
}
}
//@Test
public void testSendMessageToTopic() {
System.out.println("sendMessageToTopic");
final String topic = "mytopic";
System.out.println("Enregistrement au Topic '" + topic + "'");
Class clazz = OcelotServices.class;
String methodName = "subscribe";
System.out.println(methodName);
try (Session wssession = createAndGetSession()) {
MessageToClient messageToClient = getMessageToClientAfterSendInSession(wssession, clazz.getName(), methodName, getJson(topic));
assertEquals(MessageType.RESULT, messageToClient.getType());
long t0 = System.currentTimeMillis();
// Thread.sleep(TIMEOUT);
int nbMsg = 10;
CountDownLatch lock = new CountDownLatch(nbMsg);
CountDownMessageHandler messageHandler = new CountDownMessageHandler(topic, lock);
wssession.addMessageHandler(messageHandler);
MessageToClient toTopic = new MessageToClient();
toTopic.setId(topic);
for (int i = 0; i < nbMsg; i++) {
System.out.println("Envois d'un message au Topic '" + topic + "'");
toTopic.setResponse(new Result(i));
wsEvent.fire(toTopic);
}
boolean await = lock.await(TIMEOUT, TimeUnit.MILLISECONDS);
long t1 = System.currentTimeMillis();
assertTrue("Timeout. waiting " + (t1 - t0) + " ms. Remain " + lock.getCount() + "/" + nbMsg + " msgs", await);
wssession.removeMessageHandler(messageHandler);
} catch (InterruptedException | IOException ex) {
fail(ex.getMessage());
}
}
} |
package model.states.gamestates;
import gameactions.GameActionRiverPush;
import gameactions.GameActionStatePush;
import gameactions.GameActionTeleport;
import java.awt.Color;
import java.util.Collection;
import model.KeyPreferences;
import model.area.Area;
import model.area.LinearArea;
import model.area.RadialArea;
import model.area.RealCoordinate;
import model.area.TileCoordinate;
import model.entity.Avatar;
import model.entity.EntityManager;
import model.entity.EntityMovementAssocation;
import model.entity.Mount;
import model.entity.NPC;
import model.event.EventManager;
import model.event.ExperienceModifierEvent;
import model.event.RiverPushEvent;
import model.event.TeleportEvent;
import model.item.Boots;
import model.item.Door;
import model.item.Gloves;
import model.item.Helmet;
import model.item.ObstacleItem;
import model.item.OneShotItem;
import model.item.TakeableItem;
import model.item.Trap;
import model.light.LightManager;
import model.map.GameTerrain;
import model.map.ItemMap;
import model.map.tile.AirPassableTile;
import model.map.tile.ImpassableTile;
import model.map.tile.PassableTile;
import model.projectile.Projectile;
import model.projectile.ProjectileManager;
import model.statistics.EntityStatistics;
import model.statistics.Statistics;
import model.trigger.PermanentTrigger;
import model.trigger.RateLimitedTrigger;
import model.trigger.SingleUseTrigger;
import model.trigger.Trigger;
import model.trigger.TriggerManager;
import utilities.Angle;
import view.EntitySpriteFactory;
import view.EntityView;
import view.item.BasicItemView;
import view.item.ItemView;
import view.layout.GameplayLayout;
import view.map.BasicTileView;
import view.map.TileView;
import controller.GameplayController;
import controller.listener.Listener;
import controller.listener.SingleUseListener;
public class GameplayState extends GameState {
private GameplayController controller;
private GameplayLayout layout;
private GameTerrain gameMap;
private ItemMap itemMap;
private Avatar avatar;
public GameplayState(Avatar avatar) {
layout = new GameplayLayout();
gameMap = new GameTerrain();
itemMap = new ItemMap();
this.avatar = avatar;
}
public void update(double deltaTime) {
TriggerManager.getSingleton().update(deltaTime);
EventManager.getSingleton().update(deltaTime);
// Alternative to passing an itemMap is to use traps as triggers
EntityManager.getSingleton().update(itemMap, deltaTime);
ProjectileManager.getSingleton().update(deltaTime);
/* Run through projectile queue */
while (!ProjectileManager.getSingleton().isQueueEmpty()) {
Projectile poll = ProjectileManager.getSingleton()
.dequeueProjectile();
poll.projView.registerWithGameProjectileView(layout
.getGameProjectileView());
}
}
@Override
public void onEnter() {
// Entity test must run before item test, which must be run before
// setListeners.
// The reason for this is the avatar must be made prior to items, to
// make the itemEntityAssocation,
// Which is needed for other stuff.
super.onEnter();
controller = new GameplayController(this);
addTilesTest();
addEntityTest();
addItemsTest();
addTriggersTest();
controller.spawnUpdateThread();
avatar.subscribe(layout.getCamera());
}
@Override
public void onResume() {
super.onResume();
setListeners(getContext().getPreferences());
}
@Override
public void onPause() {
super.onPause();
controller.removeListeners();
layout.clearBindings();
}
@Override
public void onExit() {
controller.terminateUpdateThread();
EntityManager.getSingleton().clear();
TriggerManager.getSingleton().clear();
LightManager.getSingleton().clear();
super.onExit();
}
public void addEntityTest() {
TileCoordinate loc = new TileCoordinate(3, 3);
EntityView eView = avatar.getEntityView();
avatar.setLocation(loc);
//testing this for equipped Items
avatar.equip(new Helmet(new BasicItemView(),new Statistics()));
EntityManager.getSingleton().setAvatar(avatar);
getController().registerAvatar(avatar);
eView.registerWithGameMapView(layout.getGameEntityView(), TileCoordinate.convertToRealCoordinate(loc),Angle.UP);
TileCoordinate npcLocation = new TileCoordinate(7, 7);
EntityView npcView = new EntityView(EntitySpriteFactory.getLadySpriteHolder());
NPC npc = new NPC("DaveTheBargainer", npcView, npcLocation);
npcView.registerWithGameMapView(layout.getGameEntityView(), TileCoordinate.convertToRealCoordinate(npcLocation),Angle.UP);
EntityManager.getSingleton().addPartyNpc(npc);
TileCoordinate mountLocation = new TileCoordinate(7, 2);
EntityView mountView = new EntityView(EntitySpriteFactory.getUnderlingSpriteHolder());
Mount mount = new Mount("My first mount", mountView, mountLocation);
mountView.registerWithGameMapView(layout.getGameEntityView(), TileCoordinate.convertToRealCoordinate(mountLocation), Angle.UP);
EntityManager.getSingleton().addNonPartyNpc(mount);
KeyPreferences preferences = new KeyPreferences();
getContext().setPreferences(preferences);
setListeners(preferences);
}
private void setListeners(KeyPreferences preferences) {
controller.removeListeners();
getLayout().clearBindings();
Listener escapeListener = new SingleUseListener(preferences.getPauseKey(), new GameActionStatePush(
getContext(), new PauseMenuState()));
escapeListener.addAsBinding(getLayout());
Listener inventoryListener = new SingleUseListener(preferences.getInventoryKey(), new GameActionStatePush(
getContext(), new InventoryMenuState(avatar)));
inventoryListener.addAsBinding(getLayout());
Listener skillsListener = new SingleUseListener(preferences.getSkillsKey(), new GameActionStatePush(
getContext(), new SkillsMenuState()));
skillsListener.addAsBinding(getLayout());
Collection<Listener> listeners = new EntityMovementAssocation(getContext().getCurrentUnit(), gameMap,
this.getItemMap()).getListeners(getContext());
for (Listener listener : listeners) {
listener.addAsBinding(getLayout());
controller.addEntityListener(listener);
}
}
private ItemMap getItemMap() {
return itemMap;
}
private void addItemsTest() {
ItemView takeableItemView = new BasicItemView(new Color(100, 60, 100), Color.GREEN);
TileCoordinate takeableItemViewPosition = new TileCoordinate(5, 5);
takeableItemView.registerWithGameItemView(layout.getGameItemView(), TileCoordinate.convertToRealCoordinate(takeableItemViewPosition));
this.getItemMap().addItem(new Boots(takeableItemView),
takeableItemViewPosition);
ItemView takeableItemViewTwo = new BasicItemView(new Color(100, 60, 100), Color.DARK_GRAY);
TileCoordinate takeableItemViewPositionTwo = new TileCoordinate(5, 6);
takeableItemViewTwo.registerWithGameItemView(layout.getGameItemView(), TileCoordinate.convertToRealCoordinate(takeableItemViewPositionTwo));
TakeableItem takeableItemTwo = new Gloves(takeableItemViewTwo);
this.getItemMap().addItem(takeableItemTwo, takeableItemViewPositionTwo);
ItemView doorItemView = new BasicItemView(Color.RED, Color.MAGENTA);
TileCoordinate doorItemViewPosition = new TileCoordinate(15, 14);
doorItemView.registerWithGameItemView(layout.getGameItemView(), TileCoordinate.convertToRealCoordinate(doorItemViewPosition));
Door doorItem = new Door(doorItemView, takeableItemTwo);
this.getItemMap().addItem(doorItem, doorItemViewPosition);
ItemView obstacleItemView = new BasicItemView(Color.GRAY, Color.BLACK);
TileCoordinate obstacleItemPosition = new TileCoordinate(9, 7);
obstacleItemView.registerWithGameItemView(layout.getGameItemView(),TileCoordinate.convertToRealCoordinate(obstacleItemPosition));
this.getItemMap().addItem(new ObstacleItem(obstacleItemView), obstacleItemPosition);
ItemView oneshotItemView = new BasicItemView(Color.WHITE, Color.GREEN);
TileCoordinate oneshotItemPosition = new TileCoordinate(13, 9);
oneshotItemView.registerWithGameItemView(layout.getGameItemView(), TileCoordinate.convertToRealCoordinate(oneshotItemPosition));
this.getItemMap().addItem(new OneShotItem(oneshotItemView, new EntityStatistics()), oneshotItemPosition);
ItemView riverMarker = new BasicItemView(Color.GRAY, Color.BLACK);
TileCoordinate riverMarkerSpot = new TileCoordinate(13, 0);
riverMarker.registerWithGameItemView(layout.getGameItemView(), TileCoordinate.convertToRealCoordinate(riverMarkerSpot));
this.getItemMap().addItem(new ObstacleItem(riverMarker), riverMarkerSpot);
ItemView trapView = new BasicItemView(Color.RED, Color.BLACK);
TileCoordinate trapSpot = new TileCoordinate(15, 12);
trapView.registerWithGameItemView(layout.getGameItemView(), TileCoordinate.convertToRealCoordinate(trapSpot));
this.getItemMap().addItem(new Trap(trapView), trapSpot);
}
private void addTriggersTest() {
TriggerManager triggerManager = TriggerManager.getSingleton();
// This may need a ViewableTriggerDecorator to display the Decal for the
// AreaEffect
/* TileCoordinate locOne = new TileCoordinate(2, 6);
Area areaOne = new RadialArea(20, locOne);
Trigger triggerOne = new SingleUseTrigger(areaOne, new HealthModifierEvent(2, -1));*/
TileCoordinate locTwo = new TileCoordinate(2, 7);
Area areaTwo = new RadialArea(1, locTwo);
Trigger triggerTwo = new SingleUseTrigger(areaTwo, new ExperienceModifierEvent(0, 750));
TileCoordinate locThree = new TileCoordinate(2, 8);
Area areaThree = new RadialArea(0, locThree);
Trigger triggerThree = new PermanentTrigger(areaThree, new TeleportEvent(new TileCoordinate(2, 0),
new GameActionTeleport(avatar, gameMap, this.getItemMap(), Angle.DOWN)));
TileCoordinate locFour = new TileCoordinate(13, 0);
Area areaFour = new LinearArea(20, locFour, Angle.DOWN);
Trigger triggerFour = new RateLimitedTrigger(areaFour, new RiverPushEvent(
new GameActionRiverPush(avatar, gameMap, this.getItemMap(), Angle.DOWN)),1000);
// triggerManager.addNonPartyTrigger(triggerOne);
triggerManager.addNonPartyTrigger(triggerTwo);
triggerManager.addNonPartyTrigger(triggerThree);
triggerManager.addNonPartyTrigger(triggerFour);
}
public void addTilesTest() {
for (int x = 0; x < 100; ++x) {
for (int y = 0; y < 100; ++y) {// Hardcoded for as long as the area
TileCoordinate p = new TileCoordinate(x, y);
if ((x != 10 || y != 10) && (x!=13 || y!=13)) {
TileView view = new BasicTileView(new Color(0, 200, 200), Color.WHITE);
view.registerWithGameMapView(layout.getGameTerrainView(), new RealCoordinate(x, y));
gameMap.add(new PassableTile(view), p);
} else if(x!=13 || y!=13){
TileView view = new BasicTileView(new Color(200, 0, 200), Color.WHITE);
view.registerWithGameMapView(layout.getGameTerrainView(), new RealCoordinate(x, y));
gameMap.add(new ImpassableTile(view), p);
}
else{
TileView view = new BasicTileView(new Color(100, 0, 200), Color.BLACK);
view.registerWithGameMapView(layout.getGameTerrainView(), new RealCoordinate(x, y));
gameMap.add(new AirPassableTile(view), p);
}
}
}
}
@Override
public GameplayLayout getLayout() {
return layout;
}
@Override
public GameplayController getController() {
return controller;
}
} |
package clients;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.sql.Array;
import java.util.List;
import java.util.regex.Pattern;
import java.util.concurrent.TimeUnit;
import org.testng.*;
import org.testng.annotations.*;
import static org.junit.Assert.*;
import static org.hamcrest.CoreMatchers.*;
import org.apache.http.*;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.openqa.selenium.*;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.openqa.selenium.interactions.Actions;
import org.openqa.selenium.logging.LogEntry;
import org.openqa.selenium.support.ui.Select;
public class Link_visit_website extends turnkeye2.pages.TestBase{
private boolean acceptNextAlert = true;
private StringBuffer verificationErrors = new StringBuffer();
private static int statusCode;
@Test
public void testUntitled8() throws Exception {
boolean ok = true;
driver.manage().window().maximize();
Actions actions = new Actions(driver);
driver.get(baseUrl + "clients");
List<WebElement> links = driver.findElements(By.cssSelector("a.link-visit"));
for(WebElement e : links) {
System.out.println(e.getAttribute("href"));
}
// for(int i = 0; i < links.size(); i++){
// if(!(links.get(i).getAttribute("href") == null) && !(links.get(i).getAttribute("href").equals(""))){
// if(links.get(i).getAttribute("href").contains("http")){
// statusCode= getResponseCode(links.get(i).getAttribute("href").trim());
// if(statusCode != 200 && statusCode != 301 && statusCode != 302){
// System.out.println("HTTP Error " + statusCode + " # " + i + " " + links.get(i).getAttribute("href"));
// ok = false;
assertEquals(true,ok);
}
public static int getResponseCode(String urlString) throws MalformedURLException, IOException{
try {
URL url = new URL(urlString);
HttpURLConnection huc = (HttpURLConnection)url.openConnection();
huc.setRequestMethod("GET");
huc.addRequestProperty("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.99 Safari/537.36");
huc.connect();
return huc.getResponseCode();
} catch (Exception e) {
}
return 500;
}
private boolean isElementPresent(By by) {
try {
driver.findElement(by);
return true;
} catch (NoSuchElementException e) {
return false;
}
}
private String closeAlertAndGetItsText() {
try {
Alert alert = driver.switchTo().alert();
String alertText = alert.getText();
if (acceptNextAlert) {
alert.accept();
} else {
alert.dismiss();
}
return alertText;
} finally {
acceptNextAlert = true;
}
}
} |
package com.akiban.sql.test;
import com.akiban.sql.StandardException;
import com.akiban.sql.compiler.BooleanNormalizer;
import com.akiban.sql.optimizer.AISBinder;
import com.akiban.sql.optimizer.AISTypeComputer;
import com.akiban.sql.optimizer.BindingNodeFactory;
import com.akiban.sql.optimizer.BoundNodeToString;
import com.akiban.sql.optimizer.OperatorCompiler;
import com.akiban.sql.optimizer.OperatorCompilerTest;
import com.akiban.sql.optimizer.SubqueryFlattener;
import com.akiban.sql.optimizer.plan.AST;
import com.akiban.sql.optimizer.plan.PlanContext;
import com.akiban.sql.optimizer.plan.PlanToString;
import com.akiban.sql.optimizer.rule.BaseRule;
import static com.akiban.sql.optimizer.rule.DefaultRules.*;
import com.akiban.sql.optimizer.rule.RulesContext;
import com.akiban.sql.optimizer.rule.RulesTestContext;
import com.akiban.sql.optimizer.rule.RulesTestHelper;
import com.akiban.sql.parser.CursorNode;
import com.akiban.sql.parser.DMLStatementNode;
import com.akiban.sql.parser.DeleteNode;
import com.akiban.sql.parser.InsertNode;
import com.akiban.sql.parser.NodeTypes;
import com.akiban.sql.parser.SQLParser;
import com.akiban.sql.parser.StatementNode;
import com.akiban.sql.parser.UpdateNode;
import com.akiban.sql.parser.ValueNode;
import com.akiban.sql.pg.PostgresSessionTracer;
import com.akiban.sql.views.ViewDefinition;
import com.akiban.ais.ddl.SchemaDef;
import com.akiban.ais.ddl.SchemaDefToAis;
import com.akiban.ais.model.AkibanInformationSchema;
import com.akiban.ais.model.Index;
import com.akiban.ais.model.GroupIndex;
import com.akiban.server.service.functions.FunctionsRegistryImpl;
import com.akiban.server.store.statistics.IndexStatisticsYamlLoader;
import com.akiban.server.util.GroupIndexCreator;
import java.util.*;
import java.io.*;
/** Standalone testing. */
public class Tester
{
enum Action {
ECHO, PARSE, CLONE,
PRINT_TREE, PRINT_SQL, PRINT_BOUND_SQL,
BIND, COMPUTE_TYPES,
BOOLEAN_NORMALIZE, FLATTEN_SUBQUERIES,
PLAN, OPERATORS
}
List<Action> actions;
SQLParser parser;
BoundNodeToString unparser;
AkibanInformationSchema ais;
AISBinder binder;
AISTypeComputer typeComputer;
BooleanNormalizer booleanNormalizer;
SubqueryFlattener subqueryFlattener;
OperatorCompiler operatorCompiler;
List<BaseRule> planRules;
RulesContext rulesContext;
int repeat;
public Tester() {
actions = new ArrayList<Action>();
parser = new SQLParser();
parser.setNodeFactory(new BindingNodeFactory(parser.getNodeFactory()));
unparser = new BoundNodeToString();
typeComputer = new AISTypeComputer();
booleanNormalizer = new BooleanNormalizer(parser);
subqueryFlattener = new SubqueryFlattener(parser);
}
public void addAction(Action action) {
actions.add(action);
}
public int getRepeat() {
return repeat;
}
public void setRepeat(int repeat) {
this.repeat = repeat;
}
public void process(String sql) throws Exception {
process(sql, false);
if (repeat > 0) {
long start = System.currentTimeMillis();
for (int i = 0; i < repeat; i++) {
process(sql, true);
}
long end = System.currentTimeMillis();
System.out.println((end - start) + " ms.");
}
}
public void process(String sql, boolean silent) throws Exception {
StatementNode stmt = null;
for (Action action : actions) {
switch (action) {
case ECHO:
if (!silent) {
System.out.println("=====");
System.out.println(sql);
}
break;
case PARSE:
stmt = parser.parseStatement(sql);
break;
case CLONE:
stmt = (StatementNode)parser.getNodeFactory().copyNode(stmt, parser);
break;
case PRINT_TREE:
stmt.treePrint();
break;
case PRINT_SQL:
case PRINT_BOUND_SQL:
{
unparser.setUseBindings(action == Action.PRINT_BOUND_SQL);
String usql = unparser.toString(stmt);
if (!silent)
System.out.println(usql);
}
break;
case BIND:
binder.bind(stmt);
break;
case COMPUTE_TYPES:
typeComputer.compute(stmt);
break;
case BOOLEAN_NORMALIZE:
stmt = booleanNormalizer.normalize(stmt);
break;
case FLATTEN_SUBQUERIES:
stmt = subqueryFlattener.flatten((DMLStatementNode)stmt);
break;
case PLAN:
{
PlanContext plan =
new PlanContext(rulesContext,
new AST((DMLStatementNode)stmt,
parser.getParameterList()));
rulesContext.applyRules(plan);
System.out.println(PlanToString.of(plan.getPlan()));
}
break;
case OPERATORS:
{
Object compiled = operatorCompiler.compile((DMLStatementNode)stmt,
parser.getParameterList());
if (!silent)
System.out.println(compiled);
}
break;
}
}
}
static final String DEFAULT_SCHEMA = "user";
public void setSchema(String sql) throws Exception {
SchemaDef schemaDef = SchemaDef.parseSchema("use " + DEFAULT_SCHEMA + "; " +
sql);
SchemaDefToAis toAis = new SchemaDefToAis(schemaDef, false);
ais = toAis.getAis();
if (actions.contains(Action.BIND))
binder = new AISBinder(ais, DEFAULT_SCHEMA);
if (actions.contains(Action.OPERATORS))
operatorCompiler = OperatorCompilerTest.TestOperatorCompiler.create(parser, ais, DEFAULT_SCHEMA, new FunctionsRegistryImpl());
if (actions.contains(Action.PLAN))
rulesContext = new RulesTestContext(ais, planRules);
}
public void addGroupIndex(String cols) throws Exception {
BufferedReader brdr = new BufferedReader(new StringReader(cols));
while (true) {
String line = brdr.readLine();
if (line == null) break;
String defn[] = line.split("\t");
Index.JoinType joinType = Index.JoinType.LEFT;
if (defn.length > 3)
joinType = Index.JoinType.valueOf(defn[3]);
GroupIndex index = GroupIndexCreator.createIndex(ais,
defn[0],
defn[1],
defn[2],
joinType);
index.getGroup().addIndex(index);
}
}
public void loadIndexStatistics(File file) throws Exception {
System.out.println(new IndexStatisticsYamlLoader(ais, DEFAULT_SCHEMA).load(file));
}
public void addView(String sql) throws Exception {
ViewDefinition view = new ViewDefinition(sql, parser);
if (binder != null)
binder.addView(view);
if (operatorCompiler != null)
operatorCompiler.addView(view);
}
public void defaultPlanRules() throws Exception {
planRules = DEFAULT_RULES;
}
public void loadPlanRules(File file) throws Exception {
planRules = RulesTestHelper.loadRules(file);
}
public void parsePlanRules(String rules) throws Exception {
planRules = RulesTestHelper.parseRules(rules);
}
public static String maybeFile(String sql) throws Exception {
if (!sql.startsWith("@"))
return sql;
FileReader reader = null;
try {
reader = new FileReader(sql.substring(1));
StringBuilder str = new StringBuilder();
char[] buf = new char[128];
while (true) {
int nc = reader.read(buf);
if (nc < 0) break;
str.append(buf, 0, nc);
}
return str.toString();
}
finally {
if (reader != null) {
reader.close();
}
}
}
public static void main(String[] args) throws Exception {
if (args.length == 0) {
System.out.println("Usage: Tester " +
"[-clone] [-bind] [-types] [-boolean] [-flatten] [-plan @planfile] [-operators]" +
"[-tree] [-print] [-print-bound]" +
"[-schema ddl] [-index-stats yamlfile] [-view ddl]..." +
"sql...");
System.out.println("Examples:");
System.out.println("-tree 'SELECT t1.x+2 FROM t1'");
System.out.println("-bind -print -tree -schema 'CREATE TABLE t1(x INT NOT NULL, y VARCHAR(7), z DECIMAL); CREATE table t2(w CHAR(1) NOT NULL);' -view 'CREATE VIEW v1(x,y) AS SELECT y,z FROM t1 WHERE y IS NOT NULL' \"SELECT x FROM v1 WHERE y > 'foo'\"");
System.out.println("-operators -schema 'CREATE TABLE parent(id INT, PRIMARY KEY(id), name VARCHAR(256) NOT NULL, UNIQUE(name), state CHAR(2)); CREATE TABLE child(id INT, PRIMARY KEY(id), pid INT, CONSTRAINT `__akiban_fk0` FOREIGN KEY akibanfk(pid) REFERENCES parent(id), name VARCHAR(256) NOT NULL);' \"SELECT parent.name,child.name FROM parent,child WHERE child.pid = parent.id AND parent.state = 'MA'\"");
}
Tester tester = new Tester();
tester.addAction(Action.ECHO);
tester.addAction(Action.PARSE);
int i = 0;
while (i < args.length) {
String arg = args[i++];
if (arg.startsWith("-")) {
if ("-tree".equals(arg))
tester.addAction(Action.PRINT_TREE);
else if ("-print".equals(arg))
tester.addAction(Action.PRINT_SQL);
else if ("-print-bound".equals(arg))
tester.addAction(Action.PRINT_BOUND_SQL);
else if ("-clone".equals(arg))
tester.addAction(Action.CLONE);
else if ("-bind".equals(arg))
tester.addAction(Action.BIND);
else if ("-schema".equals(arg))
tester.setSchema(maybeFile(args[i++]));
else if ("-group-index".equals(arg))
tester.addGroupIndex(maybeFile(args[i++]));
else if ("-index-stats".equals(arg))
tester.loadIndexStatistics(new File(args[i++]));
else if ("-view".equals(arg))
tester.addView(maybeFile(args[i++]));
else if ("-types".equals(arg))
tester.addAction(Action.COMPUTE_TYPES);
else if ("-boolean".equals(arg))
tester.addAction(Action.BOOLEAN_NORMALIZE);
else if ("-flatten".equals(arg))
tester.addAction(Action.FLATTEN_SUBQUERIES);
else if ("-plan".equals(arg)) {
String rules = args[i++];
if (rules.startsWith("@"))
tester.loadPlanRules(new File(rules.substring(1)));
else if (rules.equals("default"))
tester.defaultPlanRules();
else
tester.parsePlanRules(rules);
tester.addAction(Action.PLAN);
}
else if ("-operators".equals(arg))
tester.addAction(Action.OPERATORS);
else if ("-repeat".equals(arg))
tester.setRepeat(Integer.parseInt(args[i++]));
else
throw new Exception("Unknown switch: " + arg);
}
else {
try {
tester.process(maybeFile(arg));
}
catch (StandardException ex) {
System.out.flush();
ex.printStackTrace();
}
}
}
}
} |
package guitests;
import org.junit.Test;
import seedu.cmdo.testutil.TestTask;
import seedu.cmdo.testutil.TestUtil;
import static seedu.cmdo.logic.commands.DeleteCommand.MESSAGE_DELETE_TASK_SUCCESS;
import static org.junit.Assert.assertTrue;
//@@author A0141128R tested and passed
public class DeleteCommandTest extends ToDoListGuiTest {
@Test
public void delete() {
//delete the first in the list
TestTask[] currentList = td.getTypicalTasks();
int targetIndex = 1;
assertDeleteSuccess(targetIndex, currentList);
currentList = updateList(targetIndex,currentList);
//delete the last in the list
targetIndex = currentList.length;
assertDeleteSuccess(targetIndex, currentList);
currentList = updateList(targetIndex,currentList);
//delete from the middle of the list
targetIndex = currentList.length/2;
assertDeleteSuccess(targetIndex, currentList);
currentList = updateList(targetIndex,currentList);
//invalid index
invalidCommand("delete " + currentList.length + 1);
//delete something from an empty list
commandBox.runCommand("clear");
targetIndex = 1;
invalidCommand("delete " + targetIndex);
}
//check if invalid command throws the right error
private void invalidCommand(String input){
commandBox.runCommand(input);
assertResultMessage("The task index provided is invalid");
}
private TestTask[] updateList(int targetIndex, TestTask... currentList){
return TestUtil.removeTaskFromList(currentList, targetIndex);
}
private void runDeleteCommand(int targetIndex){
commandBox.runCommand("delete " + targetIndex);
}
//confirm the list now contains all previous tasks except the deleted task
private void compareList(TestTask[] expectedRemainder){
assertTrue(taskListPanel.isListMatching(expectedRemainder));
}
/**
* Runs the delete command to delete the task at specified index and confirms the result is correct.
* @param targetIndexOneIndexed e.g. to delete the first task in the list, 1 should be given as the target index.
* @param currentList A copy of the current list of tasks (before deletion).
*/
private void assertDeleteSuccess(int targetIndexOneIndexed, final TestTask[] currentList) {
TestTask taskToDelete = currentList[targetIndexOneIndexed-1]; //-1 because array uses zero indexing
runDeleteCommand(targetIndexOneIndexed);
TestTask[] expectedRemainder = updateList(targetIndexOneIndexed,currentList);
//confirm the list now contains all previous tasks except the deleted task
compareList(expectedRemainder);
//confirm the result message is correct
assertResultMessage(String.format(MESSAGE_DELETE_TASK_SUCCESS,taskToDelete));
}
} |
package org.mapdb;
import org.junit.Test;
import java.io.File;
import java.util.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class StoreArchiveTest {
@Test
public void pump(){
File f = TT.tempDbFile();
StoreArchive e = new StoreArchive(
f.getPath(),
Volume.RandomAccessFileVol.FACTORY,
false);
e.init();
List a = new ArrayList();
for(int i=0;i<10000;i++){
a.add(i);
}
Collections.reverse(a);
long recid = Pump.buildTreeMap(
a.iterator(),
e,
Fun.extractNoTransform(),
Fun.extractNoTransform(),
false,
32,
false,
0,
BTreeKeySerializer.INTEGER,
(Serializer)Serializer.INTEGER,
null
);
e.commit();
assertTrue(recid>0);
e.close();
f.delete();
}
@Test public void update_same_size(){
if(TT.shortTest())
return;
StoreArchive e = new StoreArchive(
null,
Volume.ByteArrayVol.FACTORY,
false);
e.init();
assertTrue(!e.readonly);
long max = 100000;
List<Long> recids = new ArrayList<Long>();
for(long i=0;i<max;i++){
recids.add(e.put(i,Serializer.LONG));
}
for(long i=max;i<max*100;i++){
long recid = recids.get((int) (i%max));
assertTrue(i-max == e.get(recid,Serializer.LONG));
e.update(recid, i, Serializer.LONG);
}
}
@Test public void reserved_recid_update(){
StoreArchive e = new StoreArchive(
null,
Volume.ByteArrayVol.FACTORY,
false);
e.init();
for(long recid=1; recid<Engine.RECID_LAST_RESERVED;recid++){
assertEquals(null, e.get(recid, Serializer.BYTE_ARRAY_NOSIZE));
byte[] b = TT.randomByteArray(1000);
e.update(recid,b,Serializer.BYTE_ARRAY_NOSIZE);
assertTrue(Arrays.equals(b, e.get(recid, Serializer.BYTE_ARRAY_NOSIZE)));
e.update(recid, null, Serializer.BYTE_ARRAY_NOSIZE);
assertEquals(null, e.get(recid, Serializer.BYTE_ARRAY_NOSIZE));
}
}
@Test public void reserved_recid_update_reopen(){
File f = TT.tempDbFile();
StoreArchive e = new StoreArchive(
f.getPath(),
Volume.RandomAccessFileVol.FACTORY,
false);
e.init();
byte[] b = TT.randomByteArray(10000);
e.update(Engine.RECID_NAME_CATALOG, b, Serializer.BYTE_ARRAY_NOSIZE);
e.close();
e = new StoreArchive(
f.getPath(),
Volume.RandomAccessFileVol.FACTORY,
false);
e.init();
assertTrue(Arrays.equals(b, e.get(Engine.RECID_NAME_CATALOG, Serializer.BYTE_ARRAY_NOSIZE)));
e.close();
f.delete();
}
@Test public void large_record(){
StoreArchive e = new StoreArchive(
null,
Volume.ByteArrayVol.FACTORY,
false);
e.init();
assertTrue(!e.readonly);
byte[] b = TT.randomByteArray((int) 1e8);
long recid = e.put(b,Serializer.BYTE_ARRAY_NOSIZE);
assertTrue(Arrays.equals(b, e.get(recid,Serializer.BYTE_ARRAY_NOSIZE)));
}
@Test public void pump_copy_named_btree(){
File f = TT.tempDbFile();
NavigableMap source = new TreeMap();
for(int i=0;i<10000;i++){
source.put(i,""+i);
}
Pump.archiveTreeMap(
source,
f,
new DB.BTreeMapMaker("name")
.keySerializer(Serializer.INTEGER)
.valueSerializer(Serializer.STRING)
);
DB db = DBMaker.archiveFileDB(f).make();
Map m = db.treeMap("name");
assertTrue(source.entrySet().containsAll(m.entrySet()));
assertTrue(m.entrySet().containsAll(source.entrySet()));
db.close();
f.delete();
}
} |
package uk.ac.ebi.subs.api;
import uk.ac.ebi.subs.data.client.Study;
import uk.ac.ebi.subs.data.component.*;
import uk.ac.ebi.subs.data.status.ProcessingStatusEnum;
import uk.ac.ebi.subs.data.status.SubmissionStatusEnum;
import uk.ac.ebi.subs.repository.model.ProcessingStatus;
import uk.ac.ebi.subs.repository.model.Sample;
import uk.ac.ebi.subs.repository.model.Submission;
import uk.ac.ebi.subs.repository.model.SubmissionStatus;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.UUID;
public class Helpers {
public static Submission generateSubmission() {
Submission s = new Submission();
s.setTeam(generateTestTeam());
s.setSubmitter(generateTestSubmitter());
return s;
}
private static Submitter generateTestSubmitter() {
Submitter u = new Submitter();
u.setEmail("test@test.org");
return u;
}
public static List<Sample> generateTestSamples() {
return generateTestSamples(2);
}
public static List<uk.ac.ebi.subs.data.client.Sample> generateTestClientSamples(int numberOfSamplesRequired) {
List<uk.ac.ebi.subs.data.client.Sample> samples = new ArrayList<>(numberOfSamplesRequired);
for (int i = 1; i <= numberOfSamplesRequired; i++) {
uk.ac.ebi.subs.data.client.Sample s = new uk.ac.ebi.subs.data.client.Sample();
samples.add(s);
s.setAlias("D" + i);
s.setTitle("Donor " + i);
s.setDescription("Human sample donor");
s.setTaxon("Homo sapiens");
s.setTaxonId(9606L);
s.setArchive(Archive.BioSamples);
}
return samples;
}
public static List<uk.ac.ebi.subs.data.client.Study> generateTestClientStudies(int numberOfStudiesRequired) {
List<uk.ac.ebi.subs.data.client.Study> studies= new ArrayList<>(numberOfStudiesRequired);
for (int i = 1; i <= numberOfStudiesRequired; i++) {
uk.ac.ebi.subs.data.client.Study s = new uk.ac.ebi.subs.data.client.Study();
studies.add(s);
Attribute studyType = new Attribute();
studyType.setName("study_type");
studyType.setValue("Whole Genome Sequencing");
Term studyFactorTerm = new Term();
studyFactorTerm.setUrl("http:
studyType.getTerms().add(studyFactorTerm);
s.setAlias("Study" + i);
s.setTitle("My Sequencing Study " + i);
s.setDescription("We sequenced some humans to discover variants linked with a disease");
s.setArchive(Archive.Ena);
Attribute studyAbstract = new Attribute();
studyType.setName("study_abstract");
studyType.setValue(s.getDescription());
s.getAttributes().add(studyType);
s.getAttributes().add(studyAbstract);
LocalDate releaseDate = LocalDate.parse("2020-12-25");
s.setReleaseDate(java.sql.Date.valueOf(releaseDate));
}
return studies;
}
public static List<Sample> generateTestSamples(int numberOfSamplesRequired) {
List<Sample> samples = new ArrayList<>(numberOfSamplesRequired);
for (int i = 1; i <= numberOfSamplesRequired; i++) {
Sample s = new Sample();
samples.add(s);
s.setId(createId());
s.setAlias("D" + i);
s.setTitle("Donor " + i);
s.setDescription("Human sample donor");
s.setTaxon("Homo sapiens");
s.setTaxonId(9606L);
s.setProcessingStatus(new ProcessingStatus(ProcessingStatusEnum.Draft));
}
return samples;
}
public static Team generateTestTeam() {
Team d = new Team();
d.setName("my-team");
return d;
}
public static Submission generateTestSubmission() {
Submission sub = new Submission();
Team d = new Team();
sub.setId(createId());
sub.setTeam(generateTestTeam());
sub.setSubmissionStatus(new SubmissionStatus(SubmissionStatusEnum.Draft));
return sub;
}
private static String createId() {
return UUID.randomUUID().toString();
}
} |
package org.apache.solr;
import org.apache.solr.client.solrj.SolrServerException;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* <p> Test disabling components</p>
*
* @version $Id$
* @since solr 1.4
*/
public class TestPluginEnable extends SolrTestCaseJ4 {
@BeforeClass
public static void beforeClass() throws Exception {
initCore("solrconfig-enableplugin.xml", "schema-replication1.xml");
}
@Test
public void testSimple() throws SolrServerException {
assertNull(h.getCore().getRequestHandler("disabled"));
assertNotNull(h.getCore().getRequestHandler("enabled"));
}
} |
package net.dlogic.kryonet.common.entity;
import java.util.HashMap;
import java.util.Map;
public class Room {
public final Map<Integer, User> users = new HashMap<Integer, User>();
public String name;
public int maxUsers;
public boolean isFull() {
return maxUsers == users.size();
}
} |
package net.mcft.copy.core.api;
import net.minecraft.init.Items;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
public class ItemIdentifier {
private final Item item;
private final int damage;
private final NBTTagCompound data;
private int hashCode;
private boolean calculatedHashCode = false;
// Since, in my opinion, null data and an empty compound are
// equivalent, ItemIdentifiers should not be created with an
// empty compound. Otherwise, hashcodes might not match even
// though equals returns true.
public ItemIdentifier(Item item, int damage, NBTTagCompound data) {
this.item = item;
this.damage = damage;
this.data = data;
}
public ItemIdentifier(ItemStack stack) {
this(stack.getItem(), Items.diamond.getDamage(stack),
((stack.hasTagCompound() && !stack.getTagCompound().hasNoTags())
? (NBTTagCompound)stack.getTagCompound().copy() : null));
}
/** Creates an item stack from the identifier with the specified stack size. */
public ItemStack createStack(int size) {
ItemStack stack = new ItemStack(item, size, damage);
if (data != null)
stack.stackTagCompound = (NBTTagCompound)data.copy();
return stack;
}
/** Returns if the identifier matches this item, damage and NBT data. */
public boolean matches(Item item, int damage, NBTTagCompound data) {
return ((item == this.item) && (damage == this.damage) &&
((data == null) ? (this.data == null)
: ((this.data == null) ? data.hasNoTags()
: data.equals(this.data))));
}
/** Returns if the identifier matches this item stack. */
public boolean matches(ItemStack stack) {
return matches(stack.getItem(), Items.diamond.getDamage(stack), stack.getTagCompound());
}
@Override
public int hashCode() {
if (!calculatedHashCode) {
hashCode = Item.getIdFromItem(item) ^ (damage << 8);
if (data != null) hashCode ^= data.hashCode();
calculatedHashCode = true;
}
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (obj == null || !(obj instanceof ItemIdentifier)) return false;
ItemIdentifier other = (ItemIdentifier)obj;
return matches(other.item, other.damage, other.data);
}
@Override
public String toString() {
return item.getUnlocalizedName() + ":" + damage;
}
} |
package nl.tudelft.selfcompileapp;
import java.io.File;
import java.io.FileOutputStream;
import java.security.PrivateKey;
import java.security.cert.X509Certificate;
import com.android.dex.Dex;
import com.android.dx.merge.CollisionPolicy;
import com.android.dx.merge.DexMerger;
import com.android.sdklib.build.ApkBuilder;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import kellinwood.security.zipsigner.ProgressEvent;
import kellinwood.security.zipsigner.ZipSigner;
public class MakeApkTask extends ProgressStatusTask {
public MakeApkTask(Context app) {
super(app, 14);
}
@Override
protected Intent doInBackground(Object... params) {
try {
if (setMsg("PROCESS INTERFACES")) // TODO make aidl.so
return null;
// DELETE UNSUPPORTED RESOURCES // TODO update aapt.so
Util.deleteRecursive(new File(S.dirRes, "drawable-xxhdpi"));
if (setMsg("PROCESS RESOURCES"))
return null;
Aapt aapt = new Aapt();
int exitCode = aapt.fnExecute("aapt p -f -v -M " + S.xmlMan.getPath() + " -F " + S.ap_Resources.getPath()
+ " -I " + S.jarAndroid.getPath() + " -A " + S.dirAssets.getPath() + " -S " + S.dirRes.getPath()
+ " -J " + S.dirGen.getPath());
if (exitCode != 0) {
throw new Exception("AAPT exit(" + exitCode + ")");
}
/*
* strStatus = "INDEXING RESOURCES"; exitCode = aapt.fnExecute(
* "aapt p -m -v -J " + dirGen.getPath() + " -M " + xmlMan.getPath()
* + " -S " + dirRes.getPath() + " -I " + jarAndroid.getPath());
*
* strStatus = "CRUNCH RESOURCES"; exitCode = aapt.fnExecute(
* "aapt c -v -S " + dirRes.getPath() + " -C " +
* dirCrunch.getPath());
*
* strStatus = "PACKAGE RESOURCES"; exitCode = aapt .fnExecute(
* "aapt p -v -S " + dirCrunch.getPath() + " -S " + dirRes.getPath()
* + " -f --no-crunch --auto-add-overlay --debug-mode -0 apk -M " +
* xmlBinMan.getPath() + " -A " + dirAssets.getPath() + " -I " +
* jarAndroid.getPath() + " -F " + ap_Resources.getPath());
*/
if (setMsg("COMPILE SOURCE"))
return null;
org.eclipse.jdt.core.compiler.batch.BatchCompiler.compile(
new String[] { "-1.5", "-showversion", "-verbose", "-deprecation", "-bootclasspath",
S.getJavaBootClassPath(), "-cp", S.getJavaClassPath(), "-d", S.dirClasses.getPath(),
S.dirGen.getPath(), S.dirSrc.getPath() },
new java.io.PrintWriter(System.out), new java.io.PrintWriter(System.err), new CompileProgress());
if (setMsg("PROCESS DEPENDENCIES"))
return null;
for (File jarLib : S.dirLibs.listFiles()) {
// skip native libs in sub directories
if (!jarLib.isFile() || !jarLib.getName().endsWith(".jar")) {
continue;
}
// compare hash of jar contents to name of dexed version
String md5 = Util.getMD5Checksum(jarLib);
// check if jar is pre-dexed
File dexLib = new File(S.dirDexedLibs, jarLib.getName().replace(".jar", "-" + md5 + ".jar"));
System.out.println(dexLib.getName());
if (!dexLib.exists()) {
com.android.dx.command.dexer.Main
.main(new String[] { "--verbose", "--output=" + dexLib.getPath(), jarLib.getPath() });
}
}
if (setMsg("INTEGRATE DEPENDENCIES"))
return null;
// dex project classes
com.android.dx.command.dexer.Main
.main(new String[] { "--verbose", "--output=" + S.dexClasses.getPath(), S.dirClasses.getPath() });
// merge pre-dexed libs
for (File dexLib : S.dirDexedLibs.listFiles()) {
Dex merged = new DexMerger(new Dex(S.dexClasses), new Dex(dexLib), CollisionPolicy.FAIL).merge();
merged.writeTo(S.dexClasses);
}
if (setMsg("PACKAGE APP"))
return null;
// Do NOT use embedded JarSigner
PrivateKey privateKey = null;
X509Certificate x509Cert = null;
ApkBuilder apkbuilder = new ApkBuilder(S.apkUnsigned, S.ap_Resources, S.dexClasses, privateKey, x509Cert,
System.out);
if (setMsg("PACKAGE DEPENDENCIES"))
return null;
apkbuilder.addNativeLibraries(S.dirLibs);
if (setMsg("PACKAGE RESOURCES"))
return null;
for (File jarLib : S.dirLibs.listFiles()) {
// skip native libs in sub directories
if (!jarLib.isFile() || !jarLib.getName().endsWith(".jar")) {
continue;
}
apkbuilder.addResourcesFromJar(jarLib);
}
if (setMsg("COMPRESSING RESOURCES"))
return null;
Util.zip(S.dirSrc, S.zipSrc);
Util.zip(S.dirRes, S.zipRes);
Util.zip(S.dirLibs, S.zipLibs);
Util.zip(S.dirDexedLibs, S.zipDexedLibs);
if (setMsg("PACKAGE RESOURCES"))
return null;
String strAssets = S.dirAssets.getName() + File.separator;
apkbuilder.addFile(S.xmlMan, strAssets + S.xmlMan.getName());
apkbuilder.addFile(S.zipSrc, strAssets + S.zipSrc.getName());
apkbuilder.addFile(S.zipRes, strAssets + S.zipRes.getName());
apkbuilder.addFile(S.zipLibs, strAssets + S.zipLibs.getName());
apkbuilder.addFile(S.zipDexedLibs, strAssets + S.zipDexedLibs.getName());
apkbuilder.setDebugMode(true);
apkbuilder.sealApk();
if (setMsg("PLACE SIGNATURE"))
return null;
if (!app.getString(R.string.keystore).contentEquals(S.jksEmbedded.getName())) {
// TODO use user defined certificate
}
// use embedded private key
String keystorePath = S.jksEmbedded.getPath();
char[] keystorePw = app.getString(R.string.keystorePw).toCharArray();
String certAlias = app.getString(R.string.certAlias);
char[] certPw = app.getString(R.string.certPw).toCharArray();
String signatureAlgorithm = app.getString(R.string.signatureAlgorithm);
ZipSigner zipsigner = new ZipSigner();
zipsigner.addProgressListener(new SignProgress());
kellinwood.security.zipsigner.optional.CustomKeySigner.signZip(zipsigner, keystorePath, keystorePw,
certAlias, certPw, signatureAlgorithm, S.apkUnsigned.getPath(), S.apkUnaligned.getPath());
if (setMsg("OPTIMIZE APP")) // TODO make zipalign.so
return null;
if (setMsg("PREPARE INSTALLATION"))
return null;
String strAppName = app.getString(R.string.appName);
File apkCopy = new File(S.dirRoot, strAppName + ".apk");
if (apkCopy.exists()) {
apkCopy.delete();
}
Util.copy(S.apkUnaligned, new FileOutputStream(apkCopy));
Uri uriApk = Uri.fromFile(apkCopy);
if (setMsg("LAUNCH INSTALLATION"))
return null;
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.setDataAndType(uriApk, "application/vnd.android.package-archive");
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
app.startActivity(intent);
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
private class CompileProgress extends org.eclipse.jdt.core.compiler.CompilationProgress {
@Override
public void begin(int arg0) {
}
@Override
public void done() {
}
@Override
public boolean isCanceled() {
// TODO Auto-generated method stub
return false;
}
@Override
public void setTaskName(String arg0) {
}
@Override
public void worked(int arg0, int arg1) {
}
}
private class SignProgress implements kellinwood.security.zipsigner.ProgressListener {
public void onProgress(ProgressEvent arg0) {
// TODO Auto-generated method stub
}
}
} |
package model.physics;
public class Impulse {
private double Jx, Jy;
public Impulse (double Jx, double Jy) {
this.Jx = Jx;
this.Jy = Jy;
}
public Impulse (double magnitude, double direction, boolean flag) {
this.Jx = magnitude * Math.cos(Math.toRadians(direction));
this.Jy = magnitude * Math.sin(Math.toRadians(direction));
}
public double getJx () {
return this.Jx;
}
public double getJy () {
return this.Jy;
}
} |
// @author A0116538A
//@author A0116538A
package bakatxt.gui;
import java.awt.Color;
import java.awt.Dimension;
import java.util.LinkedList;
import javax.swing.BoxLayout;
import javax.swing.JPanel;
import bakatxt.core.Task;
import bakatxt.international.BakaTongue;
/**
* This class does the following:
*
* 1. Draw the alert message (i.e, visual feedback for user's input)
* 2. Draw the date and content for each date
*
*/
// TODO comments
class Contents extends JPanel {
private static final String MESSAGE_EMPTY = "null";
private static final int DATE_AND_TASKS_START_POSITION = 1;
public Contents(LinkedList<Task> tasks) {
setOpaque(false);
setBackground(UIHelper.TRANSPARENT);
setLayout(new BoxLayout(this, BoxLayout.Y_AXIS));
LinkedList<Task> tempTasks = tasks;
updateContents(tempTasks);
}
/**
* Check if tasks is empty, displaying the appropriate events.
*
* @param tasks is all the tasks in the LinkedList<Task> we need to add
*/
protected void updateContents(LinkedList<Task> tasks) {
//try {
addTasksByDate(tasks);
//} catch (NullPointerException e) {
// setNoEvents();
}
/**
* Takes a LinkedList<Task> and puts it into the dateAndDay and Events "boxes"
*
* @param tasks is all the tasks in the LinkedList<Task> we need to add
*/
private void addTasksByDate(LinkedList<Task> tasks) {
String currentDate;
int y = DATE_AND_TASKS_START_POSITION;
int offset = 0;
while (tasks.peek() != null) {
currentDate = tasks.peek().getDate();
setDateAndDay(setDayAndDateText(currentDate));
offset++;
y = addCurrentEvents(getAllTasksInOneDate(tasks), y + 1, offset);
}
}
/**
* This method takes a LinkedList<Task> and splits that LinkedList by date into
* smaller LinkedLists
*
* @param tasks is the task to be split
* @return a LinkedList<Task> of the earliest date
*/
private static LinkedList<Task> getAllTasksInOneDate(LinkedList<Task> tasks) {
LinkedList<Task> seperateTasksByDate = new LinkedList<Task>();
String currentDate = tasks.peek().getDate();
while (isSameDate(tasks, currentDate)) {
seperateTasksByDate.add(tasks.poll());
}
return seperateTasksByDate;
}
/**
* Checks if the first element of a LinkedList<Task> has the same date as currentDate
*
* @param tasks is the LinkedList we are checking
* @param currentDate is the date we are comparing to
* @return true if the date is the same, false if it isn't or if the LinkedList
* is empty.
*/
private static boolean isSameDate(LinkedList<Task> tasks, String currentDate) {
if (tasks.peek() == null) {
return false;
}
String taskDate = tasks.peek().getDate();
if (currentDate == null) {
currentDate = MESSAGE_EMPTY;
}
if (taskDate == null) {
taskDate = MESSAGE_EMPTY;
}
return taskDate.equals(currentDate);
}
private int addCurrentEvents(LinkedList<Task> tasks, int y, int offset) {
assert (y > 0) : "y must be greater than zero";
assert (offset > 0) : "offset must be at least 1";
int taskNumber = y - offset;
if (tasks.size() == 1) {
setEvents(new OnlyTaskBox(tasks.pop(), taskNumber,
alternatingColors(taskNumber)));
taskNumber++;
} else {
setEvents(new FirstTaskBox(tasks.pop(), taskNumber,
alternatingColors(taskNumber)));
taskNumber++;
while (true) {
if (tasks.size() == 1) {
setEvents(new FinalTaskBox(tasks.pop(), taskNumber,
alternatingColors(taskNumber)));
taskNumber++;
break;
}
setEvents(new MiddleTaskBox(tasks.pop(), taskNumber,
alternatingColors(taskNumber)));
taskNumber++;
}
}
return taskNumber + offset;
}
private static Color alternatingColors(int taskNumber) {
if (taskNumber % 2 == 0) {
return UIHelper.GRAY_BLACK;
}
return UIHelper.GRAY_DARK;
}
/**
* @param alertMessage is the message to put in the layout specified
*/
private void setDateAndDay(FormattedText dateAndDay) {
this.add(dateAndDay);
}
/**
* @param task is the message to put in the layout specified
*/
private void setEvents(TaskBox task) {
setTaskBoxSize(task);
this.add(task);
}
// TODO probably a better method to do this
private void setNoEvents() {
FormattedText task = new FormattedText("You have no events!", UIHelper.PRESET_TYPE_TITLE,
UIHelper.PRESET_SIZE_TITLE, UIHelper.PRESET_COLOR_TITLE);
this.add(task);
}
private static void setTaskBoxSize(TaskBox task) {
task.setMinimumSize(new Dimension(634, 100));
task.setPreferredSize(new Dimension(634, 100));
task.setMaximumSize(new Dimension(634, 100));
}
/**
* @param dayAndDate is the string to style
* @return a day and date FormattedText with the string
*/
private static FormattedText setDayAndDateText(String dayAndDate) {
if (dayAndDate == null || dayAndDate.equals(MESSAGE_EMPTY)) {
dayAndDate = BakaTongue.getString("MESSAGE_FLOATING");
}
return new FormattedText(dayAndDate, UIHelper.PRESET_TYPE_DATE,
UIHelper.PRESET_SIZE_DATE, UIHelper.PRESET_COLOR_DATE);
}
} |
package todomore.android;
import com.darwinsys.todo.model.Priority;
import com.darwinsys.todo.model.Task;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.ArrayAdapter;
import android.widget.EditText;
import android.widget.Spinner;
import android.widget.Toast;
public class MainActivity extends Activity {
static String TAG = MainActivity.class.getSimpleName();
EditText addTF;
Spinner prioSpinner;
TaskDao mDao;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
addTF = (EditText) findViewById(R.id.addTF);
prioSpinner = (Spinner) findViewById(R.id.prioSpinner);
mDao = new TaskDao(this);
ArrayAdapter<CharSequence> adapter = ArrayAdapter.createFromResource(this,
R.array.priorities_array, android.R.layout.simple_spinner_item);
// Specify the layout to use when the list of choices appears
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
prioSpinner.setAdapter(adapter);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main_menu, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch(item.getItemId()) {
case R.id.settings_menuitem:
startActivity(new Intent(this, LoginActivity.class));
return true;
case R.id.help_menuitem:
Toast.makeText(this, "Help not written yet", Toast.LENGTH_SHORT).show();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
/** Called from the View when the Add button is pressed */
public void addItem(View v) {
String name = addTF.getText().toString();
Log.d(TAG, "addItem: " + name);
if (name == null || name.length() == 0) {
Toast.makeText(this, "Text required!", Toast.LENGTH_SHORT).show();
return;
}
// Do the work here! Save to local DB, send it to the server...
Task t = new Task();
t.setName(addTF.getText().toString());
t.setPriority(Priority.values()[prioSpinner.getSelectedItemPosition()]);
t.setModified(System.currentTimeMillis());
mDao.insert(t);
// XXX Send to server, or, trigger sync?
// If we get here, remove text so it doesn't get added twice
addTF.setText("");
Toast.makeText(this, "Saved locally", Toast.LENGTH_SHORT).show();
}
} |
package beast.app.util;
import javax.swing.*;
import javax.swing.UIManager.LookAndFeelInfo;
import javax.swing.filechooser.FileNameExtensionFilter;
import beast.app.beauti.BeautiPanel;
import beast.app.beauti.BeautiPanelConfig;
import java.awt.*;
import java.io.*;
import java.lang.reflect.Method;
import java.net.URL;
import java.util.HashSet;
import java.util.Set;
/**
* @author Andrew Rambaut
* @author Alexei Drummond
*/
public class Utils {
/**
* This function takes a file name and an array of extensions (specified
* without the leading '.'). If the file name ends with one of the extensions
* then it is returned with this trimmed off. Otherwise the file name is
* return as it is.
*
* @param fileName String
* @param extensions String[]
* @return the trimmed filename
*/
public static String trimExtensions(String fileName, String[] extensions) {
String newName = null;
for (String extension : extensions) {
final String ext = "." + extension;
if (fileName.toUpperCase().endsWith(ext.toUpperCase())) {
newName = fileName.substring(0, fileName.length() - ext.length());
}
}
return (newName != null) ? newName : fileName;
}
/**
* @param caller Object
* @param name String
* @return a named image from file or resource bundle.
*/
public static Image getImage(Object caller, String name) {
java.net.URL url = caller.getClass().getResource(name);
if (url != null) {
return Toolkit.getDefaultToolkit().createImage(url);
} else {
if (caller instanceof Component) {
Component c = (Component) caller;
Image i = c.createImage(100, 20);
Graphics g = c.getGraphics();
g.drawString("Not found!", 1, 15);
return i;
} else return null;
}
}
public static File getCWD() {
final String f = System.getProperty("user.dir");
return new File(f);
}
public static void loadUIManager() {
boolean lafLoaded = false;
if (isMac()) {
System.setProperty("apple.awt.graphics.UseQuartz", "true");
System.setProperty("apple.awt.antialiasing", "true");
System.setProperty("apple.awt.rendering", "VALUE_RENDER_QUALITY");
System.setProperty("apple.laf.useScreenMenuBar", "true");
System.setProperty("apple.awt.draggableWindowBackground", "true");
System.setProperty("apple.awt.showGrowBox", "true");
try {
try {
// We need to do this using dynamic class loading to avoid other platforms
// having to link to this class. If the Quaqua library is not on the classpath
// it simply won't be used.
Class<?> qm = Class.forName("ch.randelshofer.quaqua.QuaquaManager");
Method method = qm.getMethod("setExcludedUIs", Set.class);
Set<String> excludes = new HashSet<String>();
excludes.add("Button");
excludes.add("ToolBar");
method.invoke(null, excludes);
} catch (Throwable e) {
}
//set the Quaqua Look and Feel in the UIManager
UIManager.setLookAndFeel(
"ch.randelshofer.quaqua.QuaquaLookAndFeel"
);
lafLoaded = true;
} catch (Exception e) {
}
UIManager.put("SystemFont", new Font("Lucida Grande", Font.PLAIN, 13));
UIManager.put("SmallSystemFont", new Font("Lucida Grande", Font.PLAIN, 11));
}
try {
if (!lafLoaded) {
if (System.getProperty("beast.laf") != null && !System.getProperty("beast.laf").equals("")) {
UIManager.setLookAndFeel(System.getProperty("beast.laf"));
} else if (isMac()) {
UIManager.setLookAndFeel("javax.swing.plaf.metal.MetalLookAndFeel");
} else { // If Windows or Linux
try {
UIManager.setLookAndFeel("javax.swing.plaf.nimbus.NimbusLookAndFeel");
} catch (Exception e) {
UIManager.setLookAndFeel("com.sun.java.swing.plaf.gtk.GTKLookAndFeel");
}
}
}
} catch (Exception e) {
}
}
public static boolean isMac() {
return System.getProperty("os.name").toLowerCase().startsWith("mac");
}
public static boolean isWindows() {
return System.getProperty("os.name").toLowerCase().startsWith("windows");
}
public static boolean isLinux() {
return System.getProperty("os.name").toLowerCase().startsWith("linux");
}
public static File getLoadFile(String message) {
return getLoadFile(message, null, null, (String[]) null);
}
public static File getSaveFile(String message) {
return getSaveFile(message, null, null, (String[]) null);
}
public static File getLoadFile(String message, File defaultFileOrDir, String description, final String... extensions) {
File[] files = getFile(message, true, defaultFileOrDir, false, description, extensions);
if (files == null) {
return null;
} else {
return files[0];
}
}
public static File getSaveFile(String message, File defaultFileOrDir, String description, final String... extensions) {
File[] files = getFile(message, false, defaultFileOrDir, false, description, extensions);
if (files == null) {
return null;
} else {
return files[0];
}
}
public static File[] getLoadFiles(String message, File defaultFileOrDir, String description, final String... extensions) {
return getFile(message, true, defaultFileOrDir, true, description, extensions);
}
public static File[] getSaveFiles(String message, File defaultFileOrDir, String description, final String... extensions) {
return getFile(message, false, defaultFileOrDir, true, description, extensions);
}
public static File[] getFile(String message, boolean bLoadNotSave, File defaultFileOrDir, boolean bAllowMultipleSelection, String description, final String... extensions) {
if (isMac()) {
java.awt.Frame frame = new java.awt.Frame();
java.awt.FileDialog chooser = new java.awt.FileDialog(frame, message,
(bLoadNotSave ? java.awt.FileDialog.LOAD : java.awt.FileDialog.SAVE));
if (defaultFileOrDir != null) {
if (defaultFileOrDir.isDirectory()) {
chooser.setDirectory(defaultFileOrDir.getAbsolutePath());
} else {
chooser.setDirectory(defaultFileOrDir.getParentFile().getAbsolutePath());
chooser.setFile(defaultFileOrDir.getName());
}
}
if (description != null) {
FilenameFilter filter = new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
for (int i = 0; i < extensions.length; i++) {
if (name.toLowerCase().endsWith(extensions[i].toLowerCase())) {
return true;
}
}
return false;
}
};
chooser.setFilenameFilter(filter);
}
// chooser.show();
chooser.setVisible(true);
if (chooser.getFile() == null) return null;
File file = new java.io.File(chooser.getDirectory(), chooser.getFile());
chooser.dispose();
frame.dispose();
return new File[]{file};
} else {
// No file name in the arguments so throw up a dialog box...
java.awt.Frame frame = new java.awt.Frame();
frame.setTitle(message);
final JFileChooser chooser = new JFileChooser(defaultFileOrDir);
chooser.setMultiSelectionEnabled(bAllowMultipleSelection);
//chooser.setFileSelectionMode(JFileChooser.FILES_AND_DIRECTORIES);
if (description != null) {
FileNameExtensionFilter filter = new FileNameExtensionFilter(description, extensions);
chooser.setFileFilter(filter);
}
if (bLoadNotSave) {
if (chooser.showOpenDialog(frame) == JFileChooser.APPROVE_OPTION) {
frame.dispose();
if (bAllowMultipleSelection) {
return chooser.getSelectedFiles();
} else {
if (chooser.getSelectedFile() == null) {
return null;
}
return new File[]{chooser.getSelectedFile()};
}
}
} else {
if (chooser.showSaveDialog(frame) == JFileChooser.APPROVE_OPTION) {
frame.dispose();
if (bAllowMultipleSelection) {
return chooser.getSelectedFiles();
} else {
if (chooser.getSelectedFile() == null) {
return null;
}
return new File[]{chooser.getSelectedFile()};
}
}
}
}
return null;
}
public static String toString(InputStream in) throws IOException {
BufferedReader reader = new BufferedReader(new InputStreamReader(in));
StringBuilder out = new StringBuilder();
String line;
while ((line = reader.readLine()) != null) {
out.append(line);
}
reader.close();
return out.toString();
}
public static ImageIcon getIcon(int iPanel, BeautiPanelConfig config) {
String sIconLocation = BeautiPanel.ICONPATH + iPanel + ".png";
if (config != null) {
sIconLocation = BeautiPanel.ICONPATH + config.getIcon();
}
return Utils.getIcon(sIconLocation);
}
public static ImageIcon getIcon(String sIconLocation) {
try {
URL url = (URL) ClassLoader.getSystemResource(sIconLocation);
if (url == null) {
System.err.println("Cannot find icon " + sIconLocation);
return null;
}
ImageIcon icon = new ImageIcon(url);
return icon;
} catch (Exception e) {
System.err.println("Cannot load icon " + sIconLocation + " " + e.getMessage());
return null;
}
}
} |
//package com.eastpoint.chrysalis;
package org.apache.fop.render.txt;
// FOP
import org.apache.fop.render.PrintRenderer;
import org.apache.fop.render.pcl.*;
import org.apache.fop.messaging.MessageHandler;
import org.apache.fop.apps.FOPException;
import org.apache.fop.fo.properties.*;
import org.apache.fop.layout.*;
import org.apache.fop.layout.inline.*;
import org.apache.fop.datatypes.*;
import org.apache.fop.svg.PathPoint;
import org.apache.fop.pdf.PDFPathPaint;
import org.apache.fop.pdf.PDFColor;
import org.apache.fop.image.*;
import org.apache.fop.dom.svg.SVGArea;
import org.w3c.dom.svg.SVGSVGElement;
// Java
import java.io.IOException;
import java.io.OutputStream;
import java.util.Enumeration;
import java.util.Vector;
/**
* Renderer that renders areas to plain text
*/
public class TXTRenderer extends PrintRenderer
{
/** the current stream to add Text commands to */
PCLStream currentStream;
private int pageHeight = 7920;
// These variables control the virtual paggination functionality.
public int curdiv = 0;
private int divisions = -1;
private int paperheight = -1; // Paper height in decipoints?
public int orientation = -1; // -1=default/unknown, 0=portrait, 1=landscape.
public int topmargin = -1; // Top margin in decipoints?
public int leftmargin = -1; // Left margin in decipoints?
private int fullmargin = 0;
final boolean debug = false;
// Variables for rendering text.
StringBuffer charData[];
StringBuffer decoData[];
public float textCPI = 16.67f;
public float textLPI = 8;
int maxX = (int)(8.5f * textCPI + 1);
int maxY = (int)(11f * textLPI + 1);
float xFactor;
float yFactor;
public String lineEnding = "\r\n"; // Every line except the last line on a page (which will end with pageEnding) will be terminated with this string.
public String pageEnding = "\f"; // Every page except the last one will end with this string.
public boolean suppressGraphics = false; // If true then graphics/decorations will not be rendered - text only.
/**
* create the TXT renderer
*/
public TXTRenderer()
{
}
/**
* set the TXT document's producer
*
* @param producer string indicating application producing PDF
*/
public void setProducer(String producer)
{
}
/**
* render the areas into text
*
* @param areaTree the laid-out area tree
* @param writer the PrintWriter to write the PDF with
*/
public void render(AreaTree areaTree, OutputStream stream) throws IOException, FOPException
{
MessageHandler.logln("rendering areas to TEXT");
idReferences=areaTree.getIDReferences();
Enumeration e = areaTree.getPages().elements();
currentStream = new PCLStream(stream);
boolean first = true;
while (e.hasMoreElements())
{
if ( first )
first = false;
else
currentStream.add(pageEnding);
this.renderPage((Page) e.nextElement());
}
currentStream.add(lineEnding);
if ( !idReferences.isEveryIdValid() )
{
//throw new FOPException("The following id's were referenced but not found: "+idReferences.getInvalidIds()+"\n");
MessageHandler.errorln("Warning: The following id's were referenced but not found: "+idReferences.getInvalidIds()+"\n");
}
MessageHandler.logln("writing out TEXT");
stream.flush();
}
void addStr(int row, int col, String str, boolean ischar)
{
if ( debug )
System.out.println("TXTRenderer.addStr(" + row + ", " + col + ", \"" + str + "\", " + ischar + ")");
if ( suppressGraphics && ! ischar )
return;
StringBuffer sb;
if ( row < 0 )
row = 0;
if ( ischar )
sb = charData[row];
else
sb = decoData[row];
if ( sb == null )
sb = new StringBuffer();
if ( (col + str.length()) > maxX )
col = maxX - str.length();
if ( col < 0 )
{
col = 0;
if ( str.length() > maxX )
str = str.substring(0, maxX);
}
// Pad to col
for ( int countr = sb.length() ; countr < col ; countr++ )
sb.append(' ');
if ( debug )
System.out.println("TXTRenderer.addStr() sb.length()=" + sb.length());
for ( int countr = col ; countr < (col + str.length()) ; countr++ )
{
if ( countr >= sb.length() )
sb.append(str.charAt(countr - col));
else
{
if ( debug )
System.out.println("TXTRenderer.addStr() sb.length()=" + sb.length() + " countr=" + countr);
sb.setCharAt(countr, str.charAt(countr - col));
}
}
if ( ischar )
charData[row] = sb;
else
decoData[row] = sb;
}
/**
* add a line to the current stream
*
* @param x1 the start x location in millipoints
* @param y1 the start y location in millipoints
* @param x2 the end x location in millipoints
* @param y2 the end y location in millipoints
* @param th the thickness in millipoints
* @param stroke the line color
*/
protected void addLine(int x1, int y1, int x2, int y2, int th, PDFPathPaint stroke)
{
if ( x1 == x2 )
{
addRect(x1, y1, th, y2 - y1 + 1, stroke, stroke);
}
else if ( y1 == y2 )
{
addRect(x1, y1, x2 - x1 + 1, th, stroke, stroke);
}
}
/**
* add a line to the current stream
*
* @param x1 the start x location in millipoints
* @param y1 the start y location in millipoints
* @param x2 the end x location in millipoints
* @param y2 the end y location in millipoints
* @param th the thickness in millipoints
* @param rs the rule style
* @param stroke the line color
*/
protected void addLine(int x1, int y1, int x2, int y2, int th, int rs, PDFPathPaint stroke)
{
PDFColor lstroke = null;
if ( rs == org.apache.fop.fo.properties.RuleStyle.DOTTED )
lstroke = new PDFColor(0.7f, 0.7f, 0.7f);
else
lstroke = (PDFColor)stroke;
if ( x1 == x2 )
{
addRect(x1, y1, th, y2 - y1 + 1, lstroke, lstroke);
}
else if ( y1 == y2 )
{
addRect(x1, y1, x2 - x1 + 1, th, lstroke, lstroke);
}
}
protected void addLine(float x1, float y1, float x2, float y2, PDFColor sc, float sw)
{
/* SVG - Not yet implemented
if ( debug )
System.out.println("TXTRenderer.addLine(" + x1 + ", " + y1 + ", " + x2 + ", " + y2 + ", " + sc + ", " + sw + ")");
if ( x1 == x2 )
{
addRect(x1 - sw/2, y1, sw, y2 - y1 + 1, 0, 0, sc, null, 0);
}
else if ( y1 == y2 || (Math.abs(y1 - y2) <= 0.24) ) // 72/300=0.24
{
addRect(x1, y1 - sw/2, x2 - x1 + 1, sw, 0, 0, sc, null, 0);
}
else if ( sc != null )
{
// Convert dimensions to characters.
//float cfact = 300f / 72f; // 300 dpi, 1pt=1/72in
int ix1 = (int)(x1 * xFactor);
int iy1 = (int)(y1 * yFactor);
int ix2 = (int)(x2 * xFactor);
int iy2 = (int)(y2 * yFactor);
int isw = (int)(sw * xFactor);
int origix;
// Normalize
if ( iy1 > iy2 )
{
int tmp = ix1;
ix1 = ix2;
ix2 = tmp;
tmp = iy1;
iy1 = iy2;
iy2 = tmp;
}
if ( ix1 > ix2 )
{
origix = ix2;
ix1 -=ix2;
ix2 = 0;
}
else
{
origix = ix1;
ix2 -= ix1;
ix1 = 0;
}
// Convert line width to a pixel run length.
//System.out.println("TXTRenderer.addLine(" + ix1 + ", " + iy1 + ", " + ix2 + ", " + iy2 + ", " + isw + ")");
int runlen = (int)Math.sqrt(Math.pow(isw, 2) * (1 + Math.pow((ix1 - ix2) / (iy1 - iy2), 2)));
if ( runlen < 1 )
runlen = 1;
StringBuffer rlbuff = new StringBuffer();
for ( int countr = 0 ; countr < runlen ; countr++ )
rlbuff.append('*');
String rlstr = rlbuff.toString();
//System.out.println("TXTRenderer.addLine: runlen = " + runlen);
// Draw the line.
int d, dx, dy;
int Aincr, Bincr;
int xincr = 1;
int x, y;
dx = Math.abs(ix2 - ix1);
dy = iy2 - iy1;
if ( dx > dy )
{
xincr = dx / dy;
// Move to starting position.
//currentStream.add("\033*p" + origix + "x" + iy1 + "Y");
x = ix1 - runlen / 2;
iy2 += (isw / 2);
// Start raster graphics
//currentStream.add("\033*t300R\033*r" + dx + "s1A\033*b1M");
}
else
{
// Move to starting position.
//currentStream.add("\033*p" + (origix - runlen / 2) + "x" + iy1 + "Y");
x = ix1;
// Start raster graphics
//currentStream.add("\033*t300R\033*r1A\033*b1M");
}
if ( ix1 > ix2 )
xincr *= -1;
d = 2 * dx - dy;
Aincr = 2 * (dx - dy);
Bincr = 2 * dx;
y = iy1;
xferLineBytes(x, runlen, null, -1);
for ( y = iy1 + 1 ; y <= iy2 ; y++ )
{
if ( d >= 0 )
{
x += xincr;
d += Aincr;
}
else
d += Bincr;
xferLineBytes(x, runlen, null, -1);
}
// End raster graphics
//currentStream.add("\033*rB");
// Return to regular print mode.
//currentStream.add("\033*v0t0n0O");
}
*/
}
private void xferLineBytes(int startpos, int bitcount, Vector save, int start2)
{
/* Not yet implemented
//System.out.println("TXTRenderer.xferLineBytes(" + startpos + ", " + bitcount + ")");
int curbitpos = 0;
if ( start2 > 0 && start2 <= (startpos + bitcount) )
{
bitcount += (start2 - startpos);
start2 = 0;
}
char bytes[] = new char[((start2>startpos?start2:startpos) + bitcount) / 4 + 2];
int dlen = 0;
byte dbyte = 0;
int bytepos = 0;
do
{
int bits2set;
if ( startpos < 0 )
{
bits2set = bitcount + startpos;
startpos = 0;
}
else
bits2set = bitcount;
byte bittype = 0;
do
{
if ( bytepos > 0 )
{
int inc = startpos - curbitpos;
if ( (inc) >= (8 - bytepos) )
{
curbitpos += (8 - bytepos);
bytepos = 0;
bytes[dlen++] = (char)0;
bytes[dlen++] = (char)dbyte;
dbyte = 0;
}
else
{
bytepos += inc;
dbyte = (byte)(dbyte ^ (byte)(Math.pow(2, 8 - bytepos) - 1));
curbitpos += inc;
}
}
// Set runs of whole bytes.
int setbytes = (startpos - curbitpos) / 8;
if ( setbytes > 0 )
{
curbitpos += setbytes * 8;
while ( setbytes > 0 )
{
if ( setbytes > 256 )
{
bytes[dlen++] = 0xFF;
setbytes -= 256;
}
else
{
bytes[dlen++] = (char)((setbytes - 1) & 0xFF);
setbytes = 0;
}
bytes[dlen++] = (char)bittype;
}
}
// move to position in the first byte.
if ( curbitpos < startpos )
{
if ( bytepos == 0 )
dbyte = bittype;
bytepos += startpos - curbitpos;
dbyte = (byte)(dbyte ^ (byte)(Math.pow(2, 8 - bytepos) - 1));
curbitpos += bytepos;
startpos += bits2set;
}
else
{
startpos += bits2set;
}
if ( bittype == 0 )
bittype = (byte)0xFF;
else
bittype = 7;
} while ( bittype != 7 );
if ( start2 > 0 )
{
startpos = start2;
start2 = -1;
}
else
startpos = -1;
} while ( startpos >= 0 );
if ( bytepos > 0 )
{
bytes[dlen++] = (char)0;
bytes[dlen++] = (char)dbyte;
}
if ( save == null )
{
//currentStream.add("\033*b" + dlen + "W");
//currentStream.add(new String(bytes, 0, dlen));
}
else
{
String line = "\033*b" + dlen + "W" + new String(bytes, 0, dlen);
//currentStream.add(line);
save.addElement(line);
}
*/
}
/**
* add a rectangle to the current stream
*
* @param x the x position of left edge in millipoints
* @param y the y position of top edge in millipoints
* @param w the width in millipoints
* @param h the height in millipoints
* @param stroke the stroke color/gradient
*/
protected void addRect(int x, int y, int w, int h, PDFPathPaint stroke)
{
if ( h < 0 )
h *= -1;
if (h < 720 || w < 720)
{
if ( w < 720 )
w = 720;
if ( h < 720 )
h = 720;
addRect(x, y, w, h, stroke, stroke);
}
else
{
addRect(x, y, w, 720, stroke, stroke);
addRect(x, y, 720, h, stroke, stroke);
addRect(x + w - 720, y, 720, h, stroke, stroke);
addRect(x, y - h + 720, w, 720, stroke, stroke);
}
}
/**
* add a filled rectangle to the current stream
*
* @param x the x position of left edge in millipoints
* @param y the y position of top edge in millipoints
* @param w the width in millipoints
* @param h the height in millipoints
* @param fill the fill color/gradient
* @param stroke the stroke color/gradient
*/
protected void addRect(int x, int y, int w, int h, PDFPathPaint stroke, PDFPathPaint fill)
{
//System.out.println("TXTRenderer.addRect(" + x + ", " + y + ", " + w + ", " + h + ", " + r + ", " + g + ", " + b + ", " + fr + ", " + fg + ", " + fb + ")");
if ((w == 0) || (h == 0))
return;
if ( h < 0 )
h *= -1;
int row = (int)((pageHeight - (y / 100))* 100 * yFactor);
int col = (int)(x * xFactor);
PDFColor sc = (PDFColor)stroke;
PDFColor fc = (PDFColor)fill;
sc.setColorSpace(ColorSpace.DEVICE_RGB);
fc.setColorSpace(ColorSpace.DEVICE_RGB);
int lineshade = (int)(100 - ((0.3f * sc.red() + 0.59f * sc.green() + 0.11f * sc.blue()) * 100f));
int fillshade = (int)(100 - ((0.3f * fc.red() + 0.59f * fc.green() + 0.11f * fc.blue()) * 100f));
if ( debug )
System.out.println("TXTRenderer.addRect(" + x + ", " + y + ", " + w + ", " + h + ", " + stroke + ", " + fill + ") fillshade=" + fillshade);
char fillchar = ' ';
if ( fillshade >= 75 )
fillchar = '
else if ( fillshade >= 50 )
fillchar = '*';
else if ( fillshade >= 25 )
fillchar = ':';
if ( fillchar != ' ' )
{
StringBuffer linefill = new StringBuffer();
int sw = (int)(w * xFactor);
int sh = (int)(h * yFactor);
if ( sw == 0 || sh == 0 )
{
if ( fillshade >= 50 )
{
if ( h > w )
fillchar = '|';
else
fillchar = '-';
}
else
{
if ( h > w )
fillchar = ':';
else
fillchar = '.';
}
}
if ( sw == 0 )
linefill.append(fillchar);
else
for ( int countr = 0 ; countr < sw ; countr++ )
linefill.append(fillchar);
if ( sh == 0 )
addStr(row, col, linefill.toString(), false);
else
for ( int countr = 0 ; countr < sh ; countr++ )
addStr(row + countr, col, linefill.toString(), false);
}
if ( lineshade >= 25 )
{
char vlinechar = '|';
char hlinechar = '-';
if ( lineshade < 50 )
{
vlinechar = ':';
hlinechar = '.';
}
StringBuffer linefill = new StringBuffer();
int sw = (int)(w * xFactor);
for ( int countr = 0 ; countr < sw ; countr++ )
linefill.append(hlinechar);
int sh = (int)(h * yFactor);
if ( w > h )
{
for ( int countr = 1 ; countr < (sh - 1); countr++ )
{
addStr(row + countr, col, String.valueOf(vlinechar), false);
addStr(row + countr, col + sw, String.valueOf(vlinechar), false);
}
addStr(row, col, linefill.toString(), false);
addStr(row + sh, col, linefill.toString(), false);
}
else
{
addStr(row, col, linefill.toString(), false);
addStr(row + sh, col, linefill.toString(), false);
for ( int countr = 1 ; countr < (sh - 1); countr++ )
{
addStr(row + countr, col, String.valueOf(vlinechar), false);
addStr(row + countr, col + sw, String.valueOf(vlinechar), false);
}
}
}
}
/**
* add a filled rectangle to the current stream
*
* @param x the x position of left edge in millipoints
* @param y the y position of top edge in millipoints
* @param w the width in millipoints
* @param h the height in millipoints
* @param r the red component of edges
* @param g the green component of edges
* @param b the blue component of edges
* @param fr the red component of the fill
* @param fg the green component of the fill
* @param fb the blue component of the fill
*/
protected void addRect(float x, float y, float w, float h, float rx, float ry,
PDFColor fc, PDFColor sc, float sw)
{
/* SVG - Not yet implemented
if ( debug )
System.out.println("TXTRenderer.addRect(" + x + ", " + y + ", " + w + ", " + h + ", " + rx + ", " + ry + ", " + fc + ", " + sc + ", " + sw + ")");
float sr = 1;
float sg = 1;
float sb = 1;
float fr = 1;
float fg = 1;
float fb = 1;
if ( sc != null && sw > 0 )
{
sr = (float)sc.red();
sg = (float)sc.green();
sb = (float)sc.blue();
}
if ( fc != null )
{
fr = (float)fc.red();
fg = (float)fc.green();
fb = (float)fc.blue();
}
addRect((int)(x * 1000), (int)(pageHeight * 100 - y * 1000), (int)(w * 1000), (int)(h * 1000), sr, sg, sb, fr, fg, fb);
fc = null;
sc = null;
if ( rx == 0 || ry == 0 )
{
if ( fc != null )
{
int fillshade = (int)(100 - ((0.3f * fc.red() + 0.59f * fc.green() + 0.11f * fc.blue()) * 100f));
currentStream.add("\033*v0n1O\033&a" + (x * 10) + "h" + ((y * 10)) + "V"
+ "\033*c" + (w * 10) + "h" + (h * 10) + "v" + fillshade + "g2P\033*v0n0O");
}
if ( sc != null && sw > 0 )
{
String lend = "v" + String.valueOf((int)(100 - ((0.3f * sc.red() + 0.59f * sc.green() + 0.11f * sc.blue()) * 100f))) + "g2P";
currentStream.add("\033*v0n1O");
currentStream.add("\033&a" + ((x - sw/2) * 10) + "h" + (((y - sw/2)) * 10) + "V"
+ "\033*c" + ((w + sw) * 10) + "h" + ((sw) * 10) + lend);
currentStream.add("\033&a" + ((x - sw/2) * 10) + "h" + (((y - sw/2)) * 10) + "V"
+ "\033*c" + ((sw) * 10) + "h" + ((h + sw) * 10) + lend);
currentStream.add("\033&a" + ((x + w - sw/2) * 10) + "h" + (((y - sw/2)) * 10) + "V"
+ "\033*c" + ((sw) * 10) + "h" + ((h + sw) * 10) + lend);
currentStream.add("\033&a" + ((x - sw/2) * 10) + "h" + (((y + h - sw/2)) * 10) + "V"
+ "\033*c" + ((w + sw) * 10) + "h" + ((sw) * 10) + lend);
currentStream.add("\033*v0n0O");
}
}
else
{
// Convert dimensions to pixels.
float cfact = 300f / 72f; // 300 dpi, 1pt=1/72in
int ix = (int)(x * cfact);
int iy = (int)(y * cfact);
int iw = (int)(w * cfact);
int ih = (int)(h * cfact);
int irx = (int)(rx * cfact);
int iry = (int)(ry * cfact);
int isw = (int)(sw * cfact);
int longwidth = 0;
int pass = 0;
PDFColor thecolor = null;
do
{
if ( pass == 0 && fc != null )
{
thecolor = fc;
}
else if ( pass == 1 && sc != null )
{
int iswdiv2 = isw / 2;
thecolor = sc;
ix -= iswdiv2;
iy -= iswdiv2;
irx += iswdiv2;
iry += iswdiv2;
iw += isw;
ih += isw;
longwidth = (int)(isw * 1.414);
}
else
thecolor = null;
if ( thecolor != null )
{
int tx = 0;
int ty = iry;
long a = irx;
long b = iry;
long Asquared = (long)Math.pow(a, 2);
long TwoAsquared = 2 * Asquared;
long Bsquared = (long)Math.pow(b, 2);
long TwoBsquared = 2 * Bsquared;
long d = Bsquared - Asquared * b + Asquared / 4;
long dx = 0;
long dy = TwoAsquared * b;
int rectlen = iw - 2 * irx;
Vector bottomlines = new Vector();
int x0 = tx;
// Set Transparency modes and select shading.
currentStream.add("\033*v0n1O\033*c" + (int)(100 - ((0.3f * thecolor.red() + 0.59f * thecolor.green() + 0.11f * thecolor.blue()) * 100f)) + "G\033*v2T");
// Move to starting position.
currentStream.add("\033*p" + ix + "x" + iy + "Y");
// Start raster graphics
currentStream.add("\033*t300R\033*r" + iw + "s1A\033*b1M");
while ( dx < dy )
{
if ( d > 0 )
{
if ( pass == 0 || ty > (iry - isw) )
xferLineBytes(irx - x0, rectlen + 2 * x0, bottomlines, -1);
else
xferLineBytes(irx - x0, longwidth, bottomlines, iw - irx + x0 - longwidth);
x0 = tx + 1;
ty--;
dy -= TwoAsquared;
d -= dy;
}
tx++;
dx += TwoBsquared;
d += Bsquared + dx;
}
d += (3 * (Asquared - Bsquared) / 2 - (dx + dy)) / 2;
while ( ty > 0 )
{
if ( pass == 0 || ty >= (iry - isw) )
xferLineBytes(irx - tx, rectlen + 2 * tx, bottomlines, -1);
else
xferLineBytes(irx - tx, isw, bottomlines, iw - irx + tx - isw);
if ( d < 0 )
{
tx++;
dx += TwoBsquared;
d += dx;
}
ty--;
dy -= TwoAsquared;
d += Asquared - dy;
}
// Draw the middle part of the rectangle
int midlen = ih - 2 * iry;
if ( midlen > 0 )
{
if ( pass == 0 )
xferLineBytes(0, iw, null, -1);
else
xferLineBytes(0, isw, null, iw - isw);
currentStream.add("\033*b3M");
for ( int countr = midlen - 1 ; countr > 0 ; countr-- )
currentStream.add("\033*b0W");
currentStream.add("\033*b1M");
}
// Draw the bottom.
for ( int countr = bottomlines.size() - 1 ; countr >= 0 ; countr-- )
currentStream.add((String)bottomlines.elementAt(countr));
// End raster graphics
currentStream.add("\033*rB");
// Return to regular print mode.
currentStream.add("\033*v0t0n0O");
}
pass++;
} while ( pass < 2 );
}
*/
}
// Add a polyline or polygon. Does not support fills yet!!!
protected void addPolyline(Vector points, int posx, int posy, PDFColor fc, PDFColor sc, float sw, boolean close)
{
PathPoint pc;
float lastx = 0;
float lasty = 0;
float curx = 0;
float cury = 0;
float startx = 0;
float starty = 0;
Enumeration e = points.elements();
if(e.hasMoreElements())
{
pc = (PathPoint)e.nextElement();
lastx = pc.x + posx / 1000;
lasty = ((pageHeight / 10) - posy/1000) + pc.y;
startx = lastx;
starty = lasty;
}
while(e.hasMoreElements())
{
pc = (PathPoint)e.nextElement();
curx = pc.x + posx / 1000;
cury = ((pageHeight / 10) - posy/1000) + pc.y;
addLine(lastx, lasty, curx, cury, sc, sw);
lastx = curx;
lasty = cury;
}
if(close)
{
addLine(lastx, lasty, startx, starty, sc, sw);
}
}
boolean printBMP(FopImage img, int x, int y, int w, int h) throws FopImageException
{
if ( debug )
System.out.println("TXTRenderer.printBMP(" + img + ", " + x + ", "
+ y + ", " + w + ", " + h + ")");
addRect(x, y, w, h, new PDFColor(1f, 1f, 1f), new PDFColor(0f, 0f, 0f));
int nameh = (int)(h * yFactor / 2);
if ( nameh > 0 )
{
int namew = (int)(w * xFactor);
if ( namew > 4 )
{
String iname = img.getURL();
if ( iname.length() >= namew )
addStr((int)((pageHeight - (y / 100))* 100 * yFactor) + nameh, (int)(x * xFactor), iname.substring(iname.length() - namew), true);
else
addStr((int)((pageHeight - (y / 100))* 100 * yFactor) + nameh, (int)(x * xFactor + (namew - iname.length()) / 2), iname, true);
}
}
return(true);
}
/**
* render image area to PCL
*
* @param area the image area to render
*/
public void renderImageArea(ImageArea area)
{
int x = this.currentAreaContainerXPosition + area.getXOffset();
int y = this.currentYPosition;
int w = area.getContentWidth();
int h = area.getHeight();
this.currentYPosition -= h;
FopImage img = area.getImage();
try
{
printBMP(img, x, y, w, h);
}
catch ( FopImageException e )
{
//e.printStackTrace(System.out);
MessageHandler.errorln("TXTRenderer.renderImageArea() Error printing BMP (" + e.toString() + ").");
}
}
public void renderImage(FontState fontState, String href, float x, float y, float width, float height)
{
try
{
if ( href.indexOf(":") == -1 )
href = "file:" + href;
FopImage img = FopImageFactory.Make(href);
if(img != null)
{
if ( img instanceof SVGImage )
{
SVGSVGElement svg = ((SVGImage)img).getSVGDocument().getRootElement();
renderSVG(fontState, svg, (int)x * 1000, (int)y * 1000);
}
else
{
printBMP(img, (int)x, (int)y, (int)width, (int)height);
}
}
}
catch(Exception e)
{
MessageHandler.errorln("could not add image to SVG: " + href);
}
}
/** render a foreign object area */
public void renderForeignObjectArea(ForeignObjectArea area)
{
// if necessary need to scale and align the content
this.currentXPosition = this.currentXPosition + area.getXOffset();
this.currentYPosition = this.currentYPosition;
switch (area.getAlign())
{
case TextAlign.START:
break;
case TextAlign.END:
break;
case TextAlign.CENTER:
case TextAlign.JUSTIFY:
break;
}
switch (area.getVerticalAlign())
{
case VerticalAlign.BASELINE:
break;
case VerticalAlign.MIDDLE:
break;
case VerticalAlign.SUB:
break;
case VerticalAlign.SUPER:
break;
case VerticalAlign.TEXT_TOP:
break;
case VerticalAlign.TEXT_BOTTOM:
break;
case VerticalAlign.TOP:
break;
case VerticalAlign.BOTTOM:
break;
}
// in general the content will not be text
// align and scale
switch (area.scalingMethod())
{
case Scaling.UNIFORM:
break;
case Scaling.NON_UNIFORM:
break;
}
// if the overflow is auto (default), scroll or visible
// then the contents should not be clipped, since this
// is considered a printing medium.
switch (area.getOverflow())
{
case Overflow.VISIBLE:
case Overflow.SCROLL:
case Overflow.AUTO:
break;
case Overflow.HIDDEN:
break;
}
area.getObject().render(this);
this.currentXPosition += area.getEffectiveWidth();
// this.currentYPosition -= area.getEffectiveHeight();
}
void renderSVG(FontState fontState, SVGSVGElement svg, int x, int y)
{
/* SVG - Not yet implemented
NodeList nl = svg.getChildNodes();
for(int count = 0; count < nl.getLength(); count++) {
Node n = nl.item(count);
if(n instanceof SVGElement) {
renderElement(fontState, (SVGElement)n, x, y);
}
}
*/
}
/**
* render SVG area to Text
*
* @param area the SVG area to render
*/
public void renderSVGArea(SVGArea area)
{
if ( debug )
System.out.println("TXTRenderer.renderSVGArea(" + area + ")");
int x = this.currentAreaContainerXPosition;
int y = this.currentYPosition;
SVGSVGElement svg = area.getSVGDocument().getRootElement();
int w = (int)(svg.getWidth().getBaseVal().getValue() * 1000);
int h = (int)(svg.getHeight().getBaseVal().getValue() * 1000);
//currentStream.add("ET\n");
/*
* Clip to the svg area.
* Note: To have the svg overlay (under) a text area then use
* an fo:block-container
*/
//currentStream.add("q\n");
//currentStream.add(x / 1000f + " " + y / 1000f + " m\n");
//currentStream.add((x + w) / 1000f + " " + y / 1000f + " l\n");
//currentStream.add((x + w) / 1000f + " " + (y - h) / 1000f + " l\n");
//currentStream.add(x / 1000f + " " + (y - h) / 1000f + " l\n");
//currentStream.add("h\n");
//currentStream.add("W\n");
//currentStream.add("n\n");
// transform so that the coordinates (0,0) is from the top left
// and positive is down and to the right
//currentStream.add(1 + " " + 0 + " " + 0 + " " + (-1) + " " + x / 1000f + " " + y / 1000f + " cm\n");
// TODO - translate and clip to viewbox
renderSVG(area.getFontState(), svg, x, y);
//Enumeration e = area.getChildren().elements();
//while (e.hasMoreElements()) {
// Object o = e.nextElement();
// if(o instanceof GraphicImpl) {
// renderElement(area, (GraphicImpl)o, x, y);
//currentStream.add("Q\n");
//currentStream.add("BT\n");
//this.currentYPosition -= h;
}
/* SVG - Not yet implemented
public void renderElement(FontState fontState, SVGElement area, int posx, int posy)
{
if ( debug )
System.out.println("TXTRenderer.renderElement(" + fontState + ", " + area + ", " + posx + ", " + posy + ")");
int x = posx;
int y = posy;
CSSStyleDeclaration style = null;
if ( area instanceof SVGStylable )
style = ((SVGStylable)area).getStyle();
PDFColor fillColour = null;
PDFColor strokeColour = null;
float strokeWidth = 0;
//currentStream.add("q\n");
//if( area instanceof SVGTransformable )
//{
// SVGTransformable tf = (SVGTransformable)area;
// SVGAnimatedTransformList trans = tf.getTransform();
// SVGRect bbox = tf.getBBox();
// if(trans != null) {
// applyTransform(trans, bbox);
// }
//}
if(style != null)
{
CSSValue sp = style.getPropertyCSSValue("fill");
if(sp != null)
{
if( sp.getValueType() == CSSValue.CSS_PRIMITIVE_VALUE )
{
if( ((CSSPrimitiveValue)sp).getPrimitiveType() == CSSPrimitiveValue.CSS_RGBCOLOR )
{
RGBColor col = ((CSSPrimitiveValue)sp).getRGBColorValue();
CSSPrimitiveValue val;
val = col.getRed();
float red = val.getFloatValue(CSSPrimitiveValue.CSS_NUMBER);
val = col.getGreen();
float green = val.getFloatValue(CSSPrimitiveValue.CSS_NUMBER);
val = col.getBlue();
float blue = val.getFloatValue(CSSPrimitiveValue.CSS_NUMBER);
fillColour = new PDFColor(red, green, blue);
}
}
//if(sp instanceof ColorType)
//{
// ColorType ct = (ColorType)sp;
// fillColour = new PDFColor(ct.red(), ct.green(), ct.blue());
//}
}
else
fillColour = new PDFColor(0, 0, 0);
sp = style.getPropertyCSSValue("stroke");
if(sp != null)
{
if( sp.getValueType() == CSSValue.CSS_PRIMITIVE_VALUE )
{
if( ((CSSPrimitiveValue)sp).getPrimitiveType() == CSSPrimitiveValue.CSS_RGBCOLOR )
{
RGBColor col = ((CSSPrimitiveValue)sp).getRGBColorValue();
CSSPrimitiveValue val;
val = col.getRed();
float red = val.getFloatValue(CSSPrimitiveValue.CSS_NUMBER);
val = col.getGreen();
float green = val.getFloatValue(CSSPrimitiveValue.CSS_NUMBER);
val = col.getBlue();
float blue = val.getFloatValue(CSSPrimitiveValue.CSS_NUMBER);
strokeColour = new PDFColor(red, green, blue);
}
}
//if(sp instanceof ColorType)
//{
// ColorType ct = (ColorType)sp;
// strokeColour = new PDFColor(ct.red(), ct.green(), ct.blue());
//}
}
sp = style.getPropertyCSSValue("stroke-width");
if(sp != null && sp.getValueType() == CSSValue.CSS_PRIMITIVE_VALUE)
{
strokeWidth = ((CSSPrimitiveValue)sp).getFloatValue(CSSPrimitiveValue.CSS_PT);
//PDFNumber pdfNumber = new PDFNumber();
//currentStream.add(pdfNumber.doubleOut(width) + " w\n");
//strokeWidth = ((SVGLengthImpl)sp).getValue();
}
else
strokeWidth = 1;
}
if (area instanceof SVGRectElement)
{
SVGRectElement rg = (SVGRectElement)area;
float rectx = rg.getX().getBaseVal().getValue() + posx / 1000;
float recty = ((pageHeight / 10) - posy/1000) + rg.getY().getBaseVal().getValue();
float rx = rg.getRx().getBaseVal().getValue();
float ry = rg.getRy().getBaseVal().getValue();
float rw = rg.getWidth().getBaseVal().getValue();
float rh = rg.getHeight().getBaseVal().getValue();
addRect(rectx, recty, rw, rh, rx, ry, fillColour, strokeColour, strokeWidth);
}
else if (area instanceof SVGLineElement)
{
SVGLineElement lg = (SVGLineElement)area;
float x1 = lg.getX1().getBaseVal().getValue() + posx / 1000;
float y1 = ((pageHeight / 10) - posy/1000) + lg.getY1().getBaseVal().getValue();
float x2 = lg.getX2().getBaseVal().getValue() + posx / 1000;
float y2 = ((pageHeight / 10) - posy/1000) + lg.getY2().getBaseVal().getValue();
addLine(x1,y1,x2,y2, strokeColour, strokeWidth);
}
else if (area instanceof SVGTextElementImpl)
{
//currentStream.add("BT\n");
renderText(fontState, (SVGTextElementImpl)area, posx / 1000f, ((float)(pageHeight / 10) - posy/1000f));
//currentStream.add("ET\n");
}
else if (area instanceof SVGCircleElement)
{
SVGCircleElement cg = (SVGCircleElement)area;
float cx = cg.getCx().getBaseVal().getValue() + posx / 1000;
float cy = ((pageHeight / 10) - posy/1000) + cg.getCy().getBaseVal().getValue();
float r = cg.getR().getBaseVal().getValue();
//addCircle(cx,cy,r, di);
addRect(cx - r, cy - r, 2 * r, 2 * r, r, r, fillColour, strokeColour, strokeWidth);
}
else if (area instanceof SVGEllipseElement)
{
SVGEllipseElement cg = (SVGEllipseElement)area;
float cx = cg.getCx().getBaseVal().getValue() + posx / 1000;
float cy = ((pageHeight / 10) - posy/1000) + cg.getCy().getBaseVal().getValue();
float rx = cg.getRx().getBaseVal().getValue();
float ry = cg.getRy().getBaseVal().getValue();
//addEllipse(cx,cy,rx,ry, di);
addRect(cx - rx, cy - ry, 2 * rx, 2 * ry, rx, ry, fillColour, strokeColour, strokeWidth);
}
else if (area instanceof SVGPathElementImpl)
{
//addPath(((SVGPathElementImpl)area).pathElements, posx, posy, di);
}
else if (area instanceof SVGPolylineElementImpl)
{
addPolyline(((SVGPolylineElementImpl)area).points, posx, posy, fillColour, strokeColour, strokeWidth, false);
}
else if (area instanceof SVGPolygonElementImpl)
{
addPolyline(((SVGPolylineElementImpl)area).points, posx, posy, fillColour, strokeColour, strokeWidth, true);
}
else if (area instanceof SVGGElementImpl)
{
renderGArea(fontState, (SVGGElementImpl)area, x, y);
}
else if(area instanceof SVGUseElementImpl)
{
SVGUseElementImpl ug = (SVGUseElementImpl)area;
String ref = ug.link;
ref = ref.substring(1, ref.length());
SVGElement graph = null;
//GraphicImpl graph = null;
//graph = area.locateDef(ref);
if(graph != null) {
// probably not the best way to do this, should be able
// to render without the style being set.
//GraphicImpl parent = graph.getGraphicParent();
//graph.setParent(area);
// need to clip (if necessary) to the use area
// the style of the linked element is as if is was
// a direct descendant of the use element.
renderElement(fontState, graph, posx, posy);
//graph.setParent(parent);
}
}
else if (area instanceof SVGImageElementImpl)
{
SVGImageElementImpl ig = (SVGImageElementImpl)area;
renderImage(fontState, ig.link, ig.x, ig.y, ig.width, ig.height);
}
else if (area instanceof SVGSVGElement)
{
// the x and y pos will be wrong!
renderSVG(fontState, (SVGSVGElement)area, x, y);
}
else if (area instanceof SVGAElement)
{
SVGAElement ael = (SVGAElement)area;
org.w3c.dom.NodeList nl = ael.getChildNodes();
for ( int count = 0 ; count < nl.getLength() ; count++ )
{
org.w3c.dom.Node n = nl.item(count);
if ( n instanceof SVGElement )
{
if ( n instanceof GraphicElement )
{
SVGRect rect = ((GraphicElement)n).getBBox();
if ( rect != null )
{
// currentAnnotList = this.pdfDoc.makeAnnotList();
// currentPage.setAnnotList(currentAnnotList);
// String dest = linkSet.getDest();
// int linkType = linkSet.getLinkType();
// currentAnnotList.addLink(
// this.pdfDoc.makeLink(lrect.getRectangle(), dest, linkType));
// currentAnnotList = null;
// }
}
renderElement(fontState, (SVGElement)n, posx, posy);
}
}
}
else if ( area instanceof SVGSwitchElement )
{
handleSwitchElement(fontState, posx, posy, (SVGSwitchElement)area);
}
// should be done with some cleanup code, so only
// required values are reset.
//currentStream.add("Q\n");
}
*/
private void setFont(String name, float size)
{
return;
}
/* SVG - Not implemented yet.
public void renderText(FontState fontState, SVGTextElementImpl tg, float x, float y)
{
PDFNumber pdfNumber = new PDFNumber();
CSSStyleDeclaration styles;
styles = tg.getStyle();
//applyStyle(tg, styles);
// apply transform
// text has a Tm and need to handle each element
SVGTransformList trans = tg.getTransform().getBaseVal();
SVGMatrix matrix = trans.consolidate().getMatrix();
String transstr = (pdfNumber.doubleOut(matrix.getA())
+ " " + pdfNumber.doubleOut(matrix.getB())
+ " " + pdfNumber.doubleOut(matrix.getC())
+ " " + pdfNumber.doubleOut(-matrix.getD()) + " ");
String fontFamily = null;
CSSValue sp = styles.getPropertyCSSValue("font-family");
if ( sp != null && sp.getValueType() == CSSValue.CSS_PRIMITIVE_VALUE )
{
if ( ((CSSPrimitiveValue)sp).getPrimitiveType() == CSSPrimitiveValue.CSS_STRING )
fontFamily = sp.getCssText();
}
if ( fontFamily == null )
fontFamily = fontState.getFontFamily();
String fontStyle = null;
sp = styles.getPropertyCSSValue("font-style");
if ( sp != null && sp.getValueType() == CSSValue.CSS_PRIMITIVE_VALUE )
{
if ( ((CSSPrimitiveValue)sp).getPrimitiveType() == CSSPrimitiveValue.CSS_STRING )
fontStyle = sp.getCssText();
}
if ( fontStyle == null )
fontStyle = fontState.getFontStyle();
String fontWeight = null;
sp = styles.getPropertyCSSValue("font-weight");
if( sp != null && sp.getValueType() == CSSValue.CSS_PRIMITIVE_VALUE )
{
if ( ((CSSPrimitiveValue)sp).getPrimitiveType() == CSSPrimitiveValue.CSS_STRING )
fontWeight = sp.getCssText();
}
if( fontWeight == null )
fontWeight = fontState.getFontWeight();
float fontSize;
sp = styles.getPropertyCSSValue("font-size");
if( sp != null && sp.getValueType() == CSSValue.CSS_PRIMITIVE_VALUE )
{
// if(((CSSPrimitiveValue)sp).getPrimitiveType() == CSSPrimitiveValue.CSS_NUMBER) {
fontSize = ((CSSPrimitiveValue)sp).getFloatValue(CSSPrimitiveValue.CSS_PT);
// }
}
else
{
fontSize = fontState.getFontSize() / 1000f;
}
FontState fs = fontState;
try
{
fs = new FontState(fontState.getFontInfo(), fontFamily, fontStyle,
fontWeight, (int)(fontSize * 1000));
}
catch( Exception fope )
{
// fope.printStackTrace();
}
//currentStream.add("/" + fs.getFontName() + " " + fontSize + " Tf\n");
setFont(fs.getFontName(), fontSize * 1000);
float tx = tg.x;
float ty = tg.y;
float currentX = x + tx;
float currentY = y + ty;
Vector list = tg.textList;
for ( Enumeration e = list.elements() ; e.hasMoreElements() ; )
{
Object o = e.nextElement();
styles = tg.getStyle();
//applyStyle(tg, styles);
if( o instanceof String )
{
String str = (String)o;
//currentStream.add(transstr
// + (currentX + matrix.getE()) + " "
// + (y+ty + matrix.getF()) + " Tm "
// + "(");
boolean spacing = "preserve".equals(tg.getXMLspace());
//currentX = addSVGStr(fs, currentX, str, spacing);
//currentStream.add(") Tj\n");
// for(int count = 0; count < str.length(); count++) {
// }
// currentX += fs.width(' ') / 1000f;
currentStream.add("\033&a" + (currentX + matrix.getE())*10 + "h" + (y+ty + matrix.getF())*10 + "V" + str);
for ( int count = 0; count < str.length(); count++ )
{
currentX += fs.width(str.charAt(count)) / 1000f;
}
currentX += fs.width(' ') / 1000f;
} else if(o instanceof SVGTextPathElementImpl) {
SVGTextPathElementImpl tpg = (SVGTextPathElementImpl)o;
String ref = tpg.str;
SVGElement graph = null;
// graph = tpg.locateDef(ref);
if(graph != null && graph instanceof SVGPathElementImpl) {
// probably not the best way to do this, should be able
// to render without the style being set.
// GraphicImpl parent = graph.getGraphicParent();
// graph.setParent(tpg);
// set text path??
// how should this work
// graph.setParent(parent);
}
} else if(o instanceof SVGTRefElementImpl) {
SVGTRefElementImpl trg = (SVGTRefElementImpl)o;
String ref = trg.ref;
ref = ref.substring(1, ref.length());
SVGElement graph = null;
// graph = trg.locateDef(ref);
if(graph != null && graph instanceof SVGTextElementImpl) {
// GraphicImpl parent = graph.getGraphicParent();
// graph.setParent(trg);
SVGTextElementImpl te = (SVGTextElementImpl)graph;
renderText(fs, te, (int)(x + tx), (int)(y + ty));
// graph.setParent(parent);
}
} else if(o instanceof SVGTSpanElementImpl) {
SVGTSpanElementImpl tsg = (SVGTSpanElementImpl)o;
styles = tsg.getStyle();
//applyStyle(tsg, styles);
boolean changed = false;
String newprop = null;
sp = styles.getPropertyCSSValue("font-family");
if(sp != null && sp.getValueType() == CSSValue.CSS_PRIMITIVE_VALUE) {
if(((CSSPrimitiveValue)sp).getPrimitiveType() == CSSPrimitiveValue.CSS_STRING) {
newprop = sp.getCssText();
}
}
if(newprop != null && !newprop.equals(fontFamily)) {
fontFamily = newprop;
changed = true;
}
sp = styles.getPropertyCSSValue("font-style");
if(sp != null && sp.getValueType() == CSSValue.CSS_PRIMITIVE_VALUE) {
if(((CSSPrimitiveValue)sp).getPrimitiveType() == CSSPrimitiveValue.CSS_STRING) {
newprop = sp.getCssText();
}
}
if(newprop != null && !newprop.equals(fontStyle)) {
fontStyle = newprop;
changed = true;
}
sp = styles.getPropertyCSSValue("font-weight");
if(sp != null && sp.getValueType() == CSSValue.CSS_PRIMITIVE_VALUE) {
if(((CSSPrimitiveValue)sp).getPrimitiveType() == CSSPrimitiveValue.CSS_STRING) {
newprop = sp.getCssText();
}
}
if(newprop != null && !newprop.equals(fontWeight)) {
fontWeight = newprop;
changed = true;
}
float newSize = fontSize;
sp = styles.getPropertyCSSValue("font-size");
if(sp != null && sp.getValueType() == CSSValue.CSS_PRIMITIVE_VALUE) {
// if(((CSSPrimitiveValue)sp).getPrimitiveType() == CSSPrimitiveValue.CSS_NUMBER) {
newSize = ((CSSPrimitiveValue)sp).getFloatValue(CSSPrimitiveValue.CSS_PT);
// }
}
if ( fontSize != newSize )
{
fontSize = newSize;
changed = true;
}
FontState oldfs = null;
if ( changed )
{
oldfs = fs;
try
{
fs = new FontState(fontState.getFontInfo(), fontFamily, fontStyle,
fontWeight, (int)(fontSize * 1000));
}
catch(Exception fope)
{
}
setFont(fs.getFontName(), fontSize * 1000);
//currentStream.add("/" + fs.getFontName() + " " + fontSize + " Tf\n");
}
float baseX;
float baseY;
StringBuffer pdf = new StringBuffer();
boolean spacing = "preserve".equals(tsg.getXMLspace());
boolean inbetween = false;
boolean addedspace = false;
int charPos = 0;
float xpos = currentX;
float ypos = currentY;
for ( int i=0 ; i < tsg.str.length() ; i++ )
{
char ch = tsg.str.charAt(i);
xpos = currentX;
ypos = currentY;
if ( tsg.ylist.size() > charPos )
ypos = y + ty + ((Float)tsg.ylist.elementAt(charPos)).floatValue();
if ( tsg.dylist.size() > charPos )
ypos = ypos + ((Float)tsg.dylist.elementAt(charPos)).floatValue();
if ( tsg.xlist.size() > charPos )
xpos = x + tx + ((Float)tsg.xlist.elementAt(charPos)).floatValue();
if ( tsg.dxlist.size() > charPos )
xpos = xpos + ((Float)tsg.dxlist.elementAt(charPos)).floatValue();
switch (ch)
{
case ' ':
case ' ':
if ( spacing )
{
currentX = xpos + fs.width(' ') / 1000f;
currentY = ypos;
charPos++;
}
else
{
if ( inbetween && !addedspace)
{
addedspace = true;
currentX = xpos + fs.width(' ') / 1000f;
currentY = ypos;
charPos++;
}
}
break;
case '\n':
case '\r':
if ( spacing )
{
currentX = xpos + fs.width(' ') / 1000f;
currentY = ypos;
charPos++;
}
break;
default:
addedspace = false;
pdf = pdf.append(transstr
+ (xpos + matrix.getE()) + " "
+ (ypos + matrix.getF()) + " Tm "
+ "(" + ch + ") Tj\n");
pdf = pdf.append("\033&a" + (xpos + matrix.getE())*10 + "h" + (ypos + matrix.getF())*10 + "V" + ch);
currentX = xpos + fs.width(ch) / 1000f;
currentY = ypos;
charPos++;
inbetween = true;
break;
}
//currentStream.add(pdf.toString());
}
// currentX += fs.width(' ') / 1000f;
if ( changed )
{
fs = oldfs;
setFont(fs.getFontName(), fs.getFontSize() * 1000);
//currentStream.add("/" + fs.getFontName() + " " + fs.getFontSize() / 1000f + " Tf\n");
}
}
else
{
System.err.println("Error: unknown text element " + o);
}
}
}
*/
/* SVG - Not yet implemented
public void renderGArea(FontState fontState, SVGGElement area, int posx, int posy)
{
NodeList nl = area.getChildNodes();
for ( int count = 0 ; count < nl.getLength() ; count++ )
{
Node n = nl.item(count);
if ( n instanceof SVGElement )
renderElement(fontState, (SVGElement)n, posx, posy);
}
}
*/
/**
* render inline area to Text
*
* @param area inline area to render
*/
public void renderWordArea(WordArea area)
{
//System.out.println("TXTRenderer.renderInlineArea: currentXPosition=" + this.currentXPosition + " currentYPosition=" + this.currentYPosition + " text=" + area.getText());
int rx = this.currentXPosition;
int bl = this.currentYPosition;
String s;
if ( area.getPageNumberID()!=null )
{
// this text is a page number, so resolve it
s = idReferences.getPageNumber(area.getPageNumberID());
if(s==null)
s="";
}
else
{
s = area.getText();
}
if ( debug )
System.out.println("TXTRenderer.renderInlineArea: rx=" + rx + " bl=" + bl + " pageHeight=" + pageHeight);
addStr((int)((pageHeight - (bl / 100))* 100 * yFactor) - 1, (int)(rx * xFactor), s, true);
this.currentXPosition += area.getContentWidth();
}
/**
* render inline space to Text
*
* @param space space to render
*/
public void renderInlineSpace(InlineSpace space)
{
this.currentXPosition += space.getSize();
}
/**
* render page into Text
*
* @param page page to render
*/
public void renderPage(Page page)
{
if ( debug )
System.out.println("TXTRenderer.renderPage() page.getHeight() = " + page.getHeight());
BodyAreaContainer body;
AreaContainer before, after, start, end;
maxX = (int)(textCPI * page.getWidth() / 72000 + 1);
maxY = (int)(textLPI * page.getHeight() / 72000 + 1);
xFactor = (float)(maxX - 1) / (float)page.getWidth();
yFactor = (float)(maxY - 1) / (float)page.getHeight();
charData = new StringBuffer[maxY + 1];
decoData = new StringBuffer[maxY + 1];
if ( paperheight > 0 )
pageHeight = paperheight;
else
pageHeight = page.getHeight() / 100;
if ( debug )
System.out.println("TXTRenderer.renderPage() maxX=" + maxX + " maxY=" + maxY + " xFactor=" + xFactor + " yFactor=" + yFactor + " paperHeight=" + pageHeight);
body = page.getBody();
before = page.getBefore();
after = page.getAfter();
start = page.getStart();
end = page.getEnd();
this.currentFontName = "";
this.currentFontSize = 0;
//currentStream.add("BT\n");
renderBodyAreaContainer(body);
if (before != null)
renderAreaContainer(before);
if (after != null)
renderAreaContainer(after);
if (start != null)
renderAreaContainer(start);
if (end != null)
renderAreaContainer(end);
// Write out the buffers.
for ( int row = 0 ; row <= maxY ; row++ )
{
StringBuffer cr = charData[row];
StringBuffer dr = decoData[row];
StringBuffer outr = null;
if ( cr != null && dr == null )
outr = cr;
else if ( dr != null && cr == null )
outr = dr;
else if ( cr != null && dr != null )
{
int len = dr.length();
if ( cr.length() > len )
len = cr.length();
outr = new StringBuffer();
for ( int countr = 0 ; countr < len ; countr++ )
{
if ( countr < cr.length() && cr.charAt(countr) != ' ' )
outr.append(cr.charAt(countr));
else if ( countr < dr.length() )
outr.append(dr.charAt(countr));
else
outr.append(' ');
}
}
if ( outr != null )
currentStream.add(outr.toString());
if ( row < maxY )
currentStream.add(lineEnding);
}
// End page.
//if ( ++curdiv == divisions || divisions == -1 )
// curdiv = 0;
// currentStream.add("\f");
// Links, etc not implemented...
/*
currentPage = this.pdfDoc.makePage(this.pdfResources, currentStream,
page.getWidth()/1000,
page.getHeight()/1000, page);
if (page.hasLinks()) {
currentAnnotList = this.pdfDoc.makeAnnotList();
currentPage.setAnnotList(currentAnnotList);
Enumeration e = page.getLinkSets().elements();
while (e.hasMoreElements()) {
LinkSet linkSet = (LinkSet) e.nextElement();
linkSet.align();
String dest = linkSet.getDest();
int linkType = linkSet.getLinkType();
Enumeration f = linkSet.getRects().elements();
while (f.hasMoreElements()) {
LinkedRectangle lrect = (LinkedRectangle) f.nextElement();
currentAnnotList.addLink(
this.pdfDoc.makeLink(lrect.getRectangle(), dest, linkType));
}
}
} else {
// just to be on the safe side
currentAnnotList = null;
}
*/
}
} |
package bepler.seq.svm;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Scanner;
import java.util.Set;
public class Parse {
public static final double DEFAULT_TERMINATION_EPSILON = 0.01;
public static final int DEFAULT_CROSS_VALIDATION = 5;
public static char[] asCharArray(Collection<Character> col){
char[] array = new char[col.size()];
int index = 0;
for(char c : col){
array[index++] = c;
}
return array;
}
public static double[] asDoubleArray(Collection<Double> col){
double[] array = new double[col.size()];
int index = 0;
for(double d : col){
array[index++] = d;
}
return array;
}
public static int[] asIntArray(Collection<Integer> col){
int[] array = new int[col.size()];
int index = 0;
for(int i : col){
array[index++] = i;
}
return array;
}
public static String usage(){
return "Usage: seqsvm Seqs_File Features_File Epsilon_File C_File [Cross_Validation_K] [Terminator_Epsilon] [Intermediaries_Dir]";
}
private final List<String> seqs = new ArrayList<String>();
private final List<Double> vals = new ArrayList<Double>();
private final char[] alphabet;
private final int[] kmers;
private final double[] ps;
private final double[] cs;
private int seqLen;
private int k = DEFAULT_CROSS_VALIDATION;
private double term = DEFAULT_TERMINATION_EPSILON;
private File intermediariesDir = null;
public Parse(String[] args){
if(args.length < 4 || args.length > 7){
System.err.println(usage());
throw new Error();
}
try{
alphabet = parseSequences(args[0]);
kmers = parseInts(args[1]);
ps = parseDoubles(args[2]);
cs = parseDoubles(args[3]);
if(args.length == 7){
k = Integer.parseInt(args[4]);
term = Double.parseDouble(args[5]);
intermediariesDir = new File(args[6]);
}else if(args.length == 6){
try{
k = Integer.parseInt(args[4]);
term = Double.parseDouble(args[5]);
}catch(Exception e){
try{
term = Double.parseDouble(args[4]);
intermediariesDir = new File(args[5]);
} catch (Exception e2){
k = Integer.parseInt(args[4]);
intermediariesDir = new File(args[5]);
}
}
}else if(args.length == 5){
try{
k = Integer.parseInt(args[4]);
}catch(Exception e){
try{
term = Double.parseDouble(args[4]);
} catch (Exception e2){
intermediariesDir = new File(args[4]);
}
}
}
} catch(Exception e){
System.err.println(usage());
throw new Error(e);
}
}
private String arrayToString(double[] array){
String s = "";
for(double d : array){
s += d + " ";
}
return s;
}
private String arrayToString(int[] array){
String s = "";
for(int i : array){
s += i + " ";
}
return s;
}
public void execute(){
System.err.println("Building model for kmers: "+arrayToString(kmers));
System.err.println("Using epsilons: "+arrayToString(ps));
System.err.println("Using Cs: "+arrayToString(cs));
System.err.println("Using K: "+k);
System.err.println("Using terminal epsilon: "+term);
System.err.println("Sequence length: "+seqLen);
System.err.println("Total sequences: "+seqs.size());
SeqSVMTrainer trainer = new SeqSVMTrainer(seqLen, kmers, alphabet, ps, cs);
trainer.setVerbose(true);
SeqSVMModel model = trainer.train(seqs, vals, k, new Random(), term, intermediariesDir);
model.write(System.out);
}
private int[] parseInts(String file) throws FileNotFoundException{
File f = new File(file);
Scanner s = new Scanner(f);
List<Integer> ints = new ArrayList<Integer>();
while(s.hasNextInt()){
ints.add(s.nextInt());
}
s.close();
return asIntArray(ints);
}
private double[] parseDoubles(String file) throws FileNotFoundException{
File f = new File(file);
Scanner s = new Scanner(f);
List<Double> vals = new ArrayList<Double>();
while(s.hasNextDouble()){
vals.add(s.nextDouble());
}
s.close();
return asDoubleArray(vals);
}
private char[] parseSequences(String file) throws IOException{
File f = new File(file);
Set<Character> alphabet = new HashSet<Character>();
BufferedReader reader = new BufferedReader(new FileReader(f));
seqLen = -1;
String line;
try {
while((line = reader.readLine()) != null){
String[] tokens = line.split("\\s+");
String seq = tokens[0];
for(char c : seq.toCharArray()){
alphabet.add(c);
}
if(seqLen == -1){
seqLen = seq.length();
}else if(seqLen != seq.length()){
throw new Error("Sequences must all be of same length.");
}
seqs.add(seq);
vals.add(Double.parseDouble(tokens[1]));
}
} catch (NumberFormatException e) {
throw e;
} catch (IOException e) {
throw e;
}finally{
try {
reader.close();
} catch (IOException e) {
throw e;
}
}
return asCharArray(alphabet);
}
} |
package org.broad.igv.util;
import org.apache.http.*;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.Credentials;
import org.apache.http.auth.NTCredentials;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.auth.params.AuthPNames;
import org.apache.http.client.ResponseHandler;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpHead;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.client.params.AuthPolicy;
import org.apache.http.conn.ClientConnectionManager;
import org.apache.http.conn.params.ConnRoutePNames;
import org.apache.http.conn.scheme.Scheme;
import org.apache.http.conn.scheme.SchemeRegistry;
import org.apache.http.conn.ssl.SSLSocketFactory;
import org.apache.http.entity.FileEntity;
import org.apache.http.impl.client.BasicResponseHandler;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager;
import org.apache.http.util.EntityUtils;
import org.apache.log4j.Logger;
import org.broad.igv.Globals;
import org.broad.igv.PreferenceManager;
import org.broad.igv.exceptions.HttpResponseException;
import org.broad.igv.gs.GSUtils;
import org.broad.igv.ui.IGV;
import org.broad.igv.util.ftp.FTPClient;
import org.broad.igv.util.ftp.FTPStream;
import org.broad.igv.util.ftp.FTPUtils;
import org.broad.igv.util.stream.ApacheURLHelper;
import org.broad.tribble.util.SeekableHTTPStream;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import java.awt.*;
import java.io.*;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
* New version of IGVHttpUtils built on Apache HttpClient 4.1. Currently this version is only used for GenomeSpace
* connections, which was its intention, but eventually all client connections will use this class and IGVHttpUtils
* will be eliminated.
*
* @author jrobinso
* @date Jun 9, 2011
*/
public class IGVHttpClientUtils {
private static Logger log = Logger.getLogger(IGVHttpClientUtils.class);
public static boolean byteRangeTested = false;
public static boolean useByteRange = true;
private static DefaultHttpClient client;
private static IdleConnectionMonitorThread monitorThread;
static {
client = createClient();
client.getParams().setParameter("http.protocol.allow-circular-redirects", true);
client.getParams().setParameter("http.useragent", Globals.applicationString());
}
/**
* Create the singleton client instance. This is private to insure a single instance is created.
*
* @return
*/
private static DefaultHttpClient createClient() {
try {
ThreadSafeClientConnManager cm = new ThreadSafeClientConnManager();
cm.setMaxTotal(100);
SSLContext ctx = SSLContext.getInstance("TLS");
X509TrustManager tm = getDefaultTrustManager();
ctx.init(null, new TrustManager[]{tm}, null);
SSLSocketFactory ssf = new SSLSocketFactory(ctx);
ssf.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
SchemeRegistry sr = cm.getSchemeRegistry();
sr.register(new Scheme("https", ssf, 443));
monitorThread = new IdleConnectionMonitorThread(cm);
monitorThread.start();
client = new DefaultHttpClient(cm);
boolean includeKerbeos = (new File(System.getenv("windir") + "\\krb5.ini").exists());
ArrayList<String> authpref = new ArrayList<String>();
authpref.add(AuthPolicy.BASIC);
authpref.add(AuthPolicy.DIGEST);
if(includeKerbeos) {
authpref.add(AuthPolicy.SPNEGO);
}
authpref.add(AuthPolicy.NTLM);
client.getParams().setParameter(AuthPNames.PROXY_AUTH_PREF, authpref);
client.getParams().setParameter(AuthPNames.TARGET_AUTH_PREF, authpref);
return client;
} catch (Exception ex) {
ex.printStackTrace();
return null;
}
}
/**
* A default trust manager for SSL connections. Basically trusts everybody.
*
* @return
*/
private static X509TrustManager getDefaultTrustManager() {
X509TrustManager tm = new X509TrustManager() {
public void checkClientTrusted(X509Certificate[] xcs, String string) throws CertificateException {
}
public void checkServerTrusted(X509Certificate[] xcs, String string) throws CertificateException {
}
public X509Certificate[] getAcceptedIssuers() {
return null;
}
};
return tm;
}
/**
* Shutdown client and free all resources. Called upon application exit.
*/
public static void shutdown() {
client.getConnectionManager().shutdown();
monitorThread.shutdown();
}
/**
* Return the contents of the url as a String. This method should only be used for queries expected to return
* a small amount of data.
*
* @param url
* @return
*/
public static String getContentsAsString(URL url) throws IOException {
return getContentsAsString(url, null);
}
public static String getContentsAsString(URL url, Map<String, String> headers) throws IOException {
StringBuffer buf = new StringBuffer();
InputStream is = null;
try {
is = IGVHttpClientUtils.openConnectionStream(url, false, headers);
BufferedInputStream bis = new BufferedInputStream(is);
int b;
while ((b = bis.read()) >= 0) {
buf.append((char) b);
}
return buf.toString();
} finally {
if (is != null) {
is.close();
}
}
}
/**
* Execute a get on the url and return the response stream. It is the responsibility of the caller to
* close the stream.
*
* @param url
* @return
* @throws IOException
*/
public static InputStream openConnectionStream(URL url) throws IOException {
return openConnectionStream(url, false);
}
/**
* Execute a get on the url and return the response stream. Optionally abort httpget when closing the stream.
* This should be done when reading only a portion of the response stream, as otherwise the close() method in the
* HttpClient stream class will read the rest of the stream before closing it.
* <p/>
* <p/>
* NOTE: A better solution for the partial read problem would be to use byte-range headers to get only the portion
* of the file needed.
* <p/>
* It is the responsibility of the caller to
* close the stream.
*
* @param url
* @param abortOnClose true if HttpGet.abort() should be called upon close. Note this will also kill the connection.
* @return
* @throws IOException
*/
public static InputStream openConnectionStream(URL url, boolean abortOnClose) throws IOException {
return openConnectionStream(url, abortOnClose, null);
}
public static InputStream openConnectionStream(URL url, boolean abortOnClose, Map<String, String> headers) throws IOException {
//TODO -- the protocol (ftp) test should be done before calling this method.
if (url.getProtocol().toLowerCase().equals("ftp")) {
String userInfo = url.getUserInfo();
String host = url.getHost();
String file = url.getPath();
FTPClient ftp = FTPUtils.connect(host, userInfo);
ftp.pasv();
ftp.retr(file);
return new FTPStream(ftp);
} else {
HttpGet getMethod = new HttpGet(url.toExternalForm());
if (headers != null) {
for (Map.Entry<String, String> entry : headers.entrySet()) {
getMethod.setHeader(entry.getKey(), entry.getValue());
}
}
HttpResponse response = execute(getMethod, url);
// Wrap the response stream to do extra cleanaup upon close.
InputStream is = response.getEntity().getContent();
return abortOnClose ? new ResponseInputStream(getMethod, is) : is;
}
}
/**
* Test for the existence of a URL resource
*
* @param url
* @return
*/
public static boolean resourceAvailable(URL url) {
HttpHead headMethod = null;
HttpResponse response = null;
try {
headMethod = new HttpHead(url.toExternalForm());
response = execute(headMethod, url);
final int statusCode = response.getStatusLine().getStatusCode();
// TODO -- is this even neccessary with HttpClient 4.1 ?
EntityUtils.consume(response.getEntity());
return statusCode == 200;
} catch (FileNotFoundException e) {
return false;
} catch (Exception e) {
log.error("Error checking resoruce availability", e);
return false;
}
}
/**
* Execute a HEAD on the url and return the header field value.
*
* @param url
* @return
* @throws IOException
*/
public static String getHeaderField(URL url, String key) throws IOException {
HttpHead headMethod = new HttpHead(url.toExternalForm());
HttpResponse response = execute(headMethod, url);
String value = null;
Header header = response.getFirstHeader(key);
if (header != null) {
value = header.getValue();
EntityUtils.consume(response.getEntity());
}
return value;
}
/**
* @param url
*/
public static HttpResponse executeGet(URL url) throws IOException {
HttpGet get = new HttpGet(url.toExternalForm());
return execute(get, url);
}
public static HttpResponse executeGet(URL url, Map<String, String> headers) throws IOException {
HttpGet get = new HttpGet(url.toExternalForm());
for (Map.Entry<String, String> entry : headers.entrySet()) {
get.setHeader(entry.getKey(), entry.getValue());
}
return execute(get, url);
}
/**
* Upload a file.
* <p/>
* Note: this method was written for, and has only been tested against, the GenomeSpace amazon server.
*
* @param uri
* @param file
* @param headers
* @throws IOException
*/
public static void uploadFile(URI uri, File file, Map<String, String> headers) throws IOException {
HttpPut put = new HttpPut(uri);
try {
FileEntity entity = new FileEntity(file, "text");
put.setEntity(entity);
if (headers != null) {
for (Map.Entry<String, String> entry : headers.entrySet()) {
put.addHeader(entry.getKey(), entry.getValue());
}
}
HttpResponse response = client.execute(put);
final int statusCode = response.getStatusLine().getStatusCode();
if (statusCode == 401) {
// Try again
client.getCredentialsProvider().clear();
login(uri.getHost());
uploadFile(uri, file, headers);
} else if (statusCode == 404 || statusCode == 410) {
put.abort();
throw new FileNotFoundException();
} else if (statusCode >= 400) {
put.abort();
throw new HttpResponseException(statusCode);
}
} catch (RuntimeException e) {
// An unexpected exception -- abort the HTTP request in order to shut down the underlying
// connection immediately. THis happens automatically for an IOException
if (put != null) put.abort();
throw e;
}
}
/**
* Execute a request. In the case of an authorization failure (401) this method is called recursively
* with a login prompt until the correct credentials are entered, or the user cancels triggering an
* authorization exception.
*
* @param url
* @return
* @throws IOException
*/
private static HttpResponse execute(HttpRequestBase method, URL url) throws IOException {
try {
if (GSUtils.isGenomeSpace(url.toString())) {
GSUtils.checkForCookie(client, url.getHost());
}
HttpResponse response = client.execute(method);
final int statusCode = response.getStatusLine().getStatusCode();
if (statusCode == 401) {
// Try again
client.getCredentialsProvider().clear();
login(url.getHost());
return execute(method, url);
} else if (statusCode == 404 || statusCode == 410) {
method.abort();
throw new FileNotFoundException();
} else if (statusCode >= 400) {
method.abort();
throw new HttpResponseException(statusCode);
}
return response;
} catch (RuntimeException e) {
// An unexpected exception -- abort the HTTP request in order to shut down the underlying
// connection immediately. THis happens automatically for an IOException
if (method != null) method.abort();
throw e;
}
}
private static void login(String server) {
Frame owner = IGV.hasInstance() ? IGV.getMainFrame() : null;
String userpass = getUserPass(owner);
if (userpass == null) {
throw new RuntimeException("Access denied: " + server);
}
UsernamePasswordCredentials GENOME_SPACE_CREDS = new UsernamePasswordCredentials(userpass);
String host = GSUtils.isGenomeSpace(server) ? GSUtils.GENOME_SPACE_ID_SERVER : server;
client.getCredentialsProvider().setCredentials(
new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT, AuthScope.ANY_REALM),
GENOME_SPACE_CREDS);
if (GSUtils.isGenomeSpace(host)) {
// Get the genomespace token
try {
HttpGet httpget = new HttpGet(GSUtils.identityServerUrl);
ResponseHandler<String> responseHandler = new BasicResponseHandler();
String responseBody = client.execute(httpget, responseHandler);
if (responseBody != null && responseBody.length() > 0) {
String[] tokens = userpass.split(":");
String user = tokens[0];
GSUtils.saveLoginForSSO(responseBody, user);
}
} catch (IOException e) {
log.error("Error fetching GS token", e);
}
}
}
/**
* Open a modal login dialog and return
*
* @param owner
* @return the user credentials in the form of "user:password". If the user cancels return null.
*/
public static String getUserPass(Frame owner) {
LoginDialog dlg = new LoginDialog(owner);
dlg.setVisible(true);
if (dlg.isCanceled()) {
return null;
} else {
final String userString = dlg.getUsername();
final String userPass = new String(dlg.getPassword());
return userString + ":" + userPass;
}
}
public static long getContentLength(URL url) throws IOException {
String contentLengthString = "";
contentLengthString = getHeaderField(url, "Content-Length");
if (contentLengthString == null) {
return -1;
} else {
return Long.parseLong(contentLengthString);
}
}
public static long getContentLength(HttpResponse response) {
String contentLengthString = "";
contentLengthString = response.getFirstHeader("Content-Length").getValue();
if (contentLengthString == null) {
return -1;
} else {
return Long.parseLong(contentLengthString);
}
}
public static boolean isURL(String string) {
String lcString = string.toLowerCase();
return lcString.startsWith("http:
|| lcString.startsWith("file:
}
public static boolean testByteRange() {
try {
String testURL = "http:
byte[] expectedBytes = {(byte) 'k', (byte) 'l', (byte) 'm', (byte) 'n', (byte) 'o'};
SeekableHTTPStream str = new SeekableHTTPStream(new ApacheURLHelper(new URL(testURL)));
str.seek(10);
byte[] buffer = new byte[5];
str.read(buffer, 0, 5);
for (int i = 0; i < buffer.length; i++) {
if (buffer[i] != expectedBytes[i]) {
return false;
}
}
return true;
} catch (IOException e) {
log.error("Error while testing byte range ", e);
return false;
}
}
public static boolean useByteRange() {
useByteRange = PreferenceManager.getInstance().getAsBoolean(PreferenceManager.USE_BYTE_RANGE);
if (useByteRange && !byteRangeTested) {
useByteRange = testByteRange();
byteRangeTested = true;
}
return useByteRange;
}
public static void updateProxySettings() {
String proxyHost;
int proxyPort;
boolean auth;
String user;
String pw = null;
PreferenceManager prefMgr = PreferenceManager.getInstance();
boolean useProxy = prefMgr.getAsBoolean(PreferenceManager.USE_PROXY);
proxyHost = prefMgr.get(PreferenceManager.PROXY_HOST, null);
try {
proxyPort = Integer.parseInt(prefMgr.get(PreferenceManager.PROXY_PORT, "-1"));
} catch (NumberFormatException e) {
proxyPort = -1;
}
auth = prefMgr.getAsBoolean(PreferenceManager.PROXY_AUTHENTICATE);
user = prefMgr.get(PreferenceManager.PROXY_USER, null);
String pwString = prefMgr.get(PreferenceManager.PROXY_PW, null);
if (pwString != null) {
pw = Utilities.base64Decode(pwString);
}
if (useProxy) {
if (proxyHost != null) {
HttpHost proxy = new HttpHost(proxyHost, proxyPort);
client.getParams().setParameter(ConnRoutePNames.DEFAULT_PROXY, proxy);
log.info("Proxy settings: " + proxyHost + ":" + proxyPort);
}
if (auth && pw != null) {
boolean ntlm = prefMgr.getAsBoolean(PreferenceManager.PROXY_NTLM);
Credentials creds;
if (ntlm) {
log.info("Using NTLM authentication");
if (!new File(System.getenv("windir") + "\\krb5.ini").exists()) {
ArrayList<String> authpref = new ArrayList<String>();
authpref.add(AuthPolicy.BASIC);
authpref.add(AuthPolicy.DIGEST);
authpref.add(AuthPolicy.NTLM);
client.getParams().setParameter(AuthPNames.PROXY_AUTH_PREF, authpref);
client.getParams().setParameter(AuthPNames.TARGET_AUTH_PREF, authpref);
}
// Kerbeos file location
// System.getenv("java.security.krb5.conf");
// Parse domain , e.g. DOMAIN\\user
String domain = "";
if (user.contains("\\")) {
String[] tmp = new String[2];
int nTokens = ParsingUtils.split(user, tmp, '\\');
if (nTokens == 2) {
domain = tmp[0];
user = tmp[1];
}
}
log.info("Domain=" + domain);
log.info("User=" + user);
String hostName = "127.0.0.1";
try {
java.net.InetAddress i = java.net.InetAddress.getLocalHost();
hostName = i.getHostName();
} catch (Exception e) {
log.error("Error getting host name", e);
}
log.info("Workstation=" + hostName);
creds = new NTCredentials(user, pw, hostName, domain);
} else {
creds = new UsernamePasswordCredentials(user, pw);
}
client.getCredentialsProvider().setCredentials(new AuthScope(proxyHost, proxyPort), creds);
}
} else {
client.getParams().removeParameter(ConnRoutePNames.DEFAULT_PROXY);
}
}
/**
* Download the contents of the URL and save the results to a file.
*
* @throws IOException
*/
public static boolean downloadFile(String url, File outputFile) throws IOException {
log.info("Downloading " + url + " to " + outputFile.getAbsolutePath());
HttpGet httpget = new HttpGet(url);
HttpResponse response = client.execute(httpget);
HttpEntity entity = response.getEntity();
if (entity != null) {
final long contentLength = entity.getContentLength();
log.info("Content length = " + contentLength);
InputStream is = null;
OutputStream out = null;
try {
is = entity.getContent();
out = new FileOutputStream(outputFile);
byte[] buf = new byte[64 * 1024];
int downloaded = 0;
int bytesRead = 0;
while ((bytesRead = is.read(buf)) != -1) {
out.write(buf, 0, bytesRead);
downloaded += bytesRead;
}
log.info("Download complete. Total bytes downloaded = " + downloaded);
} catch (IOException e) {
httpget.abort();
throw e;
} finally {
if (is != null) is.close();
if (out != null) {
out.flush();
out.close();
}
}
long fileLength = outputFile.length();
//log.info("File length = " + fileLength);
return contentLength <= 0 || contentLength == fileLength;
}
return false;
}
/**
* Wrapper for an Entity input stream.
* <p/>
* NOTE: Without the call to getMethod.abort() in the close method the entire contents of the underlying
* stream will be read by the Apache implementation. As IGV peeks at large files by opening a stream and
* reading a few lines we have to protect against this, or the application might hang. For the future -- a better
* solution would be to use byte-range requests to request only digestible sections of the file at a time.
* <p/>
* <p/>
* <p/>
* TODO -- Verify that exhausting the stream is still a requirement in HttpClient 4.1.
*/
public static class ResponseInputStream extends FilterInputStream {
HttpGet getMethod;
public ResponseInputStream(HttpGet getMethod, InputStream content) {
super(content);
this.getMethod = getMethod;
}
@Override
public void close() throws IOException {
getMethod.abort();
}
}
/**
* Thread to flush idle connections periodically
*/
public static class IdleConnectionMonitorThread extends Thread {
private final ClientConnectionManager connMgr;
private volatile boolean shutdown;
public IdleConnectionMonitorThread(ClientConnectionManager connMgr) {
super();
this.connMgr = connMgr;
}
@Override
public void run() {
try {
while (!shutdown) {
synchronized (this) {
wait(60000);
// Close expired connections
connMgr.closeExpiredConnections();
// Optionally, close connections
// that have been idle longer than 300 sec
connMgr.closeIdleConnections(300, TimeUnit.SECONDS);
}
}
} catch (InterruptedException ex) {
// terminate
}
}
public void shutdown() {
shutdown = true;
synchronized (this) {
notifyAll();
}
}
}
/**
* Returns the proxy information for the specified sampleURL using JRE 1.4
* specific plugin classes.
*
* Notes:
* Plugin 1.4 Final added
* com.sun.java.browser.net.* classes ProxyInfo & ProxyService...
* Use those with JREs => 1.4
*
* @param sampleURL the URL to check proxy settings for
* @return ProxyHost the host and port of the proxy that should be used
*/
/* private static ProxyHost detectProxySettingsJDK14_JDK15_JDK16(URL sampleURL) {
ProxyHost result = null;
try {
// Look around for the 1.4.X plugin proxy detection class...
// Without it, cannot autodetect...
Class ProxyServiceClass =
Class.forName("com.sun.java.browser.net.ProxyService");
Method getProxyInfoMethod =
ProxyServiceClass.getDeclaredMethod("getProxyInfo",
new Class[] {URL.class});
Object proxyInfoArrayObj =
getProxyInfoMethod.invoke(null, new Object[] {sampleURL});
if (proxyInfoArrayObj == null
|| Array.getLength(proxyInfoArrayObj) == 0) {
if (log.isDebugEnabled()) {
log.debug("1.4.X reported NULL proxy (no proxy assumed)");
}
result = NO_PROXY_HOST;
} else {
Object proxyInfoObject = Array.get(proxyInfoArrayObj, 0);
Class proxyInfoClass = proxyInfoObject.getClass();
Method getHostMethod =
proxyInfoClass.getDeclaredMethod("getHost",null);
String proxyIP =
(String)getHostMethod.invoke(proxyInfoObject, null);
Method getPortMethod =
proxyInfoClass.getDeclaredMethod("getPort",null);
Integer portInteger =
(Integer)getPortMethod.invoke(proxyInfoObject, null);
int proxyPort = portInteger.intValue();
if (log.isDebugEnabled()) {
log.debug("1.4.X Proxy info geProxy:"+proxyIP+
" get Port:"+proxyPort);
}
result = new ProxyHost(proxyIP, proxyPort);
}
} catch (Exception e) {
e.printStackTrace();
log.warn("Sun Plugin 1.4.X proxy detection class not found, " +
"will try failover detection, e:"+e);
}
return result;
} */
} |
package org.jpos.ee.pm.struts;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import org.jpos.ee.pm.core.EntitySupport;
import org.jpos.ee.pm.core.Field;
import org.jpos.ee.pm.core.PMContext;
import org.jpos.ee.pm.core.PMCoreConstants;
import org.jpos.ee.pm.core.PMSession;
import org.jpos.ee.pm.core.PresentationManager;
/**
* Helper class for internal use.
*
* @author jpaoletti
* @see EntitySupport
*/
public class PMEntitySupport extends EntitySupport implements PMCoreConstants, PMStrutsConstants {
private String context_path;
private static PMEntitySupport instance;
private HttpServletRequest request;
public static final Map<String, String> htmlConversions = new HashMap<String, String>();
/* TODO Externalize this values into a resource */
static {
htmlConversions.put("á", "á");
htmlConversions.put("é", "é");
htmlConversions.put("í", "í");
htmlConversions.put("ó", "ó");
htmlConversions.put("ú", "ú");
htmlConversions.put("Á", "Á");
htmlConversions.put("É", "É");
htmlConversions.put("Í", "Í");
htmlConversions.put("Ó", "Ó");
htmlConversions.put("Ú", "Ú");
htmlConversions.put("ñ", "ñ");
htmlConversions.put("Ñ", "Ñ");
htmlConversions.put("º", "º");
htmlConversions.put("ª", "ª");
htmlConversions.put("ü", "ü");
htmlConversions.put("Ü", "Ü");
htmlConversions.put("ç", "ç");
htmlConversions.put("Ç", "Ç");
}
/**
* Singleton getter
* @return The PMEntitySupport
*/
public synchronized static PMEntitySupport getInstance() {
if (instance == null) {
instance = new PMEntitySupport();
}
return instance;
}
public PMSession getPMSession() throws PMStrutsException {
if (request == null) {
throw new PMStrutsException("request.not.found");
}
return (PMSession) request.getSession().getAttribute(PMSESSION);
}
/**
* Setter for context path
*
* @param context_path The context_path
*/
public void setContext_path(String context_path) {
this.context_path = context_path;
}
/**
* Getter for context path
*
* @return The context_path
*/
public String getContext_path() {
return context_path;
}
@Deprecated
public HttpServletRequest getRequest() {
return request;
}
@Deprecated
public void setRequest(HttpServletRequest request) {
this.request = request;
}
public String getWelcomePage() {
return PresentationManager.getPm().getCfg().get("welcome-page", "pages/welcome.jsp");
}
@Deprecated
public PMContext prepareForConversion(Field field, Object item, Object field_value) {
final PMContext ctx = (PMContext) request.getAttribute(PM_CONTEXT);
ctx.setField(field);
if (field_value != null) {
ctx.setFieldValue(field_value);
} else {
ctx.setFieldValue(ctx.getPresentationManager().get(item, field.getProperty()));
}
ctx.setEntityInstance(item);
request.setAttribute("ctx", ctx);
return ctx;
}
public static String toHtml(final String s) {
if (s == null) {
return null;
}
if (PresentationManager.getPm().getCfg().getBoolean("html-convert", true)) {
String tmp = s;
for (Map.Entry<String, String> entry : htmlConversions.entrySet()) {
tmp = tmp.replace(entry.getKey(), entry.getValue());
}
return tmp;
} else {
return s;
}
}
/**
* Getter for PMSession from http session
*/
public static PMSession getPMSession(final HttpServletRequest request) {
return (PMSession) request.getSession().getAttribute(PMSESSION);
}
} |
/*
* $Id: BaseServletManager.java,v 1.31 2010-11-29 07:25:26 tlipkis Exp $
*/
package org.lockss.servlet;
import java.io.*;
import java.net.*;
import java.util.*;
import javax.net.ssl.KeyManagerFactory;
import org.lockss.app.*;
import org.lockss.config.Configuration;
import org.lockss.daemon.*;
import org.lockss.util.*;
import org.lockss.config.*;
import org.lockss.account.*;
import org.lockss.jetty.*;
import org.mortbay.http.*;
import org.mortbay.http.Authenticator;
import org.mortbay.http.BasicAuthenticator;
import org.mortbay.http.handler.*;
import org.mortbay.jetty.*;
import org.mortbay.jetty.servlet.*;
/**
* Base class for servlet managers.
* Note: this class may be used in an environment where the LOCKSS app is
* not running (<i>e.g.</i>, for {@link org.lockss.servlet.TinyUi}), so it
* must not rely on any non-static app services, nor any other managers.
*/
public abstract class BaseServletManager
extends JettyManager implements ServletManager {
private static Logger log = Logger.getLogger("ServletMgr");
// Suffixes of config keys below org.lockss.<servlet>. Config is
// accessed through a generic mechanism in setConfig(). The PARAM_XXX
// symbols following some of the suffixes are present only to generate
// appropriate parameter documentation; they aren't used by the daemon.
/** Prefix of doc-only parameters */
public static final String DOC_PREFIX = Configuration.PREFIX + "<server>.";
public static final String SUFFIX_START = "start";
/** Start the named server */
public static final String PARAM_START = DOC_PREFIX + SUFFIX_START;
public static final String SUFFIX_PORT = "port";
/** Listen port for named server */
public static final String PARAM_PORT = DOC_PREFIX + SUFFIX_PORT;
// IP access list tree below org.lockss.<server>.access.ip
public static final String SUFFIX_IP_ACCESS_PREFIX = "access.ip.";
public static final String DOC_ACCESS_PREFIX =
DOC_PREFIX + SUFFIX_IP_ACCESS_PREFIX;
public static final String SUFFIX_IP_INCLUDE = "include";
/** List of IPs or subnets to allow */
public static final String PARAM_IP_INCLUDE =
DOC_ACCESS_PREFIX + SUFFIX_IP_INCLUDE;
public static final String SUFFIX_IP_EXCLUDE = "exclude";
/** List of IPs or subnets to reject */
public static final String PARAM_IP_EXCLUDE =
DOC_ACCESS_PREFIX + SUFFIX_IP_EXCLUDE;
public static final String SUFFIX_LOG_FORBIDDEN = "logForbidden";
/** Log accesses from forbidden IP addresses */
public static final String PARAM_LOG_FORBIDDEN =
DOC_ACCESS_PREFIX + SUFFIX_LOG_FORBIDDEN;
// public static final String SUFFIX_USER_AUTH = "access.auth";
// /** Require user authentication for named server */
// public static final String PARAM_USER_AUTH = DOC_PREFIX + SUFFIX_USER_AUTH;
public static final String SUFFIX_USE_SSL = "useSsl";
/** Connect to named server with https if true */
public static final String PARAM_USE_SSL =
DOC_PREFIX + SUFFIX_USE_SSL;
public static final boolean DEFAULT_USE_SSL = false;
public static final String SUFFIX_SSL_KEYSTORE_NAME = "sslKeystoreName";
/** Name of managed keystore to use (see
* org.lockss.keyMgr.keystore.<i>id</i>.name) */
public static final String PARAM_SSL_KEYSTORE_NAME =
DOC_PREFIX + SUFFIX_SSL_KEYSTORE_NAME;
public static final String SUFFIX_SSL_REDIR_FROM = "sslRedirFromPort";
/** HTTP Redirector to HTTPS */
public static final String PARAM_SSL_REDIR_FROM =
DOC_PREFIX + SUFFIX_SSL_REDIR_FROM;
public static final String SUFFIX_AUTH_TYPE = "authType";
/** User authentication type: Basic or Form */
public static final String PARAM_AUTH_TYPE = DOC_PREFIX + SUFFIX_AUTH_TYPE;
public static final AuthType DEFAULT_AUTH_TYPE = AuthType.Basic;
public static final String SUFFIX_FORM_LOGIN_URL = "formLoginUrl";
/** Login page URL for Form authentication */
public static final String PARAM_FORM_LOGIN_URL =
DOC_PREFIX + SUFFIX_FORM_LOGIN_URL;
public static final String DEFAULT_FORM_LOGIN_URL = "/LoginForm";
public static final String SUFFIX_FORM_LOGIN_ERROR_URL = "formLoginErrorUrl";
/** Login error page URL for Form authentication */
public static final String PARAM_FORM_LOGIN_ERROR_URL =
DOC_PREFIX + SUFFIX_FORM_LOGIN_ERROR_URL;
public static final String DEFAULT_FORM_LOGIN_ERROR_URL =
"/LoginForm?error=true";
public static final String SUFFIX_MAX_LOGIN_INACTIVITY = "maxLoginInactivity";
/** Interval after which inactive user must re-login (used only if no
per-account inactivity timer) */
public static final String PARAM_MAX_LOGIN_INACTIVITY =
DOC_PREFIX + SUFFIX_MAX_LOGIN_INACTIVITY;
public static long DEFAULT_MAX_LOGIN_INACTIVITY = -1;
public static final String SUFFIX_RESOLVE_REMOTE_HOST = "resolveRemoteHost";
/** True if should attempt to resolve remote host (request source IP) */
public static final String PARAM_RESOLVE_REMOTE_HOST =
DOC_PREFIX + SUFFIX_RESOLVE_REMOTE_HOST;
public static final String SUFFIX_403_MSG = "403Msg";
/** Message to include in 403 response */
public static final String PARAM_403MSG = DOC_PREFIX + SUFFIX_403_MSG;
public static final String SUFFIX_ENABLE_DEBUG_USER = "debugUser.enable";
/** Enable the debug user on named server. Daemon restart required. */
public static final String PARAM_ENABLE_DEBUG_USER =
DOC_PREFIX + SUFFIX_ENABLE_DEBUG_USER;
// User login tree below org.lockss.<server>.users
public static final String SUFFIX_USERS = "users";
public static final String DOC_USERS_PREFIX =
DOC_PREFIX + SUFFIX_USERS + ".";
public static final String USER_PARAM_USER = "user";
/** Username */
public static final String PARAM_USER_PARAM_USER =
DOC_USERS_PREFIX + USER_PARAM_USER;
public static final String USER_PARAM_PWD = "password";
/** Encrypted password */
public static final String PARAM_USER_PARAM_PWD =
DOC_USERS_PREFIX + USER_PARAM_PWD;
public static final String USER_PARAM_ROLES = "roles";
/** List of roles (Debug, Admin) */
public static final String PARAM_USER_PARAM_ROLES =
DOC_USERS_PREFIX + USER_PARAM_ROLES;
public enum AuthType {Basic, Form}
private static String textMimes[] = {
"out", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9",
};
ManagerInfo mi;
protected int port;
protected UserRealm realm;
private boolean start;
private String includeIps;
private String excludeIps;
private boolean logForbidden;
private long maxLoginInactivity = DEFAULT_MAX_LOGIN_INACTIVITY;
protected boolean enableDebugUser;
protected boolean useSsl;
protected boolean resolveRemoteHost;
protected String sslKeystoreName;
protected int sslRedirFromPort;
protected AuthType authType = DEFAULT_AUTH_TYPE;
protected String formLoginUrl = DEFAULT_FORM_LOGIN_URL;
protected String formLoginErrorUrl = DEFAULT_FORM_LOGIN_ERROR_URL;
protected Authenticator authenticator;
private String _403Msg;
List accessHandlers = new ArrayList();
protected LockssSessionManager sessionMgr = new LockssSessionManager();
protected AccountManager acctMgr;
protected LockssKeyStoreManager keystoreMgr;
public BaseServletManager(String serverName) {
super(serverName);
}
/** Start servlets */
public void startService() {
acctMgr = getDaemon().getAccountManager();
keystoreMgr = getDaemon().getKeystoreManager();
initDescrs();
super.startService();
}
/** Stop servlets */
public void stopService() {
stopServer();
super.stopService();
}
protected LockssDaemon getDaemon() {
return (LockssDaemon)getApp();
}
/** Return the ManagerInfo object with config info and defaults for servlet
* manager */
protected abstract ManagerInfo getManagerInfo();
/** Return array of ServletDescr for all servlets managed by the servlet
* manager */
public abstract ServletDescr[] getServletDescrs();
/** Install appropriate users for these servlets */
protected abstract void installUsers();
/** Create and configure contexts for this server */
protected abstract void configureContexts(HttpServer server);
public void setConfig(Configuration config, Configuration prevConfig,
Configuration.Differences changedKeys) {
super.setConfig(config, prevConfig, changedKeys);
mi = getManagerInfo();
String prefix = mi.prefix;
if (changedKeys.contains(prefix)) {
port = config.getInt(prefix + SUFFIX_PORT, mi.defaultPort);
start = config.getBoolean(prefix + SUFFIX_START, mi.defaultStart);
_403Msg = config.get(prefix + SUFFIX_403_MSG, mi.default403Msg);
enableDebugUser = config.getBoolean(prefix + SUFFIX_ENABLE_DEBUG_USER,
mi.defaultEnableDebugUser);
useSsl = config.getBoolean(mi.prefix + SUFFIX_USE_SSL, false);
if (useSsl) {
sslKeystoreName = config.get(mi.prefix + SUFFIX_SSL_KEYSTORE_NAME);
sslRedirFromPort = config.getInt(mi.prefix + SUFFIX_SSL_REDIR_FROM, -1);
}
authType = (AuthType)config.getEnum(AuthType.class,
mi.prefix + SUFFIX_AUTH_TYPE,
DEFAULT_AUTH_TYPE);
switch (authType) {
case Form:
formLoginUrl = config.get(mi.prefix + SUFFIX_FORM_LOGIN_URL,
DEFAULT_FORM_LOGIN_URL);
formLoginErrorUrl = config.get(mi.prefix + SUFFIX_FORM_LOGIN_ERROR_URL,
DEFAULT_FORM_LOGIN_ERROR_URL);
break;
default:
}
maxLoginInactivity =
config.getTimeInterval(mi.prefix + SUFFIX_MAX_LOGIN_INACTIVITY,
DEFAULT_MAX_LOGIN_INACTIVITY);
resolveRemoteHost = config.getBoolean(mi.prefix
+ SUFFIX_RESOLVE_REMOTE_HOST,
mi.defaultResolveRemoteHost);
}
// Access control prefix not nec. related to prefix, don't nest inside
// if (changedKeys.contains(prefix))
String accessPrefix = mi.accessPrefix;
if (mi.accessPrefix == null) {
accessPrefix = prefix + SUFFIX_IP_ACCESS_PREFIX;
}
if (changedKeys.contains(accessPrefix)) {
includeIps = config.get(accessPrefix + SUFFIX_IP_INCLUDE, "");
excludeIps = config.get(accessPrefix + SUFFIX_IP_EXCLUDE, "");
logForbidden = config.getBoolean(accessPrefix + SUFFIX_LOG_FORBIDDEN,
mi.defaultLogForbidden);
log.debug("Installing new ip filter: incl: " + includeIps +
", excl: " + excludeIps);
setIpFilters();
}
}
/** Return true iff the auth method is of a type that results in there
* always being user sessions to display */
public boolean hasUserSessions() {
switch (authType) {
case Form:
return true;
default:
return false;
}
}
public Authenticator getAuthenticator() {
return authenticator;
}
void startOrStop() {
if (start) {
if (getDaemon().isDaemonInited()) {
startServlets();
}
} else if (isServerRunning()) {
stopServer();
}
}
// Create mapping from servlet class to ServletDescr
protected Hashtable servletToDescr = new Hashtable();
protected void initDescrs() {
for (ServletDescr d : getServletDescrs()) {
if (d.cls != null && d.cls != ServletDescr.UNAVAILABLE_SERVLET_MARKER) {
servletToDescr.put(d.cls, d);
}
}
}
public ServletDescr findServletDescr(Object o) {
ServletDescr res = (ServletDescr)servletToDescr.get(o.getClass());
if (res != null) return res;
// if not in map, o might be an instance of a subclass of a servlet class
// that's in the map.
for (ServletDescr d : getServletDescrs()) {
if (d.cls != null && d.cls.isInstance(o)) {
// found a descr that describes a superclass. Add actual class to map
servletToDescr.put(o.getClass(), d);
return d;
}
}
return null; // shouldn't happen
// XXX do something better here
}
public void startServlets() {
if (isRunningOnPort(port)) {
return;
}
if (isServerRunning()) {
stopServer();
}
try {
// Create the server
HttpServer server = new Server();
if (resolveRemoteHost) {
server.setResolveRemoteHost(true);
}
HttpListener listener;
// Create a port listener
if (useSsl) {
LockssSslListener lsl =
new LockssSslListener(new org.mortbay.util.InetAddrPort(port));
KeyManagerFactory kmf =
keystoreMgr.getKeyManagerFactory(sslKeystoreName,
mi.serverName + " server");
if (kmf == null) {
log.critical("Keystore " + sslKeystoreName +
" not found, not starting " + mi.serverName + " server");
return;
}
lsl.setKeyManagerFactory(kmf);
listener = lsl;
if (sslRedirFromPort > 0) {
// Setup redirect from insecure port
log.debug("redir from: " + sslRedirFromPort);
SocketListener redirListener =
new SocketListener(new org.mortbay.util.InetAddrPort(sslRedirFromPort));
redirListener.setIntegralPort(port);
// redirListener.setConfidentialPort(port);
server.addListener(redirListener);
}
} else {
listener = new SocketListener(new org.mortbay.util.InetAddrPort(port));
}
server.addListener(listener);
setupAuthRealm();
configureContexts(server);
// Start the http server
startServer(server, port);
} catch (Exception e) {
log.warning("Couldn't start servlets", e);
}
}
// common context setup
// adds IpAccessHandler as all contexts want it
// doesn't add AuthHandler as not all contexts want it
HttpContext makeContext(HttpServer server, String path) {
HttpContext context = server.getContext(path);
context.setAttribute(HttpContext.__ErrorHandler,
new LockssErrorHandler("daemon"));
context.setAttribute(CONTEXT_ATTR_LOCKSS_APP, theApp);
context.setAttribute(CONTEXT_ATTR_SERVLET_MGR, this);
// In this environment there is no point in consuming memory with
// cached resources
context.setMaxCachedFileSize(0);
// IpAccessHandler is always first handler
if (mi.doFilterIpAccess) {
addAccessHandler(context);
}
return context;
}
void setIpFilters() {
for (Iterator iter = accessHandlers.iterator(); iter.hasNext(); ) {
setIpFilter((IpAccessHandler)iter.next());
}
}
void setIpFilter(IpAccessHandler ah) {
try {
IpFilter filter = new IpFilter();
filter.setFilters(includeIps, excludeIps);
ah.setFilter(filter);
} catch (IpFilter.MalformedException e) {
log.warning("Malformed IP filter, filters not changed", e);
}
ah.setLogForbidden(logForbidden);
ah.setAllowLocal(true);
ah.set403Msg(_403Msg);
}
void setupAuthRealm() {
if (mi.doAuth) {
realm = newUserRealm();
installUsers();
if (acctMgr != null && acctMgr.getUsers().isEmpty()) {
log.warning("No users created, " + mi.authRealm +
" is effectively disabled.");
}
}
}
protected UserRealm newUserRealm() {
return new LockssUserRealm(mi.authRealm, acctMgr);
}
protected void installDebugUser() {
if (enableDebugUser) {
try {
log.debug("passwd props file: " + mi.debugUserFile);
URL propsUrl = this.getClass().getResource(mi.debugUserFile);
if (propsUrl != null) {
log.debug("passwd props file: " + propsUrl);
log.debug("debugUserFile: " + mi.debugUserFile);
acctMgr.loadFromProps(mi.debugUserFile);
}
} catch (IOException e) {
log.warning("Error loading " + mi.debugUserFile, e);
}
}
}
// Manually install password set by platform config.
protected void installPlatformUser() {
// Use platform config in case real config hasn't been loaded yet (when
// used from TinyUI)
Configuration platConfig = ConfigManager.getPlatformConfig();
String platUser = platConfig.get(PARAM_PLATFORM_USERNAME);
String platPass = platConfig.get(PARAM_PLATFORM_PASSWORD);
acctMgr.installPlatformUser(platUser, platPass);
}
protected void installGlobalUsers() {
// Install globally configured users
// XXX disallow this on the platform
installUsers(ConfigManager.getCurrentConfig().getConfigTree(mi.prefix + SUFFIX_USERS));
}
protected void installLocalUsers() {
// Install locally configured users
// installUsers(ConfigManager.getCurrentConfig().getConfigTree(PARAM_USERS));
}
protected void installUsers(Configuration users) {
for (Iterator iter = users.nodeIterator(); iter.hasNext(); ) {
Configuration oneUser = users.getConfigTree((String)iter.next());
String user = oneUser.get(USER_PARAM_USER);
String pwd = oneUser.get(USER_PARAM_PWD);
String roles = oneUser.get(USER_PARAM_ROLES);
if (!StringUtil.isNullString(user) &&
!StringUtil.isNullString(pwd)) {
try {
UserAccount acct = acctMgr.addStaticUser(user, pwd);
if (!StringUtil.isNullString(roles)) {
acct.setRoles(roles);
}
} catch (AccountManager.NotAddedException e) {
log.error(e.getMessage());
}
}
}
}
protected void addAccessHandler(HttpContext context) {
IpAccessHandler ah = new IpAccessHandler(serverName);
setIpFilter(ah);
context.addHandler(ah);
accessHandlers.add(ah);
}
protected void setContextAuthHandler(HttpContext context, UserRealm realm) {
if (realm != null) {
context.setRealm(realm);
switch (authType) {
case Basic:
log.info(mi.serverName + ", " + context.getName() + ": Using basic auth");
context.setAuthenticator(new BasicAuthenticator());
context.addHandler(new SecurityHandler());
context.addSecurityConstraint("/",
newSecurityConstraint(mi.serverName,
"*"));
break;
case Form:
log.info(mi.serverName + ", " + context.getName() + ": Using form auth");
LockssFormAuthenticator fa = new LockssFormAuthenticator(getDaemon());
fa.setLoginPage(formLoginUrl);
fa.setErrorPage(formLoginErrorUrl);
if (maxLoginInactivity > 0) {
fa.setMaxInactivity(maxLoginInactivity);
}
context.addSecurityConstraint("/",
newSecurityConstraint(mi.serverName,
"*"));
context.setAuthenticator(fa);
break;
}
authenticator = context.getAuthenticator();
}
}
SecurityConstraint newSecurityConstraint(String name, String role) {
SecurityConstraint sc = new SecurityConstraint(name, role);
if (useSsl) {
// Ensure all contexts insist on appropriate security
sc.setDataConstraint(SecurityConstraint.DC_INTEGRAL);
}
return sc;
}
WebApplicationHandler makeWebAppHandler(HttpContext context) {
WebApplicationHandler handler = new WebApplicationHandler();
handler.setSessionManager(sessionMgr);
context.setAttribute(CONTEXT_ATTR_SERVLET_HANDLER, handler);
return handler;
}
protected void setupDirContext(HttpServer server, UserRealm realm,
String contextPath, String dir,
FilenameFilter filter)
throws MalformedURLException {
HttpContext context = makeContext(server, contextPath);
// user authentication handler
setContextAuthHandler(context, realm);
WebApplicationHandler handler = makeWebAppHandler(context);
context.addHandler(handler);
handler.addServlet("Login", "/LoginForm", "org.lockss.servlet.LoginForm");
// Log Dir resource
String dirname = (dir != null) ? dir : "";
URL url = new URL("file", null,
new File(dirname).getAbsolutePath());
if (filter != null) {
try {
org.mortbay.util.Resource res =
new FilteredDirFileResource(url, filter);
context.setBaseResource(res);
} catch (IOException e) {
throw new
MalformedURLException("Can't create " +
"FilteredDirFileResource: " + e.toString());
} catch (URISyntaxException e) {
throw new
MalformedURLException("Can't create " +
"FilteredDirFileResource: " + e.toString());
}
} else {
context.setResourceBase(url.toString());
}
switch (authType) {
case Basic:
break;
case Form:
Map redirMap = new HashMap();
redirMap.put(formLoginUrl + ".*", formLoginUrl);
context.setAttribute(CONTEXT_ATTR_RESOURCE_REDIRECT_MAP, redirMap);
break;
}
ServletHolder holder =
handler.addServlet("Resource", "/",
"org.lockss.servlet.LockssResourceServlet");
holder.put("dirAllowed", "true");
for (int ix = 0; ix < textMimes.length; ix++) {
context.setMimeMapping(textMimes[ix], "text/plain");
}
// context.setMimeMapping("gz", "text/gzip");
// context.setTypeEncoding("text/gzip", "x-gzip");
// NotFoundHandler
context.addHandler(new NotFoundHandler());
}
protected void addServlets(ServletDescr[] descrs, ServletHandler handler) {
for (ServletDescr d : descrs) {
Class cls = d.getServletClass();
if (cls != null
&& cls != ServletDescr.UNAVAILABLE_SERVLET_MARKER
&& !d.isPathIsUrl()
&& d.isEnabled(getDaemon())) {
String path = "/" + d.getPath();
log.debug2("addServlet("+d.getServletName()+", "+path+
", "+cls.getName()+")");
handler.addServlet(d.getServletName(), path, cls.getName());
}
}
}
// Add a servlet if its class can be loaded.
protected void addServletIfAvailable(ServletHandler handler, String name,
String pathSpec, String servletClass) {
try {
Class.forName(servletClass);
handler.addServlet(name, pathSpec, servletClass);
} catch (ClassNotFoundException e) {
log.warning("Not starting servlet \"" + name +
"\", class not found: " + servletClass);
}
}
/** Struct to hold particulars of concrete servlet managers needed for
* generic processing. Mostly config param defaults. */
protected static class ManagerInfo {
String prefix;
String accessPrefix; // if from different server
String serverName;
boolean defaultStart;
int defaultPort;
String default403Msg = "Access forbidden";
boolean doAuth;
boolean doFilterIpAccess = true;
String authRealm;
boolean defaultEnableDebugUser;
boolean defaultLogForbidden;
boolean defaultResolveRemoteHost;
String debugUserFile;
}
} |
package org.mozilla.mozstumbler;
import android.net.wifi.ScanResult;
final class SSIDBlockList {
private static final String[] PREFIX_LIST = {
// Mobile devices
"AndroidAP",
"AndroidHotspot",
"Android Hotspot",
"barnacle", // Android tether app
"Galaxy Note",
"Galaxy S",
"Galaxy Tab",
"HTC ",
"iPhone",
"LG-MS770",
"LG-MS870",
"LG VS910 4G",
"LG Vortex",
"MIFI",
"MiFi",
"myLGNet",
"myTouch 4G Hotspot",
"NOKIA Lumia",
"PhoneAP",
"SCH-I",
"Sprint MiFi",
"Verizon ",
"Verizon-",
"VirginMobile MiFi",
"VodafoneMobileWiFi-",
"FirefoxHotspot",
"Mobile Hotspot", // BlackBerry OS 10
// Transportation Wi-Fi
"ac_transit_wifi_bus",
"AmtrakConnect",
"Amtrak_",
"amtrak_",
"GBUS",
"GBusWifi",
"gogoinflight", // Gogo in-flight WiFi
"SF Shuttle Wireless",
"ShuttleWiFi",
"Southwest WiFi", // Southwest Airlines in-flight WiFi
"SST-PR-1", // Sears Home Service van hotspot?!
"wifi_rail", // BART
"egged.co.il", // Egged transportation services (Israel)
"gb-tours.com", // GB Tours transportation services (Israel)
"ISRAEL-RAILWAYS",
"Omni-WiFi", // Omnibus transportation services (Israel)
"Telekom_ICE", // Deutsche Bahn on-train WiFi
"TPE-Free Bus", // Taipei City on-bus WiFi (Taiwan)
"THSR-VeeTIME", // Taiwan High Speed Rail on-train WiFi
"VR-junaverkko", // VR on-train WiFi (Finland)
"CapitalBus", // Capital Bus on-bus WiFi (Taiwan)
"Hot-Spot-KS", // Koleje Slaskie transportation services (Poland)
};
private static final String[] SUFFIX_LIST = {
// Mobile devices
"iPhone",
"iphone",
"MIFI",
"MIFI",
"MiFi",
"Mifi",
"mifi",
"mi-fi",
"MyWi",
"Phone",
"Portable Hotspot",
"Tether",
"tether",
// Google's SSID opt-out
"_nomap",
};
private SSIDBlockList() {
}
static boolean contains(ScanResult scanResult) {
String SSID = scanResult.SSID;
if (SSID == null) {
return true; // no SSID?
}
for (String prefix : PREFIX_LIST) {
if (SSID.startsWith(prefix)) {
return true; // blocked!
}
}
for (String suffix : SUFFIX_LIST) {
if (SSID.endsWith(suffix)) {
return true; // blocked!
}
}
return false;
}
} |
package org.nutz.dao.util.cnd;
import org.nutz.dao.Condition;
import org.nutz.dao.entity.Entity;
/**
* SQL
*
* @author zozoh(zozohtnt@gmail.com)
*/
public class SimpleCondition implements Condition {
private String content;
public SimpleCondition(Object obj) {
this.content = obj.toString();
}
public SimpleCondition(String format, Object... args) {
this.content = String.format(format, args);
}
public String toSql(Entity<?> entity) {
return content;
}
public String toString() {
return toSql(null);
}
} |
package org.opencms.jsp;
import org.opencms.ade.containerpage.shared.CmsFormatterConfig;
import org.opencms.file.CmsObject;
import org.opencms.file.CmsResource;
import org.opencms.file.history.CmsHistoryResourceHandler;
import org.opencms.file.types.CmsResourceTypeXmlContent;
import org.opencms.file.types.I_CmsResourceType;
import org.opencms.flex.CmsFlexController;
import org.opencms.jsp.util.CmsJspStandardContextBean;
import org.opencms.loader.CmsLoaderException;
import org.opencms.main.CmsException;
import org.opencms.main.CmsLog;
import org.opencms.main.OpenCms;
import org.opencms.util.CmsStringUtil;
import org.opencms.util.CmsUUID;
import org.opencms.xml.CmsXmlContentDefinition;
import org.opencms.xml.containerpage.CmsContainerBean;
import org.opencms.xml.containerpage.CmsContainerElementBean;
import org.opencms.xml.containerpage.CmsContainerPageBean;
import org.opencms.xml.containerpage.CmsFormatterConfiguration;
import org.opencms.xml.containerpage.CmsXmlContainerPage;
import org.opencms.xml.containerpage.CmsXmlContainerPageFactory;
import org.opencms.xml.containerpage.I_CmsFormatterBean;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.servlet.ServletRequest;
import javax.servlet.jsp.JspException;
import javax.servlet.jsp.tagext.BodyTagSupport;
import org.apache.commons.logging.Log;
/**
* This tag includes required CSS or JavaScript resources that are to be places in the HTML head.<p>
*
* Required resources can be configured in the resource type schema.
* Set attribute type to 'css' to include css resources or to 'javascript' to include JavaScript resources.<p>
*
* @since 8.0
*/
public class CmsJspTagHeadIncludes extends BodyTagSupport implements I_CmsJspTagParamParent {
/** The include type CSS. */
public static final String TYPE_CSS = "css";
/** The include type java-script. */
public static final String TYPE_JAVASCRIPT = "javascript";
/** The log object for this class. */
private static final Log LOG = CmsLog.getLog(CmsJspTagHeadIncludes.class);
/** Serial version UID required for safe serialisation. */
private static final long serialVersionUID = 5496349529835666345L;
/** The value of the closetags attribute. */
private String m_closeTags;
/** The default include resources separated by '|'. */
private String m_defaults;
/** The detail container type. */
private String m_detailType;
/** The detail container width. */
private String m_detailWidth;
/** Map to save parameters to the include in. */
private Map<String, String[]> m_parameterMap;
/** The include type. */
private String m_type;
/**
* Adds parameters to a parameter Map that can be used for a http request.<p>
*
* @param parameters the Map to add the parameters to
* @param name the name to add
* @param value the value to add
* @param overwrite if <code>true</code>, a parameter in the map will be overwritten by
* a parameter with the same name, otherwise the request will have multiple parameters
* with the same name (which is possible in http requests)
*/
public static void addParameter(Map<String, String[]> parameters, String name, String value, boolean overwrite) {
// No null values allowed in parameters
if ((parameters == null) || (name == null) || (value == null)) {
return;
}
// Check if the parameter name (key) exists
if (parameters.containsKey(name) && (!overwrite)) {
// Yes: Check name values if value exists, if so do nothing, else add new value
String[] values = parameters.get(name);
String[] newValues = new String[values.length + 1];
System.arraycopy(values, 0, newValues, 0, values.length);
newValues[values.length] = value;
parameters.put(name, newValues);
} else {
// No: Add new parameter name / value pair
String[] values = new String[] {value};
parameters.put(name, values);
}
}
/**
* @see org.opencms.jsp.I_CmsJspTagParamParent#addParameter(java.lang.String, java.lang.String)
*/
public void addParameter(String name, String value) {
// No null values allowed in parameters
if ((name == null) || (value == null)) {
return;
}
// Check if internal map exists, create new one if not
if (m_parameterMap == null) {
m_parameterMap = new HashMap<String, String[]>();
}
addParameter(m_parameterMap, name, value, false);
}
/**
* @return <code>EVAL_PAGE</code>
*
* @see javax.servlet.jsp.tagext.Tag#doEndTag()
*
* @throws JspException by interface default
*/
@Override
public int doEndTag() throws JspException {
ServletRequest req = pageContext.getRequest();
CmsFlexController controller = CmsFlexController.getController(req);
CmsObject cms = controller.getCmsObject();
try {
if (TYPE_CSS.equals(m_type)) {
tagCssAction(cms, req);
}
if (TYPE_JAVASCRIPT.equals(m_type)) {
tagJSAction(cms, req);
}
} catch (Exception e) {
throw new JspException(e);
} finally {
m_parameterMap = null;
}
return EVAL_PAGE;
}
/**
* Returns <code>{@link #EVAL_BODY_BUFFERED}</code>.<p>
*
* @return <code>{@link #EVAL_BODY_BUFFERED}</code>
*
* @see javax.servlet.jsp.tagext.Tag#doStartTag()
*/
@Override
public int doStartTag() {
return EVAL_BODY_BUFFERED;
}
/**
* Returns the default include resources separated by '|'.<p>
*
* @return the default include resources
*/
public String getDefaults() {
return m_defaults;
}
/**
* Returns the detail container type.<p>
*
* @return the detail container type
*/
public String getDetailtype() {
return m_detailType;
}
/**
* Returns the detail container width.<p>
*
* @return the detail container width
*/
public String getDetailwidth() {
return m_detailWidth;
}
/**
* Returns the type.<p>
*
* @return the type
*/
public String getType() {
return m_type;
}
/**
* Sets the value of the closetags attribute.<p>
*
* @param closeTags the value of the closetags attribute
*/
public void setClosetags(String closeTags) {
m_closeTags = closeTags;
}
/**
* Sets the default include resources separated by '|'.<p>
*
* @param defaults the default include resources to set
*/
public void setDefaults(String defaults) {
m_defaults = defaults;
}
/**
* Sets the detail container type.<p>
*
* @param detailType the detail container type to set
*/
public void setDetailtype(String detailType) {
m_detailType = detailType;
}
/**
* Sets the detail container width.<p>
*
* @param detailWidth the detail container width to set
*/
public void setDetailwidth(String detailWidth) {
m_detailWidth = detailWidth;
}
/**
* Sets the type.<p>
*
* @param type the type to set
*/
public void setType(String type) {
m_type = type;
}
/**
* Returns true if the headincludes tag should be closed.<p>
*
* @return true if the headincludes tag should be closed
*/
public boolean shouldCloseTags() {
if (m_closeTags == null) {
return true;
}
return Boolean.parseBoolean(m_closeTags);
}
/**
* Action to include the CSS resources.<p>
*
* @param cms the current cms context
* @param req the current request
*
* @throws CmsException if something goes wrong reading the resources
* @throws IOException if something goes wrong writing to the response out
*/
public void tagCssAction(CmsObject cms, ServletRequest req) throws CmsException, IOException {
CmsJspStandardContextBean standardContext = getStandardContext(cms, req);
CmsContainerPageBean containerPage = standardContext.getPage();
Set<String> cssIncludes = new LinkedHashSet<String>();
Map<String, String> inlineCss = new LinkedHashMap<String, String>();
// add defaults
if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(m_defaults)) {
String[] defaults = m_defaults.split("\\|");
for (int i = 0; i < defaults.length; i++) {
cssIncludes.add(defaults[i]);
}
}
if ((containerPage != null) && (containerPage.getElements() != null)) {
Map<CmsUUID, I_CmsFormatterBean> formatters = OpenCms.getADEManager().getCachedFormatters(
standardContext.isOnline()).getFormatters();
for (CmsContainerBean container : containerPage.getContainers().values()) {
for (CmsContainerElementBean element : container.getElements()) {
try {
element.initResource(cms);
if (element.isGroupContainer(cms) || element.isInheritedContainer(cms)) {
List<CmsContainerElementBean> subElements;
if (element.isGroupContainer(cms)) {
subElements = CmsJspTagContainer.getGroupContainerElements(
cms,
element,
req,
container.getType());
} else {
subElements = CmsJspTagContainer.getInheritedContainerElements(cms, element);
}
for (CmsContainerElementBean subElement : subElements) {
subElement.initResource(cms);
I_CmsFormatterBean formatter = getFormatterBeanForElement(
subElement,
container,
formatters);
if (formatter != null) {
cssIncludes.addAll(formatter.getCssHeadIncludes());
if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(formatter.getInlineCss())) {
inlineCss.put(formatter.getId(), formatter.getInlineCss());
}
} else {
cssIncludes.addAll(getCSSHeadIncludes(cms, subElement.getResource()));
}
}
} else {
I_CmsFormatterBean formatter = getFormatterBeanForElement(element, container, formatters);
if (formatter != null) {
cssIncludes.addAll(formatter.getCssHeadIncludes());
if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(formatter.getInlineCss())) {
inlineCss.put(formatter.getId(), formatter.getInlineCss());
}
} else {
cssIncludes.addAll(getCSSHeadIncludes(cms, element.getResource()));
}
}
} catch (CmsException e) {
LOG.error(
Messages.get().getBundle().key(
Messages.ERR_READING_REQUIRED_RESOURCE_1,
element.getSitePath()),
e);
}
}
}
}
if (standardContext.getDetailContentId() != null) {
try {
CmsResource detailContent = cms.readResource(standardContext.getDetailContentId());
CmsFormatterConfiguration config = OpenCms.getADEManager().lookupConfiguration(
cms,
cms.getRequestContext().getRootUri()).getFormatters(cms, detailContent);
boolean requiresAllIncludes = true;
if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(getDetailtype())
&& CmsStringUtil.isNotEmptyOrWhitespaceOnly(getDetailwidth())) {
try {
int width = Integer.parseInt(getDetailwidth());
I_CmsFormatterBean formatter = config.getDetailFormatter(getDetailtype(), width);
cssIncludes.addAll(formatter.getCssHeadIncludes());
if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(formatter.getInlineCss())) {
inlineCss.put(formatter.getId(), formatter.getInlineCss());
}
requiresAllIncludes = false;
} catch (NumberFormatException ne) {
// nothing to do, we will include CSS for all detail containers
}
}
if (requiresAllIncludes) {
for (I_CmsFormatterBean formatter : config.getDetailFormatters()) {
cssIncludes.addAll(formatter.getCssHeadIncludes());
if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(formatter.getInlineCss())) {
inlineCss.put(formatter.getId(), formatter.getInlineCss());
}
}
}
} catch (CmsException e) {
LOG.error(
Messages.get().getBundle().key(
Messages.ERR_READING_REQUIRED_RESOURCE_1,
standardContext.getDetailContentId()),
e);
}
}
for (String cssUri : cssIncludes) {
pageContext.getOut().print(
"\n<link href=\""
+ CmsJspTagLink.linkTagAction(cssUri.trim(), req)
+ generateReqParams()
+ "\" rel=\"stylesheet\" type=\"text/css\">");
if (shouldCloseTags()) {
pageContext.getOut().print("</link>");
}
}
if (cms.getRequestContext().getCurrentProject().isOnlineProject()) {
if (!inlineCss.isEmpty()) {
StringBuffer inline = new StringBuffer("\n<style type=\"text/css\">\n");
for (Entry<String, String> cssEntry : inlineCss.entrySet()) {
inline.append(cssEntry.getValue()).append("\n\n");
}
inline.append("\n</style>\n");
pageContext.getOut().print(inline.toString());
}
} else {
StringBuffer inline = new StringBuffer();
for (Entry<String, String> cssEntry : inlineCss.entrySet()) {
inline.append("\n<style type=\"text/css\" rel=\"" + cssEntry.getKey() + "\">\n");
inline.append(cssEntry.getValue()).append("\n\n");
inline.append("\n</style>\n");
}
pageContext.getOut().print(inline.toString());
}
}
/**
* Action to include the java-script resources.<p>
*
* @param cms the current cms context
* @param req the current request
*
* @throws CmsException if something goes wrong reading the resources
* @throws IOException if something goes wrong writing to the response out
*/
public void tagJSAction(CmsObject cms, ServletRequest req) throws CmsException, IOException {
CmsJspStandardContextBean standardContext = getStandardContext(cms, req);
CmsContainerPageBean containerPage = standardContext.getPage();
Set<String> jsIncludes = new LinkedHashSet<String>();
Map<String, String> inlineJS = new LinkedHashMap<String, String>();
// add defaults
if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(m_defaults)) {
String[] defaults = m_defaults.split("\\|");
for (int i = 0; i < defaults.length; i++) {
jsIncludes.add(defaults[i]);
}
}
if ((containerPage != null) && (containerPage.getElements() != null)) {
Map<CmsUUID, I_CmsFormatterBean> formatters = OpenCms.getADEManager().getCachedFormatters(
standardContext.isOnline()).getFormatters();
for (CmsContainerBean container : containerPage.getContainers().values()) {
for (CmsContainerElementBean element : container.getElements()) {
try {
element.initResource(cms);
if (element.isGroupContainer(cms) || element.isInheritedContainer(cms)) {
List<CmsContainerElementBean> subElements;
if (element.isGroupContainer(cms)) {
subElements = CmsJspTagContainer.getGroupContainerElements(
cms,
element,
req,
container.getType());
} else {
subElements = CmsJspTagContainer.getInheritedContainerElements(cms, element);
}
for (CmsContainerElementBean subElement : subElements) {
subElement.initResource(cms);
I_CmsFormatterBean formatter = getFormatterBeanForElement(
subElement,
container,
formatters);
if (formatter != null) {
jsIncludes.addAll(formatter.getJavascriptHeadIncludes());
if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(formatter.getInlineJavascript())) {
inlineJS.put(formatter.getId(), formatter.getInlineJavascript());
}
} else {
jsIncludes.addAll(getJSHeadIncludes(cms, subElement.getResource()));
}
}
} else {
I_CmsFormatterBean formatter = getFormatterBeanForElement(element, container, formatters);
if (formatter != null) {
jsIncludes.addAll(formatter.getJavascriptHeadIncludes());
if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(formatter.getInlineJavascript())) {
inlineJS.put(formatter.getId(), formatter.getInlineJavascript());
}
} else {
jsIncludes.addAll(getJSHeadIncludes(cms, element.getResource()));
}
}
} catch (CmsException e) {
LOG.error(
Messages.get().getBundle().key(
Messages.ERR_READING_REQUIRED_RESOURCE_1,
element.getSitePath()),
e);
}
}
}
}
if (standardContext.getDetailContentId() != null) {
try {
CmsResource detailContent = cms.readResource(standardContext.getDetailContentId());
CmsFormatterConfiguration config = OpenCms.getADEManager().lookupConfiguration(
cms,
cms.getRequestContext().getRootUri()).getFormatters(cms, detailContent);
boolean requiresAllIncludes = true;
if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(getDetailtype())
&& CmsStringUtil.isNotEmptyOrWhitespaceOnly(getDetailwidth())) {
try {
int width = Integer.parseInt(getDetailwidth());
jsIncludes.addAll(config.getDetailFormatter(getDetailtype(), width).getCssHeadIncludes());
requiresAllIncludes = false;
} catch (NumberFormatException ne) {
// nothing to do, we will include JavaScript for all detail containers
}
}
if (requiresAllIncludes) {
for (I_CmsFormatterBean formatter : config.getDetailFormatters()) {
jsIncludes.addAll(formatter.getJavascriptHeadIncludes());
}
}
} catch (CmsException e) {
LOG.error(
Messages.get().getBundle().key(
Messages.ERR_READING_REQUIRED_RESOURCE_1,
standardContext.getDetailContentId()),
e);
}
}
for (String jsUri : jsIncludes) {
pageContext.getOut().print(
"<script type=\"text/javascript\" src=\""
+ CmsJspTagLink.linkTagAction(jsUri.trim(), req)
+ generateReqParams()
+ "\"></script>");
}
if (!inlineJS.isEmpty()) {
StringBuffer inline = new StringBuffer("\n<script type=\"text/javascript\">\n");
for (Entry<String, String> jsEntry : inlineJS.entrySet()) {
inline.append(jsEntry.getValue()).append("\n\n");
}
inline.append("\n</script>\n");
pageContext.getOut().print(inline.toString());
}
}
/**
* Generates the request parameter string.<p>
*
* @return the request parameter string
*
* @throws UnsupportedEncodingException if something goes wrong encoding the request parameters
*/
private String generateReqParams() throws UnsupportedEncodingException {
String params = "";
if ((m_parameterMap != null) && !m_parameterMap.isEmpty()) {
for (Entry<String, String[]> paramEntry : m_parameterMap.entrySet()) {
if (paramEntry.getValue() != null) {
for (int i = 0; i < paramEntry.getValue().length; i++) {
params += "&"
+ paramEntry.getKey()
+ "="
+ URLEncoder.encode(paramEntry.getValue()[i], "UTF-8");
}
}
}
params = "?" + params.substring(1);
}
return params;
}
/**
* Returns the schema configured CSS head include resources.<p>
*
* @param cms the current cms context
* @param resource the resource
*
* @return the configured CSS head include resources
*/
private Set<String> getCSSHeadIncludes(CmsObject cms, CmsResource resource) {
if (CmsResourceTypeXmlContent.isXmlContent(resource)) {
try {
CmsXmlContentDefinition contentDefinition = CmsXmlContentDefinition.getContentDefinitionForResource(
cms,
resource);
return contentDefinition.getContentHandler().getCSSHeadIncludes(cms, resource);
} catch (CmsException e) {
LOG.warn(e.getLocalizedMessage(), e);
// NOOP, use the empty set
}
}
return Collections.emptySet();
}
/**
* Returns the formatter configuration for the given element, will return <code>null</code> for schema formatters.<p>
*
* @param element the element bean
* @param container the container bean
* @param formatters the formatter map
*
* @return the formatter configuration bean
*/
private I_CmsFormatterBean getFormatterBeanForElement(
CmsContainerElementBean element,
CmsContainerBean container,
Map<CmsUUID, I_CmsFormatterBean> formatters) {
I_CmsFormatterBean result = null;
String formatterConfigId = element.getSettings() != null ? element.getSettings().get(
CmsFormatterConfig.getSettingsKeyForContainer(container.getName())) : null;
if (CmsUUID.isValidUUID(formatterConfigId)) {
result = formatters.get(new CmsUUID(formatterConfigId));
}
return result;
}
/**
* Returns the schema configured JavaScript head include resources.<p>
*
* @param cms the current cms context
* @param resource the resource
*
* @return the configured JavaScript head include resources
*
* @throws CmsLoaderException if something goes wrong reading the resource type
*/
private Set<String> getJSHeadIncludes(CmsObject cms, CmsResource resource) throws CmsLoaderException {
I_CmsResourceType resType = OpenCms.getResourceManager().getResourceType(resource.getTypeId());
if (resType instanceof CmsResourceTypeXmlContent) {
try {
CmsXmlContentDefinition contentDefinition = CmsXmlContentDefinition.getContentDefinitionForResource(
cms,
resource);
return contentDefinition.getContentHandler().getJSHeadIncludes(cms, resource);
} catch (CmsException e) {
LOG.warn(e.getLocalizedMessage(), e);
// NOOP, use the empty set
}
}
return Collections.emptySet();
}
/**
* Returns the standard context bean.<p>
*
* @param cms the current cms context
* @param req the current request
*
* @return the standard context bean
*
* @throws CmsException if something goes wrong
*/
private CmsJspStandardContextBean getStandardContext(CmsObject cms, ServletRequest req) throws CmsException {
String requestUri = cms.getRequestContext().getUri();
CmsJspStandardContextBean standardContext = CmsJspStandardContextBean.getInstance(req);
CmsContainerPageBean containerPage = standardContext.getPage();
if (containerPage == null) {
// get the container page itself, checking the history first
CmsResource pageResource = (CmsResource)CmsHistoryResourceHandler.getHistoryResource(req);
if (pageResource == null) {
pageResource = cms.readResource(requestUri);
}
CmsXmlContainerPage xmlContainerPage = CmsXmlContainerPageFactory.unmarshal(cms, pageResource, req);
containerPage = xmlContainerPage.getContainerPage(cms, cms.getRequestContext().getLocale());
standardContext.setPage(containerPage);
}
return standardContext;
}
} |
package xal.tools.apputils;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.BorderLayout;
import java.awt.GridLayout;
import java.awt.Frame;
import javax.swing.*;
import javax.swing.table.*;
import java.util.*;
import xal.model.probe.Probe;
import xal.model.IAlgorithm;
import xal.tools.bricks.*;
import xal.tools.swing.KeyValueFilteredTableModel;
import xal.tools.data.KeyValueAdaptor;
import java.beans.*;
import java.lang.reflect.*;
/** SimpleProbeEditor */
public class SimpleProbeEditor extends JDialog {
/** Private serializable version ID */
private static final long serialVersionUID = 1L;
/** Table model of ProbeProperty records */
final private KeyValueFilteredTableModel<ProbeProperty> PROPERTY_TABLE_MODEL;
/** List of properties that appear in the properties table */
private List<ProbeProperty> propertyList = new ArrayList<ProbeProperty>();
/** Map that contains an object keyed by group name. The Object instance is where the properties and methods. */
private Map<String, Object> propertyClasses = new HashMap<>();
/** Used to look for methods given a method name key */
final private KeyValueAdaptor KEY_VALUE_ADAPTOR;
/** Probe that is being edited */
final private Probe PROBE;
/* Constructor that takes a window parent
* and a probe to fetch properties from
*/
public SimpleProbeEditor( final Frame owner, final Probe probe ) {
super( owner, "Probe Editor", true ); //Set JDialog's owner, title, and modality
PROBE = probe; // Set the probe to edit
KEY_VALUE_ADAPTOR = new KeyValueAdaptor();
PROPERTY_TABLE_MODEL = new KeyValueFilteredTableModel<ProbeProperty>( new ArrayList<ProbeProperty>(), "group", "property", "value");
setSize( 600, 600 ); // Set the window size
initializeComponents(); // Set up each component in the editor
pack(); // Fit the components in the window
setLocationRelativeTo( owner ); // Center the editor in relation to the frame that constructed the editor
setVisible(true); // Make the window visible
}
/**
* Get the probe to edit
* @return probe associated with this editor
*/
public Probe getProbe() {
return PROBE;
}
/* setTableProperties()
*
* Sets the table data with the properties found through introspection
*
* Takes each property from the propertyClasses HashMap and adds
* them to the list of properties.
*/
public void setTableProperties() {
//Cycle through each element in the HashMap
for(String key : propertyClasses.keySet()) {
//The instance of the object that will have its properties taken
Object instance = propertyClasses.get(key);
//Get the BeanInfo from the instance's class
BeanInfo beanInfo = getBeanObjectBeanInfo(instance.getClass());
//Get each property descriptor from the BeanInfo
PropertyDescriptor[] descriptors = getPropertyDescriptors(beanInfo);
//Cycle through each property descriptor found in the class
for(int propIndex = 0; propIndex < descriptors.length; propIndex++) {
//The property's write method for setting data
Method write = descriptors[propIndex].getWriteMethod();
//The property's read method for retreiving data
Method read = descriptors[propIndex].getReadMethod();
//If there is not a getter AND setter for each property, we can not edit the property
if(write != null && read != null) {
//Gets the value of the property from the instance's read method
Object result = getPropertyValue( read, instance);
//Filter out classes we don't want to edit
if(isEditableClass(result.getClass())) {
//Add the property as a ProbeProperty to the list of editable properties
propertyList.add(new ProbeProperty(key, descriptors[propIndex].getName(), result, result.getClass()));
}//if(isEditableClass())
}//if(write && read)
}//for(descriptors)
}//for(HashMap keys)
//Update the properties table
refreshView();
}
/* getPropertyValue(Method, Object)
*
* Gets the value of a read method by invoking that method on an object instance
*
*/
public Object getPropertyValue(Method method, Object object) {
//Result from invoking the read method
Object result = null;
try {
//Try to invoke the read method and get its value
result = method.invoke( object );
} catch (IllegalAccessException iae) {
System.err.println(iae.getMessage());
}
catch (InvocationTargetException ite) {
System.err.println(ite.getMessage());
}
//TODO: handle null
//Return the result
return result == null ? "null" : result;
}
/* getPropertyDescriptor(BeanInfo)
*
* Gets the PropertyDescriptors from a BeanInfo
*
*/
public PropertyDescriptor[] getPropertyDescriptors( BeanInfo bean ) {
//If the bean is not null, return the descriptors
return bean != null ? bean.getPropertyDescriptors() : new PropertyDescriptor[0];
}
/* initializeComponents()
*
* Initialize the components of the probe editor
*
*/
public void initializeComponents() {
//Panel containing all elements
final JPanel mainContainer = new JPanel();
//Set the layout of the panel to a BorderLayout
mainContainer.setLayout( new BorderLayout() );
//Panel containing apply and close button with a 1 row, 2 column grid layout
final JPanel controlPanel = new JPanel( new GridLayout(1, 2) );
//Apply button
final JButton applyButton = new JButton( "Apply" );
applyButton.setEnabled(false);
//Close button
closeButton = new JButton( "Close" );
//Set the close button's action to close the dialog
closeButton.addActionListener( new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
dispose();
}
});
//Add the action listener as the ApplyButtonListener
applyButton.addActionListener( new ActionListener() {
public void actionPerformed(ActionEvent e) {
//Save the properties
saveProbeProperties();
//Mark the properties as unchanged/saved
setPropertiesAsUnchanged();
//Re-enable the button
applyButton.setEnabled( false );
}
});
//Add the close button to the button panel
controlPanel.add( closeButton );
//Add the apply button to the button panel
controlPanel.add( applyButton );
//Text field that filters the properties
final JTextField filterTextField = new JTextField();
//Set the text field properts to search field
filterTextField.putClientProperty( "JTextField.variant", "search" );
filterTextField.putClientProperty( "JTextField.Search.Prompt", "Property Filter" );
//Table containing the properties that can be modified
final JTable propertyTable = new JTable() {
//Serializable version ID
private static final long serialVersionUID = 1L;
//Get the cell editor for the table
@Override
public TableCellEditor getCellEditor(int row, int col) {
//Value at [row, col] of the table
Object value = getValueAt(row, col);
//Set the appropriate editor for each value type
if( value instanceof Boolean )
return getDefaultEditor( Boolean.class );
else if( value instanceof Double )
return getDefaultEditor( Double.class );
else if( value instanceof Integer )
return getDefaultEditor( Integer.class );
//Default editor (String type)
return super.getCellEditor( row, col );
}
//Get the cell renderer for the table to change how values are displayed
@Override
public TableCellRenderer getCellRenderer(int row, int col) {
//Value at [row, col]
Object value = getValueAt(row, col);
//Set the renderer of each type
//Boolean = checkbox display
//Double/Int = right aligned display
if( value instanceof Boolean )
return getDefaultRenderer( Boolean.class );
else if( value instanceof Double )
return getDefaultRenderer( Double.class );
else if( value instanceof Integer )
return getDefaultRenderer( Integer.class );
//Default = left aligned string display
return super.getCellRenderer( row, col );
}
};
//Set the table to allow one-click edit
((DefaultCellEditor) propertyTable.getDefaultEditor(Object.class)).setClickCountToStart(1);
//Resize the last column
propertyTable.setAutoResizeMode( JTable.AUTO_RESIZE_LAST_COLUMN);
//Allow single selection only
propertyTable.setSelectionMode( ListSelectionModel.SINGLE_SELECTION );
//Match the property's keys with their method
PROPERTY_TABLE_MODEL.setMatchingKeyPaths( "group", "property", "value");
//Set the table filter component to the text field
PROPERTY_TABLE_MODEL.setInputFilterComponent( filterTextField );
//Set the editable column to the "value" column
PROPERTY_TABLE_MODEL.setColumnEditable( "value", true );
//Set the model to the table
propertyTable.setModel( PROPERTY_TABLE_MODEL );
//Update the table contents
refreshView();
//Add the scrollpane to the table with a vertical scrollbar
final JScrollPane scrollPane = new JScrollPane( propertyTable, JScrollPane.VERTICAL_SCROLLBAR_ALWAYS, JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED );
//Add the text field to the top of the dialog
mainContainer.add( filterTextField, BorderLayout.NORTH );
//Add the table to the center of the dialog
mainContainer.add( scrollPane, BorderLayout.CENTER );
//Add the buttons to the bottom of the dialog
mainContainer.add( controlPanel, BorderLayout.SOUTH );
//Add everything to the dialog
add( mainContainer );
}
/** Set the values of the table to the property list */
private void refreshView() {
//Set the records as the properties from the property list
PROPERTY_TABLE_MODEL.setRecords( propertyList );
}
/** Set the properties of the probe that have been changed */
private void saveProbeProperties() {
//Go through each value in the properties HashMap
for(String key : propertyClasses.keySet()) {
//The instance of the object from the class
Object instance = propertyClasses.get(key);
//Get the BeanInfo from the class in the HashMap
BeanInfo beanInfo = getBeanObjectBeanInfo(instance.getClass());
//Get the PropertyDescriptors from the class
PropertyDescriptor[] descriptors = getPropertyDescriptors(beanInfo);
//Go through each descriptor
for(int propIndex = 0; propIndex < descriptors.length; propIndex++) {
//Write method of the descriptor
Method write = descriptors[propIndex].getWriteMethod();
//Read method of the descriptor
Method read = descriptors[propIndex].getReadMethod();
//Do nothing if there is not both a read and write method
if(write != null && read != null) {
//Find the right property in the list based on name
for(ProbeProperty pp : propertyList)
{
if(pp.hasChanged() && pp.getProperty().equals(descriptors[propIndex].getName())) {
//Try to invoke the write method with the changed property
try {
//Call the invoke method with the instance and value
System.out.println("Set property " + pp.getProperty());
write.invoke( instance, pp.getValue());
} catch (Exception e) {
//Display an error saying the property could not be set
System.out.println("Could not set property '" + pp.getProperty() + "' with value " + pp.getValue() + " of type " + pp.getValueType());
System.err.println(e.getMessage());
}// try/catch
}// if(correct property)
}// for(each ProbeProperty)
}// if(write && read != null)
}// for(descriptors)
}// for(each HashMap key)
//Update the table contents
refreshView();
}
/** Determine if the property's class is editable or not based on the EDITABLE_CLASSES attribute */
private boolean isEditableClass(Class<?> propertyClass) {
//Look through each class in the EDITABLE_CLASSES array
for(Class<?> c : EDITABLE_CLASSES) {
if(propertyClass == c)
return true;
}
return false;
}
/* setPropertiesAsUnchanged()
*
* Sets property to be marked as unchanged after applying the changes
*
*/
private void setPropertiesAsUnchanged() {
for(ProbeProperty pp : propertyList) {
if(pp.hasChanged()) pp.setHasChanged(false);
}
}
/** ProbeProperty record that gets dislpayed in the property table */
private class ProbeProperty {
//Class type of the property
private Class<?> _type;
//Group name, and property name of the property
private String _group, _property;
//Actual value of the property
private Object _value;
//If the property has been changed or not
private boolean _hasChanged = false;
//ProbeProperty Constructor that takes a group name, property name, value, and class type
public ProbeProperty(String group, String property, Object value, Class<?> type)
{
//Initialize the attributes
_type = type;
_property = property;
_value = value;
_group = group;
}
/* getGroup()
*
* return the group name formatted with html to be bold
*
*/
public String getGroup() {
return "<html><b>" + _group + "</b></html>";
}
/* getType()
*
* return the class type of the property
*
*/
public Class<?> getType() {
return _type;
}
/* getValueType()
*
* return value type of the class
*
*/
public String getValueType() {
return _type.getSimpleName();
}
/* getProperty()
*
* return the property name
*
*/
public String getProperty() {
return _property;
}
/* getValue()
*
* return the value of the property
*
*/
public Object getValue() {
return _value;
}
/* hasChanged()
*
* return if the value has changed
*
*/
public boolean hasChanged() {
return _hasChanged;
}
/* setChanged()
*
* return if the value has changed
*
*/
public void setHasChanged(boolean changed) {
_hasChanged = changed;
}
/* setValue(Boolean)
*
* Set the value of the value
*
* If the value is type Boolean, this method will be called and
* the value would be set appropriately
*
*/
public void setValue(Boolean value) {
if( !applyButton.isEnabled() ) applyButton.setEnabled(true);
_hasChanged = true;
_value = value;
}
/* setValue(String)
*
* Set the value of the property and parse
* the value appropriately
*
*/
public void setValue(String value) {
try {
if( _type == Double.class ){
_value = Double.parseDouble(value);
}
else if(_type == Float.class) {
_value = Float.parseFloat(value);
}
else if(_type == Integer.class) {
_value = Integer.parseInt(value);
}
else if(_type == Boolean.class) {
_value = Boolean.parseBoolean(value);
}
else if(_type == Long.class) {
_value = Long.parseLong(value);
}
else if(_type == Short.class) {
_value = Short.parseShort(value);
}
else if(_type == Byte.class) {
_value = Byte.parseByte(value);
}
else {
_value = value;
}
if(!applyButton.isEnabled()) applyButton.setEnabled(true);
_hasChanged = true;
} catch (Exception e) {
System.err.println("Invalid property value " + value + " for " + getProperty());
}
}
}
}
/** model for traversing a probe's object graph and collecting editable properties */
class ProbeEditableProperties {
/** array of classes for which the property can be edited directly */
final static private Set<Class<?>> EDITABLE_PROPERTY_TYPES = new HashSet<>();
/** probe to model */
final private Probe PROBE;
// static initializer
static {
// cache the editable properties in a set for quick comparison later
final Class<?>[] editablePropertyTypes = { Double.class, Double.TYPE, Integer.class, Integer.TYPE, Boolean.class, Boolean.TYPE, String.class };
for ( final Class<?> type : editablePropertyTypes ) {
EDITABLE_PROPERTY_TYPES.add( type );
}
}
/** constructor */
public ProbeProperties( final Probe probe ) {
PROBE = probe;
}
/** Get the probe */
public Probe getProbe() {
return PROBE;
}
/** Convenience method to get the BeanInfo for an object's class */
static private BeanInfo getBeanInfo( final Object object ) {
//Try to get the BeanInfo from the class given
try {
return Introspector.getBeanInfo( object.getClass() );
}
//Throw an exception if the BeanInfo could not be obtained
catch( IntrospectionException exception ) {
return null;
}
}
} |
package codeine;
import java.util.Random;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import javax.inject.Inject;
import org.apache.log4j.Logger;
import codeine.db.IStatusDatabaseConnector;
import codeine.jsons.global.GlobalConfigurationJsonStore;
import codeine.jsons.peer_status.PeerStatus;
import codeine.utils.ThreadUtils;
import com.google.common.base.Stopwatch;
public class PeerStatusChangedUpdater implements Runnable{
private long MAX_TIME_BETWEEN_UPDATES_MILLIS = TimeUnit.SECONDS.toMillis(10);
private long MIN_TIME_BETWEEN_UPDATES_MILLIS = TimeUnit.SECONDS.toMillis(1);
private static final Logger log = Logger.getLogger(PeerStatusChangedUpdater.class);
private PeerStatus peerStatus;
private IStatusDatabaseConnector databaseConnector;
private BlockingQueue<Object> blockingQueue = new LinkedBlockingQueue<>();
private Random random = new Random();
private GlobalConfigurationJsonStore globalConfigurationJson;
@Inject
public PeerStatusChangedUpdater(PeerStatus peerStatus, IStatusDatabaseConnector databaseConnector,GlobalConfigurationJsonStore globalConfigurationJson) {
super();
this.peerStatus = peerStatus;
this.databaseConnector = databaseConnector;
this.globalConfigurationJson = globalConfigurationJson;
if (!globalConfigurationJson.get().large_deployment()) {
MIN_TIME_BETWEEN_UPDATES_MILLIS = TimeUnit.SECONDS.toMillis(5);
MAX_TIME_BETWEEN_UPDATES_MILLIS = TimeUnit.SECONDS.toMillis(10);
}
}
public void pushUpdate() {
log.debug("pushUpdate()");
blockingQueue.add(new Object());
}
@Override
public void run() {
log.info("start updating");
long initialSleep = TimeUnit.SECONDS.toMillis(31 + random.nextInt(30));
if (!globalConfigurationJson.get().large_deployment()) {
initialSleep = TimeUnit.SECONDS.toMillis(10);
}
ThreadUtils.sleep(initialSleep);
while (true){
try {
pushUpdateNow();
ThreadUtils.sleep(MIN_TIME_BETWEEN_UPDATES_MILLIS);
waitForNextUpdate();
} catch (Exception e) {
log.warn("got exception", e);
}
}
}
private void waitForNextUpdate() {
try {
Stopwatch s = new Stopwatch().start();
log.debug("going to wait at most " + MAX_TIME_BETWEEN_UPDATES_MILLIS + "milli");
blockingQueue.poll(MAX_TIME_BETWEEN_UPDATES_MILLIS, TimeUnit.MILLISECONDS);
log.debug("waited " + s);
} catch (InterruptedException e) {
log.debug("interrupted", e);
}
}
private void pushUpdateNow() {
log.info("pushing update now");
blockingQueue.clear();
databaseConnector.putReplaceStatus(peerStatus.createJson());
}
} |
package peergos.shared.merklebtree;
import peergos.shared.cbor.*;
import peergos.shared.ipfs.api.*;
import peergos.shared.util.*;
import java.io.*;
import java.util.*;
public class MerkleNode implements Cborable {
public final byte[] data;
public final List<Link> links;
public MerkleNode(byte[] data, List<Link> links) {
this.data = data;
this.links = links;
Collections.sort(this.links);
}
public MerkleNode(byte[] data) {
this(data, Collections.emptyList());
}
public static class Link implements Comparable<Link> {
public final String label;
public final Multihash target;
public Link(String label, Multihash target) {
this.label = label;
this.target = target;
}
@Override
public int compareTo(Link link) {
return label.compareTo(link.label);
}
}
public MerkleNode addLink(String label, Multihash linkTarget) {
List<Link> tmp = new ArrayList<>(links);
tmp.add(new Link(label, linkTarget));
return new MerkleNode(data, tmp);
}
public MerkleNode setData(byte[] newData) {
return new MerkleNode(newData, links);
}
public CborObject.CborMap toCbor() {
SortedMap<CborObject, CborObject> cbor = new TreeMap<>();
cbor.put(new CborObject.CborString("Data"), new CborObject.CborByteArray(data));
for (Link link: links) {
cbor.put(new CborObject.CborString(link.label), new CborObject.CborMerkleLink((link.target)));
}
return new CborObject.CborMap(cbor);
}
public static MerkleNode fromCbor(CborObject obj) {
CborObject.CborMap map = (CborObject.CborMap) obj;
CborObject.CborString dataLabel = new CborObject.CborString("Data");
CborObject.CborByteArray data = (CborObject.CborByteArray) map.values.get(dataLabel);
List<Link> links = new ArrayList<>(map.values.size() - 1);
for (Map.Entry<CborObject, CborObject> entry : map.values.entrySet()) {
if (entry.getKey().equals(dataLabel))
continue;
String label = ((CborObject.CborString) entry.getKey()).value;
Multihash value = ((CborObject.CborMerkleLink) entry.getValue()).target;
links.add(new Link(label, value));
}
return new MerkleNode(data.value, links);
}
/**
*
* @param in a CBOR encoding of a merkle node
* @return
* @throws IOException for an invalid encoding
*/
public static MerkleNode deserialize(byte[] in) {
CborDecoder decoder = new CborDecoder(new ByteArrayInputStream(in));
CborObject cbor = CborObject.deserialize(decoder);
return fromCbor(cbor);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
MerkleNode that = (MerkleNode) o;
if (!Arrays.equals(data, that.data)) return false;
return links != null ? links.equals(that.links) : that.links == null;
}
@Override
public int hashCode() {
int result = Arrays.hashCode(data);
result = 31 * result + (links != null ? links.hashCode() : 0);
return result;
}
} |
package com.yahoo.vespa.flags;
import com.yahoo.component.Vtag;
import com.yahoo.vespa.defaults.Defaults;
import com.yahoo.vespa.flags.custom.PreprovisionCapacity;
import java.util.List;
import java.util.Optional;
import java.util.TreeMap;
import static com.yahoo.vespa.flags.FetchVector.Dimension.APPLICATION_ID;
import static com.yahoo.vespa.flags.FetchVector.Dimension.HOSTNAME;
import static com.yahoo.vespa.flags.FetchVector.Dimension.NODE_TYPE;
import static com.yahoo.vespa.flags.FetchVector.Dimension.VESPA_VERSION;
import static com.yahoo.vespa.flags.FetchVector.Dimension.ZONE_ID;
/**
* Definitions of feature flags.
*
* <p>To use feature flags, define the flag in this class as an "unbound" flag, e.g. {@link UnboundBooleanFlag}
* or {@link UnboundStringFlag}. At the location you want to get the value of the flag, you need the following:</p>
*
* <ol>
* <li>The unbound flag</li>
* <li>A {@link FlagSource}. The flag source is typically available as an injectable component. Binding
* an unbound flag to a flag source produces a (bound) flag, e.g. {@link BooleanFlag} and {@link StringFlag}.</li>
* <li>If you would like your flag value to be dependent on e.g. the application ID, then 1. you should
* declare this in the unbound flag definition in this file (referring to
* {@link FetchVector.Dimension#APPLICATION_ID}), and 2. specify the application ID when retrieving the value, e.g.
* {@link BooleanFlag#with(FetchVector.Dimension, String)}. See {@link FetchVector} for more info.</li>
* </ol>
*
* <p>Once the code is in place, you can override the flag value. This depends on the flag source, but typically
* there is a REST API for updating the flags in the config server, which is the root of all flag sources in the zone.</p>
*
* @author hakonhall
*/
public class Flags {
private static volatile TreeMap<FlagId, FlagDefinition> flags = new TreeMap<>();
public static final UnboundIntFlag DROP_CACHES = defineIntFlag("drop-caches", 3,
"The int value to write into /proc/sys/vm/drop_caches for each tick. " +
"1 is page cache, 2 is dentries inodes, 3 is both page cache and dentries inodes, etc.",
"Takes effect on next tick.",
HOSTNAME);
public static final UnboundBooleanFlag ENABLE_CROWDSTRIKE = defineFeatureFlag(
"enable-crowdstrike", true,
"Whether to enable CrowdStrike.", "Takes effect on next host admin tick",
HOSTNAME);
public static final UnboundBooleanFlag ENABLE_NESSUS = defineFeatureFlag(
"enable-nessus", true,
"Whether to enable Nessus.", "Takes effect on next host admin tick",
HOSTNAME);
public static final UnboundBooleanFlag ENABLE_FLEET_SSHD_CONFIG = defineFeatureFlag(
"enable-fleet-sshd-config", false,
"Whether fleet should manage the /etc/ssh/sshd_config file.",
"Takes effect on next host admin tick.",
HOSTNAME);
public static final UnboundBooleanFlag FLEET_CANARY = defineFeatureFlag(
"fleet-canary", false,
"Whether the host is a fleet canary.",
"Takes effect on next host admin tick.",
HOSTNAME);
public static final UnboundBooleanFlag SERVICE_MODEL_CACHE = defineFeatureFlag(
"service-model-cache", true,
"Whether the service model is cached.",
"Takes effect on restart of config server.",
HOSTNAME);
public static final UnboundBooleanFlag CLEANUP_STATUS_SERVICE = defineFeatureFlag(
"cleanup-status-service", false,
"Whether to remove orphaned hosts and applications in the ZooKeeper status service.",
"Takes effect on restart of config server.",
HOSTNAME);
public static final UnboundListFlag<String> DISABLED_HOST_ADMIN_TASKS = defineListFlag(
"disabled-host-admin-tasks", List.of(), String.class,
"List of host-admin task names (as they appear in the log, e.g. root>main>UpgradeTask) that should be skipped",
"Takes effect on next host admin tick",
HOSTNAME, NODE_TYPE);
public static final UnboundStringFlag DOCKER_VERSION = defineStringFlag(
"docker-version", "1.13.1-102.git7f2769b",
"The version of the docker to use of the format VERSION-REL: The YUM package to be installed will be " +
"2:docker-VERSION-REL.el7.centos.x86_64 in AWS (and without '.centos' otherwise). " +
"If docker-version is not of this format, it must be parseable by YumPackageName::fromString.",
"Takes effect on next tick.",
HOSTNAME);
public static final UnboundLongFlag THIN_POOL_GB = defineLongFlag(
"thin-pool-gb", -1,
"The size of the disk reserved for the thin pool with dynamic provisioning in AWS, in base-2 GB. " +
"If <0, the default is used (which may depend on the zone and node type).",
"Takes effect immediately (but used only during provisioning).",
NODE_TYPE);
public static final UnboundDoubleFlag CONTAINER_CPU_CAP = defineDoubleFlag(
"container-cpu-cap", 0,
"Hard limit on how many CPUs a container may use. This value is multiplied by CPU allocated to node, so " +
"to cap CPU at 200%, set this to 2, etc.",
"Takes effect on next node agent tick. Change is orchestrated, but does NOT require container restart",
HOSTNAME, APPLICATION_ID);
public static final UnboundBooleanFlag USE_BUCKET_SPACE_METRIC = defineFeatureFlag(
"use-bucket-space-metric", true,
"Whether to use vds.datastored.bucket_space.buckets_total (true) instead of " +
"vds.datastored.alldisks.buckets (false, legacy).",
"Takes effect on the next deployment of the application",
APPLICATION_ID);
public static final UnboundStringFlag TLS_INSECURE_MIXED_MODE = defineStringFlag(
"tls-insecure-mixed-mode", "tls_client_mixed_server",
"TLS insecure mixed mode. Allowed values: ['plaintext_client_mixed_server', 'tls_client_mixed_server', 'tls_client_tls_server']",
"Takes effect on restart of Docker container",
NODE_TYPE, APPLICATION_ID, HOSTNAME);
public static final UnboundStringFlag TLS_INSECURE_AUTHORIZATION_MODE = defineStringFlag(
"tls-insecure-authorization-mode", "log_only",
"TLS insecure authorization mode. Allowed values: ['disable', 'log_only', 'enforce']",
"Takes effect on restart of Docker container",
NODE_TYPE, APPLICATION_ID, HOSTNAME);
public static final UnboundBooleanFlag USE_ADAPTIVE_DISPATCH = defineFeatureFlag(
"use-adaptive-dispatch", false,
"Should adaptive dispatch be used over round robin",
"Takes effect at redeployment",
APPLICATION_ID);
public static final UnboundIntFlag REBOOT_INTERVAL_IN_DAYS = defineIntFlag(
"reboot-interval-in-days", 30,
"No reboots are scheduled 0x-1x reboot intervals after the previous reboot, while reboot is " +
"scheduled evenly distributed in the 1x-2x range (and naturally guaranteed at the 2x boundary).",
"Takes effect on next run of NodeRebooter");
public static final UnboundBooleanFlag RETIRE_WITH_PERMANENTLY_DOWN = defineFeatureFlag(
"retire-with-permanently-down", false,
"If enabled, retirement will end with setting the host status to PERMANENTLY_DOWN, " +
"instead of ALLOWED_TO_BE_DOWN (old behavior).",
"Takes effect on the next run of RetiredExpirer.",
HOSTNAME);
public static final UnboundBooleanFlag ENABLE_DYNAMIC_PROVISIONING = defineFeatureFlag(
"enable-dynamic-provisioning", false,
"Provision a new docker host when we otherwise can't allocate a docker node",
"Takes effect on next deployment",
APPLICATION_ID);
public static final UnboundListFlag<PreprovisionCapacity> PREPROVISION_CAPACITY = defineListFlag(
"preprovision-capacity", List.of(), PreprovisionCapacity.class,
"List of node resources and their count that should be present in zone to receive new deployments. When a " +
"preprovisioned is taken, new will be provisioned within next iteration of maintainer.",
"Takes effect on next iteration of HostProvisionMaintainer.");
public static final UnboundDoubleFlag DEFAULT_TERM_WISE_LIMIT = defineDoubleFlag(
"default-term-wise-limit", 1.0,
"Node resource memory in Gb for admin cluster nodes",
"Takes effect at redeployment",
APPLICATION_ID);
public static final UnboundBooleanFlag HOST_HARDENING = defineFeatureFlag(
"host-hardening", false,
"Whether to enable host hardening Linux baseline.",
"Takes effect on next tick or on host-admin restart (may vary where used).",
HOSTNAME);
public static final UnboundBooleanFlag TCP_ABORT_ON_OVERFLOW = defineFeatureFlag(
"tcp-abort-on-overflow", false,
"Whether to set /proc/sys/net/ipv4/tcp_abort_on_overflow to 0 (false) or 1 (true)",
"Takes effect on next tick or on host-admin restart (may vary where used).",
HOSTNAME);
public static final UnboundStringFlag ZOOKEEPER_SERVER_MAJOR_MINOR_VERSION = defineStringFlag(
"zookeeper-server-version", "3.5",
"The version of ZooKeeper server to use (major.minor, not full version)",
"Takes effect on restart of Docker container",
NODE_TYPE, APPLICATION_ID, HOSTNAME);
public static final UnboundStringFlag TLS_FOR_ZOOKEEPER_QUORUM_COMMUNICATION = defineStringFlag(
"tls-for-zookeeper-quorum-communication", "OFF",
"How to setup TLS for ZooKeeper quorum communication. Valid values are OFF, PORT_UNIFICATION, TLS_WITH_PORT_UNIFICATION, TLS_ONLY",
"Takes effect on restart of config server",
NODE_TYPE, HOSTNAME);
public static final UnboundStringFlag TLS_FOR_ZOOKEEPER_CLIENT_SERVER_COMMUNICATION = defineStringFlag(
"tls-for-zookeeper-client-server-communication", "OFF",
"How to setup TLS for ZooKeeper client/server communication. Valid values are OFF, PORT_UNIFICATION, TLS_WITH_PORT_UNIFICATION, TLS_ONLY",
"Takes effect on restart of config server",
NODE_TYPE, HOSTNAME);
public static final UnboundBooleanFlag USE_TLS_FOR_ZOOKEEPER_CLIENT = defineFeatureFlag(
"use-tls-for-zookeeper-client", false,
"Whether to use TLS for ZooKeeper clients",
"Takes effect on restart of process",
NODE_TYPE, HOSTNAME);
public static final UnboundBooleanFlag ENABLE_DISK_WRITE_TEST = defineFeatureFlag(
"enable-disk-write-test", false,
"Regularly issue a small write to disk and fail the host if it is not successful",
"Takes effect on next node agent tick (but does not clear existing failure reports)",
HOSTNAME);
public static final UnboundBooleanFlag RESTRICT_ACQUIRING_NEW_PRIVILEGES = defineFeatureFlag(
"restrict-acquiring-new-privileges", false,
"Whether docker container processes should be prevented from acquiring new privileges",
"Takes effect on container creation",
APPLICATION_ID, NODE_TYPE, HOSTNAME);
public static final UnboundListFlag<String> AUDITED_PATHS = defineListFlag(
"audited-paths", List.of(), String.class,
"List of paths that should audited",
"Takes effect on next host admin tick",
HOSTNAME);
public static final UnboundBooleanFlag GENERATE_L4_ROUTING_CONFIG = defineFeatureFlag(
"generate-l4-routing-config", false,
"Whether routing nodes should generate L4 routing config",
"Takes effect immediately",
ZONE_ID, HOSTNAME);
public static final UnboundBooleanFlag USE_REFRESHED_ENDPOINT_CERTIFICATE = defineFeatureFlag(
"use-refreshed-endpoint-certificate", false,
"Whether an application should start using a newer certificate/key pair if available",
"Takes effect on the next deployment of the application",
APPLICATION_ID);
public static final UnboundBooleanFlag VALIDATE_ENDPOINT_CERTIFICATES = defineFeatureFlag(
"validate-endpoint-certificates", false,
"Whether endpoint certificates should be validated before use",
"Takes effect on the next deployment of the application");
public static final UnboundStringFlag ENDPOINT_CERTIFICATE_BACKFILL = defineStringFlag(
"endpoint-certificate-backfill", "disable",
"Whether the endpoint certificate maintainer should backfill missing certificate data from cameo",
"Takes effect on next scheduled run of maintainer - set to \"disable\", \"dryrun\" or \"enable\"");
public static final UnboundBooleanFlag USE_NEW_ATHENZ_FILTER = defineFeatureFlag(
"use-new-athenz-filter", false,
"Use new Athenz filter that supports access-tokens",
"Takes effect at redeployment",
APPLICATION_ID);
public static final UnboundStringFlag DOCKER_IMAGE_OVERRIDE = defineStringFlag(
"docker-image-override", "",
"Override the Docker image to use for deployments. This must containing the image name only, without tag",
"Takes effect on next host-admin tick", APPLICATION_ID);
public static final UnboundBooleanFlag ENDPOINT_CERT_IN_SHARED_ROUTING = defineFeatureFlag(
"endpoint-cert-in-shared-routing", false,
"Whether to provision and use endpoint certs for apps in shared routing zones",
"Takes effect on next deployment of the application", APPLICATION_ID);
public static final UnboundBooleanFlag PHRASE_SEGMENTING = defineFeatureFlag(
"phrase-segmenting", true,
"Should 'implicit phrases' in queries we parsed to a phrase or and?",
"Takes effect on redeploy",
ZONE_ID, APPLICATION_ID);
/** WARNING: public for testing: All flags should be defined in {@link Flags}. */
public static UnboundBooleanFlag defineFeatureFlag(String flagId, boolean defaultValue, String description,
String modificationEffect, FetchVector.Dimension... dimensions) {
return define(UnboundBooleanFlag::new, flagId, defaultValue, description, modificationEffect, dimensions);
}
/** WARNING: public for testing: All flags should be defined in {@link Flags}. */
public static UnboundStringFlag defineStringFlag(String flagId, String defaultValue, String description,
String modificationEffect, FetchVector.Dimension... dimensions) {
return define(UnboundStringFlag::new, flagId, defaultValue, description, modificationEffect, dimensions);
}
/** WARNING: public for testing: All flags should be defined in {@link Flags}. */
public static UnboundIntFlag defineIntFlag(String flagId, int defaultValue, String description,
String modificationEffect, FetchVector.Dimension... dimensions) {
return define(UnboundIntFlag::new, flagId, defaultValue, description, modificationEffect, dimensions);
}
/** WARNING: public for testing: All flags should be defined in {@link Flags}. */
public static UnboundLongFlag defineLongFlag(String flagId, long defaultValue, String description,
String modificationEffect, FetchVector.Dimension... dimensions) {
return define(UnboundLongFlag::new, flagId, defaultValue, description, modificationEffect, dimensions);
}
/** WARNING: public for testing: All flags should be defined in {@link Flags}. */
public static UnboundDoubleFlag defineDoubleFlag(String flagId, double defaultValue, String description,
String modificationEffect, FetchVector.Dimension... dimensions) {
return define(UnboundDoubleFlag::new, flagId, defaultValue, description, modificationEffect, dimensions);
}
/** WARNING: public for testing: All flags should be defined in {@link Flags}. */
public static <T> UnboundJacksonFlag<T> defineJacksonFlag(String flagId, T defaultValue, Class<T> jacksonClass, String description,
String modificationEffect, FetchVector.Dimension... dimensions) {
return define((id2, defaultValue2, vector2) -> new UnboundJacksonFlag<>(id2, defaultValue2, vector2, jacksonClass),
flagId, defaultValue, description, modificationEffect, dimensions);
}
/** WARNING: public for testing: All flags should be defined in {@link Flags}. */
public static <T> UnboundListFlag<T> defineListFlag(String flagId, List<T> defaultValue, Class<T> elementClass,
String description, String modificationEffect, FetchVector.Dimension... dimensions) {
return define((fid, dval, fvec) -> new UnboundListFlag<>(fid, dval, elementClass, fvec),
flagId, defaultValue, description, modificationEffect, dimensions);
}
@FunctionalInterface
private interface TypedUnboundFlagFactory<T, U extends UnboundFlag<?, ?, ?>> {
U create(FlagId id, T defaultVale, FetchVector defaultFetchVector);
}
/**
* Defines a Flag.
*
* @param factory Factory for creating unbound flag of type U
* @param flagId The globally unique FlagId.
* @param defaultValue The default value if none is present after resolution.
* @param description Description of how the flag is used.
* @param modificationEffect What is required for the flag to take effect? A restart of process? immediately? etc.
* @param dimensions What dimensions will be set in the {@link FetchVector} when fetching
* the flag value in
* {@link FlagSource#fetch(FlagId, FetchVector) FlagSource::fetch}.
* For instance, if APPLICATION is one of the dimensions here, you should make sure
* APPLICATION is set to the ApplicationId in the fetch vector when fetching the RawFlag
* from the FlagSource.
* @param <T> The boxed type of the flag value, e.g. Boolean for flags guarding features.
* @param <U> The type of the unbound flag, e.g. UnboundBooleanFlag.
* @return An unbound flag with {@link FetchVector.Dimension#HOSTNAME HOSTNAME} and
* {@link FetchVector.Dimension#VESPA_VERSION VESPA_VERSION} already set. The ZONE environment
* is typically implicit.
*/
private static <T, U extends UnboundFlag<?, ?, ?>> U define(TypedUnboundFlagFactory<T, U> factory,
String flagId,
T defaultValue,
String description,
String modificationEffect,
FetchVector.Dimension[] dimensions) {
FlagId id = new FlagId(flagId);
FetchVector vector = new FetchVector()
.with(HOSTNAME, Defaults.getDefaults().vespaHostname())
// Warning: In unit tests and outside official Vespa releases, the currentVersion is e.g. 7.0.0
// (determined by the current major version). Consider not setting VESPA_VERSION if minor = micro = 0.
.with(VESPA_VERSION, Vtag.currentVersion.toFullString());
U unboundFlag = factory.create(id, defaultValue, vector);
FlagDefinition definition = new FlagDefinition(unboundFlag, description, modificationEffect, dimensions);
flags.put(id, definition);
return unboundFlag;
}
public static List<FlagDefinition> getAllFlags() {
return List.copyOf(flags.values());
}
public static Optional<FlagDefinition> getFlag(FlagId flagId) {
return Optional.ofNullable(flags.get(flagId));
}
/**
* Allows the statically defined flags to be controlled in a test.
*
* <p>Returns a Replacer instance to be used with e.g. a try-with-resources block. Within the block,
* the flags starts out as cleared. Flags can be defined, etc. When leaving the block, the flags from
* before the block is reinserted.
*
* <p>NOT thread-safe. Tests using this cannot run in parallel.
*/
public static Replacer clearFlagsForTesting() {
return new Replacer();
}
public static class Replacer implements AutoCloseable {
private static volatile boolean flagsCleared = false;
private final TreeMap<FlagId, FlagDefinition> savedFlags;
private Replacer() {
verifyAndSetFlagsCleared(true);
this.savedFlags = Flags.flags;
Flags.flags = new TreeMap<>();
}
@Override
public void close() {
verifyAndSetFlagsCleared(false);
Flags.flags = savedFlags;
}
/**
* Used to implement a simple verification that Replacer is not used by multiple threads.
* For instance two different tests running in parallel cannot both use Replacer.
*/
private static void verifyAndSetFlagsCleared(boolean newValue) {
if (flagsCleared == newValue) {
throw new IllegalStateException("clearFlagsForTesting called while already cleared - running tests in parallell!?");
}
flagsCleared = newValue;
}
}
} |
package com.yahoo.vespa.flags;
import com.yahoo.component.Vtag;
import com.yahoo.vespa.defaults.Defaults;
import java.time.Instant;
import java.time.LocalDate;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.Optional;
import java.util.TreeMap;
import static com.yahoo.vespa.flags.FetchVector.Dimension.APPLICATION_ID;
import static com.yahoo.vespa.flags.FetchVector.Dimension.CONSOLE_USER_EMAIL;
import static com.yahoo.vespa.flags.FetchVector.Dimension.HOSTNAME;
import static com.yahoo.vespa.flags.FetchVector.Dimension.NODE_TYPE;
import static com.yahoo.vespa.flags.FetchVector.Dimension.TENANT_ID;
import static com.yahoo.vespa.flags.FetchVector.Dimension.VESPA_VERSION;
import static com.yahoo.vespa.flags.FetchVector.Dimension.ZONE_ID;
/**
* Definitions of feature flags.
*
* <p>To use feature flags, define the flag in this class as an "unbound" flag, e.g. {@link UnboundBooleanFlag}
* or {@link UnboundStringFlag}. At the location you want to get the value of the flag, you need the following:</p>
*
* <ol>
* <li>The unbound flag</li>
* <li>A {@link FlagSource}. The flag source is typically available as an injectable component. Binding
* an unbound flag to a flag source produces a (bound) flag, e.g. {@link BooleanFlag} and {@link StringFlag}.</li>
* <li>If you would like your flag value to be dependent on e.g. the application ID, then 1. you should
* declare this in the unbound flag definition in this file (referring to
* {@link FetchVector.Dimension#APPLICATION_ID}), and 2. specify the application ID when retrieving the value, e.g.
* {@link BooleanFlag#with(FetchVector.Dimension, String)}. See {@link FetchVector} for more info.</li>
* </ol>
*
* <p>Once the code is in place, you can override the flag value. This depends on the flag source, but typically
* there is a REST API for updating the flags in the config server, which is the root of all flag sources in the zone.</p>
*
* @author hakonhall
*/
public class Flags {
private static volatile TreeMap<FlagId, FlagDefinition> flags = new TreeMap<>();
public static final UnboundDoubleFlag DEFAULT_TERM_WISE_LIMIT = defineDoubleFlag(
"default-term-wise-limit", 1.0,
List.of("baldersheim"), "2020-12-02", "2022-02-01",
"Default limit for when to apply termwise query evaluation",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundDoubleFlag TLS_SIZE_FRACTION = defineDoubleFlag(
"tls-size-fraction", 0.07,
List.of("baldersheim"), "2021-12-20", "2022-02-01",
"Fraction of disk available for transaction log",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundStringFlag FEED_SEQUENCER_TYPE = defineStringFlag(
"feed-sequencer-type", "LATENCY",
List.of("baldersheim"), "2020-12-02", "2022-02-01",
"Selects type of sequenced executor used for feeding in proton, valid values are LATENCY, ADAPTIVE, THROUGHPUT",
"Takes effect at redeployment (requires restart)",
ZONE_ID, APPLICATION_ID);
public static final UnboundIntFlag FEED_TASK_LIMIT = defineIntFlag(
"feed-task-limit", 1000,
List.of("geirst, baldersheim"), "2021-10-14", "2022-02-01",
"The task limit used by the executors handling feed in proton",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundIntFlag FEED_MASTER_TASK_LIMIT = defineIntFlag(
"feed-master-task-limit", 0,
List.of("geirst, baldersheim"), "2021-11-18", "2022-02-01",
"The task limit used by the master thread in each document db in proton. Ignored when set to 0.",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundStringFlag SHARED_FIELD_WRITER_EXECUTOR = defineStringFlag(
"shared-field-writer-executor", "NONE",
List.of("geirst, baldersheim"), "2021-11-05", "2022-02-01",
"Whether to use a shared field writer executor for the document database(s) in proton. " +
"Valid values: NONE, INDEX, INDEX_AND_ATTRIBUTE, DOCUMENT_DB",
"Takes effect at redeployment (requires restart)",
ZONE_ID, APPLICATION_ID);
public static final UnboundIntFlag MAX_UNCOMMITTED_MEMORY = defineIntFlag(
"max-uncommitted-memory", 130000,
List.of("geirst, baldersheim"), "2021-10-21", "2022-02-01",
"Max amount of memory holding updates to an attribute before we do a commit.",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundStringFlag RESPONSE_SEQUENCER_TYPE = defineStringFlag(
"response-sequencer-type", "ADAPTIVE",
List.of("baldersheim"), "2020-12-02", "2022-02-01",
"Selects type of sequenced executor used for mbus responses, valid values are LATENCY, ADAPTIVE, THROUGHPUT",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundIntFlag RESPONSE_NUM_THREADS = defineIntFlag(
"response-num-threads", 2,
List.of("baldersheim"), "2020-12-02", "2022-02-01",
"Number of threads used for mbus responses, default is 2, negative number = numcores/4",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundBooleanFlag SKIP_COMMUNICATIONMANAGER_THREAD = defineFeatureFlag(
"skip-communicationmanager-thread", false,
List.of("baldersheim"), "2020-12-02", "2022-02-01",
"Should we skip the communicationmanager thread",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundBooleanFlag SKIP_MBUS_REQUEST_THREAD = defineFeatureFlag(
"skip-mbus-request-thread", false,
List.of("baldersheim"), "2020-12-02", "2022-02-01",
"Should we skip the mbus request thread",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundBooleanFlag SKIP_MBUS_REPLY_THREAD = defineFeatureFlag(
"skip-mbus-reply-thread", false,
List.of("baldersheim"), "2020-12-02", "2022-02-01",
"Should we skip the mbus reply thread",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundBooleanFlag USE_THREE_PHASE_UPDATES = defineFeatureFlag(
"use-three-phase-updates", false,
List.of("vekterli"), "2020-12-02", "2022-02-01",
"Whether to enable the use of three-phase updates when bucket replicas are out of sync.",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundBooleanFlag HIDE_SHARED_ROUTING_ENDPOINT = defineFeatureFlag(
"hide-shared-routing-endpoint", false,
List.of("tokle", "bjormel"), "2020-12-02", "2022-02-01",
"Whether the controller should hide shared routing layer endpoint",
"Takes effect immediately",
APPLICATION_ID
);
public static final UnboundBooleanFlag USE_ASYNC_MESSAGE_HANDLING_ON_SCHEDULE = defineFeatureFlag(
"async-message-handling-on-schedule", false,
List.of("baldersheim"), "2020-12-02", "2022-02-01",
"Optionally deliver async messages in own thread",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundDoubleFlag FEED_CONCURRENCY = defineDoubleFlag(
"feed-concurrency", 0.5,
List.of("baldersheim"), "2020-12-02", "2022-02-01",
"How much concurrency should be allowed for feed",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundDoubleFlag DISK_BLOAT_FACTOR = defineDoubleFlag(
"disk-bloat-factor", 0.2,
List.of("baldersheim"), "2021-10-08", "2022-02-01",
"Amount of bloat allowed before compacting file",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundIntFlag DOCSTORE_COMPRESSION_LEVEL = defineIntFlag(
"docstore-compression-level", 3,
List.of("baldersheim"), "2021-10-08", "2022-02-01",
"Default compression level used for document store",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundIntFlag NUM_DEPLOY_HELPER_THREADS = defineIntFlag(
"num-model-builder-threads", -1,
List.of("balder"), "2021-09-09", "2022-02-01",
"Number of threads used for speeding up building of models.",
"Takes effect on first (re)start of config server");
public static final UnboundBooleanFlag ENABLE_FEED_BLOCK_IN_DISTRIBUTOR = defineFeatureFlag(
"enable-feed-block-in-distributor", true,
List.of("geirst"), "2021-01-27", "2022-01-31",
"Enables blocking of feed in the distributor if resource usage is above limit on at least one content node",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundBooleanFlag CONTAINER_DUMP_HEAP_ON_SHUTDOWN_TIMEOUT = defineFeatureFlag(
"container-dump-heap-on-shutdown-timeout", false,
List.of("baldersheim"), "2021-09-25", "2022-02-01",
"Will trigger a heap dump during if container shutdown times out",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundDoubleFlag CONTAINER_SHUTDOWN_TIMEOUT = defineDoubleFlag(
"container-shutdown-timeout", 50.0,
List.of("baldersheim"), "2021-09-25", "2022-02-01",
"Timeout for shutdown of a jdisc container",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundListFlag<String> ALLOWED_ATHENZ_PROXY_IDENTITIES = defineListFlag(
"allowed-athenz-proxy-identities", List.of(), String.class,
List.of("bjorncs", "tokle"), "2021-02-10", "2022-02-01",
"Allowed Athenz proxy identities",
"takes effect at redeployment");
public static final UnboundBooleanFlag GENERATE_NON_MTLS_ENDPOINT = defineFeatureFlag(
"generate-non-mtls-endpoint", true,
List.of("tokle"), "2021-02-18", "2022-02-01",
"Whether to generate the non-mtls endpoint",
"Takes effect on next internal redeployment",
APPLICATION_ID);
public static final UnboundIntFlag MAX_ACTIVATION_INHIBITED_OUT_OF_SYNC_GROUPS = defineIntFlag(
"max-activation-inhibited-out-of-sync-groups", 0,
List.of("vekterli"), "2021-02-19", "2022-02-01",
"Allows replicas in up to N content groups to not be activated " +
"for query visibility if they are out of sync with a majority of other replicas",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundIntFlag MAX_CONCURRENT_MERGES_PER_NODE = defineIntFlag(
"max-concurrent-merges-per-node", 128,
List.of("balder", "vekterli"), "2021-06-06", "2022-02-01",
"Specifies max concurrent merges per content node.",
"Takes effect at redeploy",
ZONE_ID, APPLICATION_ID);
public static final UnboundIntFlag MAX_MERGE_QUEUE_SIZE = defineIntFlag(
"max-merge-queue-size", 1024,
List.of("balder", "vekterli"), "2021-06-06", "2022-02-01",
"Specifies max size of merge queue.",
"Takes effect at redeploy",
ZONE_ID, APPLICATION_ID);
public static final UnboundBooleanFlag IGNORE_MERGE_QUEUE_LIMIT = defineFeatureFlag(
"ignore-merge-queue-limit", false,
List.of("vekterli", "geirst"), "2021-10-06", "2022-03-01",
"Specifies if merges that are forwarded (chained) from another content node are always " +
"allowed to be enqueued even if the queue is otherwise full.",
"Takes effect at redeploy",
ZONE_ID, APPLICATION_ID);
public static final UnboundIntFlag LARGE_RANK_EXPRESSION_LIMIT = defineIntFlag(
"large-rank-expression-limit", 8192,
List.of("baldersheim"), "2021-06-09", "2022-02-01",
"Limit for size of rank expressions distributed by filedistribution",
"Takes effect on next internal redeployment",
APPLICATION_ID);
public static final UnboundDoubleFlag MIN_NODE_RATIO_PER_GROUP = defineDoubleFlag(
"min-node-ratio-per-group", 0.0,
List.of("geirst", "vekterli"), "2021-07-16", "2022-03-01",
"Minimum ratio of nodes that have to be available (i.e. not Down) in any hierarchic content cluster group for the group to be Up",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundIntFlag METRICSPROXY_NUM_THREADS = defineIntFlag(
"metricsproxy-num-threads", 2,
List.of("balder"), "2021-09-01", "2022-02-01",
"Number of threads for metrics proxy",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundBooleanFlag ENABLED_HORIZON_DASHBOARD = defineFeatureFlag(
"enabled-horizon-dashboard", false,
List.of("olaa"), "2021-09-13", "2022-02-01",
"Enable Horizon dashboard",
"Takes effect immediately",
TENANT_ID, CONSOLE_USER_EMAIL
);
public static final UnboundBooleanFlag ENABLE_ONPREM_TENANT_S3_ARCHIVE = defineFeatureFlag(
"enable-onprem-tenant-s3-archive", false,
List.of("bjorncs"), "2021-09-14", "2022-02-01",
"Enable tenant S3 buckets in cd/main. Must be set on controller cluster only.",
"Takes effect immediately",
ZONE_ID, TENANT_ID
);
public static final UnboundBooleanFlag DELETE_UNMAINTAINED_CERTIFICATES = defineFeatureFlag(
"delete-unmaintained-certificates", false,
List.of("andreer"), "2021-09-23", "2022-02-01",
"Whether to delete certificates that are known by provider but not by controller",
"Takes effect on next run of EndpointCertificateMaintainer"
);
public static final UnboundBooleanFlag USE_NEW_ENDPOINT_CERTIFICATE_PROVIDER_URL = defineFeatureFlag(
"use-new-endpoint-certificate-provider-url", true,
List.of("andreer"), "2021-12-14", "2022-01-14",
"Use the new URL for the endpoint certificate provider API",
"Takes effect immediately"
);
public static final UnboundBooleanFlag ENABLE_TENANT_DEVELOPER_ROLE = defineFeatureFlag(
"enable-tenant-developer-role", false,
List.of("bjorncs"), "2021-09-23", "2022-02-01",
"Enable tenant developer Athenz role in cd/main. Must be set on controller cluster only.",
"Takes effect immediately",
TENANT_ID
);
public static final UnboundBooleanFlag ENABLE_ROUTING_REUSE_PORT = defineFeatureFlag(
"enable-routing-reuse-port", true,
List.of("mortent"), "2021-09-29", "2022-02-01",
"Enable reuse port in routing configuration",
"Takes effect on container restart",
HOSTNAME
);
public static final UnboundBooleanFlag ENABLE_TENANT_OPERATOR_ROLE = defineFeatureFlag(
"enable-tenant-operator-role", false,
List.of("bjorncs"), "2021-09-29", "2022-02-01",
"Enable tenant specific operator roles in public systems. For controllers only.",
"Takes effect on subsequent maintainer invocation",
TENANT_ID
);
public static final UnboundIntFlag DISTRIBUTOR_MERGE_BUSY_WAIT = defineIntFlag(
"distributor-merge-busy-wait", 10,
List.of("geirst", "vekterli"), "2021-10-04", "2022-03-01",
"Number of seconds that scheduling of new merge operations in the distributor should be inhibited " +
"towards a content node that has indicated merge busy",
"Takes effect at redeploy",
ZONE_ID, APPLICATION_ID);
public static final UnboundBooleanFlag DISTRIBUTOR_ENHANCED_MAINTENANCE_SCHEDULING = defineFeatureFlag(
"distributor-enhanced-maintenance-scheduling", false,
List.of("vekterli", "geirst"), "2021-10-14", "2022-01-31",
"Enable enhanced maintenance operation scheduling semantics on the distributor",
"Takes effect at redeploy",
ZONE_ID, APPLICATION_ID);
public static final UnboundBooleanFlag ASYNC_APPLY_BUCKET_DIFF = defineFeatureFlag(
"async-apply-bucket-diff", false,
List.of("geirst", "vekterli"), "2021-10-22", "2022-01-31",
"Whether portions of apply bucket diff handling will be performed asynchronously",
"Takes effect at redeploy",
ZONE_ID, APPLICATION_ID);
public static final UnboundBooleanFlag UNORDERED_MERGE_CHAINING = defineFeatureFlag(
"unordered-merge-chaining", false,
List.of("vekterli", "geirst"), "2021-11-15", "2022-03-01",
"Enables the use of unordered merge chains for data merge operations",
"Takes effect at redeploy",
ZONE_ID, APPLICATION_ID);
public static final UnboundStringFlag JDK_VERSION = defineStringFlag(
"jdk-version", "11",
List.of("hmusum"), "2021-10-25", "2022-03-01",
"JDK version to use on host and inside containers. Note application-id dimension only applies for container, " +
"while hostname and node type applies for host.",
"Takes effect on restart for Docker container and on next host-admin tick for host",
APPLICATION_ID,
TENANT_ID,
HOSTNAME,
NODE_TYPE);
public static final UnboundBooleanFlag IGNORE_THREAD_STACK_SIZES = defineFeatureFlag(
"ignore-thread-stack-sizes", false,
List.of("arnej"), "2021-11-12", "2022-01-31",
"Whether C++ thread creation should ignore any requested stack size",
"Triggers restart, takes effect immediately",
ZONE_ID, APPLICATION_ID);
public static final UnboundBooleanFlag USE_V8_GEO_POSITIONS = defineFeatureFlag(
"use-v8-geo-positions", false,
List.of("arnej"), "2021-11-15", "2022-12-31",
"Use Vespa 8 types and formats for geographical positions",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundBooleanFlag USE_LEGACY_LB_SERVICES = defineFeatureFlag(
"use-legacy-lb-services", false,
List.of("tokle"), "2021-11-22", "2022-02-01",
"Whether to generate routing table based on legacy lb-services config",
"Takes effect on container reboot",
ZONE_ID, HOSTNAME);
public static final UnboundBooleanFlag USE_V8_DOC_MANAGER_CFG = defineFeatureFlag(
"use-v8-doc-manager-cfg", false,
List.of("arnej", "baldersheim"), "2021-12-09", "2022-12-31",
"Use new (preparing for Vespa 8) section in documentmanager.def",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundIntFlag MAX_COMPACT_BUFFERS = defineIntFlag(
"max-compact-buffers", 1,
List.of("baldersheim", "geirst", "toregge"), "2021-12-15", "2022-03-31",
"Upper limit of buffers to compact in a data store at the same time for each reason (memory usage, address space usage)",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundBooleanFlag FAIL_DEPLOYMENT_WITH_INVALID_JVM_OPTIONS = defineFeatureFlag(
"fail-deployment-with-invalid-jvm-options", false,
List.of("hmusum"), "2021-12-20", "2022-01-20",
"Whether to fail deployments with invalid JVM options in services.xml",
"Takes effect at redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundBooleanFlag ENABLE_SERVER_OCSP_STAPLING = defineFeatureFlag(
"enable-server-ocsp-stapling", false,
List.of("bjorncs"), "2021-12-17", "2022-06-01",
"Enable server OCSP stapling for jdisc containers",
"Takes effect on redeployment",
ZONE_ID, APPLICATION_ID);
public static final UnboundBooleanFlag ENABLE_DATA_HIGHWAY_IN_AWS = defineFeatureFlag(
"enable-data-highway-in-aws", false,
List.of("hmusum"), "2022-01-06", "2022-04-06",
"Enable Data Highway in AWS",
"Takes effect on restart of Docker container",
ZONE_ID, APPLICATION_ID);
/** WARNING: public for testing: All flags should be defined in {@link Flags}. */
public static UnboundBooleanFlag defineFeatureFlag(String flagId, boolean defaultValue, List<String> owners,
String createdAt, String expiresAt, String description,
String modificationEffect, FetchVector.Dimension... dimensions) {
return define(UnboundBooleanFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions);
}
/** WARNING: public for testing: All flags should be defined in {@link Flags}. */
public static UnboundStringFlag defineStringFlag(String flagId, String defaultValue, List<String> owners,
String createdAt, String expiresAt, String description,
String modificationEffect, FetchVector.Dimension... dimensions) {
return define(UnboundStringFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions);
}
/** WARNING: public for testing: All flags should be defined in {@link Flags}. */
public static UnboundIntFlag defineIntFlag(String flagId, int defaultValue, List<String> owners,
String createdAt, String expiresAt, String description,
String modificationEffect, FetchVector.Dimension... dimensions) {
return define(UnboundIntFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions);
}
/** WARNING: public for testing: All flags should be defined in {@link Flags}. */
public static UnboundLongFlag defineLongFlag(String flagId, long defaultValue, List<String> owners,
String createdAt, String expiresAt, String description,
String modificationEffect, FetchVector.Dimension... dimensions) {
return define(UnboundLongFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions);
}
/** WARNING: public for testing: All flags should be defined in {@link Flags}. */
public static UnboundDoubleFlag defineDoubleFlag(String flagId, double defaultValue, List<String> owners,
String createdAt, String expiresAt, String description,
String modificationEffect, FetchVector.Dimension... dimensions) {
return define(UnboundDoubleFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions);
}
/** WARNING: public for testing: All flags should be defined in {@link Flags}. */
public static <T> UnboundJacksonFlag<T> defineJacksonFlag(String flagId, T defaultValue, Class<T> jacksonClass, List<String> owners,
String createdAt, String expiresAt, String description,
String modificationEffect, FetchVector.Dimension... dimensions) {
return define((id2, defaultValue2, vector2) -> new UnboundJacksonFlag<>(id2, defaultValue2, vector2, jacksonClass),
flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions);
}
/** WARNING: public for testing: All flags should be defined in {@link Flags}. */
public static <T> UnboundListFlag<T> defineListFlag(String flagId, List<T> defaultValue, Class<T> elementClass,
List<String> owners, String createdAt, String expiresAt,
String description, String modificationEffect, FetchVector.Dimension... dimensions) {
return define((fid, dval, fvec) -> new UnboundListFlag<>(fid, dval, elementClass, fvec),
flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions);
}
@FunctionalInterface
private interface TypedUnboundFlagFactory<T, U extends UnboundFlag<?, ?, ?>> {
U create(FlagId id, T defaultVale, FetchVector defaultFetchVector);
}
/**
* Defines a Flag.
*
* @param factory Factory for creating unbound flag of type U
* @param flagId The globally unique FlagId.
* @param defaultValue The default value if none is present after resolution.
* @param description Description of how the flag is used.
* @param modificationEffect What is required for the flag to take effect? A restart of process? immediately? etc.
* @param dimensions What dimensions will be set in the {@link FetchVector} when fetching
* the flag value in
* {@link FlagSource#fetch(FlagId, FetchVector) FlagSource::fetch}.
* For instance, if APPLICATION is one of the dimensions here, you should make sure
* APPLICATION is set to the ApplicationId in the fetch vector when fetching the RawFlag
* from the FlagSource.
* @param <T> The boxed type of the flag value, e.g. Boolean for flags guarding features.
* @param <U> The type of the unbound flag, e.g. UnboundBooleanFlag.
* @return An unbound flag with {@link FetchVector.Dimension#HOSTNAME HOSTNAME} and
* {@link FetchVector.Dimension#VESPA_VERSION VESPA_VERSION} already set. The ZONE environment
* is typically implicit.
*/
private static <T, U extends UnboundFlag<?, ?, ?>> U define(TypedUnboundFlagFactory<T, U> factory,
String flagId,
T defaultValue,
List<String> owners,
String createdAt,
String expiresAt,
String description,
String modificationEffect,
FetchVector.Dimension[] dimensions) {
FlagId id = new FlagId(flagId);
FetchVector vector = new FetchVector()
.with(HOSTNAME, Defaults.getDefaults().vespaHostname())
// Warning: In unit tests and outside official Vespa releases, the currentVersion is e.g. 7.0.0
// (determined by the current major version). Consider not setting VESPA_VERSION if minor = micro = 0.
.with(VESPA_VERSION, Vtag.currentVersion.toFullString());
U unboundFlag = factory.create(id, defaultValue, vector);
FlagDefinition definition = new FlagDefinition(
unboundFlag, owners, parseDate(createdAt), parseDate(expiresAt), description, modificationEffect, dimensions);
flags.put(id, definition);
return unboundFlag;
}
private static Instant parseDate(String rawDate) {
return DateTimeFormatter.ISO_DATE.parse(rawDate, LocalDate::from).atStartOfDay().toInstant(ZoneOffset.UTC);
}
public static List<FlagDefinition> getAllFlags() {
return List.copyOf(flags.values());
}
public static Optional<FlagDefinition> getFlag(FlagId flagId) {
return Optional.ofNullable(flags.get(flagId));
}
/**
* Allows the statically defined flags to be controlled in a test.
*
* <p>Returns a Replacer instance to be used with e.g. a try-with-resources block. Within the block,
* the flags starts out as cleared. Flags can be defined, etc. When leaving the block, the flags from
* before the block is reinserted.
*
* <p>NOT thread-safe. Tests using this cannot run in parallel.
*/
public static Replacer clearFlagsForTesting(FlagId... flagsToKeep) {
return new Replacer(flagsToKeep);
}
public static class Replacer implements AutoCloseable {
private static volatile boolean flagsCleared = false;
private final TreeMap<FlagId, FlagDefinition> savedFlags;
private Replacer(FlagId... flagsToKeep) {
verifyAndSetFlagsCleared(true);
this.savedFlags = Flags.flags;
Flags.flags = new TreeMap<>();
List.of(flagsToKeep).forEach(id -> Flags.flags.put(id, savedFlags.get(id)));
}
@Override
public void close() {
verifyAndSetFlagsCleared(false);
Flags.flags = savedFlags;
}
/**
* Used to implement a simple verification that Replacer is not used by multiple threads.
* For instance two different tests running in parallel cannot both use Replacer.
*/
private static void verifyAndSetFlagsCleared(boolean newValue) {
if (flagsCleared == newValue) {
throw new IllegalStateException("clearFlagsForTesting called while already cleared - running tests in parallell!?");
}
flagsCleared = newValue;
}
}
} |
package com.tinkerpop.gremlin;
import com.tinkerpop.pipes.filter.FilterPipe;
public class Tokens {
public static final String VERSION = "2.2.0-SNAPSHOT";
public static final String LABEL = "label";
public static final String ID = "id";
public static enum T {
/**
* Greater than
*/
gt,
/**
* Less than
*/
lt,
/**
* Equal to
*/
eq,
/**
* Greater than or equal to
*/
gte,
/**
* Less than or equal to
*/
lte,
/**
* Not equal to
*/
neq
}
public static FilterPipe.Filter mapFilter(final T t) {
if (t.equals(T.eq))
return FilterPipe.Filter.EQUAL;
else if (t.equals(T.neq))
return FilterPipe.Filter.NOT_EQUAL;
else if (t.equals(T.lt))
return FilterPipe.Filter.LESS_THAN;
else if (t.equals(T.lte))
return FilterPipe.Filter.LESS_THAN_EQUAL;
else if (t.equals(T.gt))
return FilterPipe.Filter.GREATER_THAN;
else if (t.equals(T.gte))
return FilterPipe.Filter.GREATER_THAN_EQUAL;
else
throw new IllegalArgumentException(t.toString() + " is an unknown filter type");
}
public static FilterPipe.Filter mapFlipFilter(final T t) {
if (t.equals(T.eq))
return FilterPipe.Filter.NOT_EQUAL;
else if (t.equals(T.neq))
return FilterPipe.Filter.EQUAL;
else if (t.equals(T.lt))
return FilterPipe.Filter.GREATER_THAN_EQUAL;
else if (t.equals(T.lte))
return FilterPipe.Filter.GREATER_THAN;
else if (t.equals(T.gt))
return FilterPipe.Filter.LESS_THAN_EQUAL;
else if (t.equals(T.gte))
return FilterPipe.Filter.LESS_THAN;
else
throw new IllegalArgumentException(t.toString() + " is an unknown filter type");
}
} |
package org.genericsystem.cv;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.genericsystem.cv.lm.LMHostImpl;
import org.genericsystem.cv.utils.NativeLibraryLoader;
import org.genericsystem.cv.utils.Ransac;
import org.genericsystem.cv.utils.Ransac.Model;
import org.genericsystem.cv.utils.Tools;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint2f;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.utils.Converters;
import org.opencv.videoio.VideoCapture;
import javafx.scene.image.ImageView;
import javafx.scene.layout.GridPane;
public class LinesDetector4 extends AbstractApp {
static {
NativeLibraryLoader.load();
}
public static void main(String[] args) {
launch(args);
}
private final VideoCapture capture = new VideoCapture(0);
private ScheduledExecutorService timer = Executors.newSingleThreadScheduledExecutor();
private Point vp = new Point(0, 0);
// private Damper vpxDamper = new Damper(1);
// private Damper vpyDamper = new Damper(1);
@Override
protected void fillGrid(GridPane mainGrid) {
// vpxDamper.pushNewValue(0);
// vpyDamper.pushNewValue(0);
Mat frame = new Mat();
capture.read(frame);
ImageView frameView = new ImageView(Tools.mat2jfxImage(frame));
mainGrid.add(frameView, 0, 0);
ImageView deskiewedView = new ImageView(Tools.mat2jfxImage(frame));
mainGrid.add(deskiewedView, 0, 1);
Mat dePerspectived = frame.clone();
timer.scheduleAtFixedRate(() -> {
try {
capture.read(frame);
Img grad = new Img(frame, false).morphologyEx(Imgproc.MORPH_GRADIENT, Imgproc.MORPH_RECT, new Size(2, 2)).otsu();
// Img grad = new Img(frame, false).canny(60, 180);
// Img grad = new Img(frame, false).bilateralFilter(20, 80, 80).bgr2Gray().adaptativeThresHold(255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY_INV, 11, 3).morphologyEx(Imgproc.MORPH_CLOSE, Imgproc.MORPH_RECT, new Size(11,
Lines lines = new Lines(grad.houghLinesP(1, Math.PI / 180, 10, 100, 10));
System.out.println("Average angle: " + lines.getMean() / Math.PI * 180);
if (lines.size() > 10) {
lines.draw(frame, new Scalar(0, 0, 255));
frameView.setImage(Tools.mat2jfxImage(frame));
// Mat dePerspectived = new Mat(frame.size(), CvType.CV_8UC3, new Scalar(255, 255, 255));
Ransac<Line> ransac = lines.vanishingPointRansac(frame.width(), frame.height());
Mat vp_mat = (Mat) ransac.getBestModel().getParams()[0];
Mat uncalibrate = uncalibrate(vp_mat);
vp = new Point(uncalibrate.get(0, 0)[0], uncalibrate.get(1, 0)[0]);
System.out.println("Vanishing point : " + vp);
// vpxDamper.pushNewValue(vp.x);
// vpyDamper.pushNewValue(vp.y);
Point bary = new Point(frame.width() / 2, frame.height() / 2);
Mat homography = findHomography(new Point(vp.x, vp.y), bary, frame.width(), frame.height());
lines = new Lines(ransac.getBestDataSet().values()).perspectivTransform(homography);
Mat mask = new Mat(frame.size(), CvType.CV_8UC1, new Scalar(255));
Mat maskWarpped = new Mat();
Imgproc.warpPerspective(mask, maskWarpped, homography, frame.size());
Mat tmp = new Mat();
Imgproc.warpPerspective(frame, tmp, homography, frame.size(), Imgproc.INTER_LINEAR, Core.BORDER_REPLICATE, Scalar.all(255));
tmp.copyTo(dePerspectived, maskWarpped);
lines.draw(dePerspectived, new Scalar(0, 255, 0));
deskiewedView.setImage(Tools.mat2jfxImage(dePerspectived));
} else
System.out.println("Not enough lines : " + lines.size());
} catch (Throwable e) {
e.printStackTrace();
}
}, 33, 250, TimeUnit.MILLISECONDS);
}
public Mat uncalibrate(Mat calibrated) {
Mat uncalibrate = new Mat(3, 1, CvType.CV_32F);
Core.gemm(Lines.K, calibrated, 1, new Mat(), 0, uncalibrate);
if (uncalibrate.get(2, 0)[0] != 0) {
uncalibrate.put(0, 0, new float[] { Double.valueOf(uncalibrate.get(0, 0)[0] / uncalibrate.get(2, 0)[0]).floatValue() });
uncalibrate.put(1, 0, new float[] { Double.valueOf(uncalibrate.get(1, 0)[0] / uncalibrate.get(2, 0)[0]).floatValue() });
uncalibrate.put(2, 0, new float[] { 1 });
}
return uncalibrate;
}
public void print(Mat m) {
for (int row = 0; row < m.rows(); row++) {
System.out.print("(");
for (int col = 0; col < m.cols() - 1; col++) {
System.out.print(m.get(row, col)[0] + ",");
}
System.out.println(m.get(row, m.cols() - 1)[0] + ")");
}
System.out.println("
}
public Point[] rotate(Point bary, double alpha, Point... p) {
Mat matrix = Imgproc.getRotationMatrix2D(bary, alpha / Math.PI * 180, 1);
MatOfPoint2f results = new MatOfPoint2f();
Core.transform(new MatOfPoint2f(p), results, matrix);
return results.toArray();
}
private Mat findHomography(Point vp, Point bary, double width, double height) {
double alpha_ = Math.atan2((vp.y - bary.y), (vp.x - bary.x));
if (alpha_ < -Math.PI / 2 && alpha_ > -Math.PI)
alpha_ = alpha_ + Math.PI;
if (alpha_ < Math.PI && alpha_ > Math.PI / 2)
alpha_ = alpha_ - Math.PI;
double alpha = alpha_;
Point rotatedVp = rotate(bary, alpha, vp)[0];
Point A = new Point(0, 0);
Point B = new Point(width, 0);
Point C = new Point(width, height);
Point D = new Point(0, height);
Point AB2 = new Point(width / 2, 0);
Point CD2 = new Point(width / 2, height);
Point A_, B_, C_, D_;
if (rotatedVp.x >= width / 2) {
A_ = new Line(AB2, rotatedVp).intersection(0);
D_ = new Line(CD2, rotatedVp).intersection(0);
C_ = new Line(A_, bary).intersection(new Line(CD2, rotatedVp));
B_ = new Line(D_, bary).intersection(new Line(AB2, rotatedVp));
} else {
B_ = new Line(AB2, rotatedVp).intersection(width);
C_ = new Line(CD2, rotatedVp).intersection(width);
A_ = new Line(C_, bary).intersection(new Line(AB2, rotatedVp));
D_ = new Line(B_, bary).intersection(new Line(CD2, rotatedVp));
}
System.out.println("vp : " + vp);
System.out.println("rotated vp : " + rotatedVp);
System.out.println("Alpha : " + alpha * 180 / Math.PI);
// System.out.println("A : " + A + " " + A_);
// System.out.println("B : " + B + " " + B_);
// System.out.println("C : " + C + " " + C_);
// System.out.println("D : " + D + " " + D_);
return Imgproc.getPerspectiveTransform(new MatOfPoint2f(rotate(bary, -alpha, A_, B_, C_, D_)), new MatOfPoint2f(A, B, C, D));
}
public static class Lines {
private final List<Line> lines = new ArrayList<>();
private final double mean;
private static Mat K;
public Lines(Mat src) {
double mean = 0;
for (int i = 0; i < src.rows(); i++) {
double[] val = src.get(i, 0);
Line line = new Line(val[0], val[1], val[2], val[3]);
lines.add(line);
mean += line.getAngle();
}
this.mean = mean / src.rows();
}
private Mat getLineMat(Line line) {
Mat a = new Mat(3, 1, CvType.CV_32F);
Mat b = new Mat(3, 1, CvType.CV_32F);
a.put(0, 0, new float[] { Double.valueOf(line.x1).floatValue() });
a.put(1, 0, new float[] { Double.valueOf(line.y1).floatValue() });
a.put(2, 0, new float[] { Double.valueOf(1d).floatValue() });
b.put(0, 0, new float[] { Double.valueOf(line.x2).floatValue() });
b.put(1, 0, new float[] { Double.valueOf(line.y2).floatValue() });
b.put(2, 0, new float[] { Double.valueOf(1d).floatValue() });
// Mat an = new Mat(3, 1, CvType.CV_32F);
// Mat bn = new Mat(3, 1, CvType.CV_32F);
// Core.gemm(K.inv(), a, 1, new Mat(), 0, an);
// Core.gemm(K.inv(), b, 1, new Mat(), 0, bn);
Mat li = a.cross(b);
Core.normalize(li, li);
return li;
}
private Mat getLineMiMat(Line line) {
Mat a = new Mat(3, 1, CvType.CV_32F);
Mat b = new Mat(3, 1, CvType.CV_32F);
a.put(0, 0, new float[] { Double.valueOf(line.x1).floatValue() });
a.put(1, 0, new float[] { Double.valueOf(line.y1).floatValue() });
a.put(2, 0, new float[] { Double.valueOf(1d).floatValue() });
b.put(0, 0, new float[] { Double.valueOf(line.x2).floatValue() });
b.put(1, 0, new float[] { Double.valueOf(line.y2).floatValue() });
b.put(2, 0, new float[] { Double.valueOf(1d).floatValue() });
Mat c = new Mat(3, 1, CvType.CV_32F);
Core.addWeighted(a, 0.5, b, 0.5, 0, c);
return c;
}
public Ransac<Line> vanishingPointRansac(int width, int height) {
int minimal_sample_set_dimension = 2;
double maxError = (float) 0.01623 * 2;
if (K == null) {
K = new Mat(3, 3, CvType.CV_32F, new Scalar(0));
K.put(0, 0, new float[] { width });
K.put(0, 2, new float[] { width / 2 });
K.put(1, 1, new float[] { height });
K.put(1, 2, new float[] { height / 2 });
K.put(2, 2, new float[] { 1 });
}
Mat[] vp = new Mat[1];
Function<Collection<Line>, Model<Line>> modelProvider = datas -> {
if (datas.size() == minimal_sample_set_dimension) {
Iterator<Line> it = datas.iterator();
vp[0] = getLineMat(it.next()).cross(getLineMat(it.next()));
// System.out.println("Vanishing point : " + vp[0].get(0, 0)[0] + "," + vp[0].get(1, 0)[0] + "," + vp[0].get(2, 0)[0]);
Core.gemm(K.inv(), vp[0], 1, new Mat(), 0, vp[0]);
Core.normalize(vp[0], vp[0]);
// System.out.println("Vanishing point calibrated : " + vp[0].get(0, 0)[0] + "," + vp[0].get(1, 0)[0] + "," + vp[0].get(2, 0)[0]);
} else {
// The starting point is the provided vp which is already calibrated
// Convert to spherical coordinates to move on the sphere surface (restricted to r=1)
double x = vp[0].get(0, 0)[0];
double y = vp[0].get(1, 0)[0];
double z = vp[0].get(2, 0)[0];
double r = Core.norm(vp[0]);
double theta = Math.acos(z / r);
double phi = Math.atan2(y, x);
System.out.println("Initial Cal.VP (x,y,z) = (" + x + "," + y + "," + z + ")");
System.out.println("Initial Cal.VP (Spherical) = (" + theta + "," + phi + "," + r + ")");
BiFunction<Line, double[], Double> evaluateNieto = (line, params) -> {
Mat vn = new Mat(3, 1, CvType.CV_32F);
vn.put(0, 0, new double[] { Math.cos(params[1]) * Math.sin(params[0]) });
vn.put(1, 0, new double[] { Math.sin(params[1]) * Math.sin(params[0]) });
vn.put(2, 0, new double[] { Math.cos(params[0]) });
Core.gemm(K, vn, 1, new Mat(), 0, vn);
if (vn.get(2, 0)[0] != 0) {
vn.put(0, 0, new float[] { Double.valueOf(vn.get(0, 0)[0] / vn.get(2, 0)[0]).floatValue() });
vn.put(1, 0, new float[] { Double.valueOf(vn.get(1, 0)[0] / vn.get(2, 0)[0]).floatValue() });
vn.put(2, 0, new float[] { 1 });
}
return distanceNieto(vn, line);
};
LMHostImpl<Line> fitHost = new LMHostImpl<>(evaluateNieto, new ArrayList<>(datas), new double[] { theta, phi });
double[] params = fitHost.getParms();
// Store into vp
// 1) From spherical to cartesian
theta = params[0];
phi = params[1];
x = r * Math.cos(phi) * Math.sin(theta);
y = r * Math.sin(phi) * Math.sin(theta);
z = r * Math.cos(theta);
System.out.println("Converged Cal.VP (x,y,z) = (" + x + "," + y + "," + z + ")");
System.out.println("Converged Cal.VP (Spherical) = " + "(" + theta + "," + phi + "," + r + ")");
vp[0].put(0, 0, new float[] { Double.valueOf(x).floatValue() });
vp[0].put(1, 0, new float[] { Double.valueOf(y).floatValue() });
vp[0].put(2, 0, new float[] { Double.valueOf(z).floatValue() });
}
return new Model<Line>() {
@Override
public double computeError(Line line) {
// The vp arrives here calibrated, need to uncalibrate (check it anyway)
Mat vn = new Mat(3, 1, CvType.CV_32F);
if (Math.abs(Core.norm(vp[0]) - 1) < 0.001) {
Core.gemm(K, vp[0], 1, new Mat(), 0, vn);
if (vn.get(2, 0)[0] != 0) {
vn.put(0, 0, new float[] { Double.valueOf(vn.get(0, 0)[0] / vn.get(2, 0)[0]).floatValue() });
vn.put(1, 0, new float[] { Double.valueOf(vn.get(1, 0)[0] / vn.get(2, 0)[0]).floatValue() });
vn.put(2, 0, new float[] { 1 });
}
}
double di = distanceNieto(vn, line);
return di * di;
}
@Override
public double computeGlobalError(List<Line> datas, Collection<Line> consensusDatas) {
double globalError = 0;
for (Line line : consensusDatas) {
double error = computeError(line);
if (error > maxError)
error = maxError;
globalError += error;
}
globalError = globalError / datas.size();
return globalError;
}
@Override
public Object[] getParams() {
return new Object[] { vp[0] };
}
};
};
return new Ransac<>(lines, modelProvider, minimal_sample_set_dimension, 100, maxError, Double.valueOf(Math.floor(lines.size() * 0.5)).intValue());
}
private double distanceNieto(Mat vp, Line line) {
Mat lineSegment = getLineMat(line);
double n0 = -lineSegment.get(1, 0)[0];
double n1 = lineSegment.get(0, 0)[0];
double nNorm = Math.sqrt(n0 * n0 + n1 * n1);
// Mid point
Mat midPoint = getLineMiMat(line);
double c0 = midPoint.get(0, 0)[0];
double c1 = midPoint.get(1, 0)[0];
double c2 = midPoint.get(2, 0)[0];
// Vanishing point (uncalibrated)
double v0 = vp.get(0, 0)[0];
double v1 = vp.get(1, 0)[0];
double v2 = vp.get(2, 0)[0];
double r0, r1;
r0 = v1 * c2 - v2 * c1;
r1 = v2 * c0 - v0 * c2;
double rNorm = Math.sqrt(r0 * r0 + r1 * r1);
double num = (r0 * n0 + r1 * n1);
if (num < 0)
num = -num;
double d = 0;
if (nNorm != 0 && rNorm != 0)
d = num / (nNorm * rNorm);
// d *= line.size();
return d;
}
public Lines rotate(Mat matrix) {
return new Lines(lines.stream().map(line -> line.transform(matrix)).collect(Collectors.toList()));
}
public Lines perspectivTransform(Mat matrix) {
return new Lines(lines.stream().map(line -> line.perspectivTransform(matrix)).collect(Collectors.toList()));
}
public void draw(Mat frame, Scalar color) {
lines.forEach(line -> line.draw(frame, color));
}
public Lines(Collection<Line> lines) {
double mean = 0;
for (Line line : lines) {
this.lines.add(line);
mean += line.getAngle();
}
this.mean = mean / lines.size();
}
public int size() {
return lines.size();
}
public double getMean() {
return mean;
}
}
public static class Line {
private final double x1, y1, x2, y2, angle;
public Line(Point p1, Point p2) {
this(p1.x, p1.y, p2.x, p2.y);
}
public Line(double x1, double y1, double x2, double y2) {
this.x1 = x1;
this.x2 = x2;
this.y1 = y1;
this.y2 = y2;
this.angle = Math.atan2(y2 - y1, x2 - x1);
}
public double size() {
return Math.sqrt(Math.pow(y2 - y1, 2) + Math.pow(x2 - x1, 2));
}
public Line transform(Mat rotationMatrix) {
MatOfPoint2f results = new MatOfPoint2f();
Core.transform(Converters.vector_Point2f_to_Mat(Arrays.asList(new Point(x1, y1), new Point(x2, y2))), results, rotationMatrix);
Point[] targets = results.toArray();
return new Line(targets[0].x, targets[0].y, targets[1].x, targets[1].y);
}
public Line perspectivTransform(Mat homography) {
MatOfPoint2f results = new MatOfPoint2f();
Core.perspectiveTransform(Converters.vector_Point2f_to_Mat(Arrays.asList(new Point(x1, y1), new Point(x2, y2))), results, homography);
Point[] targets = results.toArray();
return new Line(targets[0].x, targets[0].y, targets[1].x, targets[1].y);
}
public void draw(Mat frame, Scalar color) {
Imgproc.line(frame, new Point(x1, y1), new Point(x2, y2), color, 1);
}
@Override
public String toString() {
return "Line : " + angle;
}
public double getAngle() {
return angle;
}
public double geta() {
return (y2 - y1) / (x2 - x1);
}
public double getOrthoa() {
return (x2 - x1) / (y1 - y2);
}
public double getOrthob(Point p) {
return p.y - getOrthoa() * p.x;
}
public double getb() {
return y1 - geta() * x1;
}
public double distance(Point p) {
return Math.abs(geta() * p.x - p.y + getb()) / Math.sqrt(1 + Math.pow(geta(), 2));
}
public Point intersection(double a, double b) {
double x = (b - getb()) / (geta() - a);
double y = a * x + b;
return new Point(x, y);
}
public Point intersection(Line line) {
double x = (line.getb() - getb()) / (geta() - line.geta());
double y = geta() * x + getb();
return new Point(x, y);
}
public Point intersection(double verticalLinex) {
double x = verticalLinex;
double y = geta() * x + getb();
return new Point(x, y);
}
}
@Override
public void stop() throws Exception {
super.stop();
timer.shutdown();
timer.awaitTermination(5000, TimeUnit.MILLISECONDS);
capture.release();
}
} |
package h2o.common.cluster;
import h2o.common.Mode;
import h2o.common.Tools;
import h2o.common.thirdparty.redis.JedisCallBack;
import h2o.common.thirdparty.redis.JedisProvider;
import h2o.common.util.id.UuidUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import redis.clients.jedis.*;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
public class ClusterLock {
private static final Logger log = LoggerFactory.getLogger( ClusterLock.class.getName() );
private static final long DEFAULT_TIME_OUT = 30000;
private final String id = UuidUtil.getUuid();
private final JedisProvider jedisProvider;
private final JedisCommands _jedis;
private final String key;
private final int expire;
volatile boolean run = true;
private volatile boolean locked = false;
public ClusterLock(JedisProvider jedisProvider, String key, int expire ) {
this.jedisProvider = jedisProvider;
this._jedis = null;
this.key = "H2OClusterLock_" + key;
this.expire = expire;
}
public ClusterLock( JedisCommands jedis, String key, int expire ) {
this.jedisProvider = null;
this._jedis = jedis;
this.key = "H2OClusterLock_" + key;
this.expire = expire;
}
private void tryLock( JedisCommands jedis ) {
if ( "OK".equals(jedis.set( key, id , "NX" , "EX" , expire ) ) ) {
locked = true;
} else if ( id.equals(jedis.get(key) ) ) {
jedis.expire(key, expire);
locked = true;
} else {
locked = false;
}
}
public boolean tryLock() {
try {
if ( jedisProvider == null ) {
tryLock( this._jedis );
} else jedisProvider.callback(new JedisCallBack<Void>() {
@Override
public Void doCallBack( JedisCommands jedis) throws Exception {
tryLock( jedis );
return null;
}
});
} catch (Exception e) {
e.printStackTrace();
log.error( "" , e);
locked = false;
}
return locked;
}
public boolean lock() {
return lock( DEFAULT_TIME_OUT );
}
public boolean lock( long timeout ) {
long t = System.currentTimeMillis();
do {
if ( tryLock() ) {
return true;
}
try {
TimeUnit.MILLISECONDS.sleep(50 );
} catch (InterruptedException e) {
}
} while ( System.currentTimeMillis() - t < timeout);
return false;
}
private void unlockUNLUA( JedisCommands jedis ) {
if ( id.equals( jedis.get(key) ) ) {
jedis.del(key);
}
}
private boolean UNSUPPORT_LUA = Mode.isUserMode("REDIS_UNSUPPORT_LUA");
private static final String UNLOCK_LUA;
static {
StringBuilder sb = new StringBuilder();
sb.append("if redis.call(\"get\",KEYS[1]) == ARGV[1] ");
sb.append("then ");
sb.append(" return redis.call(\"del\",KEYS[1]) ");
sb.append("else ");
sb.append(" return 0 ");
sb.append("end ");
UNLOCK_LUA = sb.toString();
}
private void unlock( JedisCommands jedis ) {
if ( UNSUPPORT_LUA ) {
unlockUNLUA(jedis);
} else try {
List<String> keys = Collections.singletonList( key );
List<String> values = Collections.singletonList( id );
if (jedis instanceof JedisClusterScriptingCommands) {
(( JedisClusterScriptingCommands) jedis).eval(UNLOCK_LUA, keys, values);
} else if (jedis instanceof ScriptingCommands) {
(( ScriptingCommands ) jedis).eval(UNLOCK_LUA, keys, values);
}
} catch (Throwable e) {
Tools.log.error(e);
unlockUNLUA(jedis);
}
}
public void unlock() {
locked = false;
try {
if ( jedisProvider == null ) {
unlock( this._jedis );
} else jedisProvider.callback(new JedisCallBack<Void>() {
@Override
public Void doCallBack( JedisCommands jedis ) throws Exception {
unlock( jedis );
return null;
}
});
} catch ( Exception e ) {
e.printStackTrace();
log.error( "" , e);
}
}
public boolean isLocked() {
return run && locked;
}
} |
package com.twitter.heron.spi.common;
import java.util.Map;
import java.util.HashMap;
import java.util.Set;
/**
* Context is an Immutable Map of <String, Object>
*/
public class Context {
private final Map<String, Object> cxtMap = new HashMap();
public static class Builder {
private final Map<String, Object> keyValues = new HashMap();
private static Context.Builder create() {
return new Builder();
}
public Builder put(String key, Object value) {
this.keyValues.put(key, value);
return this;
}
public Builder putAll(Context cxt) {
keyValues.putAll(cxt.cxtMap);
return this;
}
public Builder putAll(Map<String, Object> map) {
keyValues.putAll(map);
return this;
}
public Context build() {
return new Context(this);
}
}
private Context(Builder build) {
cxtMap.putAll(build.keyValues);
}
public static Builder newBuilder() {
return Builder.create();
}
public int size() {
return cxtMap.size();
}
public Object get(String key) {
return cxtMap.get(key);
}
public String getStringValue(String key) {
return (String) get(key);
}
public String getStringValue(String key, String defaultValue) {
String value = getStringValue(key);
return value != null ? value : defaultValue;
}
public Boolean getBooleanValue(String key) {
return (Boolean) get(key);
}
public Boolean getBooleanValue(String key, boolean defaultValue) {
Boolean value = getBooleanValue(key);
return value != null ? value : defaultValue;
}
public Long getLongValue(String key) {
Object value = cxtMap.get(key);
return getLong(value);
}
public Long getLongValue(String key, long defaultValue) {
Object value = get(key);
if (value != null) {
return getLong(value);
}
return defaultValue;
}
public Double getDoubleValue(String key) {
Object value = cxtMap.get(key);
return getDouble(value);
}
public Double getDoubleValue(String key, double defaultValue) {
Object value = get(key);
if (value != null) {
return getDouble(value);
}
return defaultValue;
}
public boolean containsKey(String key) {
return cxtMap.containsKey(key);
}
public Set<String> getKeySet() {
return cxtMap.keySet();
}
private static Long getLong(Object o) {
if (o instanceof Long) {
return ((Long) o);
} else if (o instanceof Integer) {
return new Long(((Integer) o).longValue());
} else if (o instanceof Short) {
return new Long(((Short) o).longValue());
} else {
try {
return Long.parseLong(o.toString());
} catch (NumberFormatException nfe) {
throw new IllegalArgumentException("Don't know how to convert " + o + " + to long");
}
}
}
private static Double getDouble(Object o) {
if (o instanceof Double) {
return ((Double) o);
} else if (o instanceof Float) {
return new Double(((Float) o).doubleValue());
} else if (o instanceof Long) {
return new Double(((Long) o).doubleValue());
} else if (o instanceof Integer) {
return new Double(((Integer) o).doubleValue());
} else if (o instanceof Short) {
return new Double(((Short) o).doubleValue());
} else {
try {
return Double.parseDouble(o.toString());
} catch (NumberFormatException nfe) {
throw new IllegalArgumentException("Don't know how to convert " + o + " + to double");
}
}
}
} |
package net.i2p.data;
/*
* Public domain
*/
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import net.i2p.crypto.SHA256Generator;
/**
* A SimpleDataStructure contains only a single fixed-length byte array.
* The main reason to do this is to override
* toByteArray() and fromByteArray(), which are used by toBase64(), fromBase64(),
* and calculateHash() in DataStructureImpl - otherwise these would go through
* a wasteful array-to-stream-to-array pass.
* It also centralizes a lot of common code.
*
* Implemented in 0.8.2 and retrofitted over several of the classes in this package.
*
* As of 0.8.3, SDS objects may be cached. An SDS may be instantiated with null data,
* and setData(null) is also OK. However,
* once non-null data is set, the data reference is immutable;
* subsequent attempts to set the data via setData(), readBytes(),
* or fromBase64() will throw a RuntimeException.
*
* @since 0.8.2
* @author zzz
*/
public abstract class SimpleDataStructure extends DataStructureImpl {
protected byte[] _data;
/** this is just to avoid lots of calls to length() */
protected final int _length;
/** A new instance with the data set to null. Call readBytes(), setData(), or from ByteArray() after this to set the data */
public SimpleDataStructure() {
_length = length();
}
public SimpleDataStructure(byte data[]) {
_length = length();
setData(data);
}
abstract public int length();
/**
* Get the data reference (not a copy)
* @return the byte array, or null if unset
*/
public byte[] getData() {
return _data;
}
public void setData(byte[] data) {
if (_data != null)
throw new RuntimeException("Data already set");
if (data != null && data.length != _length)
throw new IllegalArgumentException("Bad data length");
_data = data;
}
/**
* Sets the data.
* @param data of correct length, or null
* @throws RuntimeException if data already set.
*/
public void readBytes(InputStream in) throws DataFormatException, IOException {
if (_data != null)
throw new RuntimeException("Data already set");
_data = new byte[_length];
int read = read(in, _data);
if (read != _length) throw new DataFormatException("Not enough bytes to read the data");
}
public void writeBytes(OutputStream out) throws DataFormatException, IOException {
if (_data == null) throw new DataFormatException("No data to write out");
out.write(_data);
}
@Override
public String toBase64() {
if (_data == null)
return null;
return Base64.encode(_data);
}
@Override
public void fromBase64(String data) throws DataFormatException {
if (data == null) throw new DataFormatException("Null data passed in");
byte[] d = Base64.decode(data);
if (d == null)
throw new DataFormatException("Bad Base64 encoded data");
if (d.length != _length)
throw new DataFormatException("Bad decoded data length, expected " + _length + " got " + d.length);
// call setData() instead of _data = data in case overridden
setData(d);
}
/** @return the SHA256 hash of the byte array, or null if the data is null */
@Override
public Hash calculateHash() {
if (_data != null) return SHA256Generator.getInstance().calculateHash(_data);
return null;
}
/**
* Overridden for efficiency.
* @return same thing as getData()
*/
@Override
public byte[] toByteArray() {
return _data;
}
/**
* Overridden for efficiency.
* Does the same thing as setData() but null not allowed.
* @param data non-null
* @throws DataFormatException if null or wrong length
*/
@Override
public void fromByteArray(byte data[]) throws DataFormatException {
if (data == null) throw new DataFormatException("Null data passed in");
if (data.length != _length) throw new DataFormatException("Bad data length");
// call setData() instead of _data = data in case overridden
setData(data);
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(64);
buf.append('[').append(getClass().getSimpleName()).append(": ");
if (_data == null) {
buf.append("null");
} else if (_length <= 32) {
buf.append(toBase64());
} else {
buf.append("size: ").append(Integer.toString(_length));
}
buf.append(']');
return buf.toString();
}
/**
* We assume the data has enough randomness in it, so use the first 4 bytes for speed.
* If this is not the case, override in the extending class.
*/
@Override
public int hashCode() {
if (_data == null)
return 0;
int rv = _data[0];
for (int i = 1; i < 4; i++)
rv ^= (_data[i] << (i*8));
return rv;
}
@Override
public boolean equals(Object obj) {
if ((obj == null) || !(obj instanceof SimpleDataStructure)) return false;
return DataHelper.eq(_data, ((SimpleDataStructure) obj)._data);
}
} |
package com.kehxstudios.atlas.type;
import com.kehxstudios.atlas.screens.AScreen;
import com.kehxstudios.atlas.screens.FlappyBatScreen;
import com.kehxstudios.atlas.screens.IntroScreen;
import com.kehxstudios.atlas.screens.MainMenuScreen;
import java.util.HashMap;
public enum ScreenType {
INTRO("Intro", "texturePacks/intro.atlas", 512, 910, IntroScreen.class),
MAIN_MENU("MainMenu", "texturePacks/mainMenu.atlas", 1440, 2560, MainMenuScreen.class),
FLAPPY_BAT("FlappyBat", "texturePacks/flappyBat.atlas", 240, 400, FlappyBatScreen.class),
SMART_ROCKETS("SmartRockets", "texturePacks/smartRockets.atlas", 512, 910, AScreen.class),
VOID("Void", "-", 0, 0, AScreen.class);
private String id, atlasPath;
private float width, height;
private Class loaderClass;
private ScreenType(String id, String atlasPath, float width, float height, Class loaderClass) {
this.id = id;
this.atlasPath = atlasPath;
this.width = width;
this.height = height;
this.loaderClass = loaderClass;
}
public String getId() { return id; }
public String getAtlasPath() { return atlasPath; }
public float getWidth() { return width; }
public float getHeight() { return height; }
public Class getLoaderClass() { return loaderClass; }
public static ScreenType getTypeById(String id) {
return screenTypes.get(id);
}
private static HashMap<String, ScreenType> screenTypes;
static {
screenTypes = new HashMap<String, ScreenType>();
for (ScreenType type : ScreenType.values()) {
screenTypes.put(type.id, type);
}
}
} |
package com.timepath.steam.io.test;
import com.timepath.plaf.x.filechooser.NativeFileChooser;
import com.timepath.steam.io.GCF;
import com.timepath.steam.io.GCF.DirectoryEntry;
import com.timepath.swing.DirectoryTreeCellRenderer;
import java.awt.Component;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.DefaultCellEditor;
import javax.swing.JTable;
import javax.swing.JTextField;
import javax.swing.SwingUtilities;
import javax.swing.table.DefaultTableModel;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.MutableTreeNode;
import javax.swing.tree.TreePath;
import javax.swing.tree.TreeSelectionModel;
/**
*
* @author timepath
*/
@SuppressWarnings("serial")
public class GCFTest extends javax.swing.JFrame {
private GCF g;
private final DefaultTreeModel tree;
private final DefaultTableModel table;
/**
* Creates new form GCFTest
*/
public GCFTest() {
initComponents();
jTree1.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION);
jTree1.setCellRenderer(new DirectoryTreeCellRenderer());
tree = (DefaultTreeModel) jTree1.getModel();
jTable1.setDefaultEditor(Object.class, new CellSelectionListener());
table = (DefaultTableModel) jTable1.getModel();
}
private class CellSelectionListener extends DefaultCellEditor {
CellSelectionListener() {
super(new JTextField());
}
@Override
public Component getTableCellEditorComponent(JTable table, Object value, boolean isSelected, int row, int column) {
Object val = table.getValueAt(row, 0);
if(val instanceof DirectoryEntry) {
directoryChanged((DirectoryEntry) val);
}
return null;
}
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jPopupMenu1 = new javax.swing.JPopupMenu();
jPopupMenuItem1 = new javax.swing.JMenuItem();
jSplitPane1 = new javax.swing.JSplitPane();
jScrollPane2 = new javax.swing.JScrollPane();
jTree1 = new javax.swing.JTree();
jPanel1 = new javax.swing.JPanel();
jPanel2 = new javax.swing.JPanel();
jTextField1 = new javax.swing.JTextField();
jButton1 = new javax.swing.JButton();
jScrollPane3 = new javax.swing.JScrollPane();
jTable1 = new javax.swing.JTable();
jMenuBar1 = new javax.swing.JMenuBar();
jMenu1 = new javax.swing.JMenu();
jMenuItem1 = new javax.swing.JMenuItem();
jPopupMenuItem1.setText("Extract");
jPopupMenuItem1.setEnabled(false);
jPopupMenuItem1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jPopupMenuItem1ActionPerformed(evt);
}
});
jPopupMenu1.add(jPopupMenuItem1);
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
setTitle("GCF Viewer");
jSplitPane1.setDividerLocation(200);
jSplitPane1.setContinuousLayout(true);
jSplitPane1.setOneTouchExpandable(true);
javax.swing.tree.DefaultMutableTreeNode treeNode1 = new javax.swing.tree.DefaultMutableTreeNode("root");
jTree1.setModel(new javax.swing.tree.DefaultTreeModel(treeNode1));
jTree1.setRootVisible(false);
jTree1.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
jTree1MouseClicked(evt);
}
});
jTree1.addTreeSelectionListener(new javax.swing.event.TreeSelectionListener() {
public void valueChanged(javax.swing.event.TreeSelectionEvent evt) {
directoryChanged(evt);
}
});
jScrollPane2.setViewportView(jTree1);
jSplitPane1.setLeftComponent(jScrollPane2);
jPanel1.setLayout(new java.awt.BorderLayout());
jPanel2.setLayout(new java.awt.BorderLayout());
jTextField1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jTextField1ActionPerformed(evt);
}
});
jPanel2.add(jTextField1, java.awt.BorderLayout.CENTER);
jButton1.setText("Search");
jButton1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton1ActionPerformed(evt);
}
});
jPanel2.add(jButton1, java.awt.BorderLayout.EAST);
jPanel1.add(jPanel2, java.awt.BorderLayout.NORTH);
jTable1.setAutoCreateRowSorter(true);
jTable1.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
},
new String [] {
"Name", "Size", "Attributes", "Path"
}
) {
Class[] types = new Class [] {
java.lang.Object.class, java.lang.Integer.class, java.lang.Object.class, java.lang.Object.class
};
public Class getColumnClass(int columnIndex) {
return types [columnIndex];
}
});
jTable1.setFillsViewportHeight(true);
jTable1.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
jTable1MouseClicked(evt);
}
});
jScrollPane3.setViewportView(jTable1);
jTable1.getColumnModel().getSelectionModel().setSelectionMode(javax.swing.ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
jPanel1.add(jScrollPane3, java.awt.BorderLayout.CENTER);
jSplitPane1.setRightComponent(jPanel1);
getContentPane().add(jSplitPane1, java.awt.BorderLayout.CENTER);
jMenu1.setText("File");
jMenuItem1.setAccelerator(javax.swing.KeyStroke.getKeyStroke(java.awt.event.KeyEvent.VK_O, java.awt.event.InputEvent.CTRL_MASK));
jMenuItem1.setText("Open");
jMenuItem1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
open(evt);
}
});
jMenu1.add(jMenuItem1);
jMenuBar1.add(jMenu1);
setJMenuBar(jMenuBar1);
pack();
}// </editor-fold>//GEN-END:initComponents
private void open(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_open
File f = new NativeFileChooser(this, "Open GCF", null).choose(false, false);
if(f == null) {
LOG.info("File is null");
return;
}
g = GCF.load(f);
if(g == null) {
LOG.log(Level.WARNING, "Unable to load {0}", f);
return;
}
((DefaultMutableTreeNode) tree.getRoot()).removeAllChildren();
DefaultMutableTreeNode gcf = new DefaultMutableTreeNode(g);
DefaultMutableTreeNode direct = new DefaultMutableTreeNode(g.directoryEntries[0]);
tree.insertNodeInto(direct, gcf, 0);
g.analyze(direct, false);
tree.insertNodeInto(gcf, (MutableTreeNode) tree.getRoot(), tree.getChildCount(tree.getRoot()));
tree.reload();
}//GEN-LAST:event_open
private void directoryChanged(javax.swing.event.TreeSelectionEvent evt) {//GEN-FIRST:event_directoryChanged
TreePath selection = evt.getNewLeadSelectionPath();
if(selection == null) {
table.setRowCount(0);
return;
}
Object node = selection.getLastPathComponent();
if(!(node instanceof DefaultMutableTreeNode)) {
return;
}
Object obj = ((DefaultMutableTreeNode) node).getUserObject();
if(!(obj instanceof DirectoryEntry)) {
return;
}
directoryChanged((DirectoryEntry) obj);
}//GEN-LAST:event_directoryChanged
private ArrayList<DirectoryEntry> toExtract = new ArrayList<DirectoryEntry>();
private void extractablesUpdated() {
jPopupMenuItem1.setEnabled(!toExtract.isEmpty());
}
private void jPopupMenuItem1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jPopupMenuItem1ActionPerformed
File out = NativeFileChooser.choose(this, "Select extraction directory", null, true, true);
if(out == null) {
return;
}
for(DirectoryEntry e : toExtract) {
try {
g.extract(e.index, out);
} catch (IOException ex) {
Logger.getLogger(GCFTest.class.getName()).log(Level.SEVERE, null, ex);
}
}
LOG.info("Done");
}//GEN-LAST:event_jPopupMenuItem1ActionPerformed
private void jTree1MouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_jTree1MouseClicked
if(SwingUtilities.isRightMouseButton(evt)) {
TreePath clicked = jTree1.getPathForLocation(evt.getX(), evt.getY());
if(clicked == null) {
return;
}
Object obj = clicked.getLastPathComponent();
if(obj instanceof DefaultMutableTreeNode) {
obj = ((DefaultMutableTreeNode) obj).getUserObject();
}
if(jTree1.getSelectionPaths() == null || !Arrays.asList(jTree1.getSelectionPaths()).contains(clicked)) {
jTree1.setSelectionPath(clicked);
}
toExtract.clear();
TreePath[] paths = jTree1.getSelectionPaths();
for(TreePath p : paths) {
if(!(p.getLastPathComponent() instanceof DefaultMutableTreeNode)) {
return;
}
Object userObject = ((DefaultMutableTreeNode)p.getLastPathComponent()).getUserObject();
if(userObject instanceof DirectoryEntry) {
toExtract.add((DirectoryEntry) userObject);
}
}
extractablesUpdated();
jPopupMenu1.show(jTree1, evt.getX(), evt.getY());
}
}//GEN-LAST:event_jTree1MouseClicked
private void jTable1MouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_jTable1MouseClicked
if(SwingUtilities.isRightMouseButton(evt)) {
int row = jTable1.rowAtPoint(evt.getPoint());
if(row == -1) {
return;
}
Object obj = table.getValueAt(row, 0);
int[] selectedRows = jTable1.getSelectedRows();
Arrays.sort(selectedRows);
if(Arrays.binarySearch(selectedRows, row) < 0) {
jTable1.setRowSelectionInterval(row, row);
}
toExtract.clear();
int[] selected = jTable1.getSelectedRows();
for(int r : selected) {
Object userObject = table.getValueAt(r, 0);
if(userObject instanceof DirectoryEntry) {
toExtract.add((DirectoryEntry) userObject);
}
}
extractablesUpdated();
jPopupMenu1.show(jTable1, evt.getX(), evt.getY());
}
}//GEN-LAST:event_jTable1MouseClicked
private void jTextField1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jTextField1ActionPerformed
search();
}//GEN-LAST:event_jTextField1ActionPerformed
private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed
search();
}//GEN-LAST:event_jButton1ActionPerformed
private void search() {
jTree1.setSelectionPath(null);
ArrayList<DirectoryEntry> children = g.find(jTextField1.getText());
table.setRowCount(0);
for(int i = 0; i < children.size(); i++) {
DirectoryEntry c = children.get(i);
if(!c.isDirectory()) {
table.addRow(new Object[]{c, c.itemSize, c.attributes, c.getPath()});
}
}
}
private void directoryChanged(DirectoryEntry dir) {
if(!dir.isDirectory()) {
return;
}
DirectoryEntry[] children = g.getImmediateChildren(dir);
table.setRowCount(0);
for(int i = 0; i < children.length; i++) {
DirectoryEntry c = children[i];
if(!c.isDirectory()) {
table.addRow(new Object[]{c, c.itemSize, c.attributes, c.getPath()});
}
}
}
/**
* @param args the command line arguments
*/
public static void main(String... args) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(GCFTest.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(GCFTest.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(GCFTest.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(GCFTest.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
/* Create and display the form */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new GCFTest().setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton jButton1;
private javax.swing.JMenu jMenu1;
private javax.swing.JMenuBar jMenuBar1;
private javax.swing.JMenuItem jMenuItem1;
private javax.swing.JPanel jPanel1;
private javax.swing.JPanel jPanel2;
private javax.swing.JPopupMenu jPopupMenu1;
private javax.swing.JMenuItem jPopupMenuItem1;
private javax.swing.JScrollPane jScrollPane2;
private javax.swing.JScrollPane jScrollPane3;
private javax.swing.JSplitPane jSplitPane1;
private javax.swing.JTable jTable1;
private javax.swing.JTextField jTextField1;
private javax.swing.JTree jTree1;
// End of variables declaration//GEN-END:variables
private static final Logger LOG = Logger.getLogger(GCFTest.class.getName());
} |
// Clirr: compares two versions of a java library for binary compatibility
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// This library is distributed in the hope that it will be useful,
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// You should have received a copy of the GNU Lesser General Public
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package net.sf.clirr.core;
import java.util.Iterator;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.Collection;
/**
* Class which manages API Difference messages, including expanding message
* codes into strings and descriptions.
*/
public final class MessageManager
{
private static MessageManager instance;
private ArrayList messages = new ArrayList();
/**
* Utility class to sort messages by their numeric ids.
*/
private static class MessageComparator implements Comparator
{
public int compare(Object o1, Object o2)
{
Message m1 = (Message) o1;
Message m2 = (Message) o2;
return m1.getId() - m2.getId();
}
}
/**
* This is a singleton class; to get an instance of this class, use
* the getInstance method.
*/
private MessageManager()
{
}
/**
* Return the singleton instance of this class.
*/
public static MessageManager getInstance()
{
if (instance == null)
{
instance = new MessageManager();
}
return instance;
}
/**
* Add a message to the list of known messages.
*/
public void addMessage(Message msg)
{
messages.add(msg);
}
public void checkUnique()
{
java.util.Collections.sort(messages, new MessageComparator());
int lastId = -1;
for (Iterator i = messages.iterator(); i.hasNext();)
{
// check for any duplicates
Message m = (Message) i.next();
int currId = m.getId();
if (currId <= lastId)
{
throw new IllegalArgumentException("Message id [" + currId + "] is not unique.");
}
}
}
/**
* Return the complete set of registered messages.
*/
public Collection getMessages()
{
return messages;
}
} |
package sigfw.common;
import com.p1sec.sigfw.SigFW_interface.CryptoInterface;
import diameterfw.DiameterFirewall;
import static diameterfw.DiameterFirewall.VENDOR_ID;
import diameterfw.DiameterFirewallConfig;
import java.io.IOException;
import java.io.InputStream;
import java.security.InvalidKeyException;
import java.security.KeyFactory;
import java.security.KeyPair;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.Signature;
import java.security.SignatureException;
import java.security.interfaces.ECPrivateKey;
import java.security.interfaces.ECPublicKey;
import java.security.interfaces.RSAPrivateKey;
import java.security.interfaces.RSAPublicKey;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.logging.Level;
//import javafx.util.Pair;
import java.util.AbstractMap;
import java.util.Date;
import java.util.SortedMap;
import javax.crypto.BadPaddingException;
import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.NoSuchPaddingException;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;
import org.jdiameter.api.Avp;
import org.jdiameter.api.AvpDataException;
import org.jdiameter.api.AvpSet;
import org.jdiameter.api.Message;
import org.mobicents.protocols.asn.AsnOutputStream;
import org.mobicents.protocols.asn.Tag;
import org.mobicents.protocols.ss7.sccp.LongMessageRuleType;
import org.mobicents.protocols.ss7.sccp.impl.message.MessageFactoryImpl;
import org.mobicents.protocols.ss7.sccp.message.SccpDataMessage;
import org.mobicents.protocols.ss7.tcap.asn.EncodeException;
import org.mobicents.protocols.ss7.tcap.asn.InvokeImpl;
import org.mobicents.protocols.ss7.tcap.asn.TcapFactory;
import org.mobicents.protocols.ss7.tcap.asn.comp.Component;
import org.mobicents.protocols.ss7.tcap.asn.comp.ComponentType;
import org.mobicents.protocols.ss7.tcap.asn.comp.Invoke;
import org.mobicents.protocols.ss7.tcap.asn.comp.OperationCode;
import org.mobicents.protocols.ss7.tcap.asn.comp.Parameter;
import org.mobicents.protocols.ss7.tcap.asn.comp.TCBeginMessage;
import static sigfw.common.Utils.concatByteArray;
import static sigfw.common.Utils.int32ToBytes;
import static sigfw.common.Utils.bytesToInt32;
import static sigfw.common.Utils.splitByteArray;
/**
*
* @author Martin Kacer
*/
public class Crypto implements CryptoInterface {
/*
// Encryption RSA
public static KeyFactory keyFactoryRSA;
public static Cipher cipherRSA;
public static Signature signatureRSA;
// Encryption EC
public static KeyFactory keyFactoryEC;
public static Cipher cipherAES_GCM;
public static Signature signatureECDSA;
*/
static final public int AVP_ENCRYPTED = 1100;
static final public int AVP_ENCRYPTED_GROUPED = 1101;
static final public int AVP_ENCRYPTED_GROUPED_INDEXED = 1102;
static final public int AVP_DESS_SIGNATURE = 1000;
static final public int AVP_DESS_DIGITAL_SIGNATURE = 1001;
static final public int AVP_DESS_SYSTEM_TIME = 1002;
static final public int AVP_DESS_SIGNING_IDENTITY = 1003;
static final public int AVP_DESS_DIGITAL_SIGNATURE_TYPE = 1004;
static final public int ENUM_DESS_DIGITAL_SIGNATURE_TYPE_RSA4096_with_SHA256 = 0;
static final public int ENUM_DESS_DIGITAL_SIGNATURE_TYPE_ECDSA_with_SHA256 = 1;
static final private Long OC_SIGNATURE = 100L;
// Diameter signature and decryption time window used for TVP
public final static long diameter_tvp_time_window = 30; // in seconds
// TCAP signature and decryption time window used for TVP
private final static long tcap_tvp_time_window = 30; // in seconds
protected static final Logger logger = Logger.getLogger(Crypto.class);
static {
configLog4j();
}
public Crypto() {
/*
// Encryption RSA
try {
keyFactoryRSA = KeyFactory.getInstance("RSA");
cipherRSA = Cipher.getInstance("RSA/ECB/PKCS1Padding");
signatureRSA = Signature.getInstance("SHA256WithRSA");
} catch (NoSuchAlgorithmException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (NoSuchPaddingException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
// Encryption EC
try {
keyFactoryEC = KeyFactory.getInstance("EC");
cipherAES_GCM = Cipher.getInstance("AES/GCM/NoPadding", "SunJCE");
signatureECDSA = Signature.getInstance("SHA256withECDSA");
} catch (NoSuchAlgorithmException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (NoSuchPaddingException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (NoSuchProviderException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
*/
}
protected static void configLog4j() {
InputStream inStreamLog4j = Crypto.class.getClassLoader().getResourceAsStream("log4j.properties");
Properties propertiesLog4j = new Properties();
try {
propertiesLog4j.load(inStreamLog4j);
PropertyConfigurator.configure(propertiesLog4j);
} catch (Exception e) {
e.printStackTrace();
}
logger.debug("log4j configured");
}
@Override
public void diameterSign(Message message, KeyPair keyPair, String signingRealm) {
//logger.debug("Message Sign = " + message.getAvps().toString());
Signature signatureRSA = null;
Signature signatureECDSA = null;
// Encryption RSA
try {
signatureRSA = Signature.getInstance("SHA256WithRSA");
} catch (NoSuchAlgorithmException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
// Encryption EC
try {
signatureECDSA = Signature.getInstance("SHA256withECDSA");
} catch (NoSuchAlgorithmException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
if (keyPair != null) {
PrivateKey privateKey = keyPair.getPrivate();
if(privateKey != null) {
AvpSet _avps = message.getAvps();
boolean signed = false;
if (_avps.getAvp(AVP_DESS_SIGNATURE, VENDOR_ID) != null) {
signed = true;
}
if (!signed) {
// Add DESS_SIGNATURE grouped AVP
AvpSet avps = _avps.addGroupedAvp(AVP_DESS_SIGNATURE, VENDOR_ID, false, false);
// Add DESS_SIGNING_REALM inside
avps.addAvp(AVP_DESS_SIGNING_IDENTITY, signingRealm.getBytes(), VENDOR_ID, false, false);
// Add DESS_SYSTEM_TIME inside
long t = System.currentTimeMillis();
Date date = new Date(t);
avps.addAvp(AVP_DESS_SYSTEM_TIME, date, VENDOR_ID, false, false);
// Add AVP_DESS_DIGITAL_SIGNATURE_TYPE inside
if (privateKey instanceof RSAPrivateKey) {
avps.addAvp(AVP_DESS_DIGITAL_SIGNATURE_TYPE, ENUM_DESS_DIGITAL_SIGNATURE_TYPE_RSA4096_with_SHA256, VENDOR_ID, false, false);
}
else if (privateKey instanceof ECPrivateKey) {
avps.addAvp(AVP_DESS_DIGITAL_SIGNATURE_TYPE, ENUM_DESS_DIGITAL_SIGNATURE_TYPE_ECDSA_with_SHA256, VENDOR_ID, false, false);
}
// Add AVP_DESS_DIGITAL_SIGNATURE
try {
String dataToSign = message.getApplicationId() + ":" + message.getCommandCode() + ":" + message.getEndToEndIdentifier();
// jDiameter AVPs are not ordered, and the order could be changed by DRAs in IPX, so order AVPs by sorting base64 strings
List<String> strings = new ArrayList<String>();
for (int i = 0; i < avps.size(); i++) {
Avp a = avps.getAvpByIndex(i);
if (a.getCode() != Avp.ROUTE_RECORD) {
strings.add(a.getCode() + "|" + Base64.getEncoder().encodeToString(a.getRawData()));
}
}
Collections.sort(strings);
for (String s : strings) {
dataToSign += ":" + s;
}
byte[] signatureBytes = null;
// RSA
if (privateKey instanceof RSAPrivateKey) {
signatureRSA.initSign(privateKey);
signatureRSA.update(dataToSign.getBytes());
signatureBytes = signatureRSA.sign();
}
else if (privateKey instanceof ECPrivateKey) {
_avps.removeAvp(AVP_DESS_SIGNATURE, VENDOR_ID);
logger.warn("EC Public Key algorithm not implemented");
return;
} else {
_avps.removeAvp(AVP_DESS_SIGNATURE, VENDOR_ID);
logger.warn("Unknown Private Key algorithm");
return;
}
logger.debug("Adding Diameter Signed Data: " + dataToSign);
logger.debug("Adding Diameter Signature: " + Base64.getEncoder().encodeToString(signatureBytes));
// Add AVP_DESS_SIGNATURE inside
avps.addAvp(AVP_DESS_DIGITAL_SIGNATURE, signatureBytes, VENDOR_ID, false, false);
} catch (InvalidKeyException ex) {
_avps.removeAvp(AVP_DESS_SIGNATURE, VENDOR_ID);
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (SignatureException ex) {
_avps.removeAvp(AVP_DESS_SIGNATURE, VENDOR_ID);
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
}
}
@Override
public String diameterVerify(Message message, SortedMap<String, PublicKey> publicKeys) {
//logger.debug("Message Verify = " + message.getAvps().toString());
if (publicKeys == null) {
return "";
}
Signature signatureRSA = null;
Signature signatureECDSA = null;
// Encryption RSA
try {
signatureRSA = Signature.getInstance("SHA256WithRSA");
} catch (NoSuchAlgorithmException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
AvpSet _avps = message.getAvps();
Avp _a = _avps.getAvp(AVP_DESS_SIGNATURE, VENDOR_ID);
if (_a == null) {
logger.debug("");
return "DIAMETER FW: Missing DIAMETER signature (AVP_DESS_SIGNATURE).";
}
AvpSet avps = null;
try {
avps = _a.getGrouped();
} catch (AvpDataException ex) {
return "DIAMETER FW: Wrong DIAMETER signature. AVP_DESS_SIGNATURE is not grouped AVP.";
}
try {
// Get AVP_DESS_SIGNING_IDENTITY
String signing_realm = null;
String orig_realm = null;
Avp a_origin_realm = avps.getAvp(Avp.ORIGIN_REALM);
if (a_origin_realm != null && a_origin_realm.getDiameterURI() != null && a_origin_realm.getDiameterURI().getFQDN() != null) {
orig_realm = a_origin_realm.getDiameterURI().getFQDN();
}
Avp a_signing_realm = avps.getAvp(AVP_DESS_SIGNING_IDENTITY, VENDOR_ID);
if (a_signing_realm != null && a_signing_realm.getDiameterURI() != null && a_signing_realm.getDiameterURI().getFQDN() != null) {
signing_realm = a_signing_realm.getDiameterURI().getFQDN();
} else if (orig_realm != null) {
signing_realm = orig_realm;
} else {
return "DIAMETER FW: Unable to verify message signature. Both AVP_DESS_SIGNING_REALM and ORIGIN_REALM are missing.";
}
// Get AVP_DESS_SYSTEM_TIME
Avp a_system_time = avps.getAvp(AVP_DESS_SYSTEM_TIME, VENDOR_ID);
if (a_system_time == null) {
return "DIAMETER FW: Invbalid message signature. Missing AVP_DESS_SYSTEM_TIME.";
}
// Get AVP_DESS_DIGITAL_SIGNATURE_TYPE
Avp a_signature_type = avps.getAvp(AVP_DESS_DIGITAL_SIGNATURE_TYPE, VENDOR_ID);
if (a_signature_type == null) {
return "DIAMETER FW: Invbalid message signature. Missing AVP_DESS_DIGITAL_SIGNATURE_TYPE.";
}
// Get AVP_DESS_DIGITAL_SIGNATURE
Avp a_digital_signature = avps.getAvp(AVP_DESS_DIGITAL_SIGNATURE, VENDOR_ID);
if (a_digital_signature == null) {
return "DIAMETER FW: Invbalid message signature. Missing AVP_DESS_DIGITAL_SIGNATURE.";
}
// Verify timestamp
Date date = a_system_time.getTime();
byte[] signatureBytes = null;
if (date != null) {
long t = System.currentTimeMillis();
long t_received = date.getTime();
if (Math.abs(t_received-t) > diameter_tvp_time_window*1000) {
return "DIAMETER FW: DIAMETER verify signature. Wrong timestamp in TVP (received: " + t_received + ", current: " + t + ")";
}
}
// remove all signature components from the message
_avps.removeAvp(AVP_DESS_SIGNATURE, VENDOR_ID);
// Verify Signature
signatureBytes = a_digital_signature.getOctetString();
String dataToSign = message.getApplicationId() + ":" + message.getCommandCode() + ":" + message.getEndToEndIdentifier();
// jDiameter AVPs are not ordered, so order AVPs by sorting base64 strings
List<String> strings = new ArrayList<String>();
for (int i = 0; i < avps.size(); i++) {
Avp a = avps.getAvpByIndex(i);
if (a.getCode() != Avp.ROUTE_RECORD) {
strings.add(a.getCode() + "|" + Base64.getEncoder().encodeToString(a.getRawData()));
}
}
Collections.sort(strings);
for (String s : strings) {
dataToSign += ":" + s;
}
PublicKey publicKey = DiameterFirewallConfig.origin_realm_verify_signing_realm.get(orig_realm + ":" + signing_realm);
if (publicKey == null) {
return "";
}
if (publicKey instanceof RSAPublicKey) {
if (a_signature_type.getInteger32() != ENUM_DESS_DIGITAL_SIGNATURE_TYPE_RSA4096_with_SHA256) {
logger.warn("Configured Public Key type mismatch with type received in AVP_DESS_DIGITAL_SIGNATURE_TYPE");
return "";
}
signatureRSA.initVerify(publicKey);
signatureRSA.update(dataToSign.getBytes());
if (signatureBytes != null && signatureRSA.verify(signatureBytes)) {
return "";
}
} else if (publicKey instanceof ECPublicKey) {
if (a_signature_type.getInteger32() != ENUM_DESS_DIGITAL_SIGNATURE_TYPE_ECDSA_with_SHA256) {
logger.warn("Configured Public Key type mismatch with type received in AVP_DESS_DIGITAL_SIGNATURE_TYPE");
return "";
}
logger.warn("EC Public Key algorithm not implemented");
return "";
} else {
logger.warn("Unknown Public Key algorithm");
return "";
}
} catch (InvalidKeyException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (SignatureException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (AvpDataException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
return "DIAMETER FW: Wrong DIAMETER signature";
}
@Override
public void diameterEncrypt(Message message, PublicKey publicKey) throws InvalidKeyException {
// Encryption RSA
Cipher cipherRSA = null;
// Encryption EC
Cipher cipherAES_GCM = null;
// Encryption RSA
try {
cipherRSA = Cipher.getInstance("RSA/ECB/PKCS1Padding");
} catch (NoSuchAlgorithmException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (NoSuchPaddingException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
// Encryption EC
try {
cipherAES_GCM = Cipher.getInstance("AES/GCM/NoPadding", "SunJCE");
} catch (NoSuchAlgorithmException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (NoSuchPaddingException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (NoSuchProviderException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
//logger.debug("== diameterEncrypt ==");
AvpSet avps = message.getAvps();
int avps_size = avps.size();
for (int i = 0; i < avps_size; i++) {
Avp a = avps.getAvpByIndex(i);
//logger.debug("AVP[" + i + "] Code = " + a.getCode());
if (
a.getCode() != Avp.ORIGIN_HOST &&
a.getCode() != Avp.ORIGIN_REALM &&
a.getCode() != Avp.DESTINATION_HOST &&
a.getCode() != Avp.DESTINATION_REALM &&
a.getCode() != Avp.SESSION_ID &&
a.getCode() != Avp.ROUTE_RECORD &&
a.getCode() != AVP_ENCRYPTED &&
a.getCode() != AVP_ENCRYPTED_GROUPED
) {
if (publicKey instanceof RSAPublicKey) {
try {
//byte[] d = a.getRawData();
byte [] d = Utils.encodeAvp(a);
// SPI(version) and TVP(timestamp)
byte[] SPI = {0x00, 0x00, 0x00, 0x00}; // TODO
byte[] TVP = {0x00, 0x00, 0x00, 0x00};
long t = System.currentTimeMillis()/100; // in 0.1s
TVP[0] = (byte) ((t >> 24) & 0xFF);
TVP[1] = (byte) ((t >> 16) & 0xFF);
TVP[2] = (byte) ((t >> 8) & 0xFF);
TVP[3] = (byte) ((t >> 0) & 0xFF);
//Crypto.cipherRSA.init(Cipher.ENCRYPT_MODE, publicKey);
//byte[] cipherText = Crypto.cipherRSA.doFinal(b);
RSAPublicKey rsaPublicKey = (RSAPublicKey)publicKey;
cipherRSA.init(Cipher.ENCRYPT_MODE, publicKey);
int keyLength = rsaPublicKey.getModulus().bitLength() / 8;
byte[][] datas = splitByteArray(d, keyLength - 11 - 4);
byte[] cipherText = null;
for (byte[] b : datas) {
cipherText = concatByteArray(cipherText, cipherRSA.doFinal(concatByteArray(TVP, b)));
}
cipherText = concatByteArray(SPI, cipherText);
//logger.debug("Add AVP Encrypted. Current index = " + i);
avps.insertAvp(i, AVP_ENCRYPTED, cipherText, VENDOR_ID, false, true);
avps.removeAvpByIndex(i + 1);
} catch (IllegalBlockSizeException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (BadPaddingException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
} else if (publicKey instanceof ECPublicKey) {
logger.warn("EC algorithm not implemented");
return;
} else {
logger.warn("Unknown Public Key algorithm");
return;
}
}
}
}
@Override
public String diameterDecrypt(Message message, KeyPair keyPair) {
// Encryption RSA
Cipher cipherRSA = null;
// Encryption EC
Cipher cipherAES_GCM = null;
// Encryption RSA
try {
cipherRSA = Cipher.getInstance("RSA/ECB/PKCS1Padding");
} catch (NoSuchAlgorithmException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (NoSuchPaddingException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
// Encryption EC
try {
cipherAES_GCM = Cipher.getInstance("AES/GCM/NoPadding", "SunJCE");
} catch (NoSuchAlgorithmException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (NoSuchPaddingException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (NoSuchProviderException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
//logger.debug("== diameterDecrypt ==");
AvpSet avps = message.getAvps();
int avps_size = avps.size();
for (int i = 0; i < avps_size; i++) {
Avp a = avps.getAvpByIndex(i);
//logger.debug("AVP[" + i + "] Code = " + a.getCode());
if (a.getCode() == AVP_ENCRYPTED && a.isVendorId() && a.getVendorId() == VENDOR_ID) {
logger.debug("Diameter Decryption of Encrypted AVP");
PrivateKey privateKey = keyPair.getPrivate();
if (privateKey instanceof RSAPrivateKey) {
try {
byte[] b = a.getOctetString();
// SPI(version) and TVP(timestamp)
byte[] SPI = {0x00, 0x00, 0x00, 0x00};
byte[] TVP = {0x00, 0x00, 0x00, 0x00};
byte[] d = null;
if (b.length >= SPI.length) {
SPI = Arrays.copyOfRange(b, 0, SPI.length);
d = Arrays.copyOfRange(b, SPI.length, b.length);
} else {
d = b;
}
// TODO verify SPI
cipherRSA.init(Cipher.DECRYPT_MODE, privateKey);
RSAPublicKey rsaPublicKey = (RSAPublicKey)keyPair.getPublic();
int keyLength = rsaPublicKey.getModulus().bitLength() / 8;
byte[][] datas = splitByteArray(d, keyLength);
byte[] decryptedText = null;
for (byte[] _b : datas) {
d = cipherRSA.doFinal(_b);
long t = System.currentTimeMillis()/100; // in 0.1s
TVP[0] = (byte) ((t >> 24) & 0xFF);
TVP[1] = (byte) ((t >> 16) & 0xFF);
TVP[2] = (byte) ((t >> 8) & 0xFF);
TVP[3] = (byte) ((t >> 0) & 0xFF);
t = 0;
for (int j = 0; j < TVP.length; j++) {
t = ((t << 8) + (TVP[j] & 0xff));
}
TVP[0] = d[0]; TVP[1] = d[1]; TVP[2] = d[2]; TVP[3] = d[3];
long t_tvp = 0;
for (int j = 0; j < TVP.length; j++) {
t_tvp = ((t_tvp << 8) + (TVP[j] & 0xff));
}
if (Math.abs(t_tvp-t) > diameter_tvp_time_window*10) {
return "DIAMETER FW: Blocked in decryption, Wrong timestamp in TVP (received: " + t_tvp + ", current: " + t + ")";
}
d = Arrays.copyOfRange(d, TVP.length, d.length);
decryptedText = concatByteArray(decryptedText, d);
}
//logger.debug("Add AVP Decrypted. Current index = " + i);
//AvpImpl avp = new AvpImpl(code, (short) flags, (int) vendor, rawData);
//avps.insertAvp(i, ByteBuffer.wrap(cc).order(ByteOrder.BIG_ENDIAN).getInt(), d, false, false);
AvpImpl _a = (AvpImpl)Utils.decodeAvp(decryptedText);
avps.insertAvp(i, _a.getCode(), _a.getRawData(), _a.vendorID, _a.isMandatory, _a.isEncrypted);
avps.removeAvpByIndex(i + 1);
} catch (InvalidKeyException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (IllegalBlockSizeException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (BadPaddingException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (AvpDataException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (IOException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
} else if (privateKey instanceof ECPrivateKey) {
logger.warn("EC algorithm not implemented");
return "";
} else {
logger.warn("Unknown Private Key algorithm");
return "";
}
} else if (a.getCode() == AVP_ENCRYPTED_GROUPED && a.isVendorId() && a.getVendorId() == VENDOR_ID) {
logger.debug("Diameter Decryption of Grouped Encrypted AVP");
PrivateKey privateKey = keyPair.getPrivate();
if (privateKey instanceof RSAPrivateKey) {
try {
byte[] b = a.getOctetString();
// SPI(version) and TVP(timestamp)
byte[] SPI = {0x00, 0x00, 0x00, 0x00};
byte[] TVP = {0x00, 0x00, 0x00, 0x00};
byte[] d = null;
if (b.length >= SPI.length) {
SPI = Arrays.copyOfRange(b, 0, SPI.length);
d = Arrays.copyOfRange(b, SPI.length, b.length);
} else {
d = b;
}
// TODO verify SPI
cipherRSA.init(Cipher.DECRYPT_MODE, privateKey);
RSAPublicKey rsaPublicKey = (RSAPublicKey)keyPair.getPublic();
int keyLength = rsaPublicKey.getModulus().bitLength() / 8;
byte[][] datas = splitByteArray(d, keyLength);
byte[] decryptedText = null;
for (byte[] _b : datas) {
d = cipherRSA.doFinal(_b);
long t = System.currentTimeMillis()/100; // in 0.1s
TVP[0] = (byte) ((t >> 24) & 0xFF);
TVP[1] = (byte) ((t >> 16) & 0xFF);
TVP[2] = (byte) ((t >> 8) & 0xFF);
TVP[3] = (byte) ((t >> 0) & 0xFF);
t = 0;
for (int j = 0; j < TVP.length; j++) {
t = ((t << 8) + (TVP[j] & 0xff));
}
TVP[0] = d[0]; TVP[1] = d[1]; TVP[2] = d[2]; TVP[3] = d[3];
long t_tvp = 0;
for (int j = 0; j < TVP.length; j++) {
t_tvp = ((t_tvp << 8) + (TVP[j] & 0xff));
}
if (Math.abs(t_tvp-t) > diameter_tvp_time_window*10) {
return "DIAMETER FW: Blocked in decryption, Wrong timestamp in TVP (received: " + t_tvp + ", current: " + t + ")";
}
d = Arrays.copyOfRange(d, TVP.length, d.length);
decryptedText = concatByteArray(decryptedText, d);
}
//logger.debug("Add AVP Decrypted. Current index = " + i);
//AvpImpl avp = new AvpImpl(code, (short) flags, (int) vendor, rawData);
//avps.insertAvp(i, ByteBuffer.wrap(cc).order(ByteOrder.BIG_ENDIAN).getInt(), d, false, false);
//logger.debug("decryptedText = " + decryptedText.toString());
//logger.debug("decryptedText.size = " + decryptedText.length);
AvpSetImpl _avps = (AvpSetImpl)Utils.decodeAvpSet(decryptedText, 0);
//logger.debug("SIZE = " + _avps.size());
for (int j = 0; j < _avps.size(); j++) {
AvpImpl _a = (AvpImpl)_avps.getAvpByIndex(j);
//logger.debug("addAVP = " + _a.getCode());
avps.insertAvp(i, _a.getCode(), _a.getRawData(), _a.vendorID, _a.isMandatory, _a.isEncrypted);
}
avps.removeAvpByIndex(i + _avps.size());
} catch (InvalidKeyException ex) {
//java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
logger.warn("diameterDecrypt InvalidKeyException");
} catch (IllegalBlockSizeException ex) {
//java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
logger.warn("diameterDecrypt IllegalBlockSizeException");
} catch (BadPaddingException ex) {
//java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
logger.warn("diameterDecrypt BadPaddingException");
} catch (AvpDataException ex) {
//java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
logger.warn("diameterDecrypt AvpDataException");
} catch (IOException ex) {
//java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
logger.warn("diameterDecrypt IOException");
} catch (IllegalStateException ex) {
//java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
logger.warn("diameterDecrypt IllegalStateException");
}
} else if (privateKey instanceof ECPrivateKey) {
logger.warn("EC algorithm not implemented");
return "";
} else {
logger.warn("Unknown Private Key algorithm");
return "";
}
} else if (a.getCode() == AVP_ENCRYPTED_GROUPED_INDEXED && a.isVendorId() && a.getVendorId() == VENDOR_ID) {
logger.warn("Diameter Decryption of Grouped Indexed Encrypted AVP is not supported by this SigFW version");
return "";
}
}
return "";
}
/**
* Method to encrypt Diameter message v2
*
* @param message Diameter message which will be encrypted
* @param publicKey Public Key used for message encryption
*/
public void diameterEncrypt_v2(Message message, PublicKey publicKey) throws InvalidKeyException {
// Encryption RSA
Cipher cipherRSA = null;
// Encryption EC
Cipher cipherAES_GCM = null;
// Encryption RSA
try {
cipherRSA = Cipher.getInstance("RSA/ECB/PKCS1Padding");
} catch (NoSuchAlgorithmException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (NoSuchPaddingException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
// Encryption EC
try {
cipherAES_GCM = Cipher.getInstance("AES/GCM/NoPadding", "SunJCE");
} catch (NoSuchAlgorithmException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (NoSuchPaddingException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (NoSuchProviderException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
//logger.debug("== diameterEncrypt_v2 ==");
AvpSet avps = message.getAvps();
AvpSet erAvp = avps.addGroupedAvp(AVP_ENCRYPTED_GROUPED, VENDOR_ID, false, true);
for (int i = 0; i < avps.size(); i++) {
Avp a = avps.getAvpByIndex(i);
//logger.debug("AVP[" + i + "] Code = " + a.getCode());
if (
a.getCode() != Avp.ORIGIN_HOST &&
a.getCode() != Avp.ORIGIN_REALM &&
a.getCode() != Avp.DESTINATION_HOST &&
a.getCode() != Avp.DESTINATION_REALM &&
a.getCode() != Avp.SESSION_ID &&
a.getCode() != Avp.ROUTE_RECORD &&
a.getCode() != AVP_ENCRYPTED &&
a.getCode() != AVP_ENCRYPTED_GROUPED
) {
erAvp.addAvp(a);
avps.removeAvpByIndex(i);
i
}
}
if (publicKey instanceof RSAPublicKey) {
try {
//byte[] d = a.getRawData();
byte [] d = Utils.encodeAvpSet(erAvp);
logger.debug("avps.size = " + erAvp.size());
logger.debug("plainText = " + d.toString());
logger.debug("plainText.size = " + d.length);
// SPI(version) and TVP(timestamp)
byte[] SPI = {0x00, 0x00, 0x00, 0x00}; // TODO
byte[] TVP = {0x00, 0x00, 0x00, 0x00};
long t = System.currentTimeMillis()/100; // in 0.1s
TVP[0] = (byte) ((t >> 24) & 0xFF);
TVP[1] = (byte) ((t >> 16) & 0xFF);
TVP[2] = (byte) ((t >> 8) & 0xFF);
TVP[3] = (byte) ((t >> 0) & 0xFF);
//Crypto.cipherRSA.init(Cipher.ENCRYPT_MODE, publicKey);
//byte[] cipherText = Crypto.cipherRSA.doFinal(b);
RSAPublicKey rsaPublicKey = (RSAPublicKey)publicKey;
cipherRSA.init(Cipher.ENCRYPT_MODE, publicKey);
int keyLength = rsaPublicKey.getModulus().bitLength() / 8;
byte[][] datas = splitByteArray(d, keyLength - 11 - 4);
byte[] cipherText = null;
for (byte[] b : datas) {
cipherText = concatByteArray(cipherText, cipherRSA.doFinal(concatByteArray(TVP, b)));
}
cipherText = concatByteArray(SPI, cipherText);
//logger.debug("Add AVP Grouped Encrypted. Current index");
avps.removeAvp(AVP_ENCRYPTED_GROUPED, VENDOR_ID);
avps.addAvp(AVP_ENCRYPTED_GROUPED, cipherText, VENDOR_ID, false, true);
} catch (IllegalBlockSizeException ex) {
//java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
logger.warn("diameterEncrypt_v2 IllegalBlockSizeException");
} catch (BadPaddingException ex) {
//java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
logger.warn("diameterEncrypt_v2 BadPaddingException");
} catch (IllegalStateException ex) {
//java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
logger.warn("diameterEncrypt_v2 IllegalStateException");
}
} else if (publicKey instanceof ECPublicKey) {
logger.warn("EC Public Key algorithm not implemented");
return;
} else {
logger.warn("Unknown Public Key algorithm");
return;
}
}
/**
* Method to encrypt Diameter message v3
*
* @param message Diameter message which will be encrypted
* @param publicKey Public Key used for message encryption
*/
public void diameterEncrypt_v3(Message message, PublicKey publicKey) throws InvalidKeyException {
logger.warn("diameterEncrypt_v3 is not supported by this SigFW version");
return;
}
/**
* Method remove from SCCP message duplicated TCAP signatures and verifies the TCAP signature.
* Method currently is designed only for TCAP begin messages.
*
*
* @param message SCCP message
* @param tcb TCAP Begin Message
* @param comps TCAP Components
* @param publicKey Public Key
* @return -1 no public key to verify signature, 0 signature does not match, 1 signature ok
*/
@Override
public int tcapVerify(SccpDataMessage message, TCBeginMessage tcb, Component[] comps, PublicKey publicKey) {
Signature signatureRSA = null;
Signature signatureECDSA = null;
// Encryption RSA
try {
signatureRSA = Signature.getInstance("SHA256WithRSA");
} catch (NoSuchAlgorithmException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
// Encryption EC
try {
signatureECDSA = Signature.getInstance("SHA256withECDSA");
} catch (NoSuchAlgorithmException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
int signature_ok = 0;
List<Integer> signed_index = new ArrayList<Integer>();
for (int i = 0; i < comps.length; i++) {
// find all signature components
if (comps[i].getType() == ComponentType.Invoke) {
Invoke inv = (Invoke) comps[i];
if (inv.getOperationCode().getLocalOperationCode() == OC_SIGNATURE) {
signed_index.add(i);
}
}
}
if (signed_index.size() > 0) {
// read signature component
InvokeImpl invSignature = (InvokeImpl)comps[signed_index.get(0)];
Parameter p = invSignature.getParameter();
Parameter[] pa;
// Signature
byte[] signatureBytes = null;
long t_tvp = 0;
if (p != null && p.getTagClass() == Tag.CLASS_UNIVERSAL) {
pa = p.getParameters();
// Reserved (currently not used) - Signature Version
// TODO
if (pa.length >= 1) {
}
// TVP
if (pa.length >= 2) {
byte[] TVP = {0x00, 0x00, 0x00, 0x00};
long t = System.currentTimeMillis()/100; // in 0.1s
TVP[0] = (byte) ((t >> 24) & 0xFF);
TVP[1] = (byte) ((t >> 16) & 0xFF);
TVP[2] = (byte) ((t >> 8) & 0xFF);
TVP[3] = (byte) ((t >> 0) & 0xFF);
t = 0;
for (int i = 0; i < TVP.length; i++) {
t = ((t << 8) + (TVP[i] & 0xff));
}
TVP = pa[1].getData();
for (int i = 0; i < TVP.length; i++) {
t_tvp = ((t_tvp << 8) + (TVP[i] & 0xff));
}
if (Math.abs(t_tvp-t) > tcap_tvp_time_window*10) {
logger.info("TCAP FW: TCAP verify signature. Wrong timestamp in TVP (received: " + t_tvp + ", current: " + t + ")");
return 0;
}
}
// Signature
if (pa.length >= 3) {
if (pa[2].getTagClass() == Tag.CLASS_PRIVATE && pa[2].getTag() == Tag.STRING_OCTET) {
signatureBytes = pa[2].getData();
}
}
}
// remove all signature components
Component[] c = new Component[comps.length - signed_index.size()];
for (int i = 0; i < comps.length - signed_index.size(); i++) {
if (!signed_index.contains(i)) {
c[i] = comps[i];
}
}
tcb.setComponent(c);
AsnOutputStream aos = new AsnOutputStream();
try {
tcb.encode(aos);
} catch (EncodeException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
byte[] _d = aos.toByteArray();
message.setData(_d);
String dataToSign = "";
// verify signature
try {
comps = c;
dataToSign = message.getCallingPartyAddress().getGlobalTitle().getDigits()
+ message.getCalledPartyAddress().getGlobalTitle().getDigits() + t_tvp;
for (int i = 0; i < comps.length; i++) {
AsnOutputStream _aos = new AsnOutputStream();
try {
comps[i].encode(_aos);
dataToSign += Base64.getEncoder().encodeToString(_aos.toByteArray());
} catch (EncodeException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
}
if (publicKey instanceof RSAPublicKey) {
signatureRSA.initVerify(publicKey);
signatureRSA.update(dataToSign.getBytes());
if (signatureBytes != null && signatureRSA.verify(signatureBytes)) {
signature_ok = 1;
}
} else if (publicKey instanceof ECPublicKey) {
logger.warn("EC Public Key algorithm not implemented");
signature_ok = 0;
} else {
logger.warn("Unknown Public Key algorithm");
signature_ok = 0;
}
} catch (InvalidKeyException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (SignatureException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
logger.debug("Removing TCAP Signed Data: " + dataToSign);
if (signatureBytes != null) {
logger.debug("Removing TCAP Signature: " + Base64.getEncoder().encodeToString(signatureBytes));
}
}
return signature_ok;
}
/**
* Method to add TCAP signature into SCCP message.
* Method currently is designed only for TCAP begin messages.
*
*
* @param message SCCP message
* @param tcb TCAP Begin Message
* @param comps TCAP Components
* @param lmrt Long Message Rule Type, if UDT or XUDT should be send
* @param keyPair Key Pair
* @return Long Message Rule Type, if UDT or XUDT should be send
*/
@Override
public LongMessageRuleType tcapSign(SccpDataMessage message, TCBeginMessage tcb, Component[] comps, LongMessageRuleType lmrt, KeyPair keyPair) {
Signature signatureRSA = null;
Signature signatureECDSA = null;
// Encryption RSA
try {
signatureRSA = Signature.getInstance("SHA256WithRSA");
} catch (NoSuchAlgorithmException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
// Encryption EC
try {
signatureECDSA = Signature.getInstance("SHA256withECDSA");
} catch (NoSuchAlgorithmException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
LongMessageRuleType l = lmrt;
PrivateKey privateKey = keyPair.getPrivate();
Component[] c = new Component[comps.length + 1];
int i;
boolean signed = false;
for (i = 0; i < comps.length; i++) {
c[i] = comps[i];
// already signed check
if (c[i].getType() == ComponentType.Invoke) {
Invoke inv = (Invoke) comps[i];
if (inv.getOperationCode().getLocalOperationCode() == OC_SIGNATURE) {
signed = true;
}
}
}
if (!signed) {
c[i] = new InvokeImpl();
((InvokeImpl)c[i]).setInvokeId(1l);
OperationCode oc = TcapFactory.createOperationCode();
oc.setLocalOperationCode(OC_SIGNATURE);
((InvokeImpl)c[i]).setOperationCode(oc);
// Reserved (currently not used) - Signature Version
// TODO
Parameter p1 = TcapFactory.createParameter();
p1.setTagClass(Tag.CLASS_PRIVATE);
p1.setPrimitive(true);
p1.setTag(Tag.STRING_OCTET);
p1.setData("v1".getBytes());
// TVP
byte[] TVP = {0x00, 0x00, 0x00, 0x00};
long t = System.currentTimeMillis()/100; // in 0.1s
TVP[0] = (byte) ((t >> 24) & 0xFF);
TVP[1] = (byte) ((t >> 16) & 0xFF);
TVP[2] = (byte) ((t >> 8) & 0xFF);
TVP[3] = (byte) ((t >> 0) & 0xFF);
long t_tvp = 0;
for (int j = 0; j < TVP.length; j++) {
t_tvp = ((t_tvp << 8) + (TVP[j] & 0xff));
}
Parameter p2 = TcapFactory.createParameter();
p2.setTagClass(Tag.CLASS_PRIVATE);
p2.setPrimitive(true);
p2.setTag(Tag.STRING_OCTET);
p2.setData(TVP);
// Signature
Parameter p3 = TcapFactory.createParameter();
p3.setTagClass(Tag.CLASS_PRIVATE);
p3.setPrimitive(true);
p3.setTag(Tag.STRING_OCTET);
try {
String dataToSign = message.getCallingPartyAddress().getGlobalTitle().getDigits()
+ message.getCalledPartyAddress().getGlobalTitle().getDigits() + t_tvp;
for (i = 0; i < comps.length; i++) {
AsnOutputStream _aos = new AsnOutputStream();
try {
comps[i].encode(_aos);
dataToSign += Base64.getEncoder().encodeToString(_aos.toByteArray());
} catch (EncodeException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
}
byte[] signatureBytes = null;
// RSA
if (privateKey instanceof RSAPrivateKey) {
signatureRSA.initSign(privateKey);
signatureRSA.update(dataToSign.getBytes());
signatureBytes = signatureRSA.sign();
}
else if (privateKey instanceof ECPrivateKey) {
logger.warn("EC Public Key algorithm not implemented");
return l;
} else {
logger.warn("Unknown Private Key algorithm");
return l;
}
logger.debug("Adding TCAP Signed Data: " + dataToSign);
logger.debug("Adding TCAP Signature: " + Base64.getEncoder().encodeToString(signatureBytes));
p3.setData(signatureBytes);
Parameter p = TcapFactory.createParameter();
p.setTagClass(Tag.CLASS_UNIVERSAL);
p.setTag(0x04);
p.setParameters(new Parameter[] {p1, p2, p3});
((InvokeImpl)c[i]).setParameter(p);
tcb.setComponent(c);
AsnOutputStream aos = new AsnOutputStream();
try {
tcb.encode(aos);
} catch (EncodeException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
byte[] _d = aos.toByteArray();
message.setData(_d);
} catch (InvalidKeyException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (SignatureException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
}
return l;
}
/**
* Method to encrypt TCAP message.
*
*
* @param message SCCP message
* @param sccpMessageFactory SCCP message factory
* @param publicKey Public Key
* @param lmrt Long Message Rule Type, if UDT or XUDT should be send
* @return AbstractMap.SimpleEntry<message, lmrt> - message and indicator if UDT or XUDT should be send
*/
@Override
public AbstractMap.SimpleEntry<SccpDataMessage, LongMessageRuleType> tcapEncrypt(SccpDataMessage message, MessageFactoryImpl sccpMessageFactory, PublicKey publicKey, LongMessageRuleType lmrt) {
logger.debug("TCAP Encryption for SCCP Called GT = " + message.getCalledPartyAddress().getGlobalTitle().getDigits());
// Encryption RSA
Cipher cipherRSA = null;
// Encryption EC
Cipher cipherAES_GCM = null;
// Encryption RSA
try {
cipherRSA = Cipher.getInstance("RSA/ECB/PKCS1Padding");
} catch (NoSuchAlgorithmException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (NoSuchPaddingException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
// Encryption EC
try {
cipherAES_GCM = Cipher.getInstance("AES/GCM/NoPadding", "SunJCE");
} catch (NoSuchAlgorithmException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (NoSuchPaddingException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (NoSuchProviderException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
LongMessageRuleType l = lmrt;
try {
// Sending XUDT message from UDT message
// SPI(version) and TVP(timestamp)
byte[] SPI = {0x00, 0x00, 0x00, 0x00}; // TODO
byte[] TVP = {0x00, 0x00, 0x00, 0x00};
long t = System.currentTimeMillis()/100; // in 0.1s
TVP[0] = (byte) ((t >> 24) & 0xFF);
TVP[1] = (byte) ((t >> 16) & 0xFF);
TVP[2] = (byte) ((t >> 8) & 0xFF);
TVP[3] = (byte) ((t >> 0) & 0xFF);
if (publicKey instanceof RSAPublicKey) {
RSAPublicKey rsaPublicKey = (RSAPublicKey)publicKey;
cipherRSA.init(Cipher.ENCRYPT_MODE, publicKey);
int keyLength = rsaPublicKey.getModulus().bitLength() / 8;
byte[][] datas = splitByteArray(message.getData(), keyLength - 11 - 4);
byte[] cipherText = null;
for (byte[] b : datas) {
cipherText = concatByteArray(cipherText, cipherRSA.doFinal(concatByteArray(TVP, b)));
}
cipherText = concatByteArray(SPI, cipherText);
SccpDataMessage m = sccpMessageFactory.createDataMessageClass0(message.getCalledPartyAddress(), message.getCallingPartyAddress(), cipherText, message.getOriginLocalSsn(), false, null, null);
message = m;
l = LongMessageRuleType.XUDT_ENABLED;
} else if (publicKey instanceof ECPublicKey) {
logger.warn("EC algorithm not implemented");
return new AbstractMap.SimpleEntry<>(message, lmrt);
} else {
logger.warn("Unknown Public Key algorithm");
return new AbstractMap.SimpleEntry<>(message, lmrt);
}
} catch (InvalidKeyException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (IllegalBlockSizeException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (BadPaddingException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
return new AbstractMap.SimpleEntry<>(message, lmrt);
}
/**
* Method to decrypt TCAP message.
*
*
* @param message SCCP message
* @param sccpMessageFactory SCCP message factory
* @param keyPair Key Pair
* @return AbstractMap.SimpleEntry<message, result> - message and result indicator
*/
public AbstractMap.SimpleEntry<SccpDataMessage, String> tcapDecrypt(SccpDataMessage message, MessageFactoryImpl sccpMessageFactory, KeyPair keyPair) {
logger.debug("TCAP Decryption for SCCP Called GT = " + message.getCalledPartyAddress().getGlobalTitle().getDigits());
// Encryption RSA
Cipher cipherRSA = null;
// Encryption EC
Cipher cipherAES_GCM = null;
// Encryption RSA
try {
cipherRSA = Cipher.getInstance("RSA/ECB/PKCS1Padding");
} catch (NoSuchAlgorithmException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (NoSuchPaddingException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
// Encryption EC
try {
cipherAES_GCM = Cipher.getInstance("AES/GCM/NoPadding", "SunJCE");
} catch (NoSuchAlgorithmException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (NoSuchPaddingException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (NoSuchProviderException ex) {
java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
try {
// Sending XUDT message from UDT message
// SPI(version) and TVP(timestamp)
byte[] SPI = {0x00, 0x00, 0x00, 0x00};
byte[] TVP = {0x00, 0x00, 0x00, 0x00};
byte[] data = null;
if (message.getData().length >= SPI.length) {
SPI = Arrays.copyOfRange(message.getData(), 0, SPI.length);
data = Arrays.copyOfRange(message.getData(), SPI.length, message.getData().length);
} else {
data = message.getData();
}
PrivateKey privateKey = keyPair.getPrivate();
if (privateKey instanceof RSAPrivateKey) {
cipherRSA.init(Cipher.DECRYPT_MODE, privateKey);
RSAPublicKey rsaPublicKey = (RSAPublicKey) keyPair.getPublic();
int keyLength = rsaPublicKey.getModulus().bitLength() / 8;
// TODO verify SPI
byte[][] datas = splitByteArray(data, keyLength);
byte[] decryptedText = null;
for (byte[] b : datas) {
byte[] d = cipherRSA.doFinal(b);
long t = System.currentTimeMillis() / 100; // in 0.1s
TVP[0] = (byte) ((t >> 24) & 0xFF);
TVP[1] = (byte) ((t >> 16) & 0xFF);
TVP[2] = (byte) ((t >> 8) & 0xFF);
TVP[3] = (byte) ((t >> 0) & 0xFF);
t = 0;
for (int i = 0; i < TVP.length; i++) {
t = ((t << 8) + (TVP[i] & 0xff));
}
TVP[0] = d[0];
TVP[1] = d[1];
TVP[2] = d[2];
TVP[3] = d[3];
long t_tvp = 0;
for (int i = 0; i < TVP.length; i++) {
t_tvp = ((t_tvp << 8) + (TVP[i] & 0xff));
}
if (Math.abs(t_tvp - t) > tcap_tvp_time_window * 10) {
return new AbstractMap.SimpleEntry<>(message, "SS7 FW: Blocked in decryption, Wrong timestamp in TVP (received: " + t_tvp + ", current: " + t + ")");
}
d = Arrays.copyOfRange(d, TVP.length, d.length);
decryptedText = concatByteArray(decryptedText, d);
SccpDataMessage m = sccpMessageFactory.createDataMessageClass0(message.getCalledPartyAddress(), message.getCallingPartyAddress(), decryptedText, message.getOriginLocalSsn(), false, null, null);
message = m;
}
} else if (privateKey instanceof ECPrivateKey) {
logger.warn("EC algorithm not implemented");
return new AbstractMap.SimpleEntry<>(message, "");
} else {
logger.warn("Unknown Private Key algorithm");
return new AbstractMap.SimpleEntry<>(message, "");
}
} catch (InvalidKeyException ex) {
logger.info("TCAP FW: TCAP decryption failed for SCCP Called GT: " + message.getCalledPartyAddress().getGlobalTitle().getDigits() + " InvalidKeyException: " + ex.getMessage());
//java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (IllegalBlockSizeException ex) {
logger.info("TCAP FW: TCAP decryption failed for SCCP Called GT: " + message.getCalledPartyAddress().getGlobalTitle().getDigits() + " IllegalBlockSizeException: " + ex.getMessage());
//java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
} catch (BadPaddingException ex) {
logger.info("TCAP FW: TCAP decryption failed for SCCP Called GT: " + message.getCalledPartyAddress().getGlobalTitle().getDigits() + " BadPaddingException: " + ex.getMessage());
//java.util.logging.Logger.getLogger(Crypto.class.getName()).log(Level.SEVERE, null, ex);
}
return new AbstractMap.SimpleEntry<>(message, "");
}
} |
package org.flymine.codegen;
// Most of this code originated in the ArgoUML project, which carries
// software and its documentation without fee, and without a written
// and this paragraph appear in all copies. This software program and
// California. The software program and documentation are supplied "AS
// IS", without any accompanying services from The Regents. The Regents
// does not warrant that the operation of the program will be
// uninterrupted or error-free. The end-user understands that the program
// was developed for research purposes and is advised not to rely
// exclusively on the program for any reason. IN NO EVENT SHALL THE
// UNIVERSITY OF CALIFORNIA BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
// SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST PROFITS,
// ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF
// THE UNIVERSITY OF CALIFORNIA HAS BEEN ADVISED OF THE POSSIBILITY OF
// SUCH DAMAGE. THE UNIVERSITY OF CALIFORNIA SPECIFICALLY DISCLAIMS ANY
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE
// PROVIDED HEREUNDER IS ON AN "AS IS" BASIS, AND THE UNIVERSITY OF
// CALIFORNIA HAS NO OBLIGATIONS TO PROVIDE MAINTENANCE, SUPPORT,
// UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
import ru.novosoft.uml.xmi.XMIReader;
import ru.novosoft.uml.foundation.core.*;
import ru.novosoft.uml.foundation.data_types.*;
import ru.novosoft.uml.model_management.*;
import ru.novosoft.uml.foundation.extension_mechanisms.*;
import java.io.File;
import java.util.Iterator;
import java.util.HashSet;
import java.util.ArrayList;
import java.util.Collection;
import org.xml.sax.InputSource;
public class OJBModelOutput extends ModelOutput
{
// private File f;
private Collection fields = new HashSet();
private StringBuffer references, collections;
public OJBModelOutput(MModel mmodel) {
super(mmodel);
}
protected String generateAttribute (MAttribute attr) {
StringBuffer sb = new StringBuffer();
sb.append(INDENT + INDENT + "<field-descriptor name=\"")
.append(generateName(attr.getName()))
.append("\" column=\"")
.append(generateSqlCompatibleName(attr.getName()))
.append("\" jdbc-type=\"")
.append(generateOJBSqlType(attr.getType()))
.append("\" />\n");
return sb.toString();
}
protected String generateClassifier(MClassifier cls) {
StringBuffer sb = new StringBuffer();
sb.append(generateClassifierStart(cls))
.append((cls.isAbstract() || cls instanceof MInterface)
? new StringBuffer() : generateClassifierBody(cls))
.append(generateClassifierEnd(cls));
return sb.toString();
}
protected void generateFileStart(File path) {
initFile(path);
outputToFile(path, generateHeader());
}
protected void generateFile(MClassifier cls, File path) {
outputToFile(path, generate(cls));
}
protected void generateFileEnd(File path) {
outputToFile(path, generateFooter());
}
private String generateHeader() {
StringBuffer sb = new StringBuffer();
sb.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n")
.append("<!DOCTYPE descriptor-repository SYSTEM \"repository.dtd\" [\n")
.append("<!ENTITY internal SYSTEM \"repository_internal.xml\">\n]>\n\n")
.append("<descriptor-repository version=\"0.9.9\""
+ " isolation-level=\"read-uncommitted\">\n");
return sb.toString();
}
private String generateFooter () {
StringBuffer sb = new StringBuffer();
sb.append("&internal;\n</descriptor-repository>\n");
return sb.toString();
}
private StringBuffer generateClassifierStart (MClassifier cls) {
references = new StringBuffer();
collections = new StringBuffer();
String tableName = null;
Iterator parents = cls.getGeneralizations().iterator();
if (parents.hasNext()) {
MClassifier parent = (MClassifier) ((MGeneralization) parents.next()).getParent();
tableName = parent.getName();
} else {
tableName = cls.getName();
}
StringBuffer sb = new StringBuffer ();
sb.append(INDENT + "<class-descriptor class=\"")
.append(getPackagePath(cls))
.append(".")
.append(generateName(cls.getName()))
.append((cls.isAbstract() || cls instanceof MInterface)
? "" : "\" table=\"" + tableName)
.append("\">\n");
Collection ems = new ArrayList(); // extent members i.e. subclasses or implementations
if (cls instanceof MInterface) {
Collection deps = cls.getSupplierDependencies();
Iterator depIterator = deps.iterator();
while (depIterator.hasNext()) {
MDependency dep = (MDependency) depIterator.next();
if ((dep instanceof MAbstraction)) {
MClassifier mc = (MClassifier) dep.getClients().toArray()[0];
ems.add(getPackagePath(mc) + "." + mc.getName());
}
}
} else {
Collection specs = cls.getSpecializations();
Iterator specIterator = specs.iterator();
while (specIterator.hasNext()) {
MClassifier mc = (MClassifier) (((MGeneralization) specIterator.next()).getChild());
ems.add(getPackagePath(mc) + "." + mc.getName());
}
}
if (ems != null && ems.size() > 0) {
Iterator iter = ems.iterator();
while (iter.hasNext()) {
sb.append(INDENT + INDENT + "<extent-class class-ref=\"")
.append((String) iter.next())
.append("\" />\n");
}
}
return sb;
}
private StringBuffer generateClassifierBody(MClassifier cls) {
StringBuffer sb = new StringBuffer();
sb.append(INDENT + INDENT + "<field-descriptor name=\"id\"")
.append(" column=\"ID\"")
.append(" jdbc-type=\"INTEGER\"")
.append(" primarykey=\"true\"")
.append(" autoincrement=\"true\" />\n");
Iterator parents = cls.getGeneralizations().iterator();
if (parents.hasNext() || cls.getSpecializations().size() > 0) {
sb.append(INDENT + INDENT + "<field-descriptor")
.append(" name=\"ojbConcreteClass\"")
.append(" column=\"CLASS\"")
.append(" jdbc-type=\"VARCHAR\" />\n");
if (parents.hasNext()) {
MClassifier parent = (MClassifier) ((MGeneralization) parents.next()).getParent();
doAttributes(getAttributes(parent), sb);
doAssociations(parent.getAssociationEnds(), sb);
}
}
doAttributes(getAttributes(cls), sb);
doAssociations(cls.getAssociationEnds(), sb);
return sb;
}
void doAttributes(Collection c, StringBuffer sb) {
if (!c.isEmpty()) {
Iterator iter = c.iterator();
while (iter.hasNext()) {
sb.append(generate((MStructuralFeature) iter.next()));
}
}
}
void doAssociations(Collection c, StringBuffer sb) {
if (!c.isEmpty()) {
Iterator iter = c.iterator();
while (iter.hasNext()) {
MAssociationEnd ae = (MAssociationEnd) iter.next();
sb.append(generateAssociationEnd(ae, ae.getOppositeEnd()));
}
}
}
private StringBuffer generateClassifierEnd(MClassifier cls) {
fields.clear();
StringBuffer sb = new StringBuffer();
sb.append("" + references + collections)
.append(INDENT + "</class-descriptor>\n\n");
return sb;
}
private String generateAssociationEnd(MAssociationEnd ae1, MAssociationEnd ae2) {
//if (!(ae1.isNavigable() && ae2.isNavigable()))
if (!ae2.isNavigable()) {
return "";
}
StringBuffer sb = new StringBuffer();
MMultiplicity m1 = ae1.getMultiplicity();
String endName1 = ae1.getName();
String name1 = "";
if (endName1 != null && endName1.length() > 0) {
name1 = endName1;
} else {
name1 = generateClassifierRef(ae1.getType());
}
MMultiplicity m2 = ae2.getMultiplicity();
String endName2 = ae2.getName();
String name2 = "";
if (endName2 != null && endName2.length() > 0) {
name2 = endName2;
} else {
name2 = generateClassifierRef(ae2.getType());
}
// If one or zero of the other class
if ((MMultiplicity.M1_1.equals(m2) || MMultiplicity.M0_1.equals(m2)) && ae2.isNavigable()) {
if (!fields.contains(name2)) {
fields.add(name2);
sb.append(INDENT + INDENT + "<field-descriptor name=\"")
.append(generateNoncapitalName(name2))
.append("Id\" column=\"")
.append(generateNoncapitalName(name2))
.append("Id\" jdbc-type=\"INTEGER\" />\n");
references.append(INDENT + INDENT + "<reference-descriptor name=\"")
.append(generateNoncapitalName(name2))
.append("\" class-ref=\"")
.append(getPackagePath(ae2.getType()))
.append(".")
.append(generateClassifierRef(ae2.getType()))
.append("\" auto-update=\"true\"")
.append(">\n" + INDENT + INDENT + INDENT + "<foreignkey field-ref=\"")
.append(generateNoncapitalName(name2) + "Id")
.append("\" />\n" + INDENT + INDENT + "</reference-descriptor>\n");
}
} else if ((MMultiplicity.M1_N.equals(m2) || MMultiplicity.M0_N.equals(m2))
&& (MMultiplicity.M1_1.equals(m1) || MMultiplicity.M0_1.equals(m1))) {
// If more than one of the other class AND one or zero of this one
collections.append(INDENT + INDENT + "<collection-descriptor name=\"")
.append(generateNoncapitalName(name2))
.append("s\" element-class-ref=\"")
.append(getPackagePath(ae2.getType()))
.append(".")
.append(generateClassifierRef(ae2.getType()))
.append("\" auto-update=\"true\">\n")
.append(INDENT + INDENT + INDENT + "<inverse-foreignkey field-ref=\"")
.append(generateNoncapitalName(name1) + "Id")
.append("\"/>\n")
.append(INDENT + INDENT + "</collection-descriptor>\n");
} else {
// Else there must be many:many relationship
String joiningTableName = "";
if (name1.compareTo(name2) < 0) {
joiningTableName = generateCapitalName(name1) + generateCapitalName(name2);
} else {
joiningTableName = generateCapitalName(name2) + generateCapitalName(name1);
}
collections.append(INDENT + INDENT + "<collection-descriptor name=\"")
.append(generateNoncapitalName(name2))
.append("s\" element-class-ref=\"")
.append(getPackagePath(ae2.getType()))
.append(".")
.append(generateClassifierRef(ae2.getType()))
.append("\" auto-update=\"true\"")
.append(" indirection-table=\"")
.append(joiningTableName)
.append("\">\n")
.append(INDENT + INDENT + INDENT + "<fk-pointing-to-this-class column=\"")
// Name of this class's primary key in linkage table
.append(generateNoncapitalName(name1))
.append("Id\"/>\n")
.append(INDENT + INDENT + INDENT + "<fk-pointing-to-element-class column=\"")
// Name of related class's primary key in linkage table
.append(generateNoncapitalName(generateCapitalName(name2)))
.append("Id\"/>\n")
.append(INDENT + INDENT + "</collection-descriptor>\n");
}
return sb.toString();
}
private String generateOJBSqlType(MClassifier cls) {
String type = generateClassifierRef(cls);
if (type.equals("int")) {
return "INTEGER";
}
if (type.equals("String")) {
return "LONGVARCHAR";
}
if (type.equals("boolean")) {
return "INTEGER\" conversion=\""
+ "org.apache.ojb.broker.accesslayer.conversions.Boolean2IntFieldConversion";
}
if (type.equals("float")) {
return "FLOAT";
}
if (type.equals("Date")) {
return "DATE";
}
return type;
}
public static void main(String[] args) throws Exception {
if (args.length != 3) {
System.err.println("Usage: OJBModelOutput <project name> <input dir> <output dir>");
System.exit(1);
}
String projectName = args[0];
String inputDir = args[1];
String outputDir = args[2];
File xmiFile = new File(inputDir, projectName + "_.xmi");
InputSource source = new InputSource(xmiFile.toURL().toString());
File path = new File(outputDir, "repository_" + projectName.toLowerCase() + ".xml");
new OJBModelOutput(new XMIReader().parse(source)).output(path);
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.