answer
stringlengths
17
10.2M
/*** This is the Android Java Source File ***/ package com.archethought.cordovaplugins.buffers; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.CordovaResourceApi; import org.apache.cordova.PluginResult; import android.content.Context; import org.json.JSONArray; import org.json.JSONException; import java.util.ArrayList; public class Circular extends CordovaPlugin { private ArrayList<String> buffer; // String array for the circular buffer. private int max; // Maximum number of items to allow in the buffer. public Circular() { // This constructor defaults to 10 items. // FIXME: Make this constructor default to 2/s * 60s * 30m items. this.max = 10; this.buffer = new ArrayList<String>( this.max); } // Make another constructor here that allows a number of items to be set. /*** PUBLIC FUNCTIONS ***/ public void push( String newItem) { // If full, remove the first item. if( isFull()) { this.buffer.remove( 0); } // Now add the new item to the end. this.buffer.add( newItem); return; } public String pop() { // Save the first item off the list. String temp = this.buffer[ 0]; // Remove the first item. this.buffer.remove( 0); // Return that item. return temp; } public String peek() { return this.buffer[ 0]; } public int count() { return this.buffer.size(); } public int maxCount() { return this.max; } public void clear() { this.buffer = new ArrayList<String>( this.max); return; } /*** PRIVATE FUNCTIONS ***/ private boolean isFull() { // Compare the size (number of items in the buffer) to the max allowed. return this.buffer.size() == this.max; } }
package at.modalog.cordova.plugin.html2pdf; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.PluginResult; import org.json.JSONArray; import org.json.JSONException; import com.itextpdf.text.Document; import com.itextpdf.text.DocumentException; import com.itextpdf.text.Element; import com.itextpdf.text.Image; import com.itextpdf.text.PageSize; import com.itextpdf.text.pdf.PdfWriter; import android.R.bool; import android.annotation.TargetApi; import android.app.Activity; import android.content.ActivityNotFoundException; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; import android.graphics.Bitmap; import android.graphics.Canvas; import android.media.MediaScannerConnection; import android.media.MediaScannerConnection.OnScanCompletedListener; import android.net.Uri; import android.os.Build; import android.os.Environment; import android.os.Handler; import android.print.PrintAttributes; import android.print.PrintDocumentAdapter; import android.print.PrintManager; import android.print.PrintDocumentAdapter.LayoutResultCallback; import android.print.PrintAttributes.MediaSize; //import android.print.PrintDocumentInfo; //import android.print.PrintDocumentInfo.Builder; import android.print.PrintDocumentAdapter.WriteResultCallback; import android.os.CancellationSignal; import android.os.Bundle; import android.print.PageRange; import android.os.ParcelFileDescriptor; import android.print.pdf.PrintedPdfDocument; import android.util.SparseIntArray; import android.graphics.pdf.PdfDocument.Page; import android.graphics.Canvas; import android.graphics.Paint; import android.graphics.Color; import android.print.PrintAttributes.Resolution; import android.graphics.pdf.PdfDocument; import android.graphics.pdf.PdfDocument.PageInfo; import android.printservice.PrintJob; import android.view.View; import android.view.ViewGroup; import android.view.ViewGroup.LayoutParams; import android.webkit.WebSettings; import android.webkit.WebView; import android.webkit.WebViewClient; import android.util.Log; @TargetApi(19) public class Html2pdf extends CordovaPlugin { private static final String LOG_TAG = "Html2Pdf"; private CallbackContext callbackContext; // change your path on the sdcard here private String publicTmpDir = ".at.modalog.cordova.plugin.html2pdf"; // prepending a dot "." would make it hidden private String tmpPdfName = "print.pdf"; // set to true to see the webview (useful for debugging) private final boolean showWebViewForDebugging = true; PrintedPdfDocument mPdfDocument; WebView page; int totalPages = 1; /** * Constructor. */ public Html2pdf() { } @Override public boolean execute (String action, JSONArray args, CallbackContext callbackContext) throws JSONException { try { if( action.equals("create") ) { if( showWebViewForDebugging ) { Log.v(LOG_TAG,"java create pdf from html called"); Log.v(LOG_TAG, "File: " + args.getString(1)); // Log.v(LOG_TAG, "Html: " + args.getString(0)); Log.v(LOG_TAG, "Html start:" + args.getString(0).substring(0, 30)); Log.v(LOG_TAG, "Html end:" + args.getString(0).substring(args.getString(0).length() - 30)); } if( args.getString(1) != null && args.getString(1) != "null" ) this.tmpPdfName = args.getString(1); final Html2pdf self = this; final String content = args.optString(0, "<html></html>"); this.callbackContext = callbackContext; cordova.getActivity().runOnUiThread( new Runnable() { public void run() { if( Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT ) // Android 4.4 { /* * None-Kitkat pdf creation (Android < 4.4) * it will be a image based pdf */ self.loadContentIntoWebView(content); } else { /* * Kitkat pdf creation by using the android print framework (Android >= 4.4) */ // Create a WebView object specifically for printing page = new WebView(cordova.getActivity()); page.getSettings().setJavaScriptEnabled(false); page.setDrawingCacheEnabled(true); // Auto-scale the content to the webview's width. page.getSettings().setLoadWithOverviewMode(true); page.getSettings().setUseWideViewPort(true); page.setInitialScale(0); // Disable android text auto fit behaviour page.getSettings().setLayoutAlgorithm(WebSettings.LayoutAlgorithm.NORMAL); if( showWebViewForDebugging ) { page.setVisibility(View.VISIBLE); } else { page.setVisibility(View.INVISIBLE); } // self.cordova.getActivity().addContentView(webView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); page.setWebViewClient( new WebViewClient() { public boolean shouldOverrideUrlLoading(WebView view, String url) { return false; } @Override public void onPageFinished(WebView webView, String url) { /*PrintAttributes pdfPrintAttrs = new PrintAttributes.Builder(). setColorMode(PrintAttributes.COLOR_MODE_MONOCHROME). setMediaSize(PrintAttributes.MediaSize.NA_LETTER.asLandscape()). setResolution(new Resolution("zooey", Context.PRINT_SERVICE, 300, 300)). setMinMargins(PrintAttributes.Margins.NO_MARGINS). build(); PdfDocument document = new PrintedPdfDocument(self.cordova.getActivity(),pdfPrintAttrs);*/ PdfDocument documento = new PdfDocument(); PageInfo pageInfo = new PageInfo.Builder(595,842, 1).create(); Page pagina = documento.startPage(pageInfo); pagina.getCanvas().setDensity(200); //View content = (View) webView; webView.draw(pagina.getCanvas()); documento.finishPage(pagina); try{ File root = Environment.getExternalStorageDirectory(); File file = new File(root,"webview.pdf"); FileOutputStream out = new FileOutputStream(file); documento.writeTo(out); out.close(); documento.close(); } catch(Exception e){ throw new RuntimeException("Error generating file", e); } finally { // send success result to cordova PluginResult result = new PluginResult(PluginResult.Status.OK); result.setKeepCallback(false); self.callbackContext.sendPluginResult(result); } } });//end webview client // Reverse engineer base url (assets/www) from the cordova webView url String baseURL = self.webView.getUrl(); baseURL = baseURL.substring(0, baseURL.lastIndexOf('/') + 1); if( showWebViewForDebugging ) { cordova.getActivity().addContentView(page, new ViewGroup.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT)); } page.loadDataWithBaseURL(baseURL, content, "text/html", "utf-8", null); } } }); // send "no-result" result to delay result handling PluginResult pluginResult = new PluginResult(PluginResult.Status.NO_RESULT); pluginResult.setKeepCallback(true); callbackContext.sendPluginResult(pluginResult); return true; } return false; } catch (JSONException e) { // TODO: signal JSON problem to JS //callbackContext.error("Problem with JSON"); return false; } } /** * * Clean up and close all open files. * */ @Override public void onDestroy() { // ToDo: clean up. } // LOCAL METHODS private void loadContentIntoWebView (String content) { Activity ctx = cordova.getActivity(); final WebView page = new Html2PdfWebView(ctx); final Html2pdf self = this; if( showWebViewForDebugging ) { page.setVisibility(View.VISIBLE); } else { page.setVisibility(View.INVISIBLE); } page.getSettings().setJavaScriptEnabled(false); page.setDrawingCacheEnabled(true); page.getSettings().setLoadWithOverviewMode(false); page.getSettings().setUseWideViewPort(false); page.setInitialScale(100); // Disable android text auto fit behaviour page.getSettings().setLayoutAlgorithm(WebSettings.LayoutAlgorithm.NORMAL); page.setWebViewClient( new WebViewClient() { @Override public void onPageFinished(final WebView page, String url) { new Handler().postDelayed( new Runnable() { @Override public void run() { // slice the web screenshot into pages and save as pdf Bitmap b = getWebViewAsBitmap(page); if( b != null ) { File tmpFile = self.saveWebViewAsPdf(b); b.recycle(); // add pdf as stream to the print intent Intent pdfViewIntent = new Intent(Intent.ACTION_VIEW); pdfViewIntent.setDataAndNormalize(Uri.fromFile(tmpFile)); pdfViewIntent.setType("application/pdf"); // remove the webview if( !self.showWebViewForDebugging ) { ViewGroup vg = (ViewGroup)(page.getParent()); vg.removeView(page); } // add file to media scanner MediaScannerConnection.scanFile( self.cordova.getActivity(), new String[]{tmpFile.getAbsolutePath()}, null, new OnScanCompletedListener() { @Override public void onScanCompleted(String path, Uri uri) { Log.v(LOG_TAG, "file '" + path + "' was scanned seccessfully: " + uri); } } ); // start the pdf viewer app (trigger the pdf view intent) PluginResult result; boolean success = false; if( self.canHandleIntent(self.cordova.getActivity(), pdfViewIntent) ) { try { self.cordova.startActivityForResult(self, pdfViewIntent, 0); success = true; } catch( ActivityNotFoundException e ) { success = false; } } if( success ) { // send success result to cordova result = new PluginResult(PluginResult.Status.OK); result.setKeepCallback(false); self.callbackContext.sendPluginResult(result); } else { // send error result = new PluginResult(PluginResult.Status.ERROR, "activity_not_found"); result.setKeepCallback(false); self.callbackContext.sendPluginResult(result); } } } }, 500); } }); // Set base URI to the assets/www folder String baseURL = webView.getUrl(); baseURL = baseURL.substring(0, baseURL.lastIndexOf('/') + 1); /** We make it this small on purpose (is resized after page load has finished). * Making it small in the beginning has some effects on the html <body> (body * width will always remain 100 if not set explicitly). */ if( !showWebViewForDebugging ) { ctx.addContentView(page, new ViewGroup.LayoutParams(100, 100)); } else { ctx.addContentView(page, new ViewGroup.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)); } page.loadDataWithBaseURL(baseURL, content, "text/html", "utf-8", null); } public static final String MIME_TYPE_PDF = "application/pdf"; /** * Check if the supplied context can handle the given intent. * * @param context * @param intent * @return boolean */ public boolean canHandleIntent(Context context, Intent intent) { PackageManager packageManager = context.getPackageManager(); return (packageManager.queryIntentActivities(intent, PackageManager.MATCH_DEFAULT_ONLY).size() > 0); } /** * Takes a WebView and returns a Bitmap representation of it (takes a "screenshot"). * @param WebView * @return Bitmap */ public Bitmap getWebViewAsBitmap(WebView view) { Bitmap b; // prepare drawing cache view.setDrawingCacheEnabled(true); view.buildDrawingCache(); //Get the dimensions of the view so we can re-layout the view at its current size //and create a bitmap of the same size int width = ((Html2PdfWebView) view).getContentWidth(); int height = view.getContentHeight(); if( width == 0 || height == 0 ) { // return error answer to cordova String msg = "Width or height of webview content is 0. Webview to bitmap conversion failed."; Log.e(LOG_TAG, msg ); PluginResult result = new PluginResult(PluginResult.Status.ERROR, msg); result.setKeepCallback(false); callbackContext.sendPluginResult(result); return null; } Log.v(LOG_TAG, "Html2Pdf.getWebViewAsBitmap -> Content width: " + width + ", height: " + height ); //Cause the view to re-layout view.measure(width, height); view.layout(0, 0, width, height); //Create a bitmap backed Canvas to draw the view into b = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); Canvas c = new Canvas(b); // draw the view into the canvas view.draw(c); return b; } /** * Slices the screenshot into pages, merges those into a single pdf * and saves it in the public accessible /sdcard dir. */ private File saveWebViewAsPdf(Bitmap screenshot) { try { File sdCard = Environment.getExternalStorageDirectory(); File dir = new File (sdCard.getAbsolutePath() + "/" + this.publicTmpDir + "/"); dir.mkdirs(); File file; FileOutputStream stream; // creat nomedia file to avoid indexing tmp files File noMediaFile = new File(dir.getAbsolutePath() + "/", ".nomedia"); if( !noMediaFile.exists() ) { noMediaFile.createNewFile(); } double pageWidth = PageSize.A4.getWidth() * 0.85; // width of the image is 85% of the page double pageHeight = PageSize.A4.getHeight() * 0.80; // max height of the image is 80% of the page double pageHeightToWithRelation = pageHeight / pageWidth; // e.g.: 1.33 (4/3) Bitmap currPage; int totalSize = screenshot.getHeight(); int currPos = 0; int currPageCount = 0; int sliceWidth = screenshot.getWidth(); int sliceHeight = (int) Math.round(sliceWidth * pageHeightToWithRelation); while( totalSize > currPos && currPageCount < 100 ) // max 100 pages { currPageCount++; Log.v(LOG_TAG, "Creating page nr. " + currPageCount ); // slice bitmap currPage = Bitmap.createBitmap(screenshot, 0, currPos, sliceWidth, (int) Math.min( sliceHeight, totalSize - currPos )); // save page as png stream = new FileOutputStream( new File(dir, "pdf-page-"+currPageCount+".png") ); currPage.compress(Bitmap.CompressFormat.PNG, 100, stream); stream.close(); // move current position indicator currPos += sliceHeight; currPage.recycle(); } // create pdf Document document = new Document(); File filePdf = new File(sdCard.getAbsolutePath() + "/" + this.tmpPdfName); // change the output name of the pdf here // create dirs if necessary if( this.tmpPdfName.contains("/") ) { File filePdfDir = new File(filePdf.getAbsolutePath().substring(0, filePdf.getAbsolutePath().lastIndexOf("/"))); // get the dir portion filePdfDir.mkdirs(); } PdfWriter.getInstance(document,new FileOutputStream(filePdf)); document.open(); for( int i=1; i<=currPageCount; ++i ) { file = new File(dir, "pdf-page-"+i+".png"); Image image = Image.getInstance (file.getAbsolutePath()); image.scaleToFit( (float)pageWidth, 9999); image.setAlignment(Element.ALIGN_CENTER); document.add(image); document.newPage(); } document.close(); // delete tmp image files for( int i=1; i<=currPageCount; ++i ) { file = new File(dir, "pdf-page-"+i+".png"); file.delete(); } return filePdf; } catch (IOException e) { Log.e(LOG_TAG, "ERROR: " + e.getMessage()); e.printStackTrace(); // return error answer to cordova PluginResult result = new PluginResult(PluginResult.Status.ERROR, e.getMessage()); result.setKeepCallback(false); callbackContext.sendPluginResult(result); } catch (DocumentException e) { Log.e(LOG_TAG, "ERROR: " + e.getMessage()); e.printStackTrace(); // return error answer to cordova PluginResult result = new PluginResult(PluginResult.Status.ERROR, e.getMessage()); result.setKeepCallback(false); callbackContext.sendPluginResult(result); } Log.v(LOG_TAG, "Uncaught ERROR!"); return null; } } class Html2PdfWebView extends WebView { public Html2PdfWebView(Context context) { super(context); } public int getContentWidth() { return this.computeHorizontalScrollRange(); } }
package org.jetel.component; import java.io.IOException; import java.sql.BatchUpdateException; import java.sql.SQLException; import java.sql.Savepoint; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.regex.Pattern; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jetel.connection.jdbc.ConnectionAction; import org.jetel.connection.jdbc.DBConnection; import org.jetel.connection.jdbc.SQLCloverStatement; import org.jetel.connection.jdbc.SQLCloverStatement.QueryType; import org.jetel.connection.jdbc.SQLUtil; import org.jetel.connection.jdbc.specific.DBConnectionInstance; import org.jetel.connection.jdbc.specific.JdbcSpecific.OperationType; import org.jetel.data.DataRecord; import org.jetel.data.DataRecordFactory; import org.jetel.data.Defaults; import org.jetel.database.IConnection; import org.jetel.exception.ComponentNotReadyException; import org.jetel.exception.ConfigurationProblem; import org.jetel.exception.ConfigurationStatus; import org.jetel.exception.JetelException; import org.jetel.exception.XMLConfigurationException; import org.jetel.exception.ConfigurationStatus.Priority; import org.jetel.exception.ConfigurationStatus.Severity; import org.jetel.graph.IGraphElement; import org.jetel.graph.InputPort; import org.jetel.graph.Node; import org.jetel.graph.OutputPort; import org.jetel.graph.Result; import org.jetel.graph.TransformationGraph; import org.jetel.metadata.DataFieldMetadata; import org.jetel.metadata.DataRecordMetadata; import org.jetel.util.AutoFilling; import org.jetel.util.SynchronizeUtils; import org.jetel.util.file.FileUtils; import org.jetel.util.joinKey.JoinKeyUtils; import org.jetel.util.property.ComponentXMLAttributes; import org.jetel.util.property.RefResFlag; import org.jetel.util.string.StringUtils; import org.w3c.dom.Element; /** * <h3>DatabaseOutputTable Component</h3> * <!-- This component performs DML operation on specified database table (insert/update/delete). --> * * <table border="1"> * <th>Component:</th> * <tr><td><h4><i>Name:</i></h4></td> * <td>DBOutputTable</td></tr> * <tr><td><h4><i>Category:</i></h4></td> * <td></td></tr> * <tr><td><h4><i>Description:</i></h4></td> * <td>This component performs specified DML operation (insert/update/delete) on specified database table.<br> * Parameter placeholder in DML statement is [?] - question mark</td></tr> * <tr><td><h4><i>Inputs:</i></h4></td> * <td>[0]- input records</td></tr> * <tr><td><h4><i>Outputs:</i></h4></td> * <td>[0] <i>optional</i> - records rejected by database. If in this metadata there is more fields then in input metadata * and last field is of type string, this field is filled by error message<br> * [1] <i>optional</i> - autogenerated columns: * <ul><li>autogenerated columns for <i>insert</i> statement (supported only for Oracle, MySQL, Db2 and Informix * databases, not supported in batch mode at all)</li> * <li>number of updated records in database and requested input fields for <i>update, delete</i> statement</li></ul> * Key record is generated for <b>each</b> input record. In case that * statement execution fails, requested fields are not filled.</td></tr> * <tr><td><h4><i>Comment:</i></h4></td> * <td></td></tr> * </table> * <br> * <table border="1"> * <th>XML attributes:</th> * <tr><td><b>type</b></td><td>"DB_OUTPUT_TABLE"</td></tr> * <tr><td><b>id</b></td><td>component identification</td></tr> * <tr><td><b>dbConnection</b></td><td>id of the Database Connection object to be used to access the database</td> * <tr><td><b>dbTable</b><br><i>optional</i></td><td>name of the DB table to populate data with</td> * <tr><td><b>sqlQuery</b><br><i>optional</i></td><td>allows specification of SQL query/DML statement to be executed against * database. It can consist of more then one query separated by semicolon [;]. Question marks [?] in the query text * are placeholders which are filled with values from input fields specified in <b>cloverFields</b> * attribute. If you have query in this form, <i>cloverFields</i> must be specified as well - it determines which input fields will * be used/mapped onto target fields. You can write query with direct mapping too: instead of placeholders use clover field's names * predated by dollar char [$]. In such form you can use mapping between generated keys and output record with this keys too. * Complete query should appear as follows:<br><ul> * <li><code>insert into mytable [(f1,f2,...,fn)] values (val1, $field2, ...,$fieldm ) returning $key := dbfield1, $field := dbfield2</code> - * where <i>f1,f2,...,fn,dbfield1,dbfield2</i> are database fields; <i>field2,.., fieldm</i> are input record fields * and <i>key, field</i> are key record fields. <b>This is valid for databases which can return more then * one columns in <i>getGeneratedKeys()</i> method (Oracle and db2 for the time beeing). </b> </li> * <li><code>insert into mytable [(f1,f2,...,fn)] values (val1, $field2, ...,$fieldm ) returning $key := auto_generated, $field := infield</code> - * where <i>f1,f2,...,fn</i> are database fields; <i>field2,.., fieldm, infield</i> are input record fields, * <i>auto_generated</i> is auto genereted column value returned by database and <i>key, field</i> are key record * fields. <b>This is valid for databases which returns one auto generated column in <i>getGeneratedKeys()</i> method * (MySql and Informix for the time being). </b> </li> * <li><code>delete from mytable where f1 = $field1 and ... fn = $fieldn returning $updated:=update_count, $field:=infield</code> - where <i>f1,..,fn</i> are database * fields, <i>field1,...,fieldn, infield</i> are input record fields, <i>updated, field</i> are output record's fields and * <i>update_count</i> means number of updated records in database by current statement</li> * <li><code>update mytable set f1 = $field1,...,fn=$fieldn where db1=$f1 returning $updated:=update_count, $field:=infield</code> - where <i>f1,..,fn, db1</i> are database * fields, <i>field1,...,fieldn, infield</i> are input record fields, <i>updated, field</i> are output record's fields and * <i>update_count</i> means number of updated records in database by current statement</li></ul> * <tr><td><b>url</b><br><i>optional</i></td><td>url location of the query. The query will be loaded from file referenced by the url. * Rules for extern query are the same as for <i>sqlQuery</i> parameter.</td> * <tr><td><b>charset </b><i>optional</i></td><td>encoding of extern query</td></tr> * <tr><td>&lt;SQLCode&gt;<br><i>optional<small>!!XML tag!!</small></i></td><td>This tag allows for embedding large SQL statement directly into graph.. See example below.</td></tr> * <tr><td><b>fieldMap</b><br><i>optional</i></td><td>Pairs of clover fields and db fields (cloverField=dbField) separated by :;| {colon, semicolon, pipe}.<br> * It specifies mapping from source (Clover's) fields to DB table fields if it isn't specified in <i>sqlQuery</i>. It should be used instead of <i>cloverFields</i> and <i>dbFields</i> * attributes, because it provides more clear mapping. If <i>fieldMap</i> attribute is found <i>cloverFields</i> and <i>dbFields</i> attributes are ignored. * <tr><td><b>dbFields</b><br><i>optional</i></td><td>delimited list of target table's fields to be populated<br> * Input fields are mapped onto target fields (listed) in the order they are present in Clover's record.</td> * <tr><td><b>commit</b><br><i>optional</i></td><td>determines how many records are in one db commit. Minimum 1, DEFAULT is 100.<br>If * MAX_INT is specified, it is considered as NEVER COMMIT - i.e. records are send to DB without every issuing commit. It can * be called later from within other component - for example DBExecute.</td> * <tr><td><b>cloverFields</b><br><i>optional</i></td><td>delimited list of input record's fields.<br>Only listed fields (in the order * they appear in the list) will be considered for mapping onto target table's fields. Combined with <b>dbFields</b> option you can * specify mapping from source (Clover's) fields to DB table fields. If no <i>dbFields</i> are specified, then #of <i>cloverFields</i> must * correspond to number of target DB table fields.</td> * <tr><td><b>autoGeneratedColumns</b><br><i>deprecated</i> - use <i>sqlQuery</i> extended form</td><td> * This attribute can be used for obtaining auto generated columns, but only in case that <i>sqlQuery</i> consist * of only <b>one</b> query. In other case construct queries with direct mapping. * <ul><li>For Oracle or Db2 database: names of database columns to be returned (for Db2 - entity columns)</li> * <li>For MySQL or Informix database: names of input record fields plus special field called "AUTO_GENERATED" to be returned</td></tr> * <tr><td><b>batchMode</b><br><i>optional</i></td><td>[Yes/No] determines whether to use batch mode for sending statements to DB, DEFAULT is No.<br> * <i>Note:If your database/JDBC driver supports this feature, switch it on as it significantly speeds up table population.</i></td> * </tr> * <tr><td><b>batchSize</b><br><i>optional</i></td><td>number - determines how many records will be sent to database in one batch update. Default is 25. * </td> * </tr> * <tr><td><b>maxErrors</b><br><i>optional</i></td><td>maximum number of allowed SQL errors. Default: 0 (zero). If exceeded, component stops with error. If set to <b>-1</b>(minus one) all errors are ignored.</td></tr> * <tr><td><b>errorAction</b><br><i>optional</i></td><td>ROLLBACK or COMMIT (case sensitive!!!). Default: COMMIT. * Action performed when exceeded maximum number of records or execution of the component is aborted.</td></tr> * </table> * * <h4>Example:</h4> * <pre>&lt;Node id="OUTPUT" type="DB_OUTPUT_TABLE" dbConnection="NorthwindDB" dbTable="employee_z"/&gt;</pre> * <br> * <pre>&lt;Node id="OUTPUT" type="DB_OUTPUT_TABLE" dbConnection="NorthwindDB" dbTable="employee_z" dbFields="f_name;l_name;phone"/&gt;</pre> * <i>Example above shows how to populate only selected fields within target DB table. It can be used for skipping target fields which * are automatically populated by DB (such as autoincremented fields).</i> * <br> * <pre>&lt;Node id="OUTPUT" type="DB_OUTPUT_TABLE" dbConnection="NorthwindDB" dbTable="employee_z" * dbFields="f_name;l_name" cloverFields="LastName;FirstName"/&gt;</pre> * <i>Example shows how to simply map Clover's LastName and FirstName fields onto f_name and l_name DB table fields. The order * in which these fields appear in Clover data record is not important.</i> * <br> * <pre>&lt;Node id="OUTPUT" type="DB_OUTPUT_TABLE" dbConnection="NorthwindDB" sqlQuery="insert into myemployee2 (FIRST_NAME,LAST_NAME,DATE,ID) values (?,?,sysdate,123)" * cloverFields="FirstName;LastName"/&gt;</pre> * <br> * <pre>&lt;Node id="OUTPUT" type="DB_OUTPUT_TABLE" dbConnection="NorthwindDB" cloverFields="FirstName;LastName"&gt; * &lt;SQLCode&gt; * insert into myemployee2 (FIRST_NAME,LAST_NAME,DATE,ID) values (?,?,sysdate,123) * &lt;/SQLCode&gt; * &lt;/Node&gt;</pre> * <i>Example below shows how to delete records in table using DBOutputTable component</i> * <pre>&lt;Node id="OUTPUT" type="DB_OUTPUT_TABLE" dbConnection="NorthwindDB" cloverFields="FirstName;LastName"&gt; * &lt;SQLCode&gt; * delete from myemployee2 where FIRST_NAME = ? and LAST_NAME = ? * &lt;/SQLCode&gt; * &lt;/Node&gt;</pre> * <br> * <i>Example below shows usage of "fieldMap" attribute </i> * <pre>&lt;Node dbConnection="DBConnection0" dbTable="employee_tmp" fieldMap= * "EMP_NO=emp_no;FIRST_NAME=first_name;LAST_NAME=last_name;PHONE_EXT=phone_ext" * id="OUTPUT" type="DB_OUTPUT_TABLE"/&gt;</pre> * <br> * <i>Examples below show how to get aoutogenerated columns</i> * <pre> * &lt;Node dbConnection="Connection1" id="OUTPUT" maxErrors="10" sqlQuery= * "INSERT INTO CLOVER_USER (U_ID,NAME ,CREATED) values ($EMP_NO, $FULL_NAME, $HIRE_DATE); * INSERT INTO MYEMPLOYEE (EMP_NO, FIRST_NAME, LAST_NAME,COUNTRY, SALARY, FULL_NAME) VALUES * ($EMP_NO, $FIRST_NAME, $LAST_NAME, $COUNTRY, $SALARY, $FULL_NAME) * RETURNING $id:=auto_generated, $Field2:=full_name;" type="DB_OUTPUT_TABLE"/&gt; * * &lt;Node dbConnection="Connection1" id="OUTPUT" maxErrors="10" sqlQuery= * "DELETE FROM CLOVER_USER WHERE U_ID = $EMP_NO; * INSERT INTO MYEMPLOYEE (ID, EMP_NO, FIRST_NAME, LAST_NAME,COUNTRY, SALARY, FULL_NAME) VALUES * (id_seq.nextval, $EMP_NO, $FIRST_NAME, $LAST_NAME, $COUNTRY, $SALARY, $FULL_NAME) * RETURNING $id:=ID, $Field2:=FIRST_NAME;" type="DB_OUTPUT_TABLE" errorAction="ROLLBACK"/&gt; * *&lt;Node autoGeneratedColumns="AUTO_GENERATED;FIRST_NAME;" dbConnection="DBConnection2" id="OUTPUT" maxErrors="10" * sqlQuery="INSERT INTO myemployee (EMP_NO, FIRST_NAME, LAST_NAME, PHONE_EXT, HIRE_DATE, DEPT_NO, JOB_CODE, JOB_GRADE, JOB_COUNTRY, SALARY, FULL_NAME) * VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" type="DB_OUTPUT_TABLE"/&gt; * *&lt;Node autoGeneratedColumns="ID;FIRST_NAME;" batchMode="false" dbConnection="DBConnection1" * dbTable="MYEMPLOYEE" id="OUTPUT" maxErrors="10" sqlQuery="INSERT INTO myemployee VALUES (id_seq.nextval, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" * type="DB_OUTPUT_TABLE"/&gt; * *<i>Example below show how to get number of updated records in database</i> * <pre> *&lt;Node batchMode="false" commit="10" dbConnection="Connection1" errorAction="ROLLBACK" id="OUTPUT" maxErrors="10" * sqlQuery="update customers set prijmeni =$LAST_NAME where klient_id_NO=$EMP_NO returning $id:=update_count, $Field2:=FULL_NAME;" * type="DB_OUTPUT_TABLE"/&gt; * </pre> * @author dpavlis, avackova (agata.vackova@javlinconsulting.cz) * @since September 27, 2002 * @revision $Revision$ * @created 22. July 2003 * @see org.jetel.database.AnalyzeDB */ public class DBOutputTable extends Node { public static final String XML_MAXERRORS_ATRIBUTE = "maxErrors"; public static final String XML_BATCHMODE_ATTRIBUTE = "batchMode"; public static final String XML_COMMIT_ATTRIBUTE = "commit"; public static final String XML_FIELDMAP_ATTRIBUTE = "fieldMap"; public static final String XML_CLOVERFIELDS_ATTRIBUTE = "cloverFields"; public static final String XML_DBFIELDS_ATTRIBUTE = "dbFields"; public static final String XML_SQLCODE_ELEMENT = "SQLCode"; public static final String XML_DBTABLE_ATTRIBUTE = "dbTable"; public static final String XML_DBCONNECTION_ATTRIBUTE = "dbConnection"; public static final String XML_SQLQUERY_ATRIBUTE = "sqlQuery"; public static final String XML_BATCHSIZE_ATTRIBUTE = "batchSize"; public static final String XML_URL_ATTRIBUTE = "url"; public static final String XML_CHARSET_ATTRIBUTE = "charset"; public static final String XML_AUTOGENERATEDCOLUMNS_ATTRIBUTE = "autoGeneratedColumns"; public static final String XML_ACTION_ON_ERROR = "errorAction"; public static final String XML_ATOMIC_RECORD_STATEMENT_ATTRIBUTE="atomicSQL"; public static final String STATEMENT_SEPARATOR = ";"; private static final Pattern CLOVER_FIELDS_PATTERN = Pattern.compile(Defaults.CLOVER_FIELD_REGEX);//$cloverField private DBConnection dbConnection; private DBConnectionInstance connection; private String dbConnectionName; private String dbTableName; private SQLCloverStatement[] statement; private String[] cloverFields; private String[] dbFields; private String[] sqlQuery; private int recordsInCommit; private int maxErrors; private boolean useBatch; private int batchSize; private int countError=0; private String[] autoGeneratedColumns = null; private boolean[] returnResult; private ConnectionAction errorAction = ConnectionAction.COMMIT; private boolean atomicSQL; private InputPort inPort; private OutputPort rejectedPort, keysPort; private DataRecord inRecord, rejectedRecord, keysRecord; private int recCount = 0; private int errorCodeFieldNum; private int errMessFieldNum; private int failedBatches; private Savepoint savepoint; private static final String SAVEPOINT_NAME = "svpnt"; public final static String COMPONENT_TYPE = "DB_OUTPUT_TABLE"; private final static int SQL_FETCH_SIZE_ROWS = 100; private final static int READ_FROM_PORT = 0; private final static int WRITE_REJECTED_TO_PORT = 0; private final static int WRITE_AUTO_KEY_TO_PORT = 1; private final static int RECORDS_IN_COMMIT = 100; private final static int RECORDS_IN_BATCH = 25; private final static int MAX_BATCH_SIZE = 10000; private final static int MAX_ALLOWED_ERRORS = 0; private final static int MAX_WARNINGS = 3; static Log logger = LogFactory.getLog(DBOutputTable.class); /** * Constructor for the DBInputTable object * * @param id Unique ID of component * @param dbConnectionName Name of Clover's database connection to be used for communicating with DB * @param dbTableName Name of target DB table to be populated with data * @since September 27, 2002 */ public DBOutputTable(String id, String dbConnectionName, String dbTableName) { this(id,dbConnectionName); this.dbTableName = dbTableName; } /** * @param id Unique ID of component * @param dbConnectionName Name of Clover's database connection to be used for communicating with DB * @param sqlQuery set of sql queries */ public DBOutputTable(String id, String dbConnectionName, String[] sqlQuery) { this(id,dbConnectionName); setSqlQuery(sqlQuery); } /** * Constructor for the DBInputTable object * @param id Unique ID of component * @param dbConnectionName Name of Clover's database connection to be used for communicating with DB * @param sqlQuery SQL query to be executed against DB - can be any DML command (INSERT, UPDATE, DELETE) * @param cloverFields Array of Clover field names (the input data) which should substitute DML command parameters (i.e. "?") */ @Deprecated public DBOutputTable(String id, String dbConnectionName, String sqlQuery, String[] cloverFields) { this(id, dbConnectionName, new String[]{sqlQuery}); setCloverFields(cloverFields); } /** * Constructor for the DBInputTable object */ DBOutputTable(String id, String dbConnectionName){ super(id); this.dbConnectionName = dbConnectionName; this.dbTableName = null; cloverFields = null; dbFields = null; recordsInCommit = RECORDS_IN_COMMIT; maxErrors=MAX_ALLOWED_ERRORS; useBatch=false; batchSize=RECORDS_IN_BATCH; } /** * Sets the dBFields attribute of the DBOutputTable object * * @param dbFields The new dBFields value */ public void setDBFields(String[] dbFields) { this.dbFields = dbFields; } /** * Sets the useBatch attribute of the DBOutputTable object * * @param batchMode The new useBatch value */ public void setUseBatch(boolean batchMode) { this.useBatch = batchMode; } /** * Sets batch size - how many records are in batch which is sent * to DB at once. * @param batchSize */ public void setBatchSize(int batchSize){ this.batchSize=batchSize; } /** * Sets atomicSQL attribute. If true all sql statements form one record are executed in one transaction and commit is performed after each record * @param atomicSQL */ public void setAtomicSQL(boolean atomicSQL) { this.atomicSQL = atomicSQL; } /** * Sets the cloverFields attribute of the DBOutputTable object * * @param cloverFields The new cloverFields value */ public void setCloverFields(String[] cloverFields) { this.cloverFields = cloverFields; } public String[] getSqlQuery() { return sqlQuery; } public void setSqlQuery(String[] sqlQuery) { // filter empty queries ArrayList<String> queries = new ArrayList<String>(); for(int i = 0; i < sqlQuery.length; i++) { if (sqlQuery[i] != null && sqlQuery[i].trim().length() > 0) { queries.add(sqlQuery[i]); } } this.sqlQuery=queries.toArray(new String[queries.size()]); } public void setSqlQuery(String sqlQuery) { this.sqlQuery = StringUtils.isEmpty(sqlQuery) ? null : new String[] { sqlQuery }; } /** * Description of the Method * * @exception ComponentNotReadyException * Description of Exception * @since September 27, 2002 */ @Override public void init() throws ComponentNotReadyException { super.init(); // get dbConnection from graph IConnection conn = getGraph().getConnection(dbConnectionName); if (conn == null) { throw new ComponentNotReadyException("Can't find DBConnection ID: " + dbConnectionName); } if (!(conn instanceof DBConnection)) { throw new ComponentNotReadyException("Connection with ID: " + dbConnectionName + " isn't instance of the DBConnection class."); } dbConnection = (DBConnection) conn; dbConnection.init(); inPort = getInputPort(READ_FROM_PORT); rejectedPort = getOutputPort(WRITE_REJECTED_TO_PORT); rejectedRecord = rejectedPort != null ? DataRecordFactory.newRecord(rejectedPort.getMetadata()) : null; if (rejectedRecord != null) { rejectedRecord.init(); errorCodeFieldNum = rejectedRecord.getMetadata().findAutoFilledField(AutoFilling.ERROR_CODE); errMessFieldNum = rejectedRecord.getMetadata().findAutoFilledField(AutoFilling.ERROR_MESSAGE); if (errMessFieldNum == -1) { DataRecordMetadata rejectedMetadata = rejectedPort.getMetadata(); if (inPort.getMetadata().getNumFields() == rejectedMetadata.getNumFields() - 1 && rejectedMetadata.getField(rejectedMetadata.getNumFields() - 1).getType() == DataFieldMetadata.STRING_FIELD) { errMessFieldNum = rejectedMetadata.getNumFields() - 1; } } } // create insert query from db table name if (sqlQuery == null) { sqlQuery = new String[1]; // TODO Labels replace: if (dbFields != null) { sqlQuery[0] = SQLUtil.assembleInsertSQLStatement(dbTableName, dbFields, dbConnection.getJdbcSpecific()); } else { sqlQuery[0] = SQLUtil.assembleInsertSQLStatement( inPort.getMetadata(), dbTableName, dbConnection.getJdbcSpecific()); } // TODO Labels replace end // TODO Labels replace with: // // FIXME This also replaces escaped characters from dbTableName // // can lead to backslashes being consumed // String quotedTableName = StringUtils.stringToSpecChar(dbConnection.getJdbcSpecific().quoteIdentifier(dbTableName)); // if (dbFields != null) { // String[] quotedDbFields = new String[dbFields.length]; // for (int i = 0; i < dbFields.length; i++) { // quotedDbFields[i] = dbConnection.getJdbcSpecific().quoteIdentifier(dbFields[i]); // sqlQuery[0] = SQLUtil.assembleInsertSQLStatement(quotedTableName, quotedDbFields); // } else { // sqlQuery[0] = SQLUtil.assembleInsertSQLStatement(inPort.getMetadata(), quotedTableName); // TODO Labels replace with end } keysPort = getOutputPort(WRITE_AUTO_KEY_TO_PORT); returnResult = new boolean[sqlQuery.length]; Arrays.fill(returnResult, false); keysRecord = keysPort != null ? DataRecordFactory.newRecord(keysPort.getMetadata()) : null; if (keysRecord != null) { keysRecord.init(); keysRecord.reset(); } // prepare set of statements statement = new SQLCloverStatement[sqlQuery.length]; if (statement.length > 1 && autoGeneratedColumns != null) { logger.warn("Found more then one sql query and " + XML_AUTOGENERATEDCOLUMNS_ATTRIBUTE + " parameter. The last one will be ignored"); autoGeneratedColumns = null; } // The rest of initialization the connection is required, so it is done in first run of preExecute } @Override public void preExecute() throws ComponentNotReadyException { super.preExecute(); inRecord = DataRecordFactory.newRecord(inPort.getMetadata()); inRecord.init(); if (firstRun()) {// a phase-dependent part of initialization // create connection instance, which represents connection to a database try { connection = dbConnection.getConnection(getId(), OperationType.WRITE); } catch (JetelException e1) { throw new ComponentNotReadyException(e1); } // prepare rejectedRecord and keysRecord boolean supportsConnectionKeyGenaration = false; try { supportsConnectionKeyGenaration = dbConnection.getJdbcSpecific().supportsGetGeneratedKeys(connection.getSqlConnection().getMetaData()); } catch (SQLException e1) { // TODO Auto-generated catch block } int end, start = 0; String[] tmpCloverFields = null; for (int i = 0; i < statement.length; i++) { if (cloverFields != null) { end = StringUtils.count(sqlQuery[i], '?'); tmpCloverFields = new String[end - start]; for (int j = 0; j < tmpCloverFields.length; j++) { if (start + j >= cloverFields.length) { throw new ComponentNotReadyException(this, XML_CLOVERFIELDS_ATTRIBUTE, "Missing parameter value for query " + StringUtils.quote(sqlQuery[i] + " , parameter number: " + (start + j + 1))); } tmpCloverFields[j] = cloverFields[start + j]; } start = end; } statement[i] = new SQLCloverStatement(connection, sqlQuery[i], inRecord, tmpCloverFields, autoGeneratedColumns); if (statement[i].getQueryType() == QueryType.INSERT && statement[i].returnResult()) { if (useBatch) { logger.warn("Getting generated keys in batch mode is not supported -> switching it off !"); sqlQuery[i] = sqlQuery[i].substring(0, sqlQuery[i].toLowerCase().indexOf(SQLCloverStatement.RETURNING_KEY_WORD)); statement[i] = new SQLCloverStatement(connection, sqlQuery[i], inRecord, cloverFields); } else if (!supportsConnectionKeyGenaration) { logger.warn("DB indicates no support for getting generated keys -> switching it off !"); sqlQuery[i] = sqlQuery[i].substring(0, sqlQuery[i].toLowerCase().indexOf(SQLCloverStatement.RETURNING_KEY_WORD)); statement[i] = new SQLCloverStatement(connection, sqlQuery[i], inRecord, cloverFields); } } } // check that what we require is supported try { if (useBatch && !connection.getSqlConnection().getMetaData().supportsBatchUpdates()) { logger.warn("DB indicates no support for batch updates -> switching it off !"); useBatch = false; } } catch (SQLException e) { } if (keysRecord != null) { for (int i = 0; i < returnResult.length; i++) { returnResult[i] = statement[i].returnResult(); } } // when no one of queries returns something switch off key port if (keysRecord != null) { int i; for (i = 0; i < returnResult.length; i++) { if (returnResult[i]) break; } if (i == returnResult.length) { keysRecord = null; } } if (useBatch && batchSize > MAX_BATCH_SIZE) { logger.warn("Batch size greater then MAX_BATCH_SIZE -> " + XML_BATCHSIZE_ATTRIBUTE + " set to " + MAX_BATCH_SIZE); batchSize = MAX_BATCH_SIZE; } // it is probably wise to have COMMIT size multiplication of BATCH size // except situation when commit size is MAX_INTEGER -> we never commit in this situation; if (useBatch && recordsInCommit != Integer.MAX_VALUE && (recordsInCommit % batchSize != 0)) { int multiply = recordsInCommit / batchSize; recordsInCommit = (multiply + 1) * batchSize; } // init statements SQLCloverStatement eachStatement; for (int i = 0; i < statement.length; i++) { eachStatement = statement[i]; eachStatement.setLogger(logger); try { eachStatement.init(); } catch (Exception e) { throw new ComponentNotReadyException(this, e.getMessage(), e); } } } else { if (rejectedRecord != null) { rejectedRecord.reset(); } if (keysRecord != null) { keysRecord.reset(); } if (getGraph().getRuntimeContext().isBatchMode() && dbConnection.isThreadSafeConnections()) { try { connection = dbConnection.getConnection(getId(), OperationType.WRITE); for (SQLCloverStatement eachStatement : statement) { try { eachStatement.setConnection(connection); } catch (Exception e) { throw new ComponentNotReadyException(this, e.getMessage(), e); } } } catch (JetelException exception) { throw new ComponentNotReadyException(exception); } } for (SQLCloverStatement eachStatement : statement) { try { eachStatement.setInRecord(inRecord); eachStatement.reset(); } catch (Exception e) { throw new ComponentNotReadyException(this, e.getMessage(), e); } } recCount = 0; countError = 0; } } @Override public synchronized void reset() throws ComponentNotReadyException { super.reset(); } @Override public void postExecute() throws ComponentNotReadyException { super.postExecute(); if (getGraph().getRuntimeContext().isBatchMode()) { // otherwise connection is closed in TransformationGraph.free() dbConnection.closeConnection(getId(), OperationType.WRITE); } } /** * @param dbTableName The dbTableName to set. */ public void setDBTableName(String dbTableName) { this.dbTableName = dbTableName; } /** * Sets the recordsInCommit attribute of the DBOutputTable object * * @param nRecs The new recordsInCommit value */ public void setRecordsInCommit(int nRecs) { if (nRecs > 0) { recordsInCommit = nRecs; } } @Override public Result execute() throws Exception { /* * Run main processing loop */ try{ if (useBatch){ runInBatchMode(); }else{ runInNormalMode(); } }catch (InterruptedException e) { if (errorAction == ConnectionAction.ROLLBACK) { errorAction.perform(connection.getSqlConnection()); if (errorAction == ConnectionAction.ROLLBACK) { logger.info("Rollback performed."); logger.info("Number of commited records: " + (recCount / recordsInCommit)*recordsInCommit); }else{ logger.info("Number of commited records: " + recCount); } logger.info("Rollback performed."); logger.info("Number of commited records: " + (recCount / recordsInCommit)*recordsInCommit); }else if (recordsInCommit!=Integer.MAX_VALUE){ errorAction.perform(connection.getSqlConnection()); logger.info("Number of commited records: " + recCount); } } finally { broadcastEOF(); try { for (SQLCloverStatement eachStatement : statement) { eachStatement.close(); } } catch (SQLException exception) { logger.warn("SQLException when closing statement", exception); } } return runIt ? Result.FINISHED_OK : Result.ABORTED; } @Override public synchronized void free() { super.free(); // if (dbConnection != null) { // dbConnection.free(); } private void runInNormalMode() throws SQLException,InterruptedException,IOException, JetelException{ String errmes = ""; SQLException exception = null; boolean useSavepoints = connection.getJdbcSpecific().useSavepoints(); while ((inRecord = inPort.readRecord(inRecord)) != null && runIt) { if (keysRecord != null){ keysRecord.reset(); } //execute all statements for (int i=0; i<statement.length; i++) { try { // Fix of issue #5711; For PostgresSQL we need to set SAVEPOINT for partial rollback in case of SQL exception if (useSavepoints && !atomicSQL) { try { savepoint = connection.getSqlConnection().setSavepoint(SAVEPOINT_NAME); } catch (SQLException e) { logger.warn("Failed to set SAVEPOINT; rest of transaction may be lost", e); } } statement[i].executeUpdate(returnResult[i] ? keysRecord : null); } catch(SQLException ex) { countError++; exception = ex; errmes = "Exeption thrown by: " + statement[i].getQuery() + ". Message: " + ex.getMessage(); SQLException chain = ex.getNextException(); while (chain != null) { errmes += "\n Caused by: " + chain.getMessage(); chain = chain.getNextException(); } if (rejectedPort != null) { rejectedRecord.copyFieldsByName(inRecord); if (errMessFieldNum != -1) { rejectedRecord.getField(errMessFieldNum).setValue(errmes); } if (errorCodeFieldNum != -1){ rejectedRecord.getField(errorCodeFieldNum).setValue(ex.getErrorCode()); } rejectedPort.writeRecord(rejectedRecord); } if (countError <= MAX_WARNINGS) { logger.warn(errmes); }else if (countError == MAX_WARNINGS + 1){ logger.warn("more errors..."); } // if atomicity of all sql statements is required, rollback current transaction and cancel executing following statements of this record if (atomicSQL) { connection.getSqlConnection().rollback(); logger.info("AtomicSQL is true. Rollback performed."); break; } else if (useSavepoints && savepoint != null) { // Fix of issue #5711; For PostgresSQL rollback to last SAVEPOINT (which was set after last successful statement) connection.getSqlConnection().rollback(savepoint); } } } //send filled key record to output if (keysRecord != null) { keysPort.writeRecord(keysRecord); } //if number of errors is greater then allowed throw exception if (countError>maxErrors && maxErrors!=-1){ //Perform commit or rollback errorAction.perform(connection.getSqlConnection()); if (errorAction == ConnectionAction.ROLLBACK) { logger.info("Rollback performed."); logger.info("Number of commited records: " + (recCount / recordsInCommit)*recordsInCommit); // logger.info("Last " + recCount % recordsInCommit + " records not commited"); }else if (errorAction == ConnectionAction.COMMIT){ logger.info("Number of commited records: " + ++recCount); } throw new JetelException("Maximum # of errors exceeded when inserting record. "+ errmes, exception); } //if needed, commit if ((recordsInCommit!=Integer.MAX_VALUE && ++recCount % recordsInCommit == 0) || atomicSQL) { connection.getSqlConnection().commit(); } SynchronizeUtils.cloverYield(); } // end of records stream - final commits; // unless we have option never to commit, commit at the end of processing if (runIt && recordsInCommit!=Integer.MAX_VALUE){ connection.getSqlConnection().commit(); }else if (!runIt) {//component execution aborted errorAction.perform(connection.getSqlConnection()); if (errorAction == ConnectionAction.ROLLBACK) { logger.info("Rollback performed."); logger.info("Number of commited records: " + (recCount / recordsInCommit)*recordsInCommit); // logger.info("Last " + recCount % recordsInCommit + " records not commited"); }else if (errorAction == ConnectionAction.COMMIT){ logger.info("Number of commited records: " + ++recCount); } } } private void runInBatchMode() throws SQLException,InterruptedException,IOException, JetelException{ int batchCount=0; int statementCount=0; String errmes = ""; SQLException exception = null; DataRecordMetadata rejectedMetadata = null; if (rejectedPort != null) { rejectedMetadata = rejectedPort.getMetadata(); } DataRecord[][] dataRecordHolder; int holderCount= -1; // first, we set transMap to batchUpdateMode for (SQLCloverStatement eachStatement : statement) { eachStatement.setBatchUpdate(true); } // if we have rejected records port connected, we will // store and report error records in batch if (rejectedPort!=null){ dataRecordHolder=new DataRecord[statement.length][batchSize]; for (int i = 0; i < statement.length; i++) { for (int j = 0; j < batchSize; j++) { dataRecordHolder[i][j] = DataRecordFactory.newRecord(rejectedMetadata); dataRecordHolder[i][j].init(); } } }else{ dataRecordHolder=null; } while ((inRecord = inPort.readRecord(inRecord)) != null && runIt) { if (keysRecord != null){ keysRecord.reset(); } holderCount++; for (statementCount = 0; statementCount < statement.length; statementCount++) { try{ statement[statementCount].addBatch(returnResult[statementCount] ? keysRecord : null); //prepare prospective rejected record if (dataRecordHolder!=null) { dataRecordHolder[statementCount][holderCount].copyFieldsByName(inRecord); if (errMessFieldNum != -1) { dataRecordHolder[statementCount][holderCount].getField(errMessFieldNum).reset(); } if (errorCodeFieldNum != -1){ dataRecordHolder[statementCount][holderCount].getField(errorCodeFieldNum).reset(); } } }catch(SQLException ex){ countError++; exception = ex; errmes = "Exeption thrown by: " + statement[statementCount].getQuery() + ". Message: " + ex.getMessage(); //for this record statement won't be executed SQLException chain = ex.getNextException(); while(chain!=null) { errmes += "\n Caused by: "+chain.getMessage(); chain = chain.getNextException(); } if (rejectedPort != null) { dataRecordHolder[statementCount][holderCount] = null; rejectedRecord.copyFieldsByName(inRecord); if (errMessFieldNum != -1) { rejectedRecord.getField(errMessFieldNum).setValue(errmes); } if (errorCodeFieldNum != -1){ rejectedRecord.getField(errorCodeFieldNum).setValue(ex.getErrorCode()); } rejectedPort.writeRecord(rejectedRecord); } if (countError <= MAX_WARNINGS) { logger.warn(errmes); }else if (countError == MAX_WARNINGS + 1){ logger.warn("more errors..."); } } } if (countError>maxErrors && maxErrors!=-1){ logger.info("Number of commited records: " + recCount); throw new JetelException("Maximum # of errors exceeded when inserting record. "+ errmes, exception); } // shall we execute batch ? if ((++batchCount % batchSize == 0) || atomicSQL) { executeBatch(dataRecordHolder, holderCount); batchCount = 0; holderCount = -1; } if ((++recCount % recordsInCommit == 0) || atomicSQL) { if (batchCount != 0) { executeBatch(dataRecordHolder, holderCount); batchCount = 0; holderCount = -1; } connection.getSqlConnection().commit(); } } // final commit (if anything is left in batch) if (batchCount > 0) { executeBatch(dataRecordHolder, holderCount); } // unless we have option never to commit, commit at the end of processing if (runIt && recordsInCommit != Integer.MAX_VALUE) { connection.getSqlConnection().commit(); if (failedBatches > 0) { logger.warn("Number of failed batches: " + failedBatches); } } else if (!runIt) { errorAction.perform(connection.getSqlConnection()); if (errorAction == ConnectionAction.ROLLBACK) { logger.info("Rollback performed."); logger.info("Number of commited records: " + (recCount / recordsInCommit) * recordsInCommit); logger.info("Number of failed batches: " + failedBatches); // logger.info("Last " + recCount % recordsInCommit + " records not commited"); } else if (errorAction == ConnectionAction.COMMIT) { logger.info("Number of commited records: " + recCount); logger.info("Number of failed batches: " + failedBatches); } } // if (dataRecordHolder!=null) { // Arrays.fill(dataRecordHolder,null); // Arrays.fill(exeptions, null); } private void executeBatch(DataRecord[][] dataRecordHolder, int holderCount) throws SQLException, IOException, InterruptedException, JetelException { boolean exThrown = false; String errmes = ""; DataRecord[] updatedRecord = null; BatchUpdateException exception = null; BatchUpdateException[] exceptions = new BatchUpdateException[statement.length]; boolean useSavepoints = connection.getJdbcSpecific().useSavepoints(); for (int statementCount = 0; statementCount < statement.length; statementCount++) { try { // Fix of issue #5711 if (useSavepoints && !atomicSQL) { try { savepoint = connection.getSqlConnection().setSavepoint(SAVEPOINT_NAME); } catch (SQLException e) { logger.warn("Failed to set SAVEPOINT; rest of transaction may be lost", e); } } statement[statementCount].executeBatch(); updatedRecord = statement[statementCount].getBatchResult(); statement[statementCount].clearBatch(); } catch (BatchUpdateException ex) { updatedRecord = statement[statementCount].getBatchResult(); statement[statementCount].clearBatch(); exceptions[statementCount] = ex; exception = ex; errmes += "Exeption thrown by: " + statement[statementCount].getQuery() + ". Message: " + ex.getMessage() + "\n"; if (ex.getNextException() != null) { // With PostgreSQL, 1. exception is good for nothing, append next one errmes += " Caused by: " + ex.getNextException().getMessage(); } exThrown = true; if (useSavepoints && savepoint != null) { connection.getSqlConnection().rollback(savepoint); } } for (int i = 0; i < updatedRecord.length; i++) { keysPort.writeRecord(updatedRecord[i]); } } // all statements executed, some of them could fail if (exThrown) { failedBatches++; countError++; if (countError <= MAX_WARNINGS) { logger.warn(errmes); } else if (countError == MAX_WARNINGS + 1) { logger.warn("more errors..."); } flushErrorRecords(dataRecordHolder, holderCount, exceptions, rejectedPort); if (atomicSQL) { connection.getSqlConnection().rollback(); logger.info("Atomic SQL is true. Rollback performed."); } if (countError > maxErrors && maxErrors != -1) { errorAction.perform(connection.getSqlConnection()); if (errorAction == ConnectionAction.ROLLBACK) { logger.info("Rollback performed."); logger.info("Number of commited records: " + (recCount / recordsInCommit) * recordsInCommit); logger.info("Number of failed batches: " + failedBatches); // logger.info("Last " + batchCount + " records not commited"); } else if (errorAction == ConnectionAction.COMMIT) { logger.info("Number of commited records: " + ++recCount); logger.info("Number of failed batches: " + failedBatches); } throw new JetelException("Maximum # of errors exceeded when executing batch. " + errmes, exception); } } } /** * This method sends error records to output and counts errors. If array <i>records</i> is null, only counting of errors is performed * * @param records potential rejected records * @param recCount number of records in batch * @param ex thrown exception * @param rejectedPort rejected Port * @throws IOException * @throws InterruptedException */ private void flushErrorRecords(DataRecord[][] records,int recCount, BatchUpdateException[] ex, OutputPort rejectedPort) throws IOException,InterruptedException { // if (records==null) return; int[] updateCounts; int count; SQLException exception; StringBuilder message = new StringBuilder(); //for each statement exception has occurred for (int i=0; i < ex.length; i++) { if (ex[i] != null) { exception = ex[i]; updateCounts = ex[i].getUpdateCounts(); count = 0; while(count<updateCounts.length){ if (updateCounts[count] == Statement.EXECUTE_FAILED) { //increase error counter, fill rejected record and log error message countError++; if (records != null && records[i][count] != null) { if (exception != null) { if (errMessFieldNum != -1) { records[i][count].getField(errMessFieldNum).setValue("Exeption thrown by: " + statement[i].getQuery() + ". Message: " + exception.getMessage()); } if (errorCodeFieldNum != -1){ records[i][count].getField(errorCodeFieldNum).setValue(exception.getErrorCode()); } } if (exception != null && countError <= MAX_WARNINGS) { logger.warn("Exeption thrown by: " + statement[i].getQuery() + ". Message: " + exception.getMessage()); } else if (exception == null && countError <= MAX_WARNINGS) { logger.warn("Record not inserted to database"); } else if (countError == MAX_WARNINGS + 1) { logger.warn("more errors..."); } rejectedPort.writeRecord(records[i][count]); if (exception != null) { exception = exception.getNextException(); } }else if (records != null){//records[i][count] == null - it wasn't added to batch, prepare for next batch records[i][count] = DataRecordFactory.newRecord(rejectedPort.getMetadata()); records[i][count].init(); } } count++; } // flush rest of the records for which we don't have update counts message.setLength(0); Integer errCode = exception != null ? exception.getErrorCode() : null; while (exception != null) { message.append(exception.getMessage()); exception = exception.getNextException(); } while(count<recCount){ if (records != null && records[i][count] != null) { if (message.length() > 0 && countError <= MAX_WARNINGS) { logger.warn(message); } else if (message.length() > 0 && countError == MAX_WARNINGS + 1) { logger.warn("more errors..."); } if (message.length() > 0 && i < recCount) { if (errMessFieldNum != -1) { records[i][count].getField(errMessFieldNum).setValue(message); } if (errorCodeFieldNum != -1){ records[i][count].getField(errorCodeFieldNum).setValue(errCode); } } message = new StringBuilder("Record not inserted to database"); countError++; rejectedPort.writeRecord(records[i][count]); }else if (records != null){ records[i][count] = DataRecordFactory.newRecord(rejectedPort.getMetadata()); records[i][count].init(); } count++; } } } //clear errors Arrays.fill(ex, null); } /** * Description of the Method * * @return Description of the Returned Value * @since September 27, 2002 */ @Override public void toXML(Element xmlElement) { super.toXML(xmlElement); if (dbConnectionName != null) { xmlElement.setAttribute(XML_DBCONNECTION_ATTRIBUTE, dbConnectionName); } if (sqlQuery != null) { xmlElement.setAttribute(XML_SQLQUERY_ATRIBUTE, StringUtils.stringArraytoString(sqlQuery, Defaults.Component.KEY_FIELDS_DELIMITER.charAt(0))); } if (dbTableName != null) { xmlElement.setAttribute(XML_DBTABLE_ATTRIBUTE, dbTableName); } if (dbFields != null) { StringBuffer buf = new StringBuffer(dbFields[0]); for (int i=1; i< dbFields.length; i++ ) { buf.append(Defaults.Component.KEY_FIELDS_DELIMITER + dbFields[i]); } xmlElement.setAttribute(XML_DBFIELDS_ATTRIBUTE, buf.toString()); } if (cloverFields != null) { StringBuffer buf = new StringBuffer(cloverFields[0]); for (int i=1; i< cloverFields.length; i++ ) { buf.append(Defaults.Component.KEY_FIELDS_DELIMITER + cloverFields[i]); } xmlElement.setAttribute(XML_DBFIELDS_ATTRIBUTE, buf.toString()); } if (recordsInCommit > 0) { xmlElement.setAttribute(XML_COMMIT_ATTRIBUTE,String.valueOf(recordsInCommit)); } xmlElement.setAttribute(XML_BATCHMODE_ATTRIBUTE, String.valueOf(useBatch)); xmlElement.setAttribute(XML_BATCHSIZE_ATTRIBUTE, String.valueOf(batchSize)); xmlElement.setAttribute(XML_MAXERRORS_ATRIBUTE, String.valueOf(maxErrors)); if (autoGeneratedColumns != null) { StringBuilder tmp = new StringBuilder(autoGeneratedColumns[0]); for (int i = 1; i < autoGeneratedColumns.length; i++) { tmp.append(Defaults.Component.KEY_FIELDS_DELIMITER + autoGeneratedColumns[i]); } xmlElement.setAttribute(XML_AUTOGENERATEDCOLUMNS_ATTRIBUTE, tmp.toString()); } xmlElement.setAttribute(XML_ACTION_ON_ERROR, errorAction.toString()); xmlElement.setAttribute(XML_ATOMIC_RECORD_STATEMENT_ATTRIBUTE, String.valueOf(atomicSQL)); } /** * Description of the Method * * @param nodeXML Description of Parameter * @return Description of the Returned Value * @since September 27, 2002 */ public static Node fromXML(TransformationGraph graph, Element xmlElement) throws XMLConfigurationException { ComponentXMLAttributes xattribs = new ComponentXMLAttributes(xmlElement, graph); ComponentXMLAttributes xattribsChild; org.w3c.dom.Node childNode; DBOutputTable outputTable; try { // allows specifying parameterized SQL (with ? - question marks) if (xattribs.exists(XML_URL_ATTRIBUTE)){ outputTable = new DBOutputTable(xattribs.getString(XML_ID_ATTRIBUTE), xattribs.getString(XML_DBCONNECTION_ATTRIBUTE), xattribs.resolveReferences(FileUtils.getStringFromURL(graph.getRuntimeContext().getContextURL(), xattribs.getStringEx(XML_URL_ATTRIBUTE,RefResFlag.SPEC_CHARACTERS_OFF), xattribs.getString(XML_CHARSET_ATTRIBUTE, null))).split(STATEMENT_SEPARATOR)); }else if (xattribs.exists(XML_SQLQUERY_ATRIBUTE)) { outputTable = new DBOutputTable(xattribs.getString(XML_ID_ATTRIBUTE), xattribs.getString(XML_DBCONNECTION_ATTRIBUTE), xattribs.getString(XML_SQLQUERY_ATRIBUTE).split(STATEMENT_SEPARATOR)); }else if(xattribs.exists(XML_DBTABLE_ATTRIBUTE)){ outputTable = new DBOutputTable(xattribs.getString(XML_ID_ATTRIBUTE), xattribs.getString(XML_DBCONNECTION_ATTRIBUTE), xattribs.getString(XML_DBTABLE_ATTRIBUTE)); }else{ childNode = xattribs.getChildNode(xmlElement, XML_SQLCODE_ELEMENT); if (childNode == null) { throw new XMLConfigurationException(COMPONENT_TYPE + ":" + xattribs.getString(XML_ID_ATTRIBUTE," unknown ID ") + ": Can't find <SQLCode> node !"); } xattribsChild = new ComponentXMLAttributes((Element)childNode, graph); outputTable = new DBOutputTable(xattribs.getString(XML_ID_ATTRIBUTE), xattribs.getString(XML_DBCONNECTION_ATTRIBUTE), xattribsChild.getText(childNode).split(STATEMENT_SEPARATOR)); } if (xattribs.exists(XML_DBTABLE_ATTRIBUTE)) { outputTable.setDBTableName(xattribs.getString(XML_DBTABLE_ATTRIBUTE)); } if (xattribs.exists(XML_FIELDMAP_ATTRIBUTE)){ String[] pairs = StringUtils.split(xattribs.getStringEx(XML_FIELDMAP_ATTRIBUTE, RefResFlag.SPEC_CHARACTERS_OFF)); String[] cloverFields = new String[pairs.length]; String[] dbFields = new String[pairs.length]; String[] mapping; for (int i=0;i<pairs.length;i++){ mapping = JoinKeyUtils.getMappingItemsFromMappingString(pairs[i]); cloverFields[i] = mapping[0]; dbFields[i] = mapping[1]; } outputTable.setCloverFields(cloverFields); outputTable.setDBFields(dbFields); }else { if (xattribs.exists(XML_DBFIELDS_ATTRIBUTE)) { outputTable.setDBFields(xattribs.getString(XML_DBFIELDS_ATTRIBUTE).split(Defaults.Component.KEY_FIELDS_DELIMITER_REGEX)); } if (xattribs.exists(XML_CLOVERFIELDS_ATTRIBUTE)) { outputTable.setCloverFields(xattribs.getString(XML_CLOVERFIELDS_ATTRIBUTE).split(Defaults.Component.KEY_FIELDS_DELIMITER_REGEX)); } } if (xattribs.exists(XML_COMMIT_ATTRIBUTE)) { outputTable.setRecordsInCommit(xattribs.getInteger(XML_COMMIT_ATTRIBUTE)); } if (xattribs.exists(XML_BATCHMODE_ATTRIBUTE)) { outputTable.setUseBatch(xattribs.getBoolean(XML_BATCHMODE_ATTRIBUTE)); } if (xattribs.exists(XML_BATCHSIZE_ATTRIBUTE)) { outputTable.setBatchSize(xattribs.getInteger(XML_BATCHSIZE_ATTRIBUTE)); } if (xattribs.exists(XML_MAXERRORS_ATRIBUTE)){ outputTable.setMaxErrors(xattribs.getInteger(XML_MAXERRORS_ATRIBUTE)); } if (xattribs.exists(XML_AUTOGENERATEDCOLUMNS_ATTRIBUTE)){ outputTable.setAutoGeneratedColumns(xattribs.getString(XML_AUTOGENERATEDCOLUMNS_ATTRIBUTE).split(Defaults.Component.KEY_FIELDS_DELIMITER_REGEX)); } if (xattribs.exists(XML_ACTION_ON_ERROR)){ outputTable.setErrorAction(xattribs.getString(XML_ACTION_ON_ERROR)); } if (xattribs.exists(XML_ATOMIC_RECORD_STATEMENT_ATTRIBUTE)){ outputTable.setAtomicSQL(xattribs.getBoolean(XML_ATOMIC_RECORD_STATEMENT_ATTRIBUTE)); } return outputTable; } catch (Exception ex) { throw new XMLConfigurationException(COMPONENT_TYPE + ":" + xattribs.getString(XML_ID_ATTRIBUTE," unknown ID ") + ":" + ex.getMessage(),ex); } } @Override public ConfigurationStatus checkConfig(ConfigurationStatus status) { super.checkConfig(status); if(!checkInputPorts(status, 1, 1) || !checkOutputPorts(status, 0, 2)) { return status; } try { // init(); IConnection conn = getGraph().getConnection(dbConnectionName); if (conn == null) { throw new ComponentNotReadyException( "Can't find DBConnection ID: " + dbConnectionName); } if (!(conn instanceof DBConnection)) { throw new ComponentNotReadyException("Connection with ID: " + dbConnectionName + " isn't instance of the DBConnection class."); } dbConnection = (DBConnection) conn; dbConnection.init(); // create connection instance, which represents connection to a // database try { connection = dbConnection.getConnection(getId(), OperationType.WRITE); } catch (JetelException e1) { throw new ComponentNotReadyException(e1); } inPort = getInputPort(READ_FROM_PORT); connection.getJdbcSpecific().checkMetadata(status, getInMetadata(), this); if (sqlQuery == null) { sqlQuery = new String[1]; // TODO Labels replace: if (dbFields != null) { sqlQuery[0] = SQLUtil.assembleInsertSQLStatement( dbTableName, dbFields, dbConnection.getJdbcSpecific()); } else { sqlQuery[0] = SQLUtil.assembleInsertSQLStatement(inPort .getMetadata(), dbTableName, dbConnection.getJdbcSpecific()); } // TODO Labels replace end // TODO Labels replace with: // // FIXME This also replaces escaped characters from dbTableName // // can lead to backslashes being consumed // String quotedTableName = StringUtils.stringToSpecChar(dbConnection.getJdbcSpecific().quoteIdentifier(dbTableName)); // if (dbFields != null) { // String[] quotedDbFields = new String[dbFields.length]; // for (int i = 0; i < dbFields.length; i++) { // quotedDbFields[i] = dbConnection.getJdbcSpecific().quoteIdentifier(dbFields[i]); // sqlQuery[0] = SQLUtil.assembleInsertSQLStatement( // quotedTableName, quotedDbFields); // } else { // sqlQuery[0] = SQLUtil.assembleInsertSQLStatement(inPort // .getMetadata(), quotedTableName); // TODO Labels replace with end } boolean supportsConnectionKeyGenaration = false; try { supportsConnectionKeyGenaration = connection.getJdbcSpecific() .supportsGetGeneratedKeys( connection.getSqlConnection().getMetaData()); } catch (SQLException e1) { // TODO Auto-generated catch block } if (inPort.getMetadata() != null) { inRecord = DataRecordFactory.newRecord(inPort.getMetadata()); inRecord.init(); int start = 0, end; for (int i = 0; i < sqlQuery.length; i++) { String[] tmpCloverFields = null; if (cloverFields != null) { end = StringUtils.count(sqlQuery[i], '?'); tmpCloverFields = new String[end - start]; for (int j = 0; j < tmpCloverFields.length; j++) { if (start + j >= cloverFields.length) { throw new ComponentNotReadyException( this, XML_CLOVERFIELDS_ATTRIBUTE, "Missing parameter value for query " + StringUtils .quote(sqlQuery[i] + " , parameter number: " + (start + j + 1))); } tmpCloverFields[j] = cloverFields[start + j]; } start = end; } SQLCloverStatement statement = new SQLCloverStatement( connection, sqlQuery[i], inRecord, tmpCloverFields, autoGeneratedColumns); if (statement.getQueryType() == QueryType.INSERT && statement.returnResult()) { if (useBatch) { logger .warn("Getting generated keys in batch mode is not supported -> switching it off !"); sqlQuery[i] = sqlQuery[i] .substring( 0, sqlQuery[i] .toLowerCase() .indexOf( SQLCloverStatement.RETURNING_KEY_WORD)); statement = new SQLCloverStatement(connection, sqlQuery[i], inRecord, cloverFields); } else if (!supportsConnectionKeyGenaration) { logger .warn("DB indicates no support for getting generated keys -> switching it off !"); sqlQuery[i] = sqlQuery[i] .substring( 0, sqlQuery[i] .toLowerCase() .indexOf( SQLCloverStatement.RETURNING_KEY_WORD)); statement = new SQLCloverStatement(connection, sqlQuery[i], inRecord, cloverFields); } } try { statement.init(); } catch (SQLException e) { throw new ComponentAlmostNotReadyException(this, e); } try { try { keysPort = getOutputPort(WRITE_AUTO_KEY_TO_PORT); statement.checkConfig(status, keysPort == null ? null : keysPort.getMetadata(), this); } finally { // make sure we do not leak statements statement.close(); } } catch (SQLException e) { //issue #5397 //following piece of code was commented due confusing warning on DBOutputTable component for MySQL database //we are going to move this message to debug logging level // Probably only can't validate //ConfigurationProblem problem = new ConfigurationProblem(e // .getMessage(), ConfigurationStatus.Severity.WARNING, this, // ConfigurationStatus.Priority.NORMAL); //status.add(problem); logger.debug("CheckConfig warning: " + e.getMessage(), e); } } } } catch (UnsupportedOperationException uoe) { //it isn't possible to perform check config (for example some method of db driver throws the exception) //this means we don't know whether the configuration is valid or not ConfigurationProblem problem = new ConfigurationProblem("Cannot check the configuration of component. " + "Used driver does not implement some required methods.", Severity.WARNING, this, Priority.NORMAL); problem.setCauseException(uoe); status.add(problem); } catch (ComponentAlmostNotReadyException e1) { ConfigurationProblem problem = new ConfigurationProblem(e1 .getMessage(), ConfigurationStatus.Severity.WARNING, this, ConfigurationStatus.Priority.NORMAL); if (!StringUtils.isEmpty(e1.getAttributeName())) { problem.setAttributeName(e1.getAttributeName()); } status.add(problem); } catch (ComponentNotReadyException e) { ConfigurationProblem problem = new ConfigurationProblem(e .getMessage(), ConfigurationStatus.Severity.ERROR, this, ConfigurationStatus.Priority.NORMAL); if (!StringUtils.isEmpty(e.getAttributeName())) { problem.setAttributeName(e.getAttributeName()); } status.add(problem); } finally { if (dbConnection != null) { dbConnection.closeConnection(getId(), OperationType.WRITE); } } return status; } @Override public String getType(){ return COMPONENT_TYPE; } /** * @param maxErrors Maximum number of tolerated SQL errors during component run. Default: 0 (zero) */ public void setMaxErrors(int maxErrors) { this.maxErrors = maxErrors; } /** * @param autoGeneratedColumns names of db columns to get back from database */ public void setAutoGeneratedColumns(String[] autoGeneratedColumns) { this.autoGeneratedColumns = autoGeneratedColumns; } public String[] getAutoGeneratedColumns() { return autoGeneratedColumns; } public void setErrorAction(ConnectionAction errorAction) { this.errorAction = errorAction; } public void setErrorAction(String errorAction) { this.errorAction = ConnectionAction.valueOf(errorAction); } public void setDBConnection(String dbConnection) { this.dbConnectionName = dbConnection; } static class ComponentAlmostNotReadyException extends ComponentNotReadyException { public ComponentAlmostNotReadyException(IGraphElement element, Exception ex) { super(element, ex); } } }
package nars.vision; import boofcv.alg.misc.ImageMiscOps; import boofcv.core.image.ConvertBufferedImage; import boofcv.io.webcamcapture.UtilWebcamCapture; import boofcv.struct.image.ImageUInt8; import boofcv.struct.image.MultiSpectral; import com.github.sarxos.webcam.Webcam; import com.gs.collections.api.map.primitive.IntFloatMap; import com.gs.collections.api.map.primitive.IntObjectMap; import com.gs.collections.impl.map.mutable.primitive.IntFloatHashMap; import com.gs.collections.impl.map.mutable.primitive.IntObjectHashMap; import georegression.struct.point.Point2D_I32; import nars.NAR; import nars.gui.NARSwing; import nars.model.impl.Default; import org.jboss.marshalling.util.IntMap; import javax.swing.*; import java.awt.*; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.image.BufferedImage; import java.util.ArrayList; import java.util.HashMap; /** * Class for NARS Vision using a webcam with raster hierarchy representation. * Includes visualization. All relevant parameters can be adjusted in real time * and will update the visualization. * * @author James McLaughlin */ public class RasterHierarchy extends JPanel { // The number of rasters to calculate. int numberRasters; // The dimensions of the input frame. int frameWidth, frameHeight; // The number of blocks to divide the coarsest raster into. int divisions; // The scaling factor for each raster in the hierarchy. float scalingFactor; // The center of the region of focus Point2D_I32 focusPoint = new Point2D_I32(); // Image for visualization BufferedImage workImage; // Window for visualization JFrame window; //holds multispectralization of input image transient private MultiSpectral<ImageUInt8> multiInputImg; private boolean running = true; /** * Configure the Raster Hierarchy * * @param numberRasters The number of rasters to generate * @param frameWidth The desired size of the input stream * @param frameHeight The desired height of the input stream * @param divisions The number of blocks to divide the coarsest grained raster into * @param scalingFactor The scaling factor for each raster in the heirarchy. */ public RasterHierarchy(int numberRasters, int frameWidth, int frameHeight, int divisions, float scalingFactor) { this.numberRasters = numberRasters; this.frameWidth = frameWidth; this.frameHeight = frameHeight; this.divisions = divisions; this.scalingFactor = scalingFactor; // Set the default focus to the center this.setFocus(frameWidth/2, frameHeight/2); window = new JFrame("Hierarchical Raster Vision Representation"); window.setContentPane(this); window.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); addMouseListener(ma); addMouseMotionListener(ma); } final MouseAdapter ma = new MouseAdapter() { protected void update(MouseEvent e) { float px = e.getX() / ((float)getWidth()); float py = e.getY() / ((float)getHeight()); setFocus(Math.round(px * frameWidth), Math.round(py * frameHeight)); } @Override public void mousePressed(MouseEvent e) { update(e); } @Override public void mouseDragged(MouseEvent e) { update(e); } }; /** * Set the focus to the given location. All rasters (other than the most coarse-grained) are centered on * this point. * * @param x The x-coordinate of the focal point * @param y The y-coordinate of the focal point */ public void setFocus(int x, int y) { this.focusPoint.set(x, y); } /** * Generate the raster hierarchy for a given image. * * @param input The image to rasterize * @return The rasterized image. */ int updaterate=20; int cnt=1; static int arrsz=1000; //todo refine IntFloatHashMap lastvalR = new IntFloatHashMap(); IntFloatHashMap lastvalG = new IntFloatHashMap(); IntFloatHashMap lastvalB = new IntFloatHashMap(); IntObjectHashMap<Value> voter = new IntObjectHashMap(); public class Value { public int x; public int y; public int r; public float value; public Value() { } public void set(int step, int x, int y, float vote) { this.x=x; this.y=y; this.r=r; this.value=value; } } public synchronized BufferedImage rasterizeImage(BufferedImage input) { voter.clear(); boolean putin=false; //vladimir cnt if(cnt==0) { putin = true; cnt=updaterate; } int red, green, blue; int redSum, greenSum, blueSum; int x, y, startX, startY; float newX, newY; int width = input.getWidth(); int height = input.getHeight(); float fblockXSize = width/divisions; float fblockYSize = height/divisions; multiInputImg = ConvertBufferedImage.convertFromMulti(input, multiInputImg, true, ImageUInt8.class); final ImageUInt8 ib0 = multiInputImg.getBand(0); final ImageUInt8 ib1 = multiInputImg.getBand(1); final ImageUInt8 ib2 = multiInputImg.getBand(2); MultiSpectral<ImageUInt8> output = new MultiSpectral<>(ImageUInt8.class, width, height, 3); BufferedImage rasterizedImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB); // Set the initial raster region float regionWidth = width, regionHeight = height; newX = 0; newY = 0; startX = 0; startY = 0; for (int step = 1; step <= numberRasters; step++) { // For each step we need to reduce the dimensions of the area that is pixelated and // also reduce the block size. if (step > 1) { newX = startX + (regionWidth - regionWidth / scalingFactor) / scalingFactor; newY = startY + (regionHeight - regionHeight / scalingFactor) / scalingFactor; if (newX < 0) {newX = 0;} if (newY < 0) {newY = 0;} regionWidth = regionWidth/ scalingFactor; regionHeight = regionHeight/ scalingFactor; fblockXSize = fblockXSize/ scalingFactor; fblockYSize = fblockYSize/ scalingFactor; if (fblockXSize < 1) { fblockXSize = 1;} if (fblockYSize < 1) { fblockYSize = 1;} } // Set the starting point for the next step startX = Math.round(this.focusPoint.getX() - ((regionWidth) / 2)); startY = Math.round(this.focusPoint.getY() - ((regionHeight)/2)); int blockXSize = Math.round(fblockXSize); int blockYSize = Math.round(fblockYSize); int pixelCount = blockXSize * blockYSize; // Number of pixels per block int h=0,j=0; // StringBuilder to hold the Narsese translation for (x = Math.round(newX); x < ((step == 1 ? 0 : startX) + regionWidth); x += blockXSize) { h++; for (y = Math.round(newY); y < ((step == 1 ? 0 : startY) + regionHeight); y += blockYSize) { j++; redSum = 0; greenSum = 0; blueSum = 0; for (int pixelX = 0; (pixelX < blockXSize) && (x + pixelX < width); pixelX++) { for (int pixelY = 0; (pixelY < blockYSize) && (y + pixelY < height); pixelY++) { redSum += ib0.get(x + pixelX, y + pixelY); greenSum += ib1.get(x + pixelX, y + pixelY); blueSum += ib2.get(x + pixelX, y + pixelY); } } red = redSum / pixelCount; green = greenSum / pixelCount; blue = blueSum / pixelCount; float fred = red / 256.0f; float fgreen = green / 256.0f; //was: red / 255f float fblue = blue / 256.0f; //was: blue/255f //manage move heuristic int brightness = (red+green+blue)/3; //maybe not needed int key=step+10*x+10000*y; if(lastvalR.containsKey(key) && putin) { float area = blockXSize * blockYSize; float diff = Math.abs(fred - (lastvalR.get(key))) + Math.abs(fgreen - (lastvalG.get(key))) + Math.abs(fblue - (lastvalB.get(key))); float vote = diff;// / area; // vote*=step; Value value = voter.get(key); if (value == null) { value = new Value(); voter.put(key, value); } value.set(step, x + blockXSize / 2, y + blockYSize / 2, vote); } lastvalR.put(key, fred); lastvalG.put(key, fgreen); lastvalB.put(key, fblue); if(putin && step==numberRasters) { //input Narsese translation, one statement for each band. //ArrayList<String> nalStrings = new ArrayList<String>(); //nalStrings.add("<(*,r"+ String.valueOf(step)+","+String.valueOf(h)+","+String.valueOf(j)+") --> RED>. :|: %"+String.valueOf(fred)+System.getProperty("line.separator")); //nalStrings.add("<(*,r" + String.valueOf(step) + "," + String.valueOf(h) + "," + String.valueOf(j) + ") --> GREEN>. :|: %" + String.valueOf(fgreen) + System.getProperty("line.separator")); //nalStrings.add("<(*,r"+ String.valueOf(step)+","+String.valueOf(h)+","+String.valueOf(j)+") --> BLUE>. :|: %"+String.valueOf(fblue)+System.getProperty("line.separator")); double dgray = 0.2989*red + 0.5870*green + 0.1140*blue; dgray /= 255.0; //TODO create the Term / Task programmaticaly nar.input("<(*,r" + String.valueOf(step) + "," + String.valueOf(h) + "," + String.valueOf(j) + ") --> GRAY>. :|: %" + String.valueOf(dgray) + System.getProperty("line.separator")); } ImageMiscOps.fillRectangle(output.getBand(0), red, x, y, blockXSize, blockYSize); ImageMiscOps.fillRectangle(output.getBand(1), green, x, y, blockXSize, blockYSize); ImageMiscOps.fillRectangle(output.getBand(2), blue, x, y, blockXSize, blockYSize); } } } //search for maximum vote to move heuristic if(putin) { final Value[] maxvalue = {null}; float threshold = 0.05f; voter.forEachKeyValue((key,value) -> { if (maxvalue[0] == null || value.value > maxvalue[0].value) { if (value.value > threshold) maxvalue[0] = value; } }); Value maxValue = maxvalue[0]; if (maxValue != null && maxValue.x!=0 && maxValue.y!=0) { this.setFocus(maxValue.x, maxValue.y); } } ConvertBufferedImage.convertTo(output, rasterizedImage, true); return rasterizedImage; } /** * Invoke to start the main processing loop. */ public void process() { Webcam webcam = UtilWebcamCapture.openDefault(frameWidth, frameHeight); // adjust the window size and let the GUI know it has changed Dimension actualSize = webcam.getViewSize(); setPreferredSize(actualSize); setMinimumSize(actualSize); window.setMinimumSize(actualSize); window.setPreferredSize(actualSize); window.setVisible(true); BufferedImage input, buffered; workImage = new BufferedImage(actualSize.width, actualSize.height, BufferedImage.TYPE_INT_RGB); //int counter = 0; while( running ) { /* * Uncomment this section to scan the focal point across the frame * automatically - just for demo purposes. */ /* int xx = this.focusPoint.getX(); int yy = this.focusPoint.getY(); xx += 1; if(xx > frameWidth) { xx = 0; yy += 1; if (yy > frameHeight) yy = 0; } this.setFocus(xx, yy); */ input = webcam.getImage(); synchronized( workImage ) { // copy the latest image into the work buffer Graphics2D g2 = workImage.createGraphics(); buffered = this.rasterizeImage(input); g2.drawImage(buffered,0,0,null); } repaint(); } } @Override public void paint (Graphics g) { if( workImage != null ) { // draw the work image and be careful to make sure it isn't being manipulated at the same time synchronized (workImage) { ((Graphics2D) g).drawImage(workImage, 0, 0, getWidth(), getHeight(), null); } } } static NAR nar; public static void main(String[] args) { //RasterHierarchy rh = new RasterHierarchy(8, 640, 480, 12, 2); // RasterHierarchy rh = new RasterHierarchy(3, 640, 480, 5, 2); nar = new NAR(new Default.CommandLineNARBuilder(args)); NARSwing swing = new NARSwing(nar); RasterHierarchy rh = new RasterHierarchy(6, 800, 600, 16, 1.619f); rh.process(); } public int getNumberRasters() { return numberRasters; } public void setNumberRasters(int numberRasters) { this.numberRasters = numberRasters; } public int getDivisions() { return divisions; } public void setDivisions(int divisions) { this.divisions = divisions; } public float getScalingFactor() { return scalingFactor; } public void setScalingFactor(int scalingFactor) { this.scalingFactor = scalingFactor; } }
package io.grpc.transport.netty; import com.google.common.base.Preconditions; import io.netty.channel.Channel; import io.netty.channel.ChannelDuplexHandler; import io.netty.channel.ChannelHandler; import io.netty.channel.ChannelHandlerAdapter; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelPromise; import io.netty.handler.codec.http.DefaultHttpRequest; import io.netty.handler.codec.http.HttpClientCodec; import io.netty.handler.codec.http.HttpClientUpgradeHandler; import io.netty.handler.codec.http.HttpMethod; import io.netty.handler.codec.http.HttpVersion; import io.netty.handler.codec.http2.Http2ClientUpgradeCodec; import io.netty.handler.codec.http2.Http2ConnectionHandler; import io.netty.handler.ssl.SslContext; import io.netty.handler.ssl.SslHandler; import io.netty.handler.ssl.SslHandshakeCompletionEvent; import io.netty.util.ByteString; import io.netty.util.concurrent.Future; import io.netty.util.concurrent.GenericFutureListener; import java.net.InetSocketAddress; import java.util.ArrayDeque; import java.util.Queue; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLParameters; /** * Common {@link ProtocolNegotiator}s used by gRPC. */ public final class ProtocolNegotiators { private ProtocolNegotiators() { } /** * Create a TLS handler for HTTP/2 capable of using ALPN/NPN. */ public static ChannelHandler serverTls(SSLEngine sslEngine) { Preconditions.checkNotNull(sslEngine, "sslEngine"); return new SslHandler(sslEngine, false); } /** * Returns a {@link ProtocolNegotiator} that ensures the pipeline is set up so that TLS will * be negotiated, the {@code handler} is added and writes to the {@link Channel} may happen * immediately, even before the TLS Handshake is complete. */ public static ProtocolNegotiator tls(final SslContext sslContext, final InetSocketAddress inetAddress) { Preconditions.checkNotNull(sslContext, "sslContext"); Preconditions.checkNotNull(inetAddress, "inetAddress"); final ChannelHandler sslBootstrapHandler = new ChannelHandlerAdapter() { @Override public void handlerAdded(ChannelHandlerContext ctx) throws Exception { // TODO(nmittler): This method is currently unsupported for OpenSSL. Need to fix in Netty. SSLEngine sslEngine = sslContext.newEngine(ctx.alloc(), inetAddress.getHostName(), inetAddress.getPort()); SSLParameters sslParams = new SSLParameters(); sslParams.setEndpointIdentificationAlgorithm("HTTPS"); sslEngine.setSSLParameters(sslParams); SslHandler sslHandler = new SslHandler(sslEngine, false); sslHandler.handshakeFuture().addListener( new GenericFutureListener<Future<? super Channel>>() { @Override public void operationComplete(Future<? super Channel> future) throws Exception { // If an error occurred during the handshake, throw it to the pipeline. future.get(); } }); ctx.pipeline().replace(this, "sslHandler", sslHandler); } }; return new ProtocolNegotiator() { @Override public Handler newHandler(Http2ConnectionHandler handler) { return new BufferUntilTlsNegotiatedHandler(sslBootstrapHandler, handler); } }; } /** * Returns a {@link ProtocolNegotiator} used for upgrading to HTTP/2 from HTTP/1.x. */ public static ProtocolNegotiator plaintextUpgrade() { return new ProtocolNegotiator() { @Override public Handler newHandler(Http2ConnectionHandler handler) { // Register the plaintext upgrader Http2ClientUpgradeCodec upgradeCodec = new Http2ClientUpgradeCodec(handler); HttpClientCodec httpClientCodec = new HttpClientCodec(); final HttpClientUpgradeHandler upgrader = new HttpClientUpgradeHandler(httpClientCodec, upgradeCodec, 1000); return new BufferingHttp2UpgradeHandler(upgrader); } }; } /** * Returns a {@link ChannelHandler} that ensures that the {@code handler} is added to the * pipeline writes to the {@link Channel} may happen immediately, even before it is active. */ public static ProtocolNegotiator plaintext() { return new ProtocolNegotiator() { @Override public Handler newHandler(Http2ConnectionHandler handler) { return new BufferUntilChannelActiveHandler(handler); } }; } /** * Buffers all writes until either {@link #writeBufferedAndRemove(ChannelHandlerContext)} or * {@link #failBufferedAndClose(ChannelHandlerContext)} is called. This handler allows us to * write to a {@link Channel} before we are allowed to write to it officially i.e. * before it's active or the TLS Handshake is complete. */ private abstract static class AbstractBufferingHandler extends ChannelDuplexHandler { private ChannelHandler[] handlers; private Queue<ChannelWrite> bufferedWrites = new ArrayDeque<ChannelWrite>(); private boolean writing; private boolean flushRequested; /** * @param handlers the ChannelHandlers are added to the pipeline on channelRegistered and * before this handler. */ AbstractBufferingHandler(ChannelHandler... handlers) { this.handlers = handlers; } @Override public void channelRegistered(ChannelHandlerContext ctx) throws Exception { /** * This check is necessary as a channel may be registered with different event loops during it * lifetime and we only want to configure it once. */ if (handlers != null) { ctx.pipeline().addFirst(handlers); handlers = null; } super.channelRegistered(ctx); } @Override public void channelInactive(ChannelHandlerContext ctx) throws Exception { failBufferedAndClose(ctx); super.channelInactive(ctx); } @Override public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { /** * This check handles a race condition between Channel.write (in the calling thread) and the * removal of this handler (in the event loop thread). * The problem occurs in e.g. this sequence: * 1) [caller thread] The write method identifies the context for this handler * 2) [event loop] This handler removes itself from the pipeline * 3) [caller thread] The write method delegates to the invoker to call the write method in * the event loop thread. When this happens, we identify that this handler has been * removed with "bufferedWrites == null". */ if (bufferedWrites == null) { super.write(ctx, msg, promise); } else { bufferedWrites.add(new ChannelWrite(msg, promise)); } } @Override public void flush(ChannelHandlerContext ctx) { /** * Swallowing any flushes is not only an optimization but also required * for the SslHandler to work correctly. If the SslHandler receives multiple * flushes while the handshake is still ongoing, then the handshake "randomly" * times out. Not sure at this point why this is happening. Doing a single flush * seems to work but multiple flushes don't ... */ if (bufferedWrites == null) { ctx.flush(); } else { flushRequested = true; } } @Override public void close(ChannelHandlerContext ctx, ChannelPromise future) throws Exception { failBufferedAndClose(ctx); } protected void failBufferedAndClose(ChannelHandlerContext ctx) { if (bufferedWrites != null) { Exception e = new Exception("Buffered write failed."); while (!bufferedWrites.isEmpty()) { ChannelWrite write = bufferedWrites.poll(); write.promise.setFailure(e); } bufferedWrites = null; } /** * In case something goes wrong ensure that the channel gets closed as the * NettyClientTransport relies on the channel's close future to get completed. */ ctx.close(); } protected void writeBufferedAndRemove(ChannelHandlerContext ctx) { if (!ctx.channel().isActive() || writing) { return; } // Make sure that method can't be reentered, so that the ordering // in the queue can't be messed up. writing = true; while (!bufferedWrites.isEmpty()) { ChannelWrite write = bufferedWrites.poll(); ctx.write(write.msg, write.promise); } assert bufferedWrites.isEmpty(); bufferedWrites = null; if (flushRequested) { ctx.flush(); } // Removal has to happen last as the above writes will likely trigger // new writes that have to be added to the end of queue in order to not // mess up the ordering. ctx.pipeline().remove(this); } private static class ChannelWrite { Object msg; ChannelPromise promise; ChannelWrite(Object msg, ChannelPromise promise) { this.msg = msg; this.promise = promise; } } } /** * Buffers all writes until the TLS Handshake is complete. */ private static class BufferUntilTlsNegotiatedHandler extends AbstractBufferingHandler implements ProtocolNegotiator.Handler { BufferUntilTlsNegotiatedHandler(ChannelHandler... handlers) { super(handlers); } @Override public ByteString scheme() { return Utils.HTTPS; } @Override public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception { if (evt instanceof SslHandshakeCompletionEvent) { SslHandshakeCompletionEvent handshakeEvent = (SslHandshakeCompletionEvent) evt; if (handshakeEvent.isSuccess()) { writeBufferedAndRemove(ctx); } else { failBufferedAndClose(ctx); } } super.userEventTriggered(ctx, evt); } } /** * Buffers all writes until the {@link Channel} is active. */ private static class BufferUntilChannelActiveHandler extends AbstractBufferingHandler implements ProtocolNegotiator.Handler { BufferUntilChannelActiveHandler(ChannelHandler... handlers) { super(handlers); } @Override public ByteString scheme() { return Utils.HTTP; } @Override public void handlerAdded(ChannelHandlerContext ctx) { writeBufferedAndRemove(ctx); } @Override public void channelActive(ChannelHandlerContext ctx) throws Exception { writeBufferedAndRemove(ctx); super.channelActive(ctx); } } /** * Buffers all writes until the HTTP to HTTP/2 upgrade is complete. */ private static class BufferingHttp2UpgradeHandler extends AbstractBufferingHandler implements ProtocolNegotiator.Handler { BufferingHttp2UpgradeHandler(ChannelHandler... handlers) { super(handlers); } @Override public ByteString scheme() { return Utils.HTTP; } @Override public void channelActive(ChannelHandlerContext ctx) throws Exception { // Trigger the HTTP/1.1 plaintext upgrade protocol by issuing an HTTP request // which causes the upgrade headers to be added DefaultHttpRequest upgradeTrigger = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/"); ctx.writeAndFlush(upgradeTrigger); super.channelActive(ctx); } @Override public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception { if (evt == HttpClientUpgradeHandler.UpgradeEvent.UPGRADE_SUCCESSFUL) { writeBufferedAndRemove(ctx); } else if (evt == HttpClientUpgradeHandler.UpgradeEvent.UPGRADE_REJECTED) { failBufferedAndClose(ctx); ctx.pipeline().fireExceptionCaught(new Exception("HTTP/2 upgrade rejected")); } super.userEventTriggered(ctx, evt); } } }
package com.noodle.storage; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; public class RandomAccessFileStorage implements Storage { // Max size of the in-memory copy buffer. Currently 16 MB. private static final int MAX_COPY_BUFFER_SIZE = 16 * 1024 * 1024; private static final int MIN_COPY_BUFFER_SIZE = 1024; private final Encryption encryption; private final RandomAccessFile file; private final Object fileLock = new Object(); private final Map<BytesWrapper, Long> index = new ConcurrentHashMap<>(); public RandomAccessFileStorage(final File file, final Encryption encryption) { this.encryption = encryption; try { this.file = new RandomAccessFile(file, "rw"); final boolean existed = file.exists(); if (!existed) { //noinspection ResultOfMethodCallIgnored file.createNewFile(); } else { remapIndexes(); } } catch (Exception e) { throw toRuntimeException(e); } } @Override public void put(final Record record) { synchronized (fileLock) { final byte[] key = record.key; final Record encryptedRecord = encryptRecord(record); final long existingPos = positionOf(key); if (existingPos != -1) { remove(key); } try { final long pos = file.length(); file.seek(pos); file.writeInt(encryptedRecord.key.length); file.writeInt(encryptedRecord.data.length); file.write(encryptedRecord.key); file.write(encryptedRecord.data); index.put(new BytesWrapper(key), pos); } catch (IOException e) { throw toRuntimeException(e); } } } @Override public Record remove(final byte[] key) { synchronized (fileLock) { final long pos = positionOf(key); if (pos == -1) { return null; } final Record encryptedRecord = getRecordAt(pos); try { final int encryptedSize = encryptedRecord.size(); // Last element. if (pos + encryptedSize >= file.length()) { file.setLength(pos); } else { // In the middle or beginning. final int remainderSize = (int) (file.length() - pos); byte[] copyBuffer = allocateCopyBuffer(remainderSize); long fromPos = pos + encryptedSize; long toPos = pos; long fileLength = file.length(); while (fromPos < fileLength) { file.seek(fromPos); int read = file.read(copyBuffer); fromPos += read; file.seek(toPos); file.write(copyBuffer, 0, read); toPos += read; } file.setLength(fileLength - encryptedSize); // Update indexes. for (BytesWrapper wrapper : index.keySet()) { final long recordPos = index.get(wrapper); if (recordPos > pos) { index.put(wrapper, recordPos - encryptedSize); } } } index.remove(new BytesWrapper(key)); return decryptRecord(encryptedRecord); } catch (IOException e) { throw toRuntimeException(e); } } } private byte[] allocateCopyBuffer(final int remainderSize) { final Runtime runtime = Runtime.getRuntime(); final int usedMemory = (int) (runtime.totalMemory() - runtime.freeMemory()); final int availableMemory = (int) (runtime.maxMemory() - usedMemory); // Set buffer size to be as much as remaining size of the file, // but not bigger than a half of available heap and not bigger // than MAX_COPY_BUFFER_SIZE limit. int size = Math.min(remainderSize, availableMemory / 2); size = Math.min(size, MAX_COPY_BUFFER_SIZE); size = Math.max(size, MIN_COPY_BUFFER_SIZE); return new byte[size]; } @Override public Record get(final byte[] key) { synchronized (fileLock) { final long pos = positionOf(key); return pos != -1 ? decryptRecord(getRecordAt(pos)) : null; } } private Record encryptRecord(final Record original) { try { return new Record( original.key, encryption.encrypt(original.data) ); } catch (Exception e) { throw toRuntimeException(e); } } @Override public List<byte[]> prefixedWith(final byte[] prefix) { final ArrayList<byte[]> keys = new ArrayList<>(); for (BytesWrapper wrapper : index.keySet()) { if (wrapper.hasPrefix(prefix)) { keys.add(wrapper.bytes); } } return Collections.unmodifiableList(keys); } public Map<BytesWrapper, Long> getIndex() { return index; } private Record decryptRecord(final Record encrypted) { try { return new Record( encrypted.key, encryption.decrypt(encrypted.data) ); } catch (Exception e) { throw toRuntimeException(e); } } private Record getRecordAt(final long position) { try { file.seek(position); final int keySize = file.readInt(); final int dataSize = file.readInt(); final byte[] keyBytes = new byte[keySize]; final byte[] dataBytes = new byte[dataSize]; file.read(keyBytes); file.read(dataBytes); return new Record(keyBytes, dataBytes); } catch (IOException e) { throw toRuntimeException(e); } } private long positionOf(final byte[] key) { final BytesWrapper keyWrapper = new BytesWrapper(key); final Long pos = index.get(keyWrapper); return pos == null ? -1 : pos; } private void remapIndexes() throws Exception { synchronized (fileLock) { file.seek(0); while (file.getFilePointer() < file.length()) { final long pos = file.getFilePointer(); final int keySize = file.readInt(); if (keySize == 0) { break; } final int dataSize = file.readInt(); final byte[] key = new byte[keySize]; // Read key and check if all read. if (file.read(key) != keySize) { throw new RuntimeException("Data is corrupted at " + file.getFilePointer()); } index.put(new BytesWrapper(key), pos); file.seek(file.getFilePointer() + dataSize); } } } /** * Converts checked exception to runtime exception so that no mandatory try-catch is required. */ private RuntimeException toRuntimeException(Exception e) { return new RuntimeException(e); } }
package com.mapswithme.maps.sound; import android.content.Context; import android.speech.tts.TextToSpeech; import android.util.Log; import android.widget.Toast; import com.mapswithme.maps.MWMApplication; import java.util.Locale; public class TTSPlayer { private static TTSPlayer ourInstance = null; private Context mContext = null; private TextToSpeech mTts = null; private Locale mTtsLocale = null; private final static String TAG = "TTSPlayer"; private TTSPlayer() { mContext = MWMApplication.get().getApplicationContext(); setLocaleIfAvailable(Locale.getDefault()); } @Override protected void finalize() throws Throwable { if(mTts != null) mTts.shutdown(); super.finalize(); } public static TTSPlayer get() { if (ourInstance == null || !ourInstance.isLocaleEquals(Locale.getDefault())) ourInstance = new TTSPlayer(); return ourInstance; } private boolean isLocaleEquals(Locale locale) { return locale.getLanguage().equals(mTtsLocale.getLanguage()) && locale.getCountry().equals(mTtsLocale.getCountry()); } private void setLocaleIfAvailable(final Locale locale) { if (mTts != null && mTts.getLanguage().equals(locale)) return; // @TODO Consider move TextToSpeech to a service: mTts = new TextToSpeech(mContext, new TextToSpeech.OnInitListener() { @Override public void onInit(int status) { // This method is called anisochronously. if (status == TextToSpeech.ERROR) { Log.w(TAG, "Can't initialize TextToSpeech for locale " + locale.getLanguage() + " " + locale.getCountry()); return; } final int avail = mTts.isLanguageAvailable(locale); mTtsLocale = locale; if (avail != TextToSpeech.LANG_AVAILABLE && avail != TextToSpeech.LANG_COUNTRY_AVAILABLE && avail != TextToSpeech.LANG_COUNTRY_VAR_AVAILABLE) { mTtsLocale = Locale.UK; // No translation for TTS for Locale.getDefault() language. } mTts.setLanguage(mTtsLocale); nativeSetTurnNotificationsLocale(mTtsLocale.getLanguage()); Log.i(TAG, "setLocaleIfAvailable() nativeSetTurnNotificationsLocale(" + mTtsLocale.getLanguage() + ")"); } }); } public void speak(String textToSpeak) { if (mTts == null) { Log.w(TAG, "TTSPlayer.speak() is called while mTts == null."); return; } // @TODO(vbykoianko) removes these two toasts below when the test period is finished. Toast.makeText(mContext, textToSpeak, Toast.LENGTH_SHORT).show(); if (mTts.speak(textToSpeak, TextToSpeech.QUEUE_ADD, null) == TextToSpeech.ERROR) { Log.e(TAG, "TextToSpeech returns TextToSpeech.ERROR."); Toast.makeText(mContext, "TTS error", Toast.LENGTH_SHORT).show(); } } public void stop() { if(mTts != null) mTts.stop(); } public boolean isEnabled() { return nativeAreTurnNotificationsEnabled(); } public void enable(boolean enabled) { nativeEnableTurnNotifications(enabled); } public native static void nativeEnableTurnNotifications(boolean enable); public native static boolean nativeAreTurnNotificationsEnabled(); public native static void nativeSetTurnNotificationsLocale(String locale); public native static String nativeGetTurnNotificationsLocale(); }
package application; import java.io.ByteArrayInputStream; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import org.opencv.core.Mat; import org.opencv.core.MatOfByte; import org.opencv.imgcodecs.Imgcodecs; import org.opencv.imgproc.Imgproc; import org.opencv.videoio.VideoCapture; import javafx.event.ActionEvent; import javafx.fxml.FXML; import javafx.scene.control.Button; import javafx.scene.image.Image; import javafx.scene.image.ImageView; public class Controller { @FXML private Button toggleCapture; @FXML private ImageView currentFrame; private ScheduledExecutorService timer; private VideoCapture capture = new VideoCapture (); private boolean cameraActive = false; @FXML private void initialize () { assert toggleCapture != null : "fx:id=\"toggleCapture\" was not injected: check your FXML file 'Webcam.fxml'."; assert currentFrame != null : "fx:id=\"currentFrame\" was not injected: check your FXML file 'Webcam.fxml'."; } @FXML private void startCamera (ActionEvent e) { if (!this.cameraActive) { // start the video capture this.capture.open (0); // is the video stream available? if (this.capture.isOpened ()) { this.cameraActive = true; // grab a frame every 33 ms (30 frames/sec) Runnable frameGrabber = new Runnable () { @Override public void run () { Image imageToShow = grabFrame (); currentFrame.setImage (imageToShow); } }; this.timer = Executors.newSingleThreadScheduledExecutor (); this.timer.scheduleAtFixedRate (frameGrabber, 0, 17, TimeUnit.MILLISECONDS); // update the button content this.toggleCapture.setText ("Stop Camera"); } else { // log the error System.err.println ("Impossible to open the camera connection..."); } } else { // the camera is not active at this point this.cameraActive = false; // update again the button content this.toggleCapture.setText ("Start Camera"); // stop the timer try { this.timer.shutdown (); this.timer.awaitTermination (33, TimeUnit.MILLISECONDS); } catch (InterruptedException exception) { // log the exception System.err.println ("Exception in stopping the frame capture, trying to release the camera now... " + exception); } // release the camera this.capture.release (); // clean the frame this.currentFrame.setImage (null); } } private Image grabFrame() { // init everything Image imageToShow = null; Mat frame = new Mat(); // check if the capture is open if (this.capture.isOpened()) { try { // read the current frame this.capture.read(frame); // if the frame is not empty, process it if (!frame.empty()) { // convert the image to gray scale Imgproc.cvtColor(frame, frame, Imgproc.COLOR_BGR2RGB); // convert the Mat object (OpenCV) to Image (JavaFX) imageToShow = mat2Image(frame); } } catch (Exception e) { // log the error System.err.println("Exception during the image elaboration: " + e); } } return imageToShow; } /** * Convert a Mat object (OpenCV) in the corresponding Image for JavaFX * * @param frame * the {@link Mat} representing the current frame * @return the {@link Image} to show */ private Image mat2Image(Mat frame) { // create a temporary buffer MatOfByte buffer = new MatOfByte(); // encode the frame in the buffer Imgcodecs.imencode(".png", frame, buffer); // build and return an Image created from the image encoded in the // buffer return new Image(new ByteArrayInputStream(buffer.toArray())); } }
import acm.io.IODialog; import acm.program.GraphicsProgram; import acm.util.RandomGenerator; public class Yahtzee extends GraphicsProgram implements YahtzeeConstants { private String currentPlayerName; private int currentPlayerNumber; private int[] diceValues; private YahtzeeDisplay display; private int nPlayers; private String[] playerNames; private RandomGenerator rgen = new RandomGenerator(); public static void main(String[] args) { new Yahtzee().start(args); } @Override public void run() { IODialog dialog = getDialog(); nPlayers = dialog.readInt("Enter number of players"); playerNames = new String[nPlayers]; for (int i = 1; i <= nPlayers; i++) { playerNames[i - 1] = dialog.readLine("Enter name for player " + i); } display = new YahtzeeDisplay(getGCanvas(), playerNames); playGame(); } private void playGame() { playTurn(); } private void playTurn() { // TODO: change currentPlayerNumber currentPlayerNumber = 1; currentPlayerName = playerNames[currentPlayerNumber - 1]; handleFirstDiceRoll(); handleSubsequentDiceRoll(); handleSubsequentDiceRoll(); handleCategorySelection(); } private void handleFirstDiceRoll() { display.printMessage( String.format("%s's turn! Click \"Roll Dice\" button to roll the dice.", currentPlayerName)); display.waitForPlayerToClickRoll(currentPlayerNumber); diceValues = new int[N_DICE]; for (int i = 0; i < N_DICE; i++) { diceValues[i] = rgen.nextInt(1, 6); } display.displayDice(diceValues); } private void handleSubsequentDiceRoll() { display.printMessage("Select the dice you wish to re-roll and click \"Roll Again\"."); display.waitForPlayerToSelectDice(); for (int i = 0; i < N_DICE; i++) { if (display.isDieSelected(i)) { diceValues[i] = rgen.nextInt(1, 6); } } display.displayDice(diceValues); } private void handleCategorySelection() { display.printMessage("Select a category for this roll."); int category; while (true) { category = display.waitForPlayerToSelectCategory(); if (YahtzeeMagicStub.checkCategory(diceValues, category)) { display.printMessage("Test"); break; } else { display.printMessage("Invalid category selected. Please select another."); } } // display.updateScorecard(category, currentPlayerNumber, score); } }
package cloudcmd.common.index; import cloudcmd.common.FileMetaData; import cloudcmd.common.MetaUtil; import cloudcmd.common.SqlUtil; import cloudcmd.common.StringUtil; import cloudcmd.common.config.ConfigStorageService; import java.io.File; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.concurrent.ConcurrentLinkedQueue; import org.apache.log4j.Logger; import org.h2.fulltext.FullText; import org.h2.jdbcx.JdbcConnectionPool; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; public class H2IndexStorage implements IndexStorage { private static Logger log = Logger.getLogger(H2IndexStorage.class); private static final int MAX_QUEUE_SIZE = 1024 * 8; private static String _configRoot; JdbcConnectionPool _cp; // THIS IS NOT USED ConcurrentLinkedQueue<FileMetaData> _queue = new ConcurrentLinkedQueue<FileMetaData>(); private String getDbFile() { return String.format("%s%sindex", _configRoot, File.separator); } private String createConnectionString() { return String.format("jdbc:h2:%s", getDbFile()); } private Connection getDbConnection() throws SQLException { return _cp.getConnection(); } private Connection getReadOnlyDbConnection() throws SQLException { Connection conn = getDbConnection(); conn.setReadOnly(true); return conn; } @Override public void init() throws Exception { Class.forName("org.h2.Driver"); _configRoot = ConfigStorageService.instance().getConfigRoot(); _cp = JdbcConnectionPool.create(createConnectionString(), "sa", "sa"); File file = new File(getDbFile() + ".h2.db"); if (!file.exists()) { bootstrapDb(); } } @Override public void shutdown() { flush(); if (_cp != null) { _cp.dispose(); } } private void bootstrapDb() { Connection db = null; Statement st = null; try { db = getDbConnection(); st = db.createStatement(); st.execute("DROP TABLE if exists FILE_INDEX;"); st.execute("CREATE TABLE FILE_INDEX ( HASH VARCHAR PRIMARY KEY, PATH VARCHAR, FILENAME VARCHAR, FILEEXT VARCHAR, FILESIZE BIGINT, FILEDATE BIGINT, TAGS VARCHAR );"); st.execute("DROP TABLE if exists RAWMETA_INDEX;"); st.execute("CREATE TABLE RAWMETA_INDEX ( HASH VARCHAR PRIMARY KEY, RAWMETA VARCHAR );"); db.commit(); FullText.init(db); FullText.setWhitespaceChars(db, " ,:-._" + File.separator); FullText.createIndex(db, "PUBLIC", "FILE_INDEX", null); } catch (SQLException e) { log.error(e); } finally { SqlUtil.SafeClose(st); SqlUtil.SafeClose(db); } } @Override public void purge() { File file = new File(getDbFile() + ".h2.db"); if (file.exists()) { file.delete(); } bootstrapDb(); } volatile boolean _flushing = false; @Override public synchronized void flush() { if (_queue.size() == 0) return; _flushing = true; Connection db = null; try { db = getDbConnection(); db.setAutoCommit(false); while (!_queue.isEmpty()) { addMeta(db, _queue.remove()); } db.commit(); } catch (SQLException e) { log.error(e); } catch (JSONException e) { log.error(e); } finally { SqlUtil.SafeClose(db); _flushing = false; } } private void removeMeta(Connection db, String hash) throws JSONException, SQLException { PreparedStatement statement = null; try { statement = db.prepareStatement("DELETE FROM FILE_INDEX WHERE HASH = ?;"); bind(statement, 1, hash); statement.execute(); } catch (Exception e) { log.error(e); } finally { SqlUtil.SafeClose(statement); } } private void addMeta(Connection db, FileMetaData meta) throws JSONException, SQLException { String sql; PreparedStatement statementA = null; PreparedStatement statementB = null; try { List<Object> bind = new ArrayList<Object>(); List<String> fields = new ArrayList<String>(); fields.add("HASH"); fields.add("PATH"); fields.add("FILENAME"); if (meta.getFileExt() != null) fields.add("FILEEXT"); fields.add("FILESIZE"); fields.add("FILEDATE"); fields.add("TAGS"); bind.add(meta.getHash()); bind.add(meta.getPath()); bind.add(meta.getFilename()); if (meta.getFileExt() != null) bind.add(meta.getFileExt()); bind.add(meta.getFileSize()); bind.add(meta.getFileDate()); bind.add(StringUtil.join(meta.getTags(), " ")); sql = String.format("MERGE INTO FILE_INDEX (%s) VALUES (%s);", StringUtil.join(fields, ","), StringUtil.joinRepeat(bind.size(), "?", ",")); statementA = db.prepareStatement(sql); for (int i = 0, paramIdx = 1; i < bind.size(); i++, paramIdx++) { bind(statementA, paramIdx, bind.get(i)); } statementA.execute(); statementB = db.prepareStatement("MERGE INTO RAWMETA_INDEX (HASH,RAWMETA) VALUES (?,?);"); bind(statementB, 1, meta.getHash()); bind(statementB, 2, meta.getDataAsString()); statementB.execute(); } catch (Exception e) { e.printStackTrace(); log.error(e); } finally { SqlUtil.SafeClose(statementA); SqlUtil.SafeClose(statementB); } } private void bind(PreparedStatement statement, int idx, Object obj) throws SQLException { if (obj instanceof String) { statement.setString(idx, (String) obj); } else if (obj instanceof Long) { statement.setLong(idx, (Long) obj); } else { throw new IllegalArgumentException("unknown obj type: " + obj.toString()); } } @Override public void add(FileMetaData meta) { if (meta == null) return; /* _queue.add(meta); if (_queue.size() > MAX_QUEUE_SIZE && !_flushing) { flush(); } */ Connection db = null; try { db = getDbConnection(); addMeta(db, meta); } catch (JSONException e) { e.printStackTrace(); log.error(e); } catch (SQLException e) { log.error(e); } finally { SqlUtil.SafeClose(db); } } @Override public void remove(FileMetaData meta) { Connection db = null; try { db = getDbConnection(); db.setAutoCommit(false); removeMeta(db, meta.getHash()); db.commit(); } catch (JSONException e) { e.printStackTrace(); log.error(e); } catch (SQLException e) { log.error(e); } finally { SqlUtil.SafeClose(db); } } @Override public void addAll(List<FileMetaData> meta) { if (meta == null) return; Connection db = null; try { db = getDbConnection(); db.setAutoCommit(false); for (FileMetaData fmd : meta) { addMeta(db, fmd); } db.commit(); } catch (JSONException e) { log.error(e); } catch (SQLException e) { log.error(e); } finally { SqlUtil.SafeClose(db); } } @Override public JSONArray find(JSONObject filter) { JSONArray results = new JSONArray(); Connection db = null; PreparedStatement statement = null; try { db = getReadOnlyDbConnection(); String sql; List<Object> bind = new ArrayList<Object>(); if (filter.has("tags")) { sql = "SELECT HASH,RAWMETA FROM RAWMETA_INDEX WHERE HASH in (SELECT T.HASH FROM FT_SEARCH_DATA(?, 0, 0) FT, FILE_INDEX T WHERE FT.TABLE='FILE_INDEX' AND T.HASH = FT.KEYS[0])"; bind.add(filter.getString("tags")); } else { List<String> list = new ArrayList<String>(); Iterator<String> iter = filter.keys(); while (iter.hasNext()) { String key = iter.next(); Object obj = filter.get(key); if (obj instanceof String[] || obj instanceof Long[]) { Collection<Object> foo = Arrays.asList(obj); list.add(String.format("%s In (%s)", key.toUpperCase(), StringUtil.joinRepeat(foo.size(), "?", ","))); bind.addAll(foo); } else { if (obj.toString().contains("%")) { list.add(String.format("%s LIKE ?", key)); } else { list.add(String.format("%s IN (?)", key)); } bind.add(obj); } } if (list.size() > 0) { sql = String.format("SELECT HASH,RAWMETA FROM RAWMETA_INDEX WHERE HASH in (SELECT HASH FROM FILE_INDEX WHERE %s)", StringUtil.join(list, " AND ")); } else { sql = String.format("SELECT HASH,RAWMETA FROM RAWMETA_INDEX"); } } statement = db.prepareStatement(sql); for (int i = 0, paramIdx = 1; i < bind.size(); i++, paramIdx++) { bind(statement, paramIdx, bind.get(i)); } ResultSet rs = statement.executeQuery(); while (rs.next()) { results.put(MetaUtil.loadMeta(rs.getString("HASH"), new JSONObject(rs.getString("RAWMETA"))).toJson()); } } catch (JSONException e) { e.printStackTrace(); log.error(e); } catch (SQLException e) { log.error(e); } finally { SqlUtil.SafeClose(statement); SqlUtil.SafeClose(db); } return results; } @Override public void pruneHistory(List<FileMetaData> selections) { Connection db = null; try { db = getDbConnection(); db.setAutoCommit(false); for (FileMetaData meta : selections) { String parent = meta.getParent(); if (parent != null) { removeMeta(db, parent); } } db.commit(); } catch (JSONException e) { e.printStackTrace(); log.error(e); } catch (SQLException e) { log.error(e); } finally { SqlUtil.SafeClose(db); } } }
package com.openxc.sources.bluetooth; import java.io.IOException; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import java.util.Set; import java.util.UUID; import com.openxc.sources.bluetooth.BluetoothException; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.bluetooth.BluetoothSocket; import android.content.BroadcastReceiver; import android.content.Context; import android.util.Log; /** * The DeviceManager collects the functions required to discover and open a * socket to the Bluetooth device. */ public class DeviceManager { private final static String TAG = "DeviceManager"; private final static UUID RFCOMM_UUID = UUID.fromString( "00001101-0000-1000-8000-00805f9b34fb"); private Context mContext; private BluetoothAdapter mBluetoothAdapter; private BluetoothDevice mTargetDevice; private final Lock mDeviceLock = new ReentrantLock(); private final Condition mDeviceChangedCondition = mDeviceLock.newCondition(); private BroadcastReceiver mReceiver; /** * The DeviceManager requires an Android Context in order to send the intent * to enable Bluetooth if it isn't already on. */ public DeviceManager(Context context) throws BluetoothException { mContext = context; mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter(); if(mBluetoothAdapter == null) { String message = "This device most likely does not have " + "a Bluetooth adapter"; Log.w(TAG, message); throw new BluetoothException(message); } } /** * Discover and connect to the target device. This method will block while * waiting for the device. * * Returns a socket connected to the device. */ public BluetoothSocket connect(String targetAddress) throws BluetoothException { discoverDevices(targetAddress); mDeviceLock.lock(); while(mTargetDevice == null) { try { mDeviceChangedCondition.await(); } catch(InterruptedException e) {} } BluetoothSocket socket = setupSocket(mTargetDevice); mDeviceLock.unlock(); return socket; } /** * Open an RFCOMM socket to the connected Bluetooth device. * * The DeviceManager must already have a device connected, so * discoverDevices needs to be called. */ private BluetoothSocket setupSocket(BluetoothDevice device) throws BluetoothException { if(device == null) { Log.w(TAG, "Can't setup socket -- device is " + device); throw new BluetoothException(); } mBluetoothAdapter.cancelDiscovery(); Log.d(TAG, "Scanning services on " + device); BluetoothSocket socket = null; try { socket = device.createRfcommSocketToServiceRecord( RFCOMM_UUID); } catch(IOException e) { String error = "Unable to open a socket to device " + device; Log.w(TAG, error); throw new BluetoothException(error, e); } try { socket.connect(); } catch(IOException e) { String error = "Could not find required service on " + device; Log.e(TAG, error); try { socket.close(); } catch(IOException e2) {} throw new BluetoothException(error, e); } return socket; } private void captureDevice(BluetoothDevice device) { mDeviceLock.lock(); mTargetDevice = device; mDeviceChangedCondition.signal(); mDeviceLock.unlock(); if(mReceiver != null) { mContext.unregisterReceiver(mReceiver); mBluetoothAdapter.cancelDiscovery(); } } private boolean deviceDiscovered(BluetoothDevice device, String targetAddress) { Log.d(TAG, "Found Bluetooth device: " + device); if(device.getAddress().equals(targetAddress)) { Log.d(TAG, "Found matching device: " + device); return true; } return false; } /** * Check the list of previously paired devices for one matching the target * address. Once a matching device is found, calls captureDevice to connect * with it. * * This will not attempt to pair with unpaired devices - it's assumed that * this step has already been completed by the user when selecting the * Bluetooth device to use. If this class is used programatically with a * hard-coded target address, you'll need to have previously paired the * device. */ private void discoverDevices(final String targetAddress) { Log.d(TAG, "Starting device discovery"); Set<BluetoothDevice> pairedDevices = mBluetoothAdapter.getBondedDevices(); for(BluetoothDevice device : pairedDevices) { Log.d(TAG, "Found already paired device: " + device); if(deviceDiscovered(device, targetAddress)) { captureDevice(device); return; } } } }
package org.whattf.checker.table; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Set; import org.whattf.checker.AttributeUtil; import org.xml.sax.Attributes; import org.xml.sax.Locator; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import org.xml.sax.helpers.LocatorImpl; /** * Represents an XHTML table for table integrity checking. Handles * table-significant parse events and keeps track of columns. * * @version $Id$ * @author hsivonen */ final class Table { /** * An enumeration for keeping track of the parsing state of a table. */ private enum State { /** * The table element start has been seen. No child elements have been seen. * A start of a column, a column group, a row or a row group or the end of * the table is expected. */ IN_TABLE_AT_START, /** * The table element is the open element and rows have been seen. A row in * an implicit group, a row group or the end of the table is expected. */ IN_TABLE_AT_POTENTIAL_ROW_GROUP_START, /** * A column group is open. It can end or a column can start. */ IN_COLGROUP, /** * A column inside a column group is open. It can end. */ IN_COL_IN_COLGROUP, /** * A column that is a child of table is open. It can end. */ IN_COL_IN_IMPLICIT_GROUP, /** * The open element is an explicit row group. It may end or a row may start. */ IN_ROW_GROUP, /** * A row in a an explicit row group is open. It may end or a cell may start. */ IN_ROW_IN_ROW_GROUP, /** * A cell inside a row inside an explicit row group is open. It can end. */ IN_CELL_IN_ROW_GROUP, /** * A row in an implicit row group is open. It may end or a cell may start. */ IN_ROW_IN_IMPLICIT_ROW_GROUP, /** * The table itself is the currently open element, but an implicit row group * been started by previous rows. A row may start, an explicit row group may * start or the table may end. */ IN_IMPLICIT_ROW_GROUP, /** * A cell inside an implicit row group is open. It can close. */ IN_CELL_IN_IMPLICIT_ROW_GROUP, /** * The table itself is the currently open element. Columns and/or column groups * have been seen but rows or row groups have not been seen yet. A column, a * column group, a row or a row group can start. The table can end. */ IN_TABLE_COLS_SEEN } /** * Keeps track of the handler state between SAX events. */ private State state = State.IN_TABLE_AT_START; /** * The number of suppressed element starts. */ private int suppressedStarts = 0; /** * Indicates whether the width of the table was established by column markup. */ private boolean hardWidth = false; /** * The column count established by column markup or by the first row. */ private int columnCount = -1; /** * The actual column count as stretched by the widest row. */ private int realColumnCount = 0; /** * A colgroup span that hasn't been actuated yet in case the element has * col children. The absolute value counts. The negative sign means that * the value was implied. */ private int pendingColGroupSpan = 0; /** * A set of the IDs of header cells. */ private final Set<String> headerIds = new HashSet<String>(); /** * A list of cells that refer to headers (in the document order). */ private final List<Cell> cellsReferringToHeaders = new LinkedList<Cell>(); /** * The owning checker. */ private final TableChecker owner; /** * The current row group (also implicit groups have an explicit object). */ private RowGroup current; /** * The head of the column range list. */ private ColumnRange first = null; /** * The tail of the column range list. */ private ColumnRange last = null; /** * The range under inspection. */ private ColumnRange currentColRange = null; /** * The previous range that was inspected. */ private ColumnRange previousColRange = null; /** * Constructor. * @param owner reference back to the checker */ public Table(TableChecker owner) { super(); this.owner = owner; } private boolean needSuppressStart() { if (suppressedStarts > 0) { suppressedStarts++; return true; } else { return false; } } private boolean needSuppressEnd() { if (suppressedStarts > 0) { suppressedStarts return true; } else { return false; } } void startRowGroup(String type) throws SAXException { if (needSuppressStart()) { return; } switch (state) { case IN_IMPLICIT_ROW_GROUP: current.end(); // fall through case IN_TABLE_AT_START: case IN_TABLE_COLS_SEEN: case IN_TABLE_AT_POTENTIAL_ROW_GROUP_START: current = new RowGroup(this, type); state = State.IN_ROW_GROUP; break; default: suppressedStarts = 1; break; } } void endRowGroup() throws SAXException { if (needSuppressEnd()) { return; } switch (state) { case IN_ROW_GROUP: current.end(); current = null; state = State.IN_TABLE_AT_POTENTIAL_ROW_GROUP_START; break; default: throw new IllegalStateException("Bug!"); } } void startRow() { if (needSuppressStart()) { return; } switch (state) { case IN_TABLE_AT_START: case IN_TABLE_COLS_SEEN: case IN_TABLE_AT_POTENTIAL_ROW_GROUP_START: current = new RowGroup(this, null); // fall through case IN_IMPLICIT_ROW_GROUP: state = State.IN_ROW_IN_IMPLICIT_ROW_GROUP; break; case IN_ROW_GROUP: state = State.IN_ROW_IN_ROW_GROUP; break; default: suppressedStarts = 1; return; } currentColRange = first; previousColRange = null; current.startRow(); } void endRow() throws SAXException { if (needSuppressEnd()) { return; } switch (state) { case IN_ROW_IN_ROW_GROUP: state = State.IN_ROW_GROUP; break; case IN_ROW_IN_IMPLICIT_ROW_GROUP: state = State.IN_IMPLICIT_ROW_GROUP; break; default: throw new IllegalStateException("Bug!"); } current.endRow(); } void startCell(boolean header, Attributes attributes) throws SAXException { if (needSuppressStart()) { return; } switch (state) { case IN_ROW_IN_ROW_GROUP: state = State.IN_CELL_IN_ROW_GROUP; break; case IN_ROW_IN_IMPLICIT_ROW_GROUP: state = State.IN_CELL_IN_IMPLICIT_ROW_GROUP; break; default: suppressedStarts = 1; return; } if (header) { int len = attributes.getLength(); for (int i = 0; i < len; i++) { if ("ID".equals(attributes.getType(i))) { String val = attributes.getValue(i); if (!"".equals(val)) { headerIds.add(val); } } } } String[] headers = AttributeUtil.split(attributes.getValue("", "headers")); Cell cell = new Cell( Math.abs(AttributeUtil.parsePositiveInteger(attributes.getValue( "", "colspan"))), Math.abs(AttributeUtil.parseNonNegativeInteger(attributes.getValue( "", "rowspan"))), headers, header, owner.getDocumentLocator(), owner.getErrorHandler()); if (headers.length > 0) { cellsReferringToHeaders.add(cell); } current.cell(cell); } void endCell() { if (needSuppressEnd()) { return; } switch (state) { case IN_CELL_IN_ROW_GROUP: state = State.IN_ROW_IN_ROW_GROUP; break; case IN_CELL_IN_IMPLICIT_ROW_GROUP: state = State.IN_ROW_IN_IMPLICIT_ROW_GROUP; break; default: throw new IllegalStateException("Bug!"); } } void startColGroup(int span) { if (needSuppressStart()) { return; } switch (state) { case IN_TABLE_AT_START: hardWidth = true; columnCount = 0; // fall through case IN_TABLE_COLS_SEEN: pendingColGroupSpan = span; state = State.IN_COLGROUP; break; default: suppressedStarts = 1; break; } } void endColGroup() { if (needSuppressEnd()) { return; } switch (state) { case IN_COLGROUP: if (pendingColGroupSpan != 0) { int right = columnCount + Math.abs(pendingColGroupSpan); Locator locator = new LocatorImpl( owner.getDocumentLocator()); ColumnRange colRange = new ColumnRange("colgroup", locator, columnCount, right); appendColumnRange(colRange); columnCount = right; } realColumnCount = columnCount; state = State.IN_TABLE_COLS_SEEN; break; default: throw new IllegalStateException("Bug!"); } } void startCol(int span) throws SAXException { if (needSuppressStart()) { return; } switch (state) { case IN_TABLE_AT_START: hardWidth = true; columnCount = 0; // fall through case IN_TABLE_COLS_SEEN: state = State.IN_COL_IN_IMPLICIT_GROUP; break; case IN_COLGROUP: if (pendingColGroupSpan > 0) { warn("A col element causes a span attribute with value " + pendingColGroupSpan + " to be ignored on the parent colgroup."); } pendingColGroupSpan = 0; state = State.IN_COL_IN_COLGROUP; break; default: suppressedStarts = 1; return; } int right = columnCount + Math.abs(span); Locator locator = new LocatorImpl(owner.getDocumentLocator()); ColumnRange colRange = new ColumnRange("col", locator, columnCount, right); appendColumnRange(colRange); columnCount = right; realColumnCount = columnCount; } /** * Appends a column range to the linked list of column ranges. * * @param colRange the range to append */ private void appendColumnRange(ColumnRange colRange) { if (last == null) { first = colRange; last = colRange; } else { last.setNext(colRange); last = colRange; } } void warn(String message) throws SAXException { owner.warn(message); } void err(String message) throws SAXException { owner.err(message); } void endCol() { if (needSuppressEnd()) { return; } switch (state) { case IN_COL_IN_IMPLICIT_GROUP: state = State.IN_TABLE_COLS_SEEN; break; case IN_COL_IN_COLGROUP: state = State.IN_COLGROUP; break; default: throw new IllegalStateException("Bug!"); } } void end() throws SAXException { switch (state) { case IN_IMPLICIT_ROW_GROUP: current.end(); current = null; break; case IN_TABLE_AT_START: case IN_TABLE_AT_POTENTIAL_ROW_GROUP_START: case IN_TABLE_COLS_SEEN: break; default: throw new IllegalStateException("Bug!"); } // Check referential integrity for (Iterator<Cell> iter = cellsReferringToHeaders.iterator(); iter.hasNext();) { Cell cell = iter.next(); String[] headings = cell.getHeadings(); for (int i = 0; i < headings.length; i++) { String heading = headings[i]; if (!headerIds.contains(heading)) { cell.err("The \u201Cheaders\u201D attribute on the element \u201C" + cell.elementName() + "\u201D refers to the ID \u201C" + heading + "\u201D, but there is no \u201Cth\u201D element with that ID in the same table."); } } } // Check that each column has non-extended cells ColumnRange colRange = first; while (colRange != null) { if (colRange.isSingleCol()) { owner.getErrorHandler().error( new SAXParseException("Table column " + colRange + " established by element \u201C" + colRange.getElement() + "\u201D has no cells beginning in it.", colRange.getLocator())); } else { owner.getErrorHandler().error( new SAXParseException("Table columns in range " + colRange + " established by element \u201C" + colRange.getElement() + "\u201D have no cells beginning in them.", colRange.getLocator())); } colRange = colRange.getNext(); } } /** * Returns the columnCount. * * @return the columnCount */ int getColumnCount() { return columnCount; } /** * Sets the columnCount. * * @param columnCount * the columnCount to set */ void setColumnCount(int columnCount) { this.columnCount = columnCount; } /** * Returns the hardWidth. * * @return the hardWidth */ boolean isHardWidth() { return hardWidth; } /** * Reports a cell whose positioning has been decided back to the table * so that column bookkeeping can be done. (Called from * <code>RowGroup</code>--not <code>TableChecker</code>.) * * @param cell a cell whose position has been calculated */ void cell(Cell cell) { int left = cell.getLeft(); int right = cell.getRight(); // first see if we've got a cell past the last col if (right > realColumnCount) { // are we past last col entirely? if (left == realColumnCount) { // single col? if (left + 1 != right) { appendColumnRange(new ColumnRange(cell.elementName(), cell, left + 1, right)); } realColumnCount = right; return; } else { // not past entirely appendColumnRange(new ColumnRange(cell.elementName(), cell, realColumnCount, right)); realColumnCount = right; } } while (currentColRange != null) { int hit = currentColRange.hits(left); if (hit == 0) { ColumnRange newRange = currentColRange.removeColumn(left); if (newRange == null) { // zap a list item if (previousColRange != null) { previousColRange.setNext(currentColRange.getNext()); } if (first == currentColRange) { first = currentColRange.getNext(); } if (last == currentColRange) { last = previousColRange; } currentColRange = currentColRange.getNext(); } else { if (last == currentColRange) { last = newRange; } currentColRange = newRange; } return; } else if (hit == -1) { return; } else if (hit == 1) { previousColRange = currentColRange; currentColRange = currentColRange.getNext(); } } } }
package org.dspace.app.itemexport; import java.io.File; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.InputStream; import java.io.PrintWriter; import org.apache.commons.cli.Options; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.PosixParser; import org.dspace.content.Bitstream; import org.dspace.content.Bundle; import org.dspace.content.Collection; import org.dspace.content.DCValue; import org.dspace.content.Item; import org.dspace.content.ItemIterator; import org.dspace.core.Context; import org.dspace.core.Constants; import org.dspace.core.Utils; import org.dspace.handle.HandleManager; /** Item exporter to create simple AIPs for DSpace content. Currently exports individual items, or entire collections. For instructions on use, see printUsage() method. ItemExport creates the simple AIP package that the importer also uses. It consists of: /exportdir/42/ (one directory per item) / dublin_core.xml - qualified dublin core in RDF schema / contents - text file, listing one file per line / file1 - files contained in the item / file2 / ... issues -doesn't handle special characters in metadata (needs to turn &'s into &amp;, etc.) */ public class ItemExport { public static void main(String [] argv) throws Exception { // create an options object and populate it CommandLineParser parser = new PosixParser(); Options options = new Options(); options.addOption( "t", "type", true, "type: COLLECTION or ITEM"); options.addOption( "i", "id", true, "ID or handle of thing to export"); options.addOption( "d", "dest", true, "remove items in mapfile"); options.addOption( "n", "number", true, "sequence number to begin exporting items with"); options.addOption( "h", "help", false, "help"); CommandLine line = parser.parse( options, argv ); String typeString = null; String destDirName = null; String myIDString = null; int seqStart = -1; int myType = -1; Item myItem = null; Collection mycollection = null; if( line.hasOption('h') ) { HelpFormatter myhelp = new HelpFormatter(); myhelp.printHelp( "ItemExport\n", options ); System.out.println("\nfull collection: ItemExport -t COLLECTION -i ID -d dest -n number"); System.out.println("singleitem: ItemExport -t ITEM -i ID -d dest -n number"); System.exit(0); } if( line.hasOption( 't' ) ) // type { typeString = line.getOptionValue( 't' ); if( typeString.equals("ITEM") ) { myType = Constants.ITEM; } else if( typeString.equals("COLLECTION") ) { myType = Constants.COLLECTION; } } if( line.hasOption( 'i' ) ) { myIDString = line.getOptionValue( 'i' ); } if( line.hasOption( 'd' ) ) // dest { destDirName = line.getOptionValue( 'd' ); } if( line.hasOption( 'n' ) ) // number { seqStart = Integer.parseInt( line.getOptionValue( 'n' ) ); } // now validate the args if( myType == -1 ) { System.out.println("type must be either COLLECTION or ITEM (-h for help)"); System.exit(1); } if( destDirName == null ) { System.out.println("destination directory must be set (-h for help)"); System.exit(1); } if( seqStart == -1 ) { System.out.println("sequence start number must be set (-h for help)"); System.exit(1); } if( myIDString == null ) { System.out.println("ID must be set to either a database ID or a handle (-h for help)"); System.exit(1); } Context c = new Context(); c.setIgnoreAuthorization( true ); if( myType == Constants.ITEM ) { // first, is myIDString a handle? if( myIDString.indexOf('/') != -1 ) { myItem = (Item)HandleManager.resolveToObject( c, myIDString); if( myItem == null || myItem.getType() != Constants.ITEM ) { myItem = null; } } else { myItem = Item.find( c, Integer.parseInt( myIDString ) ); } if( myItem == null ) { System.out.println("Error, item cannot be found: " + myIDString); } } else { if( myIDString.indexOf('/') != -1 ) { // has a / must be a handle mycollection = (Collection)HandleManager.resolveToObject( c, myIDString ); // ensure it's a collection if( (mycollection == null) || (mycollection.getType() != Constants.COLLECTION) ) { mycollection = null; } } else if( myIDString != null ) { mycollection = Collection.find( c, Integer.parseInt( myIDString ) ); } if( mycollection == null ) { System.out.println( "Error, collection cannot be found: " + myIDString ); System.exit(1); } } if( myItem != null ) { // it's only a single item exportItem( c, myItem, destDirName, seqStart); } else { System.out.println("Exporting from collection: " + myIDString ); // it's a collection, so do a bunch of items ItemIterator i = mycollection.getItems(); exportItem(c, i, destDirName, seqStart); } File destDir = new File( destDirName ); c.complete(); } private static void printUsage() { System.out.println("Output simple AIPs, given collection or item ID"); System.out.println("Usage: ITEM|COLLECTION ID dest_dir sequence_number"); System.out.println(" dest_dir = destination of archive files"); System.out.println(" sequence_number = 0, or some other number to start naming the archive directories"); System.out.println(" first item dir is sequence_number, then sequence_number+1, etc."); } private static void exportItem( Context c, ItemIterator i, String destDirName, int seqStart ) throws Exception { int mySequenceNumber = seqStart; System.out.println("Beginning export"); while( i.hasNext() ) { System.out.println("Exporting item to " + mySequenceNumber ); exportItem(c, i.next(), destDirName, mySequenceNumber); mySequenceNumber++; } } private static void exportItem( Context c, Item myItem, String destDirName, int seqStart) throws Exception { File destDir = new File( destDirName ); if( destDir.exists() ) { // now create a subdirectory File itemDir = new File ( destDir + "/" + seqStart ); System.out.println("Exporting Item " + myItem.getID() + " to " + itemDir); if( itemDir.exists() ) { throw new Exception("Directory " + destDir + "/" + seqStart + " already exists!"); } else { if( itemDir.mkdir() ) { // make it this far, now start exporting writeMetadata ( c, myItem, itemDir ); writeBitstreams( c, myItem, itemDir ); writeHandle ( c, myItem, itemDir ); } else { throw new Exception("Error, can't make dir " + itemDir); } } } else { throw new Exception("Error, directory " + destDirName + " doesn't exist!"); } } // output the item's dublin core into the item directory private static void writeMetadata( Context c, Item i, File destDir ) throws Exception { File outFile = new File( destDir, "dublin_core.xml" ); System.out.println("Attempting to create file " + outFile); if( outFile.createNewFile() ) { PrintWriter out = new PrintWriter( new FileWriter( outFile ) ); DCValue dcorevalues[] = i.getDC(Item.ANY, Item.ANY, Item.ANY); out.println("<dublin_core>"); for(int j = 0; j < dcorevalues.length; j++) { DCValue dcv = dcorevalues[j]; String qualifier = dcv.qualifier; if( qualifier == null ) { qualifier = "none"; } String output = " <dcvalue element=\"" + dcv.element + "\" " + "qualifier=\"" + qualifier + "\">" + dcv.value + "</dcvalue>"; out.println( output ); } out.println("</dublin_core>"); out.close(); } else { throw new Exception( "Cannot create dublin_core.xml in " + destDir ); } } // create the file 'handle' which contains the handle assigned to the item private static void writeHandle( Context c, Item i, File destDir ) throws Exception { String filename = "handle"; File outFile = new File( destDir, filename ); if( outFile.createNewFile() ) { PrintWriter out = new PrintWriter( new FileWriter( outFile ) ); out.println( i.getHandle() ); // close the contents file out.close(); } else { throw new Exception( "Cannot create file " + filename + " in " + destDir ); } } // create both the bitstreams and the contents file private static void writeBitstreams( Context c, Item i, File destDir ) throws Exception { File outFile = new File( destDir, "contents" ); if( outFile.createNewFile() ) { PrintWriter out = new PrintWriter( new FileWriter( outFile ) ); Bundle [] bundles = i.getBundles(); for( int j = 0; j < bundles.length; j++ ) { // currently one bitstream per bundle! Bitstream b = (bundles[j].getBitstreams())[0]; String myName = b.getName(); String oldName = myName; int myPrefix = 1; // only used with name conflict InputStream is = b.retrieve(); boolean isDone = false; // done when bitstream is finally written while( !isDone ) { File fout = new File( destDir, myName ); if( fout.createNewFile() ) { FileOutputStream fos = new FileOutputStream(fout); Utils.bufferedCopy( is, fos ); // write the manifest file entry out.println( myName ); isDone = true; } else { myName = myPrefix + "_" + oldName; // keep appending numbers to the filename until unique myPrefix++; } } } // close the contents file out.close(); } else { throw new Exception( "Cannot create contents in " + destDir ); } } }
// VolocityReader.java package loci.formats.in; import java.io.IOException; import java.util.ArrayList; import loci.common.ByteArrayHandle; import loci.common.DataTools; import loci.common.IRandomAccess; import loci.common.Location; import loci.common.RandomAccessInputStream; import loci.common.services.DependencyException; import loci.common.services.ServiceException; import loci.common.services.ServiceFactory; import loci.formats.CoreMetadata; import loci.formats.FormatException; import loci.formats.FormatReader; import loci.formats.FormatTools; import loci.formats.MetadataTools; import loci.formats.MissingLibraryException; import loci.formats.codec.LZOCodec; import loci.formats.meta.MetadataStore; import loci.formats.services.MetakitService; import ome.xml.model.primitives.PositiveFloat; import ome.xml.model.primitives.PositiveInteger; public class VolocityReader extends FormatReader { // -- Constants -- private static final String DATA_DIR = "Data"; private static final String EMBEDDED_STREAM = "embedded-stream.raw"; private static final int SIGNATURE_SIZE = 13; // -- Fields -- private String[][] pixelsFiles; private String[] timestampFiles; private ArrayList<String> extraFiles; private int[] planePadding; private Object[][] sampleTable, stringTable; private Location dir = null; private int[] blockSize; private boolean[] clippingData; // -- Constructor -- /** Constructs a new Volocity reader. */ public VolocityReader() { super("Volocity Library", new String[] {"mvd2", "aisf", "aiix", "dat", "atsf"}); domains = new String[] {FormatTools.UNKNOWN_DOMAIN}; hasCompanionFiles = true; } // -- IFormatReader API methods -- /* @see loci.formats.IFormatReader#getSeriesUsedFiles(boolean) */ public String[] getSeriesUsedFiles(boolean noPixels) { FormatTools.assertId(currentId, true, 1); ArrayList<String> files = new ArrayList<String>(); files.addAll(extraFiles); for (int c=0; c<getEffectiveSizeC(); c++) { files.add(pixelsFiles[getSeries()][c]); } if (timestampFiles[getSeries()] != null) { files.add(timestampFiles[getSeries()]); } return files.toArray(new String[files.size()]); } /* @see loci.formats.IFormatReader#isThisType(String, boolean) */ public boolean isThisType(String name, boolean open) { if (checkSuffix(name, "mvd2")) { return super.isThisType(name, open); } if (open && checkSuffix(name, suffixes)) { Location file = new Location(name).getAbsoluteFile(); Location parent = file.getParentFile(); parent = parent.getParentFile(); if (parent != null) { parent = parent.getParentFile(); if (parent != null) { Location mvd2 = new Location(parent, parent.getName() + ".mvd2"); return mvd2.exists() && super.isThisType(mvd2.getAbsolutePath()); } } } return false; } /* @see loci.formats.IFormatReader#isThisType(RandomAccessInputStream) */ public boolean isThisType(RandomAccessInputStream stream) throws IOException { final int blockLen = 2; if (!FormatTools.validStream(stream, blockLen, false)) return false; String check = stream.readString(blockLen); return check.equals("JL") || check.equals("LJ"); } /** * @see loci.formats.IFormatReader#openBytes(int, byte[], int, int, int, int) */ public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) throws FormatException, IOException { FormatTools.checkPlaneParameters(this, no, buf.length, x, y, w, h); int[] zct = getZCTCoords(no); RandomAccessInputStream pix = new RandomAccessInputStream(pixelsFiles[getSeries()][zct[1]]); int padding = zct[2] * planePadding[getSeries()]; long planeSize = FormatTools.getPlaneSize(this); int planesInFile = (int) (pix.length() / planeSize); int planeIndex = no / getEffectiveSizeC(); if (planesInFile == getSizeT()) { planeIndex = zct[2]; int block = blockSize[getSeries()]; padding = block - (int) (planeSize % block); if (padding == block) { padding = 0; } padding *= zct[2]; } long offset = (long) blockSize[getSeries()] + planeIndex * planeSize + padding; if (offset >= pix.length()) { return buf; } pix.seek(offset); if (clippingData[getSeries()]) { pix.seek(offset - 3); ByteArrayHandle v = new ByteArrayHandle(); while (v.length() < FormatTools.getPlaneSize(this) && pix.getFilePointer() < pix.length()) { try { byte[] b = new LZOCodec().decompress(pix, null); pix.skipBytes(4); v.write(b); } catch (IOException e) { } } RandomAccessInputStream s = new RandomAccessInputStream(v); s.seek(0); readPlane(s, x, y, w, h, buf); s.close(); } else { readPlane(pix, x, y, w, h, buf); } pix.close(); if (getRGBChannelCount() == 4) { // stored as ARGB, need to swap to RGBA for (int i=0; i<buf.length/4; i++) { byte a = buf[i * 4]; buf[i * 4] = buf[i * 4 + 1]; buf[i * 4 + 1] = buf[i * 4 + 2]; buf[i * 4 + 2] = buf[i * 4 + 3]; buf[i * 4 + 3] = a; } } return buf; } /* @see loci.formats.IFormatReader#close(boolean) */ public void close(boolean fileOnly) throws IOException { super.close(fileOnly); if (!fileOnly) { pixelsFiles = null; extraFiles = null; timestampFiles = null; planePadding = null; sampleTable = null; stringTable = null; dir = null; Location.mapFile(EMBEDDED_STREAM, null); } } // -- Internal FormatReader API methods -- /* @see loci.formats.FormatReader#initFile(String) */ protected void initFile(String id) throws FormatException, IOException { if (!checkSuffix(id, "mvd2")) { Location file = new Location(id).getAbsoluteFile(); Location parent = file.getParentFile().getParentFile(); String[] files = parent.list(true); for (String f : files) { if (checkSuffix(f, "mvd2")) { id = new Location(parent, f).getAbsolutePath(); break; } } } super.initFile(id); extraFiles = new ArrayList<String>(); Location file = new Location(id).getAbsoluteFile(); extraFiles.add(file.getAbsolutePath()); Location parentDir = file.getParentFile(); dir = new Location(parentDir, DATA_DIR); if (dir.exists()) { String[] files = dir.list(true); for (String f : files) { if (!checkSuffix(f, "aisf") && !checkSuffix(f, "atsf")) { extraFiles.add(new Location(dir, f).getAbsolutePath()); } } } try { ServiceFactory factory = new ServiceFactory(); MetakitService reader = factory.getInstance(MetakitService.class); reader.initialize(id); sampleTable = reader.getTableData(1); stringTable = reader.getTableData(2); } catch (DependencyException e) { throw new MissingLibraryException("Could not find Metakit library", e); } ArrayList<String> stackNames = new ArrayList<String>(); ArrayList<Integer> parentIDs = new ArrayList<Integer>(); for (int i=0; i<sampleTable.length; i++) { Integer stringID = (Integer) sampleTable[i][11]; String name = getString(stringID); int channelIndex = getChildIndex((Integer) sampleTable[i][0], "Channels"); if (i > 0 && (Integer) sampleTable[i][2] == 1 && (channelIndex >= 0 || (sampleTable[i][14] != null && !sampleTable[i][14].equals(0)) || ((byte[]) sampleTable[i][13]).length > 21)) { if (channelIndex < 0) { RandomAccessInputStream s = getStream(i); s.seek(0); if (s.read() != 'I') { s.order(false); } s.seek(22); int x = s.readInt(); int y = s.readInt(); int z = s.readInt(); if (x * y * z > 0 && x * y * z < (s.length() * 3)) { stackNames.add(name); parentIDs.add((Integer) sampleTable[i][0]); } s.close(); } else { stackNames.add(name); parentIDs.add((Integer) sampleTable[i][0]); } } } core = new CoreMetadata[parentIDs.size()]; String[][] channelNames = new String[core.length][]; Double[] physicalX = new Double[core.length]; Double[] physicalY = new Double[core.length]; Double[] physicalZ = new Double[core.length]; Double[] magnification = new Double[core.length]; String[] detector = new String[core.length]; String[] description = new String[core.length]; pixelsFiles = new String[core.length][]; timestampFiles = new String[core.length]; for (int i=0; i<parentIDs.size(); i++) { core[i] = new CoreMetadata(); Integer parent = parentIDs.get(i); int channelIndex = getChildIndex(parent, "Channels"); if (channelIndex >= 0) { Integer[] channels = getAllChildren((Integer) sampleTable[channelIndex][0]); core[i].sizeC = channels.length; pixelsFiles[i] = new String[core[i].sizeC]; channelNames[i] = new String[channels.length]; for (int c=0; c<channels.length; c++) { channelNames[i][c] = getString((Integer) sampleTable[channels[c]][11]); RandomAccessInputStream data = getStream(channels[c]); if (data.length() > 22) { data.seek(22); int stackID = data.readInt(); Location f = new Location(dir, stackID + ".aisf"); if (!f.exists()) { f = new Location(dir, DataTools.swap(stackID) + ".aisf"); } pixelsFiles[i][c] = f.getAbsolutePath(); } else { Integer child = getAllChildren((Integer) sampleTable[channels[c]][0])[0]; pixelsFiles[i][c] = getFile((Integer) sampleTable[child][0], dir); } data.close(); } } else { pixelsFiles[i] = new String[1]; pixelsFiles[i][0] = getFile(parent, dir); if (pixelsFiles[i][0] == null || !new Location(pixelsFiles[i][0]).exists()) { int row = -1; for (int r=0; r<sampleTable.length; r++) { if (sampleTable[r][0].equals(parent)) { row = r; break; } } pixelsFiles[i][0] = EMBEDDED_STREAM; IRandomAccess data = new ByteArrayHandle((byte[]) sampleTable[row][13]); Location.mapFile(pixelsFiles[i][0], data); } } RandomAccessInputStream data = null; int timestampIndex = getChildIndex(parent, "Timepoint times stream"); if (timestampIndex >= 0) { data = getStream(timestampIndex); data.seek(22); int timestampID = data.readInt(); Location f = new Location(dir, timestampID + ".atsf"); if (!f.exists()) { f = new Location(dir, DataTools.swap(timestampID) + ".atsf"); } timestampFiles[i] = f.getAbsolutePath(); data.close(); } int xIndex = getChildIndex(parent, "um/pixel (X)"); if (xIndex >= 0) { data = getStream(xIndex); data.seek(SIGNATURE_SIZE); physicalX[i] = data.readDouble(); data.close(); } int yIndex = getChildIndex(parent, "um/pixel (Y)"); if (yIndex >= 0) { data = getStream(yIndex); data.seek(SIGNATURE_SIZE); physicalY[i] = data.readDouble(); data.close(); } int zIndex = getChildIndex(parent, "um/pixel (Z)"); if (zIndex >= 0) { data = getStream(zIndex); data.seek(SIGNATURE_SIZE); physicalZ[i] = data.readDouble(); data.close(); } int objectiveIndex = getChildIndex(parent, "Microscope Objective"); if (objectiveIndex >= 0) { data = getStream(objectiveIndex); data.seek(SIGNATURE_SIZE); magnification[i] = data.readDouble(); data.close(); } int detectorIndex = getChildIndex(parent, "Camera/Detector"); if (detectorIndex >= 0) { data = getStream(detectorIndex); data.seek(SIGNATURE_SIZE); int len = data.readInt(); detector[i] = data.readString(len); data.close(); } int descriptionIndex = getChildIndex(parent, "Experiment Description"); if (descriptionIndex >= 0) { data = getStream(descriptionIndex); data.seek(SIGNATURE_SIZE); int len = data.readInt(); description[i] = data.readString(len); data.close(); } } planePadding = new int[core.length]; blockSize = new int[core.length]; double[][][] stamps = new double[core.length][][]; clippingData = new boolean[core.length]; for (int i=0; i<core.length; i++) { setSeries(i); core[i].littleEndian = true; if (timestampFiles[i] != null) { RandomAccessInputStream s = new RandomAccessInputStream(timestampFiles[i]); s.seek(0); if (s.read() != 'I') { core[i].littleEndian = false; } s.seek(17); s.order(isLittleEndian()); core[i].sizeT = s.readInt(); s.close(); } else { core[i].sizeT = 1; } core[i].rgb = false; core[i].interleaved = true; core[i].dimensionOrder = "XYCZT"; RandomAccessInputStream s = new RandomAccessInputStream(pixelsFiles[i][0]); s.order(isLittleEndian()); if (checkSuffix(pixelsFiles[i][0], "aisf")) { s.seek(18); blockSize[i] = s.readShort() * 256; s.skipBytes(5); int x = s.readInt(); int y = s.readInt(); int zStart = s.readInt(); int w = s.readInt(); int h = s.readInt(); if (w - x < 0 || h - y < 0 || (w - x) * (h - y) < 0) { core[i].littleEndian = !isLittleEndian(); s.order(isLittleEndian()); s.seek(s.getFilePointer() - 20); x = s.readInt(); y = s.readInt(); zStart = s.readInt(); w = s.readInt(); h = s.readInt(); } core[i].sizeX = w - x; core[i].sizeY = h - y; core[i].sizeZ = s.readInt() - zStart; core[i].imageCount = getSizeZ() * getSizeC() * getSizeT(); int planesPerFile = getSizeZ() * getSizeT(); int planeSize = FormatTools.getPlaneSize(this); int bytesPerPlane = (int) ((s.length() - blockSize[i]) / planesPerFile); int bytesPerPixel = 0; while (bytesPerPlane >= planeSize) { bytesPerPixel++; bytesPerPlane -= planeSize; } if ((bytesPerPixel % 3) == 0) { core[i].sizeC *= 3; core[i].rgb = true; bytesPerPixel /= 3; } core[i].pixelType = FormatTools.pixelTypeFromBytes(bytesPerPixel, false, false); // full timepoints are padded to have a multiple of 256 bytes int timepoint = FormatTools.getPlaneSize(this) * getSizeZ(); planePadding[i] = blockSize[i] - (timepoint % blockSize[i]); if (planePadding[i] == blockSize[i]) { planePadding[i] = 0; } } else { boolean embedded = Location.getMappedFile(EMBEDDED_STREAM) != null; s.seek(0); if (s.read() != 'I') { core[i].littleEndian = false; s.order(false); } s.seek(22); core[i].sizeX = s.readInt(); core[i].sizeY = s.readInt(); core[i].sizeZ = s.readInt(); core[i].sizeC = embedded ? 1 : 4; core[i].imageCount = getSizeZ() * getSizeT(); core[i].rgb = core[i].sizeC > 1; core[i].pixelType = FormatTools.UINT8; blockSize[i] = embedded ? (int) s.getFilePointer() : 99; planePadding[i] = 0; if (s.length() > core[i].sizeX * core[i].sizeY * core[i].sizeZ * 6) { core[i].pixelType = FormatTools.UINT16; core[i].sizeC = 3; core[i].rgb = true; } if (s.length() < (core[i].sizeX * core[i].sizeY * core[i].sizeZ * core[i].sizeC)) { core[i].rgb = false; core[i].sizeC = 1; long pixels = core[i].sizeX * core[i].sizeY * core[i].sizeZ; double approximateBytes = (double) s.length() / pixels; int bytes = (int) Math.round(approximateBytes); if (bytes == 0) { bytes = 1; } core[i].pixelType = FormatTools.pixelTypeFromBytes(bytes, false, false); s.seek(70); blockSize[i] = s.readInt(); clippingData[i] = true; } } s.close(); } setSeries(0); for (int i=0; i<getSeriesCount(); i++) { setSeries(i); addSeriesMeta("Name", stackNames.get(i)); addSeriesMeta("Pixel width (in microns)", physicalX[i]); addSeriesMeta("Pixel height (in microns)", physicalY[i]); addSeriesMeta("Z step (in microns)", physicalZ[i]); addSeriesMeta("Objective magnification", magnification[i]); addSeriesMeta("Camera/Detector", detector[i]); addSeriesMeta("Description", description[i]); if (channelNames[i] != null) { for (int c=0; c<channelNames[i].length; c++) { addSeriesMeta("Channel #" + (c + 1), channelNames[i][c]); } } } setSeries(0); MetadataStore store = makeFilterMetadata(); MetadataTools.populatePixels(store, this); String instrument = MetadataTools.createLSID("Instrument", 0); store.setInstrumentID(instrument, 0); for (int i=0; i<getSeriesCount(); i++) { store.setImageInstrumentRef(instrument, i); setSeries(i); store.setImageName(stackNames.get(i), i); store.setImageDescription(description[i], i); if (channelNames[i] != null) { for (int c=0; c<getEffectiveSizeC(); c++) { store.setChannelName(channelNames[i][c], i, c); } } if (physicalX[i] != null && physicalX[i] > 0) { store.setPixelsPhysicalSizeX(new PositiveFloat(physicalX[i]), i); } if (physicalY[i] != null && physicalY[i] > 0) { store.setPixelsPhysicalSizeY(new PositiveFloat(physicalY[i]), i); } if (physicalZ[i] != null && physicalZ[i] > 0) { store.setPixelsPhysicalSizeZ(new PositiveFloat(physicalZ[i]), i); } String objective = MetadataTools.createLSID("Objective", 0, i); store.setObjectiveID(objective, 0, i); if (magnification[i] != null) { store.setObjectiveNominalMagnification( new PositiveInteger(magnification[i].intValue()), 0, i); } store.setObjectiveCorrection(getCorrection("Other"), 0, i); store.setObjectiveImmersion(getImmersion("Other"), 0, i); store.setImageObjectiveSettingsID(objective, i); String detectorID = MetadataTools.createLSID("Detector", 0, i); store.setDetectorID(detectorID, 0, i); store.setDetectorModel(detector[i], 0, i); for (int c=0; c<getEffectiveSizeC(); c++) { store.setDetectorSettingsID(detectorID, i, c); } } setSeries(0); } private String getString(Integer stringID) { for (int row=0; row<stringTable.length; row++) { if (stringID.equals(stringTable[row][0])) { String s = (String) stringTable[row][1]; if (s != null) { s = s.trim(); } return s; } } return null; } private int getChildIndex(Integer parentID, String childName) { for (int row=0; row<sampleTable.length; row++) { if (parentID.equals(sampleTable[row][1])) { String name = getString((Integer) sampleTable[row][11]); if (childName.equals(name)) { return row; } } } return -1; } private Integer[] getAllChildren(Integer parentID) { ArrayList<Integer> children = new ArrayList<Integer>(); for (int row=0; row<sampleTable.length; row++) { if (parentID.equals(sampleTable[row][1])) { children.add(row); } } return children.toArray(new Integer[children.size()]); } private RandomAccessInputStream getStream(int row) throws IOException { Object o = sampleTable[row][14]; String fileLink = o == null ? "0" : o.toString().trim(); RandomAccessInputStream data = null; if (fileLink.equals("0")) { data = new RandomAccessInputStream((byte[]) sampleTable[row][13]); } else { fileLink = new Location(dir, fileLink + ".dat").getAbsolutePath(); data = new RandomAccessInputStream(fileLink); } data.order(true); return data; } private String getFile(Integer parent, Location dir) { for (int row=0; row<sampleTable.length; row++) { if (parent.equals(sampleTable[row][0])) { Object o = sampleTable[row][14]; if (o != null) { String fileLink = o.toString().trim() + ".dat"; return new Location(dir, fileLink).getAbsolutePath(); } } } return null; } }
package loci.formats.in; import java.io.ByteArrayInputStream; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.Stack; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import loci.common.Constants; import loci.common.DateTools; import loci.common.Location; import loci.common.RandomAccessInputStream; import loci.common.Region; import loci.common.xml.XMLTools; import loci.formats.CoreMetadata; import loci.formats.FormatException; import loci.formats.FormatReader; import loci.formats.FormatTools; import loci.formats.MetadataTools; import loci.formats.codec.CodecOptions; import loci.formats.codec.JPEGCodec; import loci.formats.codec.LZWCodec; import loci.formats.meta.MetadataStore; import ome.xml.model.primitives.Color; import ome.xml.model.primitives.NonNegativeInteger; import ome.xml.model.primitives.PercentFraction; import ome.xml.model.primitives.PositiveFloat; import ome.xml.model.primitives.PositiveInteger; import ome.xml.model.primitives.Timestamp; import org.xml.sax.SAXException; import org.w3c.dom.Attr; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; public class ZeissCZIReader extends FormatReader { // -- Constants -- private static final int ALIGNMENT = 32; private static final int HEADER_SIZE = 32; private static final String CZI_MAGIC_STRING = "ZISRAWFILE"; /** Compression constants. */ private static final int UNCOMPRESSED = 0; private static final int JPEG = 1; private static final int LZW = 2; /** Pixel type constants. */ private static final int GRAY8 = 0; private static final int GRAY16 = 1; private static final int GRAY_FLOAT = 2; private static final int BGR_24 = 3; private static final int BGR_48 = 4; private static final int BGR_FLOAT = 8; private static final int BGRA_8 = 9; private static final int COMPLEX = 10; private static final int COMPLEX_FLOAT = 11; private static final int GRAY32 = 12; private static final int GRAY_DOUBLE = 13; // -- Fields -- private MetadataStore store; private ArrayList<SubBlock> planes; private int rotations = 1; private int positions = 1; private int illuminations = 1; private int acquisitions = 1; private int mosaics = 1; private int phases = 1; private String acquiredDate; private String userDisplayName, userName; private String userFirstName, userLastName, userMiddleName; private String userEmail; private String userInstitution; private String temperature, airPressure, humidity, co2Percent; private String correctionCollar, medium, refractiveIndex; private String zoom; private String gain; private ArrayList<String> emissionWavelengths = new ArrayList<String>(); private ArrayList<String> excitationWavelengths = new ArrayList<String>(); private ArrayList<String> pinholeSizes = new ArrayList<String>(); private ArrayList<String> channelNames = new ArrayList<String>(); private ArrayList<String> channelColors = new ArrayList<String>(); private ArrayList<String> binnings = new ArrayList<String>(); private ArrayList<String> detectorRefs = new ArrayList<String>(); private ArrayList<String> objectiveIDs = new ArrayList<String>(); private Double[] positionsX; private Double[] positionsY; private Double[] positionsZ; private int previousChannel = 0; private Boolean prestitched = null; // -- Constructor -- /** Constructs a new Zeiss .czi reader. */ public ZeissCZIReader() { super("Zeiss CZI", "czi"); domains = new String[] {FormatTools.LM_DOMAIN}; suffixSufficient = true; suffixNecessary = false; } // -- IFormatReader API methods -- /** * @see loci.formats.IFormatReader#isThisType(RandomAccessInputStream) */ public boolean isThisType(RandomAccessInputStream stream) throws IOException { final int blockLen = 10; if (!FormatTools.validStream(stream, blockLen, true)) return false; String check = stream.readString(blockLen); return check.equals(CZI_MAGIC_STRING); } /* @see loci.formats.IFormatReader#get8BitLookupTable() */ public byte[][] get8BitLookupTable() throws FormatException, IOException { if ((getPixelType() != FormatTools.INT8 && getPixelType() != FormatTools.UINT8) || previousChannel == -1 || previousChannel >= channelColors.size()) { return null; } byte[][] lut = new byte[3][256]; String color = channelColors.get(previousChannel); if (color != null) { color = color.replaceAll(" try { int colorValue = Integer.parseInt(color, 16); int redMax = (colorValue & 0xff0000) >> 16; int greenMax = (colorValue & 0xff00) >> 8; int blueMax = colorValue & 0xff; for (int i=0; i<lut[0].length; i++) { lut[0][i] = (byte) (redMax * (i / 255.0)); lut[1][i] = (byte) (greenMax * (i / 255.0)); lut[2][i] = (byte) (blueMax * (i / 255.0)); } return lut; } catch (NumberFormatException e) { return null; } } else return null; } /* @see loci.formats.IFormatReader#get16BitLookupTable() */ public short[][] get16BitLookupTable() throws FormatException, IOException { if ((getPixelType() != FormatTools.INT16 && getPixelType() != FormatTools.UINT16) || previousChannel == -1 || previousChannel >= channelColors.size()) { return null; } short[][] lut = new short[3][65536]; String color = channelColors.get(previousChannel); if (color != null) { color = color.replaceAll(" try { int colorValue = Integer.parseInt(color, 16); int redMax = (colorValue & 0xff0000) >> 16; int greenMax = (colorValue & 0xff00) >> 8; int blueMax = colorValue & 0xff; redMax = (int) (65535 * (redMax / 255.0)); greenMax = (int) (65535 * (greenMax / 255.0)); blueMax = (int) (65535 * (blueMax / 255.0)); for (int i=0; i<lut[0].length; i++) { lut[0][i] = (short) ((int) (redMax * (i / 65535.0)) & 0xffff); lut[1][i] = (short) ((int) (greenMax * (i / 65535.0)) & 0xffff); lut[2][i] = (short) ((int) (blueMax * (i / 65535.0)) & 0xffff); } return lut; } catch (NumberFormatException e) { return null; } } else return null; } /** * @see loci.formats.IFormatReader#openBytes(int, byte[], int, int, int, int) */ public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) throws FormatException, IOException { FormatTools.checkPlaneParameters(this, no, buf.length, x, y, w, h); previousChannel = getZCTCoords(no)[1]; int currentSeries = getSeries(); Region image = new Region(x, y, w, h); int currentX = 0; int currentY = 0; int pixel = getRGBChannelCount() * FormatTools.getBytesPerPixel(getPixelType()); int outputRowLen = w * pixel; int outputRow = h, outputCol = 0; for (SubBlock plane : planes) { if (plane.seriesIndex == currentSeries && plane.planeIndex == no) { byte[] rawData = plane.readPixelData(); if (prestitched != null && prestitched) { int realX = plane.x; int realY = plane.y; Region tile = new Region(currentX, getSizeY() - currentY - realY, realX, realY); if (tile.intersects(image)) { Region intersection = tile.intersection(image); int intersectionX = 0; if (tile.x < image.x) { intersectionX = image.x - tile.x; } int rowLen = pixel * (int) Math.min(intersection.width, realX); int outputOffset = (outputRow - intersection.height) * outputRowLen + outputCol; for (int trow=0; trow<intersection.height; trow++) { int realRow = trow + intersection.y - tile.y; int inputOffset = pixel * (realRow * realX + intersectionX); System.arraycopy( rawData, inputOffset, buf, outputOffset, rowLen); outputOffset += outputRowLen; } outputCol += rowLen; if (outputCol >= w * pixel) { outputCol = 0; outputRow -= intersection.height; } } currentX += realX; if (currentX >= getSizeX()) { currentX = 0; currentY += realY; } } else { RandomAccessInputStream s = new RandomAccessInputStream(rawData); readPlane(s, x, y, w, h, buf); s.close(); break; } } } return buf; } /* @see loci.formats.IFormatReader#close(boolean) */ public void close(boolean fileOnly) throws IOException { super.close(fileOnly); if (!fileOnly) { planes = null; rotations = 1; positions = 1; illuminations = 1; acquisitions = 1; mosaics = 1; phases = 1; store = null; acquiredDate = null; userDisplayName = null; userName = null; userFirstName = null; userLastName = null; userMiddleName = null; userEmail = null; userInstitution = null; temperature = null; airPressure = null; humidity = null; co2Percent = null; correctionCollar = null; medium = null; refractiveIndex = null; positionsX = null; positionsY = null; positionsZ = null; zoom = null; gain = null; emissionWavelengths.clear(); excitationWavelengths.clear(); pinholeSizes.clear(); channelNames.clear(); channelColors.clear(); binnings.clear(); detectorRefs.clear(); objectiveIDs.clear(); previousChannel = 0; prestitched = null; } } // -- Internal FormatReader API methods -- /* @see loci.formats.FormatReader#initFile(String) */ protected void initFile(String id) throws FormatException, IOException { super.initFile(id); in = new RandomAccessInputStream(id); core[0].littleEndian = true; in.order(isLittleEndian()); ArrayList<Segment> segments = new ArrayList<Segment>(); planes = new ArrayList<SubBlock>(); while (in.getFilePointer() < in.length()) { Segment segment = readSegment(); segments.add(segment); if (segment instanceof SubBlock) { planes.add((SubBlock) segment); } } calculateDimensions(); convertPixelType(planes.get(0).directoryEntry.pixelType); // remove any invalid SubBlocks int bpp = FormatTools.getBytesPerPixel(getPixelType()); for (int i=0; i<planes.size(); i++) { int planeSize = planes.get(i).x * planes.get(i).y * bpp; byte[] pixels = planes.get(i).readPixelData(); if (pixels.length < planeSize || planeSize < 0) { planes.remove(i); i } } if (getSizeZ() == 0) { core[0].sizeZ = 1; } if (getSizeC() == 0) { core[0].sizeC = 1; } if (getSizeT() == 0) { core[0].sizeT = 1; } // finish populating the core metadata int seriesCount = rotations * positions * illuminations * acquisitions * mosaics * phases; core[0].imageCount = getSizeZ() * (isRGB() ? 1 : getSizeC()) * getSizeT(); if (mosaics == seriesCount && seriesCount == (planes.size() / getImageCount()) && prestitched != null && prestitched) { prestitched = false; core[0].sizeX = planes.get(planes.size() - 1).x; core[0].sizeY = planes.get(planes.size() - 1).y; } if (seriesCount > 1) { CoreMetadata firstSeries = core[0]; core = new CoreMetadata[seriesCount]; for (int i=0; i<seriesCount; i++) { core[i] = firstSeries; } } core[0].dimensionOrder = "XYCZT"; assignPlaneIndices(); // populate the OME metadata store = makeFilterMetadata(); MetadataTools.populatePixels(store, this, true); for (Segment segment : segments) { if (segment instanceof Metadata) { String xml = ((Metadata) segment).xml; xml = XMLTools.sanitizeXML(xml); translateMetadata(xml); } } if (channelColors.size() > 0) { for (int i=0; i<seriesCount; i++) { core[i].indexed = true; } } String experimenterID = MetadataTools.createLSID("Experimenter", 0); store.setExperimenterID(experimenterID, 0); store.setExperimenterEmail(userEmail, 0); store.setExperimenterFirstName(userFirstName, 0); store.setExperimenterInstitution(userInstitution, 0); store.setExperimenterLastName(userLastName, 0); store.setExperimenterMiddleName(userMiddleName, 0); store.setExperimenterUserName(userName, 0); String name = new Location(getCurrentFile()).getName(); for (int i=0; i<getSeriesCount(); i++) { if (acquiredDate != null) { store.setImageAcquisitionDate(new Timestamp(acquiredDate), i); } if (experimenterID != null) { store.setImageExperimenterRef(experimenterID, i); } store.setImageName(name + " #" + (i + 1), i); if (airPressure != null) { store.setImagingEnvironmentAirPressure(new Double(airPressure), i); } if (co2Percent != null) { store.setImagingEnvironmentCO2Percent( PercentFraction.valueOf(co2Percent), i); } if (humidity != null) { store.setImagingEnvironmentHumidity( PercentFraction.valueOf(humidity), i); } if (temperature != null) { store.setImagingEnvironmentTemperature(new Double(temperature), i); } if (objectiveIDs.size() > 0) { store.setObjectiveSettingsID(objectiveIDs.get(0), i); if (correctionCollar != null) { store.setObjectiveSettingsCorrectionCollar( new Double(correctionCollar), i); } store.setObjectiveSettingsMedium(getMedium(medium), i); if (refractiveIndex != null) { store.setObjectiveSettingsRefractiveIndex( new Double(refractiveIndex), i); } } Double startTime = null; if (acquiredDate != null) { startTime = DateTools.getTime(acquiredDate, DateTools.ISO8601_FORMAT) / 1000d; } for (int plane=0; plane<getImageCount(); plane++) { for (SubBlock p : planes) { if (p.seriesIndex == i && p.planeIndex == plane) { if (startTime == null) { startTime = p.timestamp; } if (p.stageX != null) { store.setPlanePositionX(p.stageX, i, plane); } else if (positionsX != null && i < positionsX.length) { store.setPlanePositionX(positionsX[i], i, plane); } if (p.stageY != null) { store.setPlanePositionY(p.stageY, i, plane); } else if (positionsY != null && i < positionsY.length) { store.setPlanePositionY(positionsY[i], i, plane); } if (p.stageZ != null) { store.setPlanePositionZ(p.stageZ, i, plane); } else if (positionsZ != null && i < positionsZ.length) { store.setPlanePositionZ(positionsZ[i], i, plane); } if (p.timestamp != null) { store.setPlaneDeltaT(p.timestamp - startTime, i, plane); } if (p.exposureTime != null) { store.setPlaneExposureTime(p.exposureTime, i, plane); } } } } for (int c=0; c<getEffectiveSizeC(); c++) { if (c < channelNames.size()) { store.setChannelName(channelNames.get(c), i, c); } if (c < channelColors.size()) { String color = channelColors.get(c); if (color != null) { color = color.replaceAll(" try { store.setChannelColor( new Color((Integer.parseInt(color, 16) << 8) | 0xff), i, c); } catch (NumberFormatException e) { } } } if (c < emissionWavelengths.size()) { String emWave = emissionWavelengths.get(c); if (emWave != null) { Double wave = new Double(emWave); if (wave.intValue() > 0) { store.setChannelEmissionWavelength( new PositiveInteger(wave.intValue()), i, c); } else { LOGGER.warn( "Expected positive value for EmissionWavelength; got {}", wave); } } } if (c < excitationWavelengths.size()) { String exWave = excitationWavelengths.get(c); if (exWave != null) { Double wave = new Double(exWave); if (wave.intValue() > 0) { store.setChannelExcitationWavelength( new PositiveInteger(wave.intValue()), i, c); } else { LOGGER.warn( "Expected positive value for ExcitationWavelength; got {}", wave); } } } if (c < pinholeSizes.size() && pinholeSizes.get(c) != null) { store.setChannelPinholeSize(new Double(pinholeSizes.get(c)), i, c); } if (c < detectorRefs.size()) { String detector = detectorRefs.get(c); store.setDetectorSettingsID(detector, i, c); if (c < binnings.size()) { store.setDetectorSettingsBinning(getBinning(binnings.get(c)), i, c); } } } } } // -- Helper methods -- private void calculateDimensions() { // calculate the dimensions for (SubBlock plane : planes) { for (DimensionEntry dimension : plane.directoryEntry.dimensionEntries) { switch (dimension.dimension.charAt(0)) { case 'X': plane.x = dimension.size; if ((prestitched == null || prestitched) && getSizeX() > 0 && dimension.size != getSizeX()) { prestitched = true; continue; } core[0].sizeX = dimension.size; break; case 'Y': plane.y = dimension.size; if ((prestitched == null || prestitched) && getSizeY() > 0 && dimension.size != getSizeY()) { prestitched = true; continue; } core[0].sizeY = dimension.size; break; case 'C': if (dimension.start >= getSizeC()) { core[0].sizeC = dimension.start + 1; } break; case 'Z': if (dimension.start >= getSizeZ()) { core[0].sizeZ = dimension.start + 1; } break; case 'T': if (dimension.start >= getSizeT()) { core[0].sizeT = dimension.start + 1; } break; case 'R': if (dimension.start >= rotations) { rotations = dimension.start + 1; } break; case 'S': if (dimension.start >= positions) { positions = dimension.start + 1; } break; case 'I': if (dimension.start >= illuminations) { illuminations = dimension.start + 1; } break; case 'B': if (dimension.start >= acquisitions) { acquisitions = dimension.start + 1; } break; case 'M': if (dimension.start >= mosaics) { mosaics = dimension.start + 1; } break; case 'H': if (dimension.start >= phases) { phases = dimension.start + 1; } break; } } } } private void assignPlaneIndices() { // assign plane and series indices to each SubBlock int[] extraLengths = {rotations, positions, illuminations, acquisitions, mosaics, phases}; for (SubBlock plane : planes) { int z = 0; int c = 0; int t = 0; int[] extra = new int[6]; for (DimensionEntry dimension : plane.directoryEntry.dimensionEntries) { switch (dimension.dimension.charAt(0)) { case 'C': c = dimension.start; break; case 'Z': z = dimension.start; break; case 'T': t = dimension.start; break; case 'R': extra[0] = dimension.start; break; case 'S': extra[1] = dimension.start; break; case 'I': extra[2] = dimension.start; break; case 'B': extra[3] = dimension.start; break; case 'M': extra[4] = dimension.start; break; case 'H': extra[5] = dimension.start; break; } } plane.planeIndex = getIndex(z, c, t); plane.seriesIndex = FormatTools.positionToRaster(extraLengths, extra); } } private void translateMetadata(String xml) throws FormatException, IOException { Element root = null; try { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder parser = factory.newDocumentBuilder(); ByteArrayInputStream s = new ByteArrayInputStream(xml.getBytes(Constants.ENCODING)); root = parser.parse(s).getDocumentElement(); s.close(); } catch (ParserConfigurationException e) { throw new FormatException(e); } catch (SAXException e) { throw new FormatException(e); } if (root == null) { throw new FormatException("Could not parse the XML metadata."); } NodeList children = root.getChildNodes(); Element realRoot = null; for (int i=0; i<children.getLength(); i++) { if (children.item(i) instanceof Element) { realRoot = (Element) children.item(i); break; } } translateExperiment(realRoot); translateInformation(realRoot); translateScaling(realRoot); translateDisplaySettings(realRoot); translateLayers(realRoot); Stack<String> nameStack = new Stack<String>(); HashMap<String, Integer> indexes = new HashMap<String, Integer>(); populateOriginalMetadata(realRoot, nameStack, indexes); } private void translateInformation(Element root) throws FormatException { NodeList informations = root.getElementsByTagName("Information"); if (informations == null || informations.getLength() == 0) { return; } Element information = (Element) informations.item(0); Element image = getFirstNode(information, "Image"); Element user = getFirstNode(information, "User"); Element environment = getFirstNode(information, "Environment"); Element instrument = getFirstNode(information, "Instrument"); if (image != null) { String bitCount = getFirstNodeValue(image, "ComponentBitCount"); if (bitCount != null) { core[0].bitsPerPixel = Integer.parseInt(bitCount); } acquiredDate = getFirstNodeValue(image, "AcquisitionDateAndTime"); Element objectiveSettings = getFirstNode(image, "ObjectiveSettings"); correctionCollar = getFirstNodeValue(objectiveSettings, "CorrectionCollar"); medium = getFirstNodeValue(objectiveSettings, "Medium"); refractiveIndex = getFirstNodeValue(objectiveSettings, "RefractiveIndex"); Element dimensions = getFirstNode(image, "Dimensions"); NodeList channels = getGrandchildren(dimensions, "Channel"); if (channels != null) { for (int i=0; i<channels.getLength(); i++) { Element channel = (Element) channels.item(i); emissionWavelengths.add( getFirstNodeValue(channel, "EmissionWavelength")); excitationWavelengths.add( getFirstNodeValue(channel, "ExcitationWavelength")); pinholeSizes.add(getFirstNodeValue(channel, "PinholeSize")); channelNames.add(channel.getAttribute("Name")); Element detectorSettings = getFirstNode(channel, "DetectorSettings"); binnings.add(getFirstNodeValue(detectorSettings, "Binning")); Element scanInfo = getFirstNode(channel, "LaserScanInfo"); if (scanInfo != null) { zoom = getFirstNodeValue(scanInfo, "ZoomX"); } Element detector = getFirstNode(detectorSettings, "Detector"); if (detector != null) { String detectorID = detector.getAttribute("Id"); if (detectorID.indexOf(" ") != -1) { detectorID = detectorID.substring(detectorID.lastIndexOf(" ") + 1); } if (!detectorID.startsWith("Detector:")) { detectorID = "Detector:" + detectorID; } detectorRefs.add(detectorID); } } } } if (user != null) { userDisplayName = getFirstNodeValue(user, "DisplayName"); userFirstName = getFirstNodeValue(user, "FirstName"); userLastName = getFirstNodeValue(user, "LastName"); userMiddleName = getFirstNodeValue(user, "MiddleName"); userEmail = getFirstNodeValue(user, "Email"); userInstitution = getFirstNodeValue(user, "Institution"); userName = getFirstNodeValue(user, "UserName"); } if (environment != null) { temperature = getFirstNodeValue(environment, "Temperature"); airPressure = getFirstNodeValue(environment, "AirPressure"); humidity = getFirstNodeValue(environment, "Humidity"); co2Percent = getFirstNodeValue(environment, "CO2Percent"); } if (instrument != null) { NodeList microscopes = getGrandchildren(instrument, "Microscope"); Element manufacturerNode = null; store.setInstrumentID(MetadataTools.createLSID("Instrument", 0), 0); if (microscopes != null) { Element microscope = (Element) microscopes.item(0); manufacturerNode = getFirstNode(microscope, "Manufacturer"); store.setMicroscopeManufacturer( getFirstNodeValue(manufacturerNode, "Manufacturer"), 0); store.setMicroscopeModel( getFirstNodeValue(manufacturerNode, "Model"), 0); store.setMicroscopeSerialNumber( getFirstNodeValue(manufacturerNode, "SerialNumber"), 0); store.setMicroscopeLotNumber( getFirstNodeValue(manufacturerNode, "LotNumber"), 0); store.setMicroscopeType( getMicroscopeType(getFirstNodeValue(microscope, "Type")), 0); } NodeList lightSources = getGrandchildren(instrument, "LightSource"); if (lightSources != null) { for (int i=0; i<lightSources.getLength(); i++) { Element lightSource = (Element) lightSources.item(i); manufacturerNode = getFirstNode(lightSource, "Manufacturer"); String manufacturer = getFirstNodeValue(manufacturerNode, "Manufacturer"); String model = getFirstNodeValue(manufacturerNode, "Model"); String serialNumber = getFirstNodeValue(manufacturerNode, "SerialNumber"); String lotNumber = getFirstNodeValue(manufacturerNode, "LotNumber"); String type = getFirstNodeValue(lightSource, "LightSourceType"); String power = getFirstNodeValue(lightSource, "Power"); if ("Laser".equals(type)) { if (power != null) { store.setLaserPower(new Double(power), 0, i); } store.setLaserLotNumber(lotNumber, 0, i); store.setLaserManufacturer(manufacturer, 0, i); store.setLaserModel(model, 0, i); store.setLaserSerialNumber(serialNumber, 0, i); } else if ("Arc".equals(type)) { if (power != null) { store.setArcPower(new Double(power), 0, i); } store.setArcLotNumber(lotNumber, 0, i); store.setArcManufacturer(manufacturer, 0, i); store.setArcModel(model, 0, i); store.setArcSerialNumber(serialNumber, 0, i); } else if ("LightEmittingDiode".equals(type)) { if (power != null) { store.setLightEmittingDiodePower(new Double(power), 0, i); } store.setLightEmittingDiodeLotNumber(lotNumber, 0, i); store.setLightEmittingDiodeManufacturer(manufacturer, 0, i); store.setLightEmittingDiodeModel(model, 0, i); store.setLightEmittingDiodeSerialNumber(serialNumber, 0, i); } else if ("Filament".equals(type)) { if (power != null) { store.setFilamentPower(new Double(power), 0, i); } store.setFilamentLotNumber(lotNumber, 0, i); store.setFilamentManufacturer(manufacturer, 0, i); store.setFilamentModel(model, 0, i); store.setFilamentSerialNumber(serialNumber, 0, i); } } } NodeList detectors = getGrandchildren(instrument, "Detector"); if (detectors != null) { for (int i=0; i<detectors.getLength(); i++) { Element detector = (Element) detectors.item(i); manufacturerNode = getFirstNode(detector, "Manufacturer"); String manufacturer = getFirstNodeValue(manufacturerNode, "Manufacturer"); String model = getFirstNodeValue(manufacturerNode, "Model"); String serialNumber = getFirstNodeValue(manufacturerNode, "SerialNumber"); String lotNumber = getFirstNodeValue(manufacturerNode, "LotNumber"); String detectorID = detector.getAttribute("Id"); if (detectorID.indexOf(" ") != -1) { detectorID = detectorID.substring(detectorID.lastIndexOf(" ") + 1); } if (!detectorID.startsWith("Detector:")) { detectorID = "Detector:" + detectorID; } store.setDetectorID(detectorID, 0, i); store.setDetectorManufacturer(manufacturer, 0, i); store.setDetectorModel(model, 0, i); store.setDetectorSerialNumber(serialNumber, 0, i); store.setDetectorLotNumber(lotNumber, 0, i); if (gain == null) { gain = getFirstNodeValue(detector, "Gain"); } if (gain != null) { store.setDetectorGain(new Double(gain), 0, i); } String offset = getFirstNodeValue(detector, "Offset"); if (offset != null) { store.setDetectorOffset(new Double(offset), 0, i); } if (zoom == null) { zoom = getFirstNodeValue(detector, "Zoom"); } if (zoom != null) { store.setDetectorZoom(new Double(zoom), 0, i); } String ampGain = getFirstNodeValue(detector, "AmplificationGain"); if (ampGain != null) { store.setDetectorAmplificationGain(new Double(ampGain), 0, i); } store.setDetectorType( getDetectorType(getFirstNodeValue(detector, "Type")), 0, i); } } NodeList objectives = getGrandchildren(instrument, "Objective"); if (objectives != null) { for (int i=0; i<objectives.getLength(); i++) { Element objective = (Element) objectives.item(i); manufacturerNode = getFirstNode(objective, "Manufacturer"); String manufacturer = getFirstNodeValue(manufacturerNode, "Manufacturer"); String model = getFirstNodeValue(manufacturerNode, "Model"); String serialNumber = getFirstNodeValue(manufacturerNode, "SerialNumber"); String lotNumber = getFirstNodeValue(manufacturerNode, "LotNumber"); objectiveIDs.add(objective.getAttribute("Id")); store.setObjectiveID(objective.getAttribute("Id"), 0, i); store.setObjectiveManufacturer(manufacturer, 0, i); store.setObjectiveModel(model, 0, i); store.setObjectiveSerialNumber(serialNumber, 0, i); store.setObjectiveLotNumber(lotNumber, 0, i); store.setObjectiveCorrection( getCorrection(getFirstNodeValue(objective, "Correction")), 0, i); store.setObjectiveImmersion( getImmersion(getFirstNodeValue(objective, "Immersion")), 0, i); String lensNA = getFirstNodeValue(objective, "LensNA"); if (lensNA != null) { store.setObjectiveLensNA(new Double(lensNA), 0, i); } String magnification = getFirstNodeValue(objective, "NominalMagnification"); Double mag = magnification == null ? 0 : new Double(magnification); if (mag > 0) { store.setObjectiveNominalMagnification( new PositiveInteger(mag.intValue()), 0, i); } else { LOGGER.warn( "Expected positive value for NominalMagnification; got {}", mag); } String calibratedMag = getFirstNodeValue(objective, "CalibratedMagnification"); if (calibratedMag != null) { store.setObjectiveCalibratedMagnification( new Double(calibratedMag), 0, i); } String wd = getFirstNodeValue(objective, "WorkingDistance"); if (wd != null) { store.setObjectiveWorkingDistance(new Double(wd), 0, i); } String iris = getFirstNodeValue(objective, "Iris"); if (iris != null) { store.setObjectiveIris(new Boolean(iris), 0, i); } } } NodeList filterSets = getGrandchildren(instrument, "FilterSet"); if (filterSets != null) { for (int i=0; i<filterSets.getLength(); i++) { Element filterSet = (Element) filterSets.item(i); manufacturerNode = getFirstNode(filterSet, "Manufacturer"); String manufacturer = getFirstNodeValue(manufacturerNode, "Manufacturer"); String model = getFirstNodeValue(manufacturerNode, "Model"); String serialNumber = getFirstNodeValue(manufacturerNode, "SerialNumber"); String lotNumber = getFirstNodeValue(manufacturerNode, "LotNumber"); store.setFilterSetID(filterSet.getAttribute("Id"), 0, i); store.setFilterSetManufacturer(manufacturer, 0, i); store.setFilterSetModel(model, 0, i); store.setFilterSetSerialNumber(serialNumber, 0, i); store.setFilterSetLotNumber(lotNumber, 0, i); String dichroicRef = getFirstNodeValue(filterSet, "DichroicRef"); if (dichroicRef != null && dichroicRef.length() > 0) { store.setFilterSetDichroicRef(dichroicRef, 0, i); } NodeList excitations = getGrandchildren( filterSet, "ExcitationFilters", "ExcitationFilterRef"); NodeList emissions = getGrandchildren(filterSet, "EmissionFilters", "EmissionFilterRef"); if (excitations != null) { for (int ex=0; ex<excitations.getLength(); ex++) { String ref = excitations.item(ex).getTextContent(); if (ref != null && ref.length() > 0) { store.setFilterSetExcitationFilterRef(ref, 0, i, ex); } } } if (emissions != null) { for (int em=0; em<emissions.getLength(); em++) { String ref = emissions.item(em).getTextContent(); if (ref != null && ref.length() > 0) { store.setFilterSetEmissionFilterRef(ref, 0, i, em); } } } } } NodeList filters = getGrandchildren(instrument, "Filter"); if (filters != null) { for (int i=0; i<filters.getLength(); i++) { Element filter = (Element) filters.item(i); manufacturerNode = getFirstNode(filter, "Manufacturer"); String manufacturer = getFirstNodeValue(manufacturerNode, "Manufacturer"); String model = getFirstNodeValue(manufacturerNode, "Model"); String serialNumber = getFirstNodeValue(manufacturerNode, "SerialNumber"); String lotNumber = getFirstNodeValue(manufacturerNode, "LotNumber"); store.setFilterID(filter.getAttribute("Id"), 0, i); store.setFilterManufacturer(manufacturer, 0, i); store.setFilterModel(model, 0, i); store.setFilterSerialNumber(serialNumber, 0, i); store.setFilterLotNumber(lotNumber, 0, i); store.setFilterType( getFilterType(getFirstNodeValue(filter, "Type")), 0, i); store.setFilterFilterWheel( getFirstNodeValue(filter, "FilterWheel"), 0, i); Element transmittance = getFirstNode(filter, "TransmittanceRange"); String cutIn = getFirstNodeValue(transmittance, "CutIn"); String cutOut = getFirstNodeValue(transmittance, "CutOut"); Integer inWave = cutIn == null ? 0 : new Integer(cutIn); Integer outWave = cutOut == null ? 0 : new Integer(cutOut); if (inWave > 0) { store.setTransmittanceRangeCutIn(new PositiveInteger(inWave), 0, i); } else { LOGGER.warn("Expected positive value for CutIn; got {}", inWave); } if (outWave > 0) { store.setTransmittanceRangeCutOut( new PositiveInteger(outWave), 0, i); } else { LOGGER.warn("Expected positive value for CutOut; got {}", outWave); } String inTolerance = getFirstNodeValue(transmittance, "CutInTolerance"); String outTolerance = getFirstNodeValue(transmittance, "CutOutTolerance"); if (inTolerance != null) { Integer cutInTolerance = new Integer(inTolerance); store.setTransmittanceRangeCutInTolerance( new NonNegativeInteger(cutInTolerance), 0, i); } if (outTolerance != null) { Integer cutOutTolerance = new Integer(outTolerance); store.setTransmittanceRangeCutOutTolerance( new NonNegativeInteger(cutOutTolerance), 0, i); } String transmittancePercent = getFirstNodeValue(transmittance, "Transmittance"); if (transmittancePercent != null) { store.setTransmittanceRangeTransmittance( PercentFraction.valueOf(transmittancePercent), 0, i); } } } NodeList dichroics = getGrandchildren(instrument, "Dichroic"); if (dichroics != null) { for (int i=0; i<dichroics.getLength(); i++) { Element dichroic = (Element) dichroics.item(i); manufacturerNode = getFirstNode(dichroic, "Manufacturer"); String manufacturer = getFirstNodeValue(manufacturerNode, "Manufacturer"); String model = getFirstNodeValue(manufacturerNode, "Model"); String serialNumber = getFirstNodeValue(manufacturerNode, "SerialNumber"); String lotNumber = getFirstNodeValue(manufacturerNode, "LotNumber"); store.setDichroicID(dichroic.getAttribute("Id"), 0, i); store.setDichroicManufacturer(manufacturer, 0, i); store.setDichroicModel(model, 0, i); store.setDichroicSerialNumber(serialNumber, 0, i); store.setDichroicLotNumber(lotNumber, 0, i); } } } } private void translateScaling(Element root) { NodeList scalings = root.getElementsByTagName("Scaling"); if (scalings == null || scalings.getLength() == 0) { return; } Element scaling = (Element) scalings.item(0); NodeList distances = getGrandchildren(scaling, "Items", "Distance"); if (distances != null) { for (int i=0; i<distances.getLength(); i++) { Element distance = (Element) distances.item(i); String id = distance.getAttribute("Id"); String originalValue = getFirstNodeValue(distance, "Value"); if (originalValue == null) { continue; } Double value = new Double(originalValue) * 1000000; if (value > 0) { PositiveFloat size = new PositiveFloat(value); if (id.equals("X")) { for (int series=0; series<getSeriesCount(); series++) { store.setPixelsPhysicalSizeX(size, series); } } else if (id.equals("Y")) { for (int series=0; series<getSeriesCount(); series++) { store.setPixelsPhysicalSizeY(size, series); } } else if (id.equals("Z")) { for (int series=0; series<getSeriesCount(); series++) { store.setPixelsPhysicalSizeZ(size, series); } } } else { LOGGER.warn( "Expected positive value for PhysicalSize; got {}", value); } } } } private void translateDisplaySettings(Element root) { NodeList displaySettings = root.getElementsByTagName("DisplaySetting"); if (displaySettings == null || displaySettings.getLength() == 0) { return; } Element displaySetting = (Element) displaySettings.item(0); NodeList channels = getGrandchildren(displaySetting, "Channel"); if (channels != null) { for (int i=0; i<channels.getLength(); i++) { Element channel = (Element) channels.item(i); String color = getFirstNodeValue(channel, "Color"); if (color != null) { channelColors.add(color); } } } } private void translateLayers(Element root) { NodeList layerses = root.getElementsByTagName("Layers"); if (layerses == null || layerses.getLength() == 0) { return; } Element layersNode = (Element) layerses.item(0); NodeList layers = layersNode.getElementsByTagName("Layer"); if (layers != null) { for (int i=0; i<layers.getLength(); i++) { Element layer = (Element) layers.item(i); NodeList elementses = layer.getElementsByTagName("Elements"); if (elementses.getLength() == 0) { continue; } NodeList allGrandchildren = elementses.item(0).getChildNodes(); int shape = 0; NodeList lines = getGrandchildren(layer, "Elements", "Line"); shape = populateLines(lines, i, shape); NodeList arrows = getGrandchildren(layer, "Elements", "OpenArrow"); shape = populateLines(arrows, i, shape); NodeList crosses = getGrandchildren(layer, "Elements", "Cross"); for (int s=0; s<crosses.getLength(); s++, shape+=2) { Element cross = (Element) crosses.item(s); Element geometry = getFirstNode(cross, "Geometry"); Element textElements = getFirstNode(cross, "TextElements"); Element attributes = getFirstNode(cross, "Attributes"); store.setLineID( MetadataTools.createLSID("Shape", i, shape), i, shape); store.setLineID( MetadataTools.createLSID("Shape", i, shape + 1), i, shape + 1); String length = getFirstNodeValue(geometry, "Length"); String centerX = getFirstNodeValue(geometry, "CenterX"); String centerY = getFirstNodeValue(geometry, "CenterY"); if (length != null) { Double halfLen = new Double(length) / 2; if (centerX != null) { store.setLineX1(new Double(centerX) - halfLen, i, shape); store.setLineX2(new Double(centerX) + halfLen, i, shape); store.setLineX1(new Double(centerX), i, shape + 1); store.setLineX2(new Double(centerX), i, shape + 1); } if (centerY != null) { store.setLineY1(new Double(centerY), i, shape); store.setLineY2(new Double(centerY), i, shape); store.setLineY1(new Double(centerY) - halfLen, i, shape + 1); store.setLineY2(new Double(centerY) + halfLen, i, shape + 1); } } store.setLineText(getFirstNodeValue(textElements, "Text"), i, shape); store.setLineText(getFirstNodeValue(textElements, "Text"), i, shape + 1); } NodeList rectangles = getGrandchildren(layer, "Elements", "Rectangle"); if (rectangles != null) { shape = populateRectangles(rectangles, i, shape); } NodeList ellipses = getGrandchildren(layer, "Elements", "Ellipse"); if (ellipses != null) { for (int s=0; s<ellipses.getLength(); s++, shape++) { Element ellipse = (Element) ellipses.item(s); Element geometry = getFirstNode(ellipse, "Geometry"); Element textElements = getFirstNode(ellipse, "TextElements"); Element attributes = getFirstNode(ellipse, "Attributes"); store.setEllipseID( MetadataTools.createLSID("Shape", i, shape), i, shape); String radiusX = getFirstNodeValue(geometry, "RadiusX"); String radiusY = getFirstNodeValue(geometry, "RadiusY"); String centerX = getFirstNodeValue(geometry, "CenterX"); String centerY = getFirstNodeValue(geometry, "CenterY"); if (radiusX != null) { store.setEllipseRadiusX(new Double(radiusX), i, shape); } if (radiusY != null) { store.setEllipseRadiusY(new Double(radiusY), i, shape); } if (centerX != null) { store.setEllipseX(new Double(centerX), i, shape); } if (centerY != null) { store.setEllipseY(new Double(centerY), i, shape); } store.setEllipseText( getFirstNodeValue(textElements, "Text"), i, shape); } } // translate all of the circle ROIs NodeList circles = getGrandchildren(layer, "Elements", "Circle"); if (circles != null) { shape = populateCircles(circles, i, shape); } NodeList inOutCircles = getGrandchildren(layer, "Elements", "InOutCircle"); if (inOutCircles != null) { shape = populateCircles(inOutCircles, i, shape); } NodeList outInCircles = getGrandchildren(layer, "Elements", "OutInCircle"); if (outInCircles != null) { shape = populateCircles(outInCircles, i, shape); } NodeList pointsCircles = getGrandchildren(layer, "Elements", "PointsCircle"); if (pointsCircles != null) { shape = populateCircles(pointsCircles, i, shape); } NodeList polygons = getGrandchildren(layer, "Elements", "Polygon"); if (polygons != null) { shape = populatePolylines(polygons, i, shape, true); } NodeList polylines = getGrandchildren(layer, "Elements", "Polyline"); if (polylines != null) { shape = populatePolylines(polylines, i, shape, false); } NodeList openPolylines = getGrandchildren(layer, "Elements", "OpenPolyline"); if (openPolylines != null) { shape = populatePolylines(openPolylines, i, shape, false); } NodeList closedPolylines = getGrandchildren(layer, "Elements", "ClosedPolyline"); if (closedPolylines != null) { shape = populatePolylines(closedPolylines, i, shape, true); } NodeList rectRoi = getGrandchildren(layer, "Elements", "RectRoi"); if (rectRoi != null) { shape = populateRectangles(rectRoi, i, shape); } NodeList textBoxes = getGrandchildren(layer, "Elements", "TextBox"); if (textBoxes != null) { shape = populateRectangles(textBoxes, i, shape); } NodeList text = getGrandchildren(layer, "Elements", "Text"); if (text != null) { shape = populateRectangles(text, i, shape); } if (shape > 0) { String roiID = MetadataTools.createLSID("ROI", i); store.setROIID(roiID, i); store.setROIName(layer.getAttribute("Name"), i); store.setROIDescription(getFirstNodeValue(layer, "Usage"), i); for (int series=0; series<getSeriesCount(); series++) { store.setImageROIRef(roiID, series, i); } } } } } private int populateRectangles(NodeList rectangles, int roi, int shape) { for (int s=0; s<rectangles.getLength(); s++) { Element rectangle = (Element) rectangles.item(s); Element geometry = getFirstNode(rectangle, "Geometry"); Element textElements = getFirstNode(rectangle, "TextElements"); Element attributes = getFirstNode(rectangle, "Attributes"); String left = getFirstNodeValue(geometry, "Left"); String top = getFirstNodeValue(geometry, "Top"); String width = getFirstNodeValue(geometry, "Width"); String height = getFirstNodeValue(geometry, "Height"); if (left != null && top != null && width != null && height != null) { store.setRectangleID( MetadataTools.createLSID("Shape", roi, shape), roi, shape); store.setRectangleX(new Double(left), roi, shape); store.setRectangleY( new Double(getSizeY() - Double.parseDouble(top)), roi, shape); store.setRectangleWidth(new Double(width), roi, shape); store.setRectangleHeight(new Double(height), roi, shape); String name = getFirstNodeValue(attributes, "Name"); String label = getFirstNodeValue(textElements, "Text"); if (label != null) { store.setRectangleText(label, roi, shape); } shape++; } } return shape; } private int populatePolylines(NodeList polylines, int roi, int shape, boolean closed) { for (int s=0; s<polylines.getLength(); s++, shape++) { Element polyline = (Element) polylines.item(s); Element geometry = getFirstNode(polyline, "Geometry"); Element textElements = getFirstNode(polyline, "TextElements"); Element attributes = getFirstNode(polyline, "Attributes"); String shapeID = MetadataTools.createLSID("Shape", roi, shape); if (closed) { store.setPolygonID(shapeID, roi, shape); store.setPolygonPoints( getFirstNodeValue(geometry, "Points"), roi, shape); store.setPolygonText( getFirstNodeValue(textElements, "Text"), roi, shape); } else { store.setPolylineID(shapeID, roi, shape); store.setPolylinePoints( getFirstNodeValue(geometry, "Points"), roi, shape); store.setPolylineText( getFirstNodeValue(textElements, "Text"), roi, shape); } } return shape; } private int populateLines(NodeList lines, int roi, int shape) { for (int s=0; s<lines.getLength(); s++, shape++) { Element line = (Element) lines.item(s); Element geometry = getFirstNode(line, "Geometry"); Element textElements = getFirstNode(line, "TextElements"); Element attributes = getFirstNode(line, "Attributes"); String x1 = getFirstNodeValue(geometry, "X1"); String x2 = getFirstNodeValue(geometry, "X2"); String y1 = getFirstNodeValue(geometry, "Y1"); String y2 = getFirstNodeValue(geometry, "Y2"); store.setLineID( MetadataTools.createLSID("Shape", roi, shape), roi, shape); if (x1 != null) { store.setLineX1(new Double(x1), roi, shape); } if (x2 != null) { store.setLineX2(new Double(x2), roi, shape); } if (y1 != null) { store.setLineY1(new Double(y1), roi, shape); } if (y2 != null) { store.setLineY2(new Double(y2), roi, shape); } store.setLineText(getFirstNodeValue(textElements, "Text"), roi, shape); } return shape; } private int populateCircles(NodeList circles, int roi, int shape) { for (int s=0; s<circles.getLength(); s++, shape++) { Element circle = (Element) circles.item(s); Element geometry = getFirstNode(circle, "Geometry"); Element textElements = getFirstNode(circle, "TextElements"); Element attributes = getFirstNode(circle, "Attributes"); store.setEllipseID( MetadataTools.createLSID("Shape", roi, shape), roi, shape); String radius = getFirstNodeValue(geometry, "Radius"); String centerX = getFirstNodeValue(geometry, "CenterX"); String centerY = getFirstNodeValue(geometry, "CenterY"); if (radius != null) { store.setEllipseRadiusX(new Double(radius), roi, shape); store.setEllipseRadiusY(new Double(radius), roi, shape); } if (centerX != null) { store.setEllipseX(new Double(centerX), roi, shape); } if (centerY != null) { store.setEllipseY(new Double(centerY), roi, shape); } store.setEllipseText(getFirstNodeValue(textElements, "Text"), roi, shape); } return shape; } private void translateExperiment(Element root) { NodeList experiments = root.getElementsByTagName("Experiment"); if (experiments == null || experiments.getLength() == 0) { return; } Element experimentBlock = getFirstNode((Element) experiments.item(0), "ExperimentBlocks"); Element acquisition = getFirstNode(experimentBlock, "AcquisitionBlock"); Element tilesSetup = getFirstNode(acquisition, "TilesSetup"); NodeList groups = getGrandchildren(tilesSetup, "PositionGroup"); positionsX = new Double[core.length]; positionsY = new Double[core.length]; positionsZ = new Double[core.length]; if (groups != null) { for (int i=0; i<groups.getLength(); i++) { Element group = (Element) groups.item(i); int tilesX = Integer.parseInt(getFirstNodeValue(group, "TilesX")); int tilesY = Integer.parseInt(getFirstNodeValue(group, "TilesY")); Element position = getFirstNode(group, "Position"); String x = position.getAttribute("X"); String y = position.getAttribute("Y"); String z = position.getAttribute("Z"); Double xPos = x == null ? null : new Double(x); Double yPos = y == null ? null : new Double(y); Double zPos = z == null ? null : new Double(z); for (int tile=0; tile<tilesX * tilesY; tile++) { int index = i * tilesX * tilesY + tile; if (index < positionsX.length) { positionsX[index] = xPos; positionsY[index] = yPos; positionsZ[index] = zPos; } } } } Element multiTrack = getFirstNode(acquisition, "MultiTrackSetup"); if (multiTrack == null) { return; } NodeList detectors = getGrandchildren(multiTrack, "Detector"); if (detectors == null || detectors.getLength() == 0) { return; } Element detector = (Element) detectors.item(0); gain = getFirstNodeValue(detector, "Voltage"); } private Element getFirstNode(Element root, String name) { if (root == null) { return null; } NodeList list = root.getElementsByTagName(name); if (list == null) { return null; } return (Element) list.item(0); } private NodeList getGrandchildren(Element root, String name) { return getGrandchildren(root, name + "s", name); } private NodeList getGrandchildren(Element root, String child, String name) { if (root == null) { return null; } NodeList children = root.getElementsByTagName(child); if (children != null && children.getLength() > 0) { Element childNode = (Element) children.item(0); return childNode.getElementsByTagName(name); } return null; } private String getFirstNodeValue(Element root, String name) { if (root == null) { return null; } NodeList nodes = root.getElementsByTagName(name); if (nodes != null && nodes.getLength() > 0) { return nodes.item(0).getTextContent(); } return null; } private void populateOriginalMetadata(Element root, Stack<String> nameStack, HashMap<String, Integer> indexes) { String name = root.getNodeName(); nameStack.push(name); StringBuffer key = new StringBuffer(); for (String k : nameStack) { key.append(k); key.append(" "); } if (root.getChildNodes().getLength() == 1) { String value = root.getTextContent(); if (value != null && key.length() > 0) { Integer i = indexes.get(key.toString()); String storedKey = key.toString() + (i == null ? 0 : i); indexes.put(key.toString(), i == null ? 1 : i + 1); addGlobalMeta(storedKey, value); } } NamedNodeMap attributes = root.getAttributes(); for (int i=0; i<attributes.getLength(); i++) { Node attr = attributes.item(i); String attrName = attr.getNodeName(); String attrValue = attr.getNodeValue(); addGlobalMeta(key + attrName, attrValue); } NodeList children = root.getChildNodes(); if (children != null) { for (int i=0; i<children.getLength(); i++) { Object child = children.item(i); if (child instanceof Element) { populateOriginalMetadata((Element) child, nameStack, indexes); } } } nameStack.pop(); } private Segment readSegment() throws IOException { // align the stream to a multiple of 32 bytes int skip = (ALIGNMENT - (int) (in.getFilePointer() % ALIGNMENT)) % ALIGNMENT; in.skipBytes(skip); long startingPosition = in.getFilePointer(); // instantiate a Segment subclass based upon the segment ID String segmentID = in.readString(16).trim(); Segment segment = null; if (segmentID.equals("ZISRAWFILE")) { segment = new FileHeader(); } else if (segmentID.equals("ZISRAWMETADATA")) { segment = new Metadata(); } else if (segmentID.equals("ZISRAWSUBBLOCK")) { segment = new SubBlock(); } else if (segmentID.equals("ZISRAWATTACH")) { segment = new Attachment(); } else { LOGGER.info("Unknown segment type: " + segmentID); segment = new Segment(); } segment.startingPosition = startingPosition; segment.id = segmentID; segment.fillInData(); in.seek(segment.startingPosition + segment.allocatedSize + HEADER_SIZE); return segment; } private void convertPixelType(int pixelType) throws FormatException { switch (pixelType) { case GRAY8: core[0].pixelType = FormatTools.UINT8; break; case GRAY16: core[0].pixelType = FormatTools.UINT16; break; case GRAY32: core[0].pixelType = FormatTools.UINT32; break; case GRAY_FLOAT: core[0].pixelType = FormatTools.FLOAT; break; case GRAY_DOUBLE: core[0].pixelType = FormatTools.DOUBLE; break; case BGR_24: core[0].pixelType = FormatTools.UINT8; core[0].sizeC *= 3; core[0].rgb = true; break; case BGR_48: core[0].pixelType = FormatTools.UINT16; core[0].sizeC *= 3; core[0].rgb = true; break; case BGRA_8: core[0].pixelType = FormatTools.UINT8; core[0].sizeC *= 4; core[0].rgb = true; break; case BGR_FLOAT: core[0].pixelType = FormatTools.FLOAT; core[0].sizeC *= 3; core[0].rgb = true; break; case COMPLEX: case COMPLEX_FLOAT: throw new FormatException("Sorry, complex pixel data not supported."); default: throw new FormatException("Unknown pixel type: " + pixelType); } } // -- Helper classes -- /** Top-level class that implements logic common to all types of Segment. */ class Segment { public long startingPosition; public String id; public long allocatedSize; public long usedSize; public void fillInData() throws IOException { // read the segment header allocatedSize = in.readLong(); usedSize = in.readLong(); if (usedSize == 0) { usedSize = allocatedSize; } } } /** Segment with ID "ZISRAWFILE". */ class FileHeader extends Segment { public int majorVersion; public int minorVersion; public long primaryFileGUID; public long fileGUID; public int filePart; public long directoryPosition; public long metadataPosition; public boolean updatePending; public long attachmentDirectoryPosition; public void fillInData() throws IOException { super.fillInData(); majorVersion = in.readInt(); minorVersion = in.readInt(); in.skipBytes(4); // reserved 1 in.skipBytes(4); // reserved 2 primaryFileGUID = in.readLong(); fileGUID = in.readLong(); filePart = in.readInt(); directoryPosition = in.readLong(); metadataPosition = in.readLong(); updatePending = in.readInt() != 0; attachmentDirectoryPosition = in.readLong(); } } /** Segment with ID "ZISRAWMETADATA". */ class Metadata extends Segment { public String xml; public byte[] attachment; public void fillInData() throws IOException { super.fillInData(); int xmlSize = in.readInt(); int attachmentSize = in.readInt(); in.skipBytes(248); xml = in.readString(xmlSize); attachment = new byte[attachmentSize]; in.read(attachment); } } /** Segment with ID "ZISRAWSUBBLOCK". */ class SubBlock extends Segment { public int metadataSize; public int attachmentSize; public long dataSize; public DirectoryEntry directoryEntry; public String metadata; public int seriesIndex; public int planeIndex; private long dataOffset; private Double stageX, stageY, timestamp, exposureTime, stageZ; public int x, y; public void fillInData() throws IOException { super.fillInData(); long fp = in.getFilePointer(); metadataSize = in.readInt(); attachmentSize = in.readInt(); dataSize = in.readLong(); directoryEntry = new DirectoryEntry(); in.skipBytes((int) Math.max(256 - (in.getFilePointer() - fp), 0)); metadata = in.readString(metadataSize).trim(); dataOffset = in.getFilePointer(); in.seek(in.getFilePointer() + dataSize + attachmentSize); parseMetadata(); } // -- SubBlock API methods -- public byte[] readPixelData() throws FormatException, IOException { in.seek(dataOffset); byte[] data = new byte[(int) dataSize]; in.read(data); CodecOptions options = new CodecOptions(); options.interleaved = isInterleaved(); options.littleEndian = isLittleEndian(); options.maxBytes = getSizeX() * getSizeY() * getRGBChannelCount() * FormatTools.getBytesPerPixel(getPixelType()); switch (directoryEntry.compression) { case JPEG: data = new JPEGCodec().decompress(data, options); break; case LZW: data = new LZWCodec().decompress(data, options); break; } return data; } // -- Helper methods -- private void parseMetadata() throws IOException { if (metadata.length() == 0) { return; } Element root = null; try { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder parser = factory.newDocumentBuilder(); ByteArrayInputStream s = new ByteArrayInputStream(metadata.getBytes(Constants.ENCODING)); root = parser.parse(s).getDocumentElement(); s.close(); } catch (ParserConfigurationException e) { return; } catch (SAXException e) { return; } if (root == null) { return; } NodeList children = root.getChildNodes(); if (children == null) { return; } for (int i=0; i<children.getLength(); i++) { if (!(children.item(i) instanceof Element)) { continue; } Element child = (Element) children.item(i); if (child.getNodeName().equals("Tags")) { NodeList tags = child.getChildNodes(); if (tags != null) { for (int tag=0; tag<tags.getLength(); tag++) { if (!(tags.item(tag) instanceof Element)) { continue; } Element tagNode = (Element) tags.item(tag); String text = tagNode.getTextContent(); if (text != null) { if (tagNode.getNodeName().equals("StageXPosition")) { stageX = new Double(text); } else if (tagNode.getNodeName().equals("StageYPosition")) { stageY = new Double(text); } else if (tagNode.getNodeName().equals("FocusPosition")) { stageZ = new Double(text); } else if (tagNode.getNodeName().equals("AcquisitionTime")) { timestamp = DateTools.getTime( text, DateTools.ISO8601_FORMAT) / 1000d; } else if (tagNode.getNodeName().equals("ExposureTime")) { exposureTime = new Double(text); } } } } } } } } /** Segment with ID "ZISRAWATTACH". */ class Attachment extends Segment { public int dataSize; public AttachmentEntry attachment; public byte[] attachmentData; public void fillInData() throws IOException { super.fillInData(); dataSize = in.readInt(); in.skipBytes(12); // reserved attachment = new AttachmentEntry(); in.skipBytes(112); // reserved attachmentData = new byte[dataSize]; in.read(attachmentData); } } class DirectoryEntry { public String schemaType; public int pixelType; public long filePosition; public int filePart; public int compression; public byte pyramidType; public int dimensionCount; public DimensionEntry[] dimensionEntries; public DirectoryEntry() throws IOException { schemaType = in.readString(2); pixelType = in.readInt(); filePosition = in.readLong(); filePart = in.readInt(); compression = in.readInt(); pyramidType = in.readByte(); if (pyramidType == 1) { prestitched = false; } in.skipBytes(1); // reserved in.skipBytes(4); // reserved dimensionCount = in.readInt(); dimensionEntries = new DimensionEntry[dimensionCount]; for (int i=0; i<dimensionEntries.length; i++) { dimensionEntries[i] = new DimensionEntry(); } } } class DimensionEntry { public String dimension; public int start; public int size; public float startCoordinate; public int storedSize; public DimensionEntry() throws IOException { dimension = in.readString(4).trim(); start = in.readInt(); size = in.readInt(); startCoordinate = in.readFloat(); storedSize = in.readInt(); } } class AttachmentEntry { public String schemaType; public long filePosition; public int filePart; public String contentGUID; public String contentFileType; public String name; public AttachmentEntry() throws IOException { schemaType = in.readString(2); in.skipBytes(10); // reserved filePosition = in.readLong(); filePart = in.readInt(); contentGUID = in.readString(16); contentFileType = in.readString(8); name = in.readString(80); } } }
// ZeissLSMReader.java package loci.formats.in; import java.io.File; import java.io.IOException; import java.util.Hashtable; import java.util.Vector; import loci.common.DataTools; import loci.common.DateTools; import loci.common.Location; import loci.common.RandomAccessInputStream; import loci.formats.CoreMetadata; import loci.formats.FormatException; import loci.formats.FormatReader; import loci.formats.FormatTools; import loci.formats.MetadataTools; import loci.formats.meta.FilterMetadata; import loci.formats.meta.MetadataStore; import loci.formats.tiff.IFD; import loci.formats.tiff.IFDList; import loci.formats.tiff.PhotoInterp; import loci.formats.tiff.TiffCompression; import loci.formats.tiff.TiffConstants; import loci.formats.tiff.TiffParser; import loci.formats.tiff.TiffTools; public class ZeissLSMReader extends FormatReader { // -- Constants -- public static final String[] MDB_SUFFIX = {"mdb"}; /** Tag identifying a Zeiss LSM file. */ private static final int ZEISS_ID = 34412; /** Data types. */ private static final int TYPE_SUBBLOCK = 0; private static final int TYPE_ASCII = 2; private static final int TYPE_LONG = 4; private static final int TYPE_RATIONAL = 5; /** Subblock types. */ private static final int SUBBLOCK_RECORDING = 0x10000000; private static final int SUBBLOCK_LASER = 0x50000000; private static final int SUBBLOCK_TRACK = 0x40000000; private static final int SUBBLOCK_DETECTION_CHANNEL = 0x70000000; private static final int SUBBLOCK_ILLUMINATION_CHANNEL = 0x90000000; private static final int SUBBLOCK_BEAM_SPLITTER = 0xb0000000; private static final int SUBBLOCK_DATA_CHANNEL = 0xd0000000; private static final int SUBBLOCK_TIMER = 0x12000000; private static final int SUBBLOCK_MARKER = 0x14000000; private static final int SUBBLOCK_END = (int) 0xffffffff; /** Data types. */ private static final int RECORDING_NAME = 0x10000001; private static final int RECORDING_DESCRIPTION = 0x10000002; private static final int RECORDING_OBJECTIVE = 0x10000004; private static final int RECORDING_ZOOM = 0x10000016; private static final int RECORDING_SAMPLE_0TIME = 0x10000036; private static final int RECORDING_CAMERA_BINNING = 0x10000052; private static final int TRACK_ACQUIRE = 0x40000006; private static final int TRACK_TIME_BETWEEN_STACKS = 0x4000000b; private static final int LASER_NAME = 0x50000001; private static final int LASER_ACQUIRE = 0x50000002; private static final int LASER_POWER = 0x50000003; private static final int CHANNEL_DETECTOR_GAIN = 0x70000003; private static final int CHANNEL_PINHOLE_DIAMETER = 0x70000009; private static final int CHANNEL_AMPLIFIER_GAIN = 0x70000005; private static final int CHANNEL_FILTER_SET = 0x7000000f; private static final int CHANNEL_FILTER = 0x70000010; private static final int CHANNEL_ACQUIRE = 0x7000000b; private static final int CHANNEL_NAME = 0x70000014; private static final int ILLUM_CHANNEL_ATTENUATION = 0x90000002; private static final int ILLUM_CHANNEL_WAVELENGTH = 0x90000003; private static final int ILLUM_CHANNEL_ACQUIRE = 0x90000004; private static final int START_TIME = 0x10000036; private static final int DATA_CHANNEL_NAME = 0xd0000001; private static final int DATA_CHANNEL_ACQUIRE = 0xd0000017; private static final int BEAM_SPLITTER_FILTER = 0xb0000002; private static final int BEAM_SPLITTER_FILTER_SET = 0xb0000003; /** Drawing element types. */ private static final int TEXT = 13; private static final int LINE = 14; private static final int SCALE_BAR = 15; private static final int OPEN_ARROW = 16; private static final int CLOSED_ARROW = 17; private static final int RECTANGLE = 18; private static final int ELLIPSE = 19; private static final int CLOSED_POLYLINE = 20; private static final int OPEN_POLYLINE = 21; private static final int CLOSED_BEZIER = 22; private static final int OPEN_BEZIER = 23; private static final int CIRCLE = 24; private static final int PALETTE = 25; private static final int POLYLINE_ARROW = 26; private static final int BEZIER_WITH_ARROW = 27; private static final int ANGLE = 28; private static final int CIRCLE_3POINT = 29; // -- Static fields -- private static Hashtable<Integer, String> metadataKeys = createKeys(); // -- Fields -- private double pixelSizeX, pixelSizeY, pixelSizeZ; private byte[][] lut = null; private Vector<Double> timestamps; private int validChannels; private String[] lsmFilenames; private Vector<IFDList> ifdsList; private TiffParser tiffParser; private int nextLaser = 0, nextDetector = 0; private int nextFilter = 0, nextFilterSet = 0; private int nextDataChannel = 0, nextIllumChannel = 0, nextDetectChannel = 0; private boolean splitPlanes = false; private double zoom; private Vector<String> imageNames; private String binning; private int totalROIs = 0; // -- Constructor -- /** Constructs a new Zeiss LSM reader. */ public ZeissLSMReader() { super("Zeiss Laser-Scanning Microscopy", new String[] {"lsm", "mdb"}); domains = new String[] {FormatTools.LM_DOMAIN}; } // -- IFormatReader API methods -- /* @see loci.formats.IFormatReader#isSingleFile(String) */ public boolean isSingleFile(String id) throws FormatException, IOException { if (checkSuffix(id, MDB_SUFFIX)) return false; if (isGroupFiles()) { // look for an .mdb file Location parentFile = new Location(id).getAbsoluteFile().getParentFile(); String[] fileList = parentFile.list(); for (int i=0; i<fileList.length; i++) { if (fileList[i].startsWith(".")) continue; if (checkSuffix(fileList[i], MDB_SUFFIX)) { Location file = new Location(parentFile, fileList[i]).getAbsoluteFile(); if (file.isDirectory()) continue; // make sure that the .mdb references this .lsm String[] lsms = parseMDB(file.getAbsolutePath()); for (String lsm : lsms) { if (id.endsWith(lsm) || lsm.endsWith(id)) { return false; } } } } } return true; } /* @see loci.formats.IFormatReader#close(boolean) */ public void close(boolean fileOnly) throws IOException { super.close(fileOnly); if (!fileOnly) { pixelSizeX = pixelSizeY = pixelSizeZ = 0; lut = null; timestamps = null; validChannels = 0; lsmFilenames = null; ifdsList = null; tiffParser = null; nextLaser = nextDetector = 0; nextFilter = nextFilterSet = 0; nextDataChannel = nextIllumChannel = nextDetectChannel = 0; splitPlanes = false; zoom = 0; imageNames = null; binning = null; totalROIs = 0; } } /* @see loci.formats.IFormatReader#isThisType(RandomAccessInputStream) */ public boolean isThisType(RandomAccessInputStream stream) throws IOException { final int blockLen = 4; if (!FormatTools.validStream(stream, blockLen, false)) return false; byte[] check = new byte[blockLen]; stream.readFully(check); return TiffTools.isValidHeader(check) || (check[2] == 0x53 && check[3] == 0x74); } /* @see loci.formats.IFormatReader#fileGroupOption(String) */ public int fileGroupOption(String id) throws FormatException, IOException { return FormatTools.MUST_GROUP; } /* @see loci.formats.IFormatReader#getSeriesUsedFiles(boolean) */ public String[] getSeriesUsedFiles(boolean noPixels) { FormatTools.assertId(currentId, true, 1); if (noPixels) { if (checkSuffix(currentId, MDB_SUFFIX)) return new String[] {currentId}; return null; } if (lsmFilenames == null) return new String[] {currentId}; if (lsmFilenames.length == 1 && currentId.equals(lsmFilenames[0])) { return lsmFilenames; } return new String[] {currentId, lsmFilenames[getSeries()]}; } /* @see loci.formats.IFormatReader#get8BitLookupTable() */ public byte[][] get8BitLookupTable() throws FormatException, IOException { FormatTools.assertId(currentId, true, 1); if (lut == null || lut[getSeries()] == null || getPixelType() != FormatTools.UINT8) { return null; } byte[][] b = new byte[3][256]; for (int i=2; i>=3-validChannels; i for (int j=0; j<256; j++) { b[i][j] = (byte) j; } } return b; } /* @see loci.formats.IFormatReader#get16BitLookupTable() */ public short[][] get16BitLookupTable() throws FormatException, IOException { FormatTools.assertId(currentId, true, 1); if (lut == null || lut[getSeries()] == null || getPixelType() != FormatTools.UINT16) { return null; } short[][] s = new short[3][65536]; for (int i=2; i>=3-validChannels; i for (int j=0; j<s[i].length; j++) { s[i][j] = (short) j; } } return s; } /** * @see loci.formats.IFormatReader#openBytes(int, byte[], int, int, int, int) */ public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) throws FormatException, IOException { FormatTools.checkPlaneParameters(this, no, buf.length, x, y, w, h); in = new RandomAccessInputStream(lsmFilenames[getSeries()]); in.order(!isLittleEndian()); tiffParser = new TiffParser(in); IFDList ifds = ifdsList.get(getSeries()); if (splitPlanes && getSizeC() > 1) { int plane = no / getSizeC(); int c = no % getSizeC(); byte[][] samples = tiffParser.getSamples(ifds.get(plane), x, y, w, h); System.arraycopy(samples[c], 0, buf, 0, buf.length); } else tiffParser.getSamples(ifds.get(no), buf, x, y, w, h); in.close(); return buf; } // -- Internal FormatReader API methods -- /* @see loci.formats.FormatReader#initFile(String) */ protected void initFile(String id) throws FormatException, IOException { debug("ZeissLSMReader.initFile(" + id + ")"); super.initFile(id); if (!checkSuffix(id, MDB_SUFFIX) && isGroupFiles()) { // look for an .mdb file Location parentFile = new Location(id).getAbsoluteFile().getParentFile(); String[] fileList = parentFile.list(); for (int i=0; i<fileList.length; i++) { if (fileList[i].startsWith(".")) continue; if (checkSuffix(fileList[i], MDB_SUFFIX)) { Location file = new Location(parentFile, fileList[i]).getAbsoluteFile(); if (file.isDirectory()) continue; // make sure that the .mdb references this .lsm String[] lsms = parseMDB(file.getAbsolutePath()); for (String lsm : lsms) { if (id.endsWith(lsm) || lsm.endsWith(id)) { setId(file.getAbsolutePath()); return; } } } } lsmFilenames = new String[] {id}; } else if (checkSuffix(id, MDB_SUFFIX)) { lsmFilenames = parseMDB(id); } else lsmFilenames = new String[] {id}; if (lsmFilenames.length == 0) { throw new FormatException("LSM files were not found."); } timestamps = new Vector<Double>(); imageNames = new Vector<String>(); core = new CoreMetadata[lsmFilenames.length]; ifdsList = new Vector<IFDList>(); ifdsList.setSize(core.length); for (int i=0; i<core.length; i++) { core[i] = new CoreMetadata(); RandomAccessInputStream s = new RandomAccessInputStream(lsmFilenames[i]); core[i].littleEndian = s.read() == TiffConstants.LITTLE; s.order(isLittleEndian()); s.seek(0); ifdsList.set(i, new TiffParser(s).getIFDs(true)); s.close(); } status("Removing thumbnails"); MetadataStore store = new FilterMetadata(getMetadataStore(), isMetadataFiltered()); for (int series=0; series<ifdsList.size(); series++) { IFDList ifds = ifdsList.get(series); for (int i=0; i<ifds.size(); i++) { IFD ifd = ifds.get(i); // check that predictor is set to 1 if anything other // than LZW compression is used if (ifd.getCompression() != TiffCompression.LZW) { ifd.putIFDValue(IFD.PREDICTOR, 1); } } // fix the offsets for > 4 GB files for (int i=1; i<ifds.size(); i++) { long[] stripOffsets = ifds.get(i).getStripOffsets(); long[] previousStripOffsets = ifds.get(i - 1).getStripOffsets(); if (stripOffsets == null || previousStripOffsets == null) { throw new FormatException( "Strip offsets are missing; this is an invalid file."); } boolean neededAdjustment = false; for (int j=0; j<stripOffsets.length; j++) { if (stripOffsets[j] < previousStripOffsets[j]) { stripOffsets[j] = (previousStripOffsets[j] & ~0xffffffffL) | (stripOffsets[j] & 0xffffffffL); if (stripOffsets[j] < previousStripOffsets[j]) { stripOffsets[j] += 0x100000000L; } neededAdjustment = true; } if (neededAdjustment) { ifds.get(i).putIFDValue(IFD.STRIP_OFFSETS, stripOffsets); } } } initMetadata(series); store.setPixelsBigEndian(new Boolean(!isLittleEndian()), series, 0); } for (int series=0; series<ifdsList.size(); series++) { store.setImageName(imageNames.get(series), series); } setSeries(0); } // -- Helper methods -- protected void initMetadata(int series) throws FormatException, IOException { setSeries(series); IFDList ifds = ifdsList.get(series); IFD ifd = ifds.get(0); in = new RandomAccessInputStream(lsmFilenames[series]); in.order(isLittleEndian()); tiffParser = new TiffParser(in); int photo = ifd.getPhotometricInterpretation(); int samples = ifd.getSamplesPerPixel(); core[series].sizeX = (int) ifd.getImageWidth(); core[series].sizeY = (int) ifd.getImageLength(); core[series].rgb = samples > 1 || photo == PhotoInterp.RGB; core[series].interleaved = false; core[series].sizeC = isRGB() ? samples : 1; core[series].pixelType = ifd.getPixelType(); core[series].imageCount = ifds.size(); core[series].sizeZ = getImageCount(); core[series].sizeT = 1; status("Reading LSM metadata for series #" + series); MetadataStore store = new FilterMetadata(getMetadataStore(), isMetadataFiltered()); String imageName = lsmFilenames[series]; if (imageName.indexOf(".") != -1) { imageName = imageName.substring(0, imageName.lastIndexOf(".")); } if (imageName.indexOf(File.separator) != -1) { imageName = imageName.substring(imageName.lastIndexOf(File.separator) + 1); } // link Instrument and Image String instrumentID = MetadataTools.createLSID("Instrument", series); store.setInstrumentID(instrumentID, series); store.setImageInstrumentRef(instrumentID, series); // get TIF_CZ_LSMINFO structure short[] s = ifd.getIFDShortArray(ZEISS_ID, true); byte[] cz = new byte[s.length]; for (int i=0; i<s.length; i++) { cz[i] = (byte) s[i]; } RandomAccessInputStream ras = new RandomAccessInputStream(cz); ras.order(isLittleEndian()); addSeriesMeta("MagicNumber ", ras.readInt()); addSeriesMeta("StructureSize", ras.readInt()); addSeriesMeta("DimensionX", ras.readInt()); addSeriesMeta("DimensionY", ras.readInt()); core[series].sizeZ = ras.readInt(); ras.skipBytes(4); core[series].sizeT = ras.readInt(); int dataType = ras.readInt(); switch (dataType) { case 2: addSeriesMeta("DataType", "12 bit unsigned integer"); break; case 5: addSeriesMeta("DataType", "32 bit float"); break; case 0: addSeriesMeta("DataType", "varying data types"); break; default: addSeriesMeta("DataType", "8 bit unsigned integer"); } addSeriesMeta("ThumbnailX", ras.readInt()); addSeriesMeta("ThumbnailY", ras.readInt()); // pixel sizes are stored in meters, we need them in microns pixelSizeX = ras.readDouble() * 1000000; pixelSizeY = ras.readDouble() * 1000000; pixelSizeZ = ras.readDouble() * 1000000; addSeriesMeta("VoxelSizeX", new Double(pixelSizeX)); addSeriesMeta("VoxelSizeY", new Double(pixelSizeY)); addSeriesMeta("VoxelSizeZ", new Double(pixelSizeZ)); addSeriesMeta("OriginX", ras.readDouble()); addSeriesMeta("OriginY", ras.readDouble()); addSeriesMeta("OriginZ", ras.readDouble()); int scanType = ras.readShort(); switch (scanType) { case 0: addSeriesMeta("ScanType", "x-y-z scan"); core[series].dimensionOrder = "XYZCT"; break; case 1: addSeriesMeta("ScanType", "z scan (x-z plane)"); core[series].dimensionOrder = "XYZCT"; break; case 2: addSeriesMeta("ScanType", "line scan"); core[series].dimensionOrder = "XYZCT"; break; case 3: addSeriesMeta("ScanType", "time series x-y"); core[series].dimensionOrder = "XYTCZ"; break; case 4: addSeriesMeta("ScanType", "time series x-z"); core[series].dimensionOrder = "XYZTC"; break; case 5: addSeriesMeta("ScanType", "time series 'Mean of ROIs'"); core[series].dimensionOrder = "XYTCZ"; break; case 6: addSeriesMeta("ScanType", "time series x-y-z"); core[series].dimensionOrder = "XYZTC"; break; case 7: addSeriesMeta("ScanType", "spline scan"); core[series].dimensionOrder = "XYCTZ"; break; case 8: addSeriesMeta("ScanType", "spline scan x-z"); core[series].dimensionOrder = "XYCZT"; break; case 9: addSeriesMeta("ScanType", "time series spline plane x-z"); core[series].dimensionOrder = "XYTCZ"; break; case 10: addSeriesMeta("ScanType", "point mode"); core[series].dimensionOrder = "XYZCT"; break; default: addSeriesMeta("ScanType", "x-y-z scan"); core[series].dimensionOrder = "XYZCT"; } core[series].indexed = lut != null && lut[series] != null && getSizeC() == 1; if (isIndexed()) { core[series].sizeC = 1; core[series].rgb = false; } if (getSizeC() == 0) core[series].sizeC = 1; if (isRGB()) { // shuffle C to front of order string core[series].dimensionOrder = getDimensionOrder().replaceAll("C", ""); core[series].dimensionOrder = getDimensionOrder().replaceAll("XY", "XYC"); } if (isIndexed()) core[series].rgb = false; if (getEffectiveSizeC() == 0) { core[series].imageCount = getSizeZ() * getSizeT(); } else { core[series].imageCount = getSizeZ() * getSizeT() * getEffectiveSizeC(); } if (getImageCount() != ifds.size()) { int diff = getImageCount() - ifds.size(); core[series].imageCount = ifds.size(); if (diff % getSizeZ() == 0) { core[series].sizeT -= (diff / getSizeZ()); } else if (diff % getSizeT() == 0) { core[series].sizeZ -= (diff / getSizeT()); } else if (getSizeZ() > 1) { core[series].sizeZ = ifds.size(); core[series].sizeT = 1; } else if (getSizeT() > 1) { core[series].sizeT = ifds.size(); core[series].sizeZ = 1; } } if (getSizeZ() == 0) core[series].sizeZ = getImageCount(); if (getSizeT() == 0) core[series].sizeT = getImageCount() / getSizeZ(); MetadataTools.setDefaultCreationDate(store, getCurrentFile(), series); int spectralScan = ras.readShort(); if (spectralScan != 1) { addSeriesMeta("SpectralScan", "no spectral scan"); } else addSeriesMeta("SpectralScan", "acquired with spectral scan"); int type = ras.readInt(); switch (type) { case 1: addSeriesMeta("DataType2", "calculated data"); break; case 2: addSeriesMeta("DataType2", "animation"); break; default: addSeriesMeta("DataType2", "original scan data"); } long[] overlayOffsets = new long[9]; String[] overlayKeys = new String[] {"VectorOverlay", "InputLut", "OutputLut", "ROI", "BleachROI", "MeanOfRoisOverlay", "TopoIsolineOverlay", "TopoProfileOverlay", "LinescanOverlay"}; overlayOffsets[0] = ras.readInt(); overlayOffsets[1] = ras.readInt(); overlayOffsets[2] = ras.readInt(); long channelColorsOffset = ras.readInt(); addSeriesMeta("TimeInterval", ras.readDouble()); ras.skipBytes(4); long scanInformationOffset = ras.readInt(); ras.skipBytes(4); long timeStampOffset = ras.readInt(); long eventListOffset = ras.readInt(); overlayOffsets[3] = ras.readInt(); overlayOffsets[4] = ras.readInt(); ras.skipBytes(4); addSeriesMeta("DisplayAspectX", ras.readDouble()); addSeriesMeta("DisplayAspectY", ras.readDouble()); addSeriesMeta("DisplayAspectZ", ras.readDouble()); addSeriesMeta("DisplayAspectTime", ras.readDouble()); overlayOffsets[5] = ras.readInt(); overlayOffsets[6] = ras.readInt(); overlayOffsets[7] = ras.readInt(); overlayOffsets[8] = ras.readInt(); for (int i=0; i<overlayOffsets.length; i++) { parseOverlays(series, overlayOffsets[i], overlayKeys[i], store); } totalROIs = 0; addSeriesMeta("ToolbarFlags", ras.readInt()); int wavelengthOffset = ras.readInt(); ras.close(); // read referenced structures addSeriesMeta("DimensionZ", getSizeZ()); addSeriesMeta("DimensionChannels", getSizeC()); if (channelColorsOffset != 0) { in.seek(channelColorsOffset + 16); int namesOffset = in.readInt(); // read the name of each channel if (namesOffset > 0) { in.skipBytes(namesOffset - 16); for (int i=0; i<getSizeC(); i++) { if (in.getFilePointer() >= in.length() - 1) break; // we want to read until we find a null char String name = in.readCString(); if (name.length() <= 128) { addSeriesMeta("ChannelName" + i, name); } } } } if (timeStampOffset != 0) { in.seek(timeStampOffset + 8); for (int i=0; i<getSizeT(); i++) { double stamp = in.readDouble(); addSeriesMeta("TimeStamp" + i, stamp); timestamps.add(new Double(stamp)); } } if (eventListOffset != 0) { in.seek(eventListOffset + 4); int numEvents = in.readInt(); in.seek(in.getFilePointer() - 4); in.order(!in.isLittleEndian()); int tmpEvents = in.readInt(); if (numEvents < 0) numEvents = tmpEvents; else numEvents = (int) Math.min(numEvents, tmpEvents); in.order(!in.isLittleEndian()); if (numEvents > 65535) numEvents = 0; for (int i=0; i<numEvents; i++) { if (in.getFilePointer() + 16 <= in.length()) { int size = in.readInt(); double eventTime = in.readDouble(); int eventType = in.readInt(); addSeriesMeta("Event" + i + " Time", eventTime); addSeriesMeta("Event" + i + " Type", eventType); long fp = in.getFilePointer(); int len = size - 16; if (len > 65536) len = 65536; if (len < 0) len = 0; addSeriesMeta("Event" + i + " Description", in.readString(len)); in.seek(fp + size - 16); if (in.getFilePointer() < 0) break; } } } if (scanInformationOffset != 0) { in.seek(scanInformationOffset); nextLaser = nextDetector = 0; nextFilter = nextFilterSet = 0; nextDataChannel = nextDetectChannel = nextIllumChannel = 0; Vector<SubBlock> blocks = new Vector<SubBlock>(); while (in.getFilePointer() < in.length() - 12) { if (in.getFilePointer() < 0) break; int entry = in.readInt(); int blockType = in.readInt(); int dataSize = in.readInt(); if (blockType == TYPE_SUBBLOCK) { SubBlock block = null; switch (entry) { case SUBBLOCK_RECORDING: block = new Recording(); break; case SUBBLOCK_LASER: block = new Laser(); break; case SUBBLOCK_TRACK: block = new Track(); break; case SUBBLOCK_DETECTION_CHANNEL: block = new DetectionChannel(); break; case SUBBLOCK_ILLUMINATION_CHANNEL: block = new IlluminationChannel(); break; case SUBBLOCK_BEAM_SPLITTER: block = new BeamSplitter(); break; case SUBBLOCK_DATA_CHANNEL: block = new DataChannel(); break; case SUBBLOCK_TIMER: block = new Timer(); break; case SUBBLOCK_MARKER: block = new Marker(); break; } if (block != null) { blocks.add(block); } } else in.skipBytes(dataSize); } Vector<SubBlock> nonAcquiredBlocks = new Vector<SubBlock>(); SubBlock[] metadataBlocks = blocks.toArray(new SubBlock[0]); for (SubBlock block : metadataBlocks) { block.addToHashtable(); if (!block.acquire) { nonAcquiredBlocks.add(block); blocks.remove(block); } } for (int i=0; i<blocks.size(); i++) { SubBlock block = blocks.get(i); // every valid IlluminationChannel must be immediately followed by // a valid DataChannel or IlluminationChannel if ((block instanceof IlluminationChannel) && i < blocks.size() - 1) { SubBlock nextBlock = blocks.get(i + 1); if (!(nextBlock instanceof DataChannel) && !(nextBlock instanceof IlluminationChannel)) { ((IlluminationChannel) block).wavelength = null; } } // every valid DetectionChannel must be immediately preceded by // a valid Track or DetectionChannel else if ((block instanceof DetectionChannel) && i > 0) { SubBlock prevBlock = blocks.get(i - 1); if (!(prevBlock instanceof Track) && !(prevBlock instanceof DetectionChannel)) { block.acquire = false; nonAcquiredBlocks.add(block); } } if (block.acquire) populateMetadataStore(block, store, series); } for (SubBlock block : nonAcquiredBlocks) { populateMetadataStore(block, store, series); } } int nLogicalChannels = nextDataChannel == 0 ? 1 : nextDataChannel; if (nLogicalChannels == getSizeC()) { if (!splitPlanes) splitPlanes = isRGB(); core[series].rgb = false; if (splitPlanes) core[series].imageCount *= getSizeC(); } MetadataTools.populatePixels(store, this, true); imageNames.add(imageName); Double pixX = new Double(pixelSizeX); Double pixY = new Double(pixelSizeY); Double pixZ = new Double(pixelSizeZ); store.setDimensionsPhysicalSizeX(pixX, series, 0); store.setDimensionsPhysicalSizeY(pixY, series, 0); store.setDimensionsPhysicalSizeZ(pixZ, series, 0); double firstStamp = 0; if (timestamps.size() > 0) { firstStamp = timestamps.get(0).doubleValue(); } for (int i=0; i<getImageCount(); i++) { int[] zct = FormatTools.getZCTCoords(this, i); if (zct[2] < timestamps.size()) { double thisStamp = timestamps.get(zct[2]).doubleValue(); store.setPlaneTimingDeltaT(new Double(thisStamp - firstStamp), series, 0, i); int index = zct[2] + 1; double nextStamp = index < timestamps.size() ? timestamps.get(index).doubleValue() : thisStamp; if (i == getSizeT() - 1 && zct[2] > 0) { thisStamp = timestamps.get(zct[2] - 1).doubleValue(); } store.setPlaneTimingExposureTime(new Double(nextStamp - thisStamp), series, 0, i); } } in.close(); } protected void populateMetadataStore(SubBlock block, MetadataStore store, int series) { // NB: block.acquire can be false. If that is the case, Instrument data // is the only thing that should be populated. if (block instanceof Recording) { Recording recording = (Recording) block; String objectiveID = MetadataTools.createLSID("Objective", series, 0); if (recording.acquire) { store.setImageDescription(recording.description, series); store.setImageCreationDate(recording.startTime, series); store.setObjectiveSettingsObjective(objectiveID, series); binning = recording.binning; } store.setObjectiveCorrection(recording.correction, series, 0); store.setObjectiveImmersion(recording.immersion, series, 0); store.setObjectiveNominalMagnification(recording.magnification, series, 0); store.setObjectiveLensNA(recording.lensNA, series, 0); store.setObjectiveIris(recording.iris, series, 0); store.setObjectiveID(objectiveID, series, 0); } else if (block instanceof Laser) { Laser laser = (Laser) block; if (laser.medium != null) { store.setLaserLaserMedium(laser.medium, series, nextLaser); } if (laser.type != null) { store.setLaserType(laser.type, series, nextLaser); } String lightSourceID = MetadataTools.createLSID("LightSource", series, nextLaser); store.setLightSourceID(lightSourceID, series, nextLaser); nextLaser++; } else if (block instanceof Track) { Track track = (Track) block; if (track.acquire) { store.setDimensionsTimeIncrement(track.timeIncrement, series, 0); } } else if (block instanceof DataChannel) { DataChannel channel = (DataChannel) block; if (channel.name != null && nextDataChannel < getSizeC() && channel.acquire) { store.setLogicalChannelName(channel.name, series, nextDataChannel++); } } else if (block instanceof DetectionChannel) { DetectionChannel channel = (DetectionChannel) block; if (channel.pinhole != null && channel.pinhole.doubleValue() != 0f && nextDetectChannel < getSizeC() && channel.acquire) { store.setLogicalChannelPinholeSize(channel.pinhole, series, nextDetectChannel); } if (channel.filter != null) { String id = MetadataTools.createLSID("Filter", series, nextFilter); if (channel.acquire && nextDetectChannel < getSizeC()) { store.setLogicalChannelSecondaryEmissionFilter( id, series, nextDetectChannel); } store.setFilterID(id, series, nextFilter); store.setFilterModel(channel.filter, series, nextFilter); int space = channel.filter.indexOf(" "); if (space != -1) { String type = channel.filter.substring(0, space).trim(); if (type.equals("BP")) type = "BandPass"; else if (type.equals("LP")) type = "LongPass"; store.setFilterType(type, series, nextFilter); String transmittance = channel.filter.substring(space + 1).trim(); String[] v = transmittance.split("-"); try { store.setTransmittanceRangeCutIn(new Integer(v[0].trim()), series, nextFilter); } catch (NumberFormatException e) { } if (v.length > 1) { try { store.setTransmittanceRangeCutOut(new Integer(v[1].trim()), series, nextFilter); } catch (NumberFormatException e) { } } } nextFilter++; } if (channel.channelName != null) { String detectorID = MetadataTools.createLSID("Detector", series, nextDetector); store.setDetectorID(detectorID, series, nextDetector); if (channel.acquire && nextDetector < getSizeC()) { store.setDetectorSettingsDetector(detectorID, series, nextDetector); store.setDetectorSettingsBinning(binning, series, nextDetector); } } if (channel.amplificationGain != null) { store.setDetectorAmplificationGain(channel.amplificationGain, series, nextDetector); } if (channel.gain != null) { store.setDetectorGain(channel.gain, series, nextDetector); } store.setDetectorType("PMT", series, nextDetector); store.setDetectorZoom(new Double(zoom), series, nextDetector); nextDetectChannel++; nextDetector++; } else if (block instanceof BeamSplitter) { BeamSplitter beamSplitter = (BeamSplitter) block; if (beamSplitter.filterSet != null) { String filterSetID = MetadataTools.createLSID("FilterSet", series, nextFilterSet); store.setFilterSetID(filterSetID, series, nextFilterSet); if (beamSplitter.filter != null) { String id = MetadataTools.createLSID("Dichroic", series, nextFilter); store.setDichroicID(id, series, nextFilter); store.setDichroicModel(beamSplitter.filter, series, nextFilter); store.setFilterSetDichroic(id, series, nextFilterSet); nextFilter++; } nextFilterSet++; } } } /** Parses overlay-related fields. */ protected void parseOverlays(int series, long data, String suffix, MetadataStore store) throws IOException { if (data == 0) return; String prefix = "Series " + series + " "; in.seek(data); int numberOfShapes = in.readInt(); int size = in.readInt(); if (size <= 194) return; in.skipBytes(20); boolean valid = in.readInt() == 1; in.skipBytes(164); for (int i=totalROIs; i<totalROIs+numberOfShapes; i++) { long offset = in.getFilePointer(); int type = in.readInt(); int blockLength = in.readInt(); int lineWidth = in.readInt(); int measurements = in.readInt(); double textOffsetX = in.readDouble(); double textOffsetY = in.readDouble(); int color = in.readInt(); boolean validShape = in.readInt() != 0; int knotWidth = in.readInt(); int catchArea = in.readInt(); int fontHeight = in.readInt(); int fontWidth = in.readInt(); int fontEscapement = in.readInt(); int fontOrientation = in.readInt(); int fontWeight = in.readInt(); boolean fontItalic = in.readInt() != 0; boolean fontUnderlined = in.readInt() != 0; boolean fontStrikeout = in.readInt() != 0; int fontCharSet = in.readInt(); int fontOutputPrecision = in.readInt(); int fontClipPrecision = in.readInt(); int fontQuality = in.readInt(); int fontPitchAndFamily = in.readInt(); String fontName = DataTools.stripString(in.readString(64)); boolean enabled = in.readShort() == 0; boolean moveable = in.readInt() == 0; in.skipBytes(34); switch (type) { case TEXT: double x = in.readDouble(); double y = in.readDouble(); String text = DataTools.stripString(in.readCString()); store.setShapeText(text, series, i, 0); break; case LINE: in.skipBytes(4); double startX = in.readDouble(); double startY = in.readDouble(); double endX = in.readDouble(); double endY = in.readDouble(); store.setLineX1(String.valueOf(startX), series, i, 0); store.setLineY1(String.valueOf(startY), series, i, 0); store.setLineX2(String.valueOf(endX), series, i, 0); store.setLineY2(String.valueOf(endY), series, i, 0); break; case SCALE_BAR: case OPEN_ARROW: case CLOSED_ARROW: case PALETTE: in.skipBytes(36); break; case RECTANGLE: in.skipBytes(4); double topX = in.readDouble(); double topY = in.readDouble(); double bottomX = in.readDouble(); double bottomY = in.readDouble(); double width = Math.abs(bottomX - topX); double height = Math.abs(bottomY - topY); topX = Math.min(topX, bottomX); topY = Math.min(topY, bottomY); store.setRectX(String.valueOf(topX), series, i, 0); store.setRectY(String.valueOf(topY), series, i, 0); store.setRectWidth(String.valueOf(width), series, i, 0); store.setRectHeight(String.valueOf(height), series, i, 0); break; case ELLIPSE: int knots = in.readInt(); double[] xs = new double[knots]; double[] ys = new double[knots]; for (int j=0; j<xs.length; j++) { xs[j] = in.readDouble(); ys[j] = in.readDouble(); } double rx = 0, ry = 0, centerX = 0, centerY = 0; if (knots == 4) { double r1x = Math.abs(xs[2] - xs[0]) / 2; double r1y = Math.abs(ys[2] - ys[0]) / 2; double r2x = Math.abs(xs[3] - xs[1]) / 2; double r2y = Math.abs(ys[3] - ys[1]) / 2; if (r1x > r2x) { ry = r1y; rx = r2x; centerX = Math.min(xs[3], xs[1]) + rx; centerY = Math.min(ys[2], ys[0]) + ry; } else { ry = r2y; rx = r1x; centerX = Math.min(xs[2], xs[0]) + rx; centerY = Math.min(ys[3], ys[1]) + ry; } } else if (knots == 3) { // we are given the center point and one cut point for each axis centerX = xs[0]; centerY = ys[0]; rx = Math.sqrt(Math.pow(xs[1] - xs[0], 2) + Math.pow(ys[1] - ys[0], 2)); ry = Math.sqrt(Math.pow(xs[2] - xs[0], 2) + Math.pow(ys[2] - ys[0], 2)); // calculate rotation angle double slope = (ys[2] - centerY) / (xs[2] - centerX); double theta = Math.toDegrees(Math.atan(slope)); store.setEllipseTransform("rotate(" + theta + " " + centerX + " " + centerY + ")", series, i, 0); } store.setEllipseCx(String.valueOf(centerX), series, i, 0); store.setEllipseCy(String.valueOf(centerY), series, i, 0); store.setEllipseRx(String.valueOf(rx), series, i, 0); store.setEllipseRy(String.valueOf(ry), series, i, 0); break; case CIRCLE: in.skipBytes(4); centerX = in.readDouble(); centerY = in.readDouble(); double curveX = in.readDouble(); double curveY = in.readDouble(); double radius = Math.sqrt(Math.pow(curveX - centerX, 2) + Math.pow(curveY - centerY, 2)); store.setCircleCx(String.valueOf(centerX), series, i, 0); store.setCircleCy(String.valueOf(centerY), series, i, 0); store.setCircleR(String.valueOf(radius), series, i, 0); break; case CIRCLE_3POINT: in.skipBytes(4); // given 3 points on the perimeter of the circle, we need to // calculate the center and radius double[][] points = new double[3][2]; for (int j=0; j<points.length; j++) { for (int k=0; k<points[j].length; k++) { points[j][k] = in.readDouble(); } } double s = 0.5 * ((points[1][0] - points[2][0]) * (points[0][0] - points[2][0]) - (points[1][1] - points[2][1]) * (points[2][1] - points[0][1])); double div = (points[0][0] - points[1][0]) * (points[2][1] - points[0][1]) - (points[1][1] - points[0][1]) * (points[0][0] - points[2][0]); s /= div; double cx = 0.5 * (points[0][0] + points[1][0]) + s * (points[1][1] - points[0][1]); double cy = 0.5 * (points[0][1] + points[1][1]) + s * (points[0][0] - points[1][0]); double r = Math.sqrt(Math.pow(points[0][0] - cx, 2) + Math.pow(points[0][1] - cy, 2)); store.setCircleCx(String.valueOf(cx), series, i, 0); store.setCircleCy(String.valueOf(cy), series, i, 0); store.setCircleR(String.valueOf(r), series, i, 0); break; case ANGLE: in.skipBytes(4); points = new double[3][2]; for (int j=0; j<points.length; j++) { for (int k=0; k<points[j].length; k++) { points[j][k] = in.readDouble(); } } StringBuffer p = new StringBuffer(); for (int j=0; j<points.length; j++) { p.append(points[j][0]); p.append(","); p.append(points[j][1]); if (j < points.length - 1) p.append(" "); } store.setPolylinePoints(p.toString(), series, i, 0); break; case CLOSED_POLYLINE: case OPEN_POLYLINE: case POLYLINE_ARROW: int nKnots = in.readInt(); points = new double[nKnots][2]; for (int j=0; j<points.length; j++) { for (int k=0; k<points[j].length; k++) { points[j][k] = in.readDouble(); } } p = new StringBuffer(); for (int j=0; j<points.length; j++) { p.append(points[j][0]); p.append(","); p.append(points[j][1]); if (j < points.length - 1) p.append(" "); } if (type == CLOSED_POLYLINE) { store.setPolygonPoints(p.toString(), series, i, 0); } else store.setPolylinePoints(p.toString(), series, i, 0); break; case CLOSED_BEZIER: case OPEN_BEZIER: case BEZIER_WITH_ARROW: nKnots = in.readInt(); points = new double[nKnots][2]; for (int j=0; j<points.length; j++) { for (int k=0; k<points[j].length; k++) { points[j][k] = in.readDouble(); } } p = new StringBuffer(); for (int j=0; j<points.length; j++) { p.append(points[j][0]); p.append(","); p.append(points[j][1]); if (j < points.length - 1) p.append(" "); } if (type == OPEN_BEZIER) { store.setPolylinePoints(p.toString(), series, i, 0); } else store.setPolygonPoints(p.toString(), series, i, 0); break; default: i numberOfShapes continue; } // populate shape attributes store.setShapeFontFamily(fontName, series, i, 0); store.setShapeFontSize(new Integer(fontHeight), series, i, 0); store.setShapeFontStyle(fontItalic ? "normal" : "italic", series, i, 0); store.setShapeFontWeight(String.valueOf(fontWeight), series, i, 0); store.setShapeLocked(new Boolean(moveable), series, i, 0); store.setShapeStrokeColor(String.valueOf(color), series, i, 0); store.setShapeStrokeWidth(new Integer(lineWidth), series, i, 0); store.setShapeTextDecoration(fontUnderlined ? "underline" : fontStrikeout ? "line-through" : "normal", series, i, 0); store.setShapeVisibility(new Boolean(enabled), series, i, 0); in.seek(offset + blockLength); } totalROIs += numberOfShapes; } /** Parse a .mdb file and return a list of referenced .lsm files. */ private String[] parseMDB(String mdbFile) throws FormatException { Location mdb = new Location(mdbFile).getAbsoluteFile(); Location parent = mdb.getParentFile(); Vector[] tables = MDBParser.parseDatabase(mdbFile); Vector<String> referencedLSMs = new Vector<String>(); for (int table=0; table<tables.length; table++) { String[] columnNames = (String[]) tables[table].get(0); String tableName = columnNames[0]; for (int row=1; row<tables[table].size(); row++) { String[] tableRow = (String[]) tables[table].get(row); for (int col=0; col<tableRow.length; col++) { String key = tableName + " " + columnNames[col + 1] + " " + row; if (currentId != null) { addGlobalMeta(key, tableRow[col]); } if (tableName.equals("Recordings") && columnNames[col + 1] != null && columnNames[col + 1].equals("SampleData")) { String filename = tableRow[col].trim(); filename = filename.replace('\\', File.separatorChar); filename = filename.replace('/', File.separatorChar); filename = filename.substring(filename.lastIndexOf(File.separator) + 1); if (filename.length() > 0) { Location file = new Location(parent, filename); if (file.exists()) { referencedLSMs.add(file.getAbsolutePath()); } } } } } } if (referencedLSMs.size() > 0) { return referencedLSMs.toArray(new String[0]); } String[] fileList = parent.list(); for (int i=0; i<fileList.length; i++) { if (checkSuffix(fileList[i], new String[] {"lsm"}) && !fileList[i].startsWith(".")) { referencedLSMs.add(new Location(parent, fileList[i]).getAbsolutePath()); } } return referencedLSMs.toArray(new String[0]); } private static Hashtable<Integer, String> createKeys() { Hashtable<Integer, String> h = new Hashtable<Integer, String>(); h.put(new Integer(0x10000001), "Name"); h.put(new Integer(0x4000000c), "Name"); h.put(new Integer(0x50000001), "Name"); h.put(new Integer(0x90000001), "Name"); h.put(new Integer(0x90000005), "Detection Channel Name"); h.put(new Integer(0xb0000003), "Name"); h.put(new Integer(0xd0000001), "Name"); h.put(new Integer(0x12000001), "Name"); h.put(new Integer(0x14000001), "Name"); h.put(new Integer(0x10000002), "Description"); h.put(new Integer(0x14000002), "Description"); h.put(new Integer(0x10000003), "Notes"); h.put(new Integer(0x10000004), "Objective"); h.put(new Integer(0x10000005), "Processing Summary"); h.put(new Integer(0x10000006), "Special Scan Mode"); h.put(new Integer(0x10000007), "Scan Type"); h.put(new Integer(0x10000008), "Scan Mode"); h.put(new Integer(0x10000009), "Number of Stacks"); h.put(new Integer(0x1000000a), "Lines Per Plane"); h.put(new Integer(0x1000000b), "Samples Per Line"); h.put(new Integer(0x1000000c), "Planes Per Volume"); h.put(new Integer(0x1000000d), "Images Width"); h.put(new Integer(0x1000000e), "Images Height"); h.put(new Integer(0x1000000f), "Number of Planes"); h.put(new Integer(0x10000010), "Number of Stacks"); h.put(new Integer(0x10000011), "Number of Channels"); h.put(new Integer(0x10000012), "Linescan XY Size"); h.put(new Integer(0x10000013), "Scan Direction"); h.put(new Integer(0x10000014), "Time Series"); h.put(new Integer(0x10000015), "Original Scan Data"); h.put(new Integer(0x10000016), "Zoom X"); h.put(new Integer(0x10000017), "Zoom Y"); h.put(new Integer(0x10000018), "Zoom Z"); h.put(new Integer(0x10000019), "Sample 0X"); h.put(new Integer(0x1000001a), "Sample 0Y"); h.put(new Integer(0x1000001b), "Sample 0Z"); h.put(new Integer(0x1000001c), "Sample Spacing"); h.put(new Integer(0x1000001d), "Line Spacing"); h.put(new Integer(0x1000001e), "Plane Spacing"); h.put(new Integer(0x1000001f), "Plane Width"); h.put(new Integer(0x10000020), "Plane Height"); h.put(new Integer(0x10000021), "Volume Depth"); h.put(new Integer(0x10000034), "Rotation"); h.put(new Integer(0x10000035), "Precession"); h.put(new Integer(0x10000036), "Sample 0Time"); h.put(new Integer(0x10000037), "Start Scan Trigger In"); h.put(new Integer(0x10000038), "Start Scan Trigger Out"); h.put(new Integer(0x10000039), "Start Scan Event"); h.put(new Integer(0x10000040), "Start Scan Time"); h.put(new Integer(0x10000041), "Stop Scan Trigger In"); h.put(new Integer(0x10000042), "Stop Scan Trigger Out"); h.put(new Integer(0x10000043), "Stop Scan Event"); h.put(new Integer(0x10000044), "Stop Scan Time"); h.put(new Integer(0x10000045), "Use ROIs"); h.put(new Integer(0x10000046), "Use Reduced Memory ROIs"); h.put(new Integer(0x10000047), "User"); h.put(new Integer(0x10000048), "Use B/C Correction"); h.put(new Integer(0x10000049), "Position B/C Contrast 1"); h.put(new Integer(0x10000050), "Position B/C Contrast 2"); h.put(new Integer(0x10000051), "Interpolation Y"); h.put(new Integer(0x10000052), "Camera Binning"); h.put(new Integer(0x10000053), "Camera Supersampling"); h.put(new Integer(0x10000054), "Camera Frame Width"); h.put(new Integer(0x10000055), "Camera Frame Height"); h.put(new Integer(0x10000056), "Camera Offset X"); h.put(new Integer(0x10000057), "Camera Offset Y"); h.put(new Integer(0x40000001), "Multiplex Type"); h.put(new Integer(0x40000002), "Multiplex Order"); h.put(new Integer(0x40000003), "Sampling Mode"); h.put(new Integer(0x40000004), "Sampling Method"); h.put(new Integer(0x40000005), "Sampling Number"); h.put(new Integer(0x40000006), "Acquire"); h.put(new Integer(0x50000002), "Acquire"); h.put(new Integer(0x7000000b), "Acquire"); h.put(new Integer(0x90000004), "Acquire"); h.put(new Integer(0xd0000017), "Acquire"); h.put(new Integer(0x40000007), "Sample Observation Time"); h.put(new Integer(0x40000008), "Time Between Stacks"); h.put(new Integer(0x4000000d), "Collimator 1 Name"); h.put(new Integer(0x4000000e), "Collimator 1 Position"); h.put(new Integer(0x4000000f), "Collimator 2 Name"); h.put(new Integer(0x40000010), "Collimator 2 Position"); h.put(new Integer(0x40000011), "Is Bleach Track"); h.put(new Integer(0x40000012), "Bleach After Scan Number"); h.put(new Integer(0x40000013), "Bleach Scan Number"); h.put(new Integer(0x40000014), "Trigger In"); h.put(new Integer(0x12000004), "Trigger In"); h.put(new Integer(0x14000003), "Trigger In"); h.put(new Integer(0x40000015), "Trigger Out"); h.put(new Integer(0x12000005), "Trigger Out"); h.put(new Integer(0x14000004), "Trigger Out"); h.put(new Integer(0x40000016), "Is Ratio Track"); h.put(new Integer(0x40000017), "Bleach Count"); h.put(new Integer(0x40000018), "SPI Center Wavelength"); h.put(new Integer(0x40000019), "Pixel Time"); h.put(new Integer(0x40000020), "ID Condensor Frontlens"); h.put(new Integer(0x40000021), "Condensor Frontlens"); h.put(new Integer(0x40000022), "ID Field Stop"); h.put(new Integer(0x40000023), "Field Stop Value"); h.put(new Integer(0x40000024), "ID Condensor Aperture"); h.put(new Integer(0x40000025), "Condensor Aperture"); h.put(new Integer(0x40000026), "ID Condensor Revolver"); h.put(new Integer(0x40000027), "Condensor Revolver"); h.put(new Integer(0x40000028), "ID Transmission Filter 1"); h.put(new Integer(0x40000029), "ID Transmission 1"); h.put(new Integer(0x40000030), "ID Transmission Filter 2"); h.put(new Integer(0x40000031), "ID Transmission 2"); h.put(new Integer(0x40000032), "Repeat Bleach"); h.put(new Integer(0x40000033), "Enable Spot Bleach Pos"); h.put(new Integer(0x40000034), "Spot Bleach Position X"); h.put(new Integer(0x40000035), "Spot Bleach Position Y"); h.put(new Integer(0x40000036), "Bleach Position Z"); h.put(new Integer(0x50000003), "Power"); h.put(new Integer(0x90000002), "Power"); h.put(new Integer(0x70000003), "Detector Gain"); h.put(new Integer(0x70000005), "Amplifier Gain"); h.put(new Integer(0x70000007), "Amplifier Offset"); h.put(new Integer(0x70000009), "Pinhole Diameter"); h.put(new Integer(0x7000000c), "Detector Name"); h.put(new Integer(0x7000000d), "Amplifier Name"); h.put(new Integer(0x7000000e), "Pinhole Name"); h.put(new Integer(0x7000000f), "Filter Set Name"); h.put(new Integer(0x70000010), "Filter Name"); h.put(new Integer(0x70000013), "Integrator Name"); h.put(new Integer(0x70000014), "Detection Channel Name"); h.put(new Integer(0x70000015), "Detector Gain B/C 1"); h.put(new Integer(0x70000016), "Detector Gain B/C 2"); h.put(new Integer(0x70000017), "Amplifier Gain B/C 1"); h.put(new Integer(0x70000018), "Amplifier Gain B/C 2"); h.put(new Integer(0x70000019), "Amplifier Offset B/C 1"); h.put(new Integer(0x70000020), "Amplifier Offset B/C 2"); h.put(new Integer(0x70000021), "Spectral Scan Channels"); h.put(new Integer(0x70000022), "SPI Wavelength Start"); h.put(new Integer(0x70000023), "SPI Wavelength End"); h.put(new Integer(0x70000026), "Dye Name"); h.put(new Integer(0xd0000014), "Dye Name"); h.put(new Integer(0x70000027), "Dye Folder"); h.put(new Integer(0xd0000015), "Dye Folder"); h.put(new Integer(0x90000003), "Wavelength"); h.put(new Integer(0x90000006), "Power B/C 1"); h.put(new Integer(0x90000007), "Power B/C 2"); h.put(new Integer(0xb0000001), "Filter Set"); h.put(new Integer(0xb0000002), "Filter"); h.put(new Integer(0xd0000004), "Color"); h.put(new Integer(0xd0000005), "Sample Type"); h.put(new Integer(0xd0000006), "Bits Per Sample"); h.put(new Integer(0xd0000007), "Ratio Type"); h.put(new Integer(0xd0000008), "Ratio Track 1"); h.put(new Integer(0xd0000009), "Ratio Track 2"); h.put(new Integer(0xd000000a), "Ratio Channel 1"); h.put(new Integer(0xd000000b), "Ratio Channel 2"); h.put(new Integer(0xd000000c), "Ratio Const. 1"); h.put(new Integer(0xd000000d), "Ratio Const. 2"); h.put(new Integer(0xd000000e), "Ratio Const. 3"); h.put(new Integer(0xd000000f), "Ratio Const. 4"); h.put(new Integer(0xd0000010), "Ratio Const. 5"); h.put(new Integer(0xd0000011), "Ratio Const. 6"); h.put(new Integer(0xd0000012), "Ratio First Images 1"); h.put(new Integer(0xd0000013), "Ratio First Images 2"); h.put(new Integer(0xd0000016), "Spectrum"); h.put(new Integer(0x12000003), "Interval"); return h; } private Integer readEntry() throws IOException { return new Integer(in.readInt()); } private Object readValue() throws IOException { int blockType = in.readInt(); int dataSize = in.readInt(); switch (blockType) { case TYPE_LONG: return new Long(in.readInt()); case TYPE_RATIONAL: return new Double(in.readDouble()); case TYPE_ASCII: String s = in.readString(dataSize).trim(); StringBuffer sb = new StringBuffer(); for (int i=0; i<s.length(); i++) { if (s.charAt(i) >= 10) sb.append(s.charAt(i)); else break; } return sb.toString(); case TYPE_SUBBLOCK: return null; } in.skipBytes(dataSize); return ""; } // -- Helper classes -- class SubBlock { public Hashtable<Integer, Object> blockData; public boolean acquire = true; public SubBlock() { try { read(); } catch (IOException e) { traceDebug(e); } } protected int getIntValue(int key) { Object o = blockData.get(new Integer(key)); if (o == null) return -1; return !(o instanceof Number) ? -1 : ((Number) o).intValue(); } protected float getFloatValue(int key) { Object o = blockData.get(new Integer(key)); if (o == null) return -1f; return !(o instanceof Number) ? -1f : ((Number) o).floatValue(); } protected double getDoubleValue(int key) { Object o = blockData.get(new Integer(key)); if (o == null) return -1d; return !(o instanceof Number) ? -1d : ((Number) o).doubleValue(); } protected String getStringValue(int key) { Object o = blockData.get(new Integer(key)); return o == null ? null : o.toString(); } protected void read() throws IOException { blockData = new Hashtable<Integer, Object>(); Integer entry = readEntry(); Object value = readValue(); while (value != null) { if (!blockData.containsKey(entry)) blockData.put(entry, value); entry = readEntry(); value = readValue(); } } public void addToHashtable() { String prefix = this.getClass().getSimpleName() + " int index = 1; while (getSeriesMeta(prefix + index + " Acquire") != null) index++; prefix += index; Integer[] keys = blockData.keySet().toArray(new Integer[0]); for (Integer key : keys) { if (metadataKeys.get(key) != null) { addSeriesMeta(prefix + " " + metadataKeys.get(key), blockData.get(key)); if (metadataKeys.get(key).equals("Bits Per Sample")) { core[getSeries()].bitsPerPixel = Integer.parseInt(blockData.get(key).toString()); } } } addGlobalMeta(prefix + " Acquire", new Boolean(acquire)); } } class Recording extends SubBlock { public String description; public String name; public String binning; public String startTime; // Objective data public String correction, immersion; public Integer magnification; public Double lensNA; public Boolean iris; protected void read() throws IOException { super.read(); description = getStringValue(RECORDING_DESCRIPTION); name = getStringValue(RECORDING_NAME); binning = getStringValue(RECORDING_CAMERA_BINNING); if (binning != null && binning.indexOf("x") == -1) { if (binning.equals("0")) binning = null; else binning += "x" + binning; } // start time in days since Dec 30 1899 long stamp = (long) (getDoubleValue(RECORDING_SAMPLE_0TIME) * 86400000); if (stamp > 0) { startTime = DateTools.convertDate(stamp, DateTools.MICROSOFT); } zoom = (Double) getDoubleValue(RECORDING_ZOOM); String objective = getStringValue(RECORDING_OBJECTIVE); correction = ""; if (objective == null) objective = ""; String[] tokens = objective.split(" "); int next = 0; for (; next<tokens.length; next++) { if (tokens[next].indexOf("/") != -1) break; correction += tokens[next]; } if (next < tokens.length) { String p = tokens[next++]; try { magnification = new Integer(p.substring(0, p.indexOf("/") - 1)); } catch (NumberFormatException e) { } try { lensNA = new Double(p.substring(p.indexOf("/") + 1)); } catch (NumberFormatException e) { } } immersion = next < tokens.length ? tokens[next++] : "Unknown"; iris = Boolean.FALSE; if (next < tokens.length) { iris = new Boolean(tokens[next++].trim().equalsIgnoreCase("iris")); } } } class Laser extends SubBlock { public String medium, type; public Double power; protected void read() throws IOException { super.read(); type = getStringValue(LASER_NAME); if (type == null) type = ""; medium = ""; if (type.startsWith("HeNe")) { medium = "HeNe"; type = "Gas"; } else if (type.startsWith("Argon")) { medium = "Ar"; type = "Gas"; } else if (type.equals("Titanium:Sapphire") || type.equals("Mai Tai")) { medium = "TiSapphire"; type = "SolidState"; } else if (type.equals("YAG")) { medium = ""; type = "SolidState"; } else if (type.equals("Ar/Kr")) { medium = ""; type = "Gas"; } acquire = getIntValue(LASER_ACQUIRE) != 0; power = getDoubleValue(LASER_POWER); } } class Track extends SubBlock { public Double timeIncrement; protected void read() throws IOException { super.read(); timeIncrement = getDoubleValue(TRACK_TIME_BETWEEN_STACKS); acquire = getIntValue(TRACK_ACQUIRE) != 0; } } class DetectionChannel extends SubBlock { public Double pinhole; public Double gain, amplificationGain; public String filter, filterSet; public String channelName; protected void read() throws IOException { super.read(); pinhole = new Double(getDoubleValue(CHANNEL_PINHOLE_DIAMETER)); gain = new Double(getDoubleValue(CHANNEL_DETECTOR_GAIN)); amplificationGain = new Double(getDoubleValue(CHANNEL_AMPLIFIER_GAIN)); filter = getStringValue(CHANNEL_FILTER); if (filter != null) { filter = filter.trim(); if (filter.length() == 0 || filter.equals("None")) { filter = null; } } filterSet = getStringValue(CHANNEL_FILTER_SET); channelName = getStringValue(CHANNEL_NAME); acquire = getIntValue(CHANNEL_ACQUIRE) != 0; } } class IlluminationChannel extends SubBlock { public Integer wavelength; public Double attenuation; protected void read() throws IOException { super.read(); wavelength = new Integer(getIntValue(ILLUM_CHANNEL_WAVELENGTH)); attenuation = new Double(getDoubleValue(ILLUM_CHANNEL_ATTENUATION)); acquire = getIntValue(ILLUM_CHANNEL_ACQUIRE) != 0; } } class DataChannel extends SubBlock { public String name; protected void read() throws IOException { super.read(); name = getStringValue(DATA_CHANNEL_NAME); for (int i=0; i<name.length(); i++) { if (name.charAt(i) < 10) { name = name.substring(0, i); break; } } acquire = getIntValue(DATA_CHANNEL_ACQUIRE) != 0; } } class BeamSplitter extends SubBlock { public String filter, filterSet; protected void read() throws IOException { super.read(); filter = getStringValue(BEAM_SPLITTER_FILTER); if (filter != null) { filter = filter.trim(); if (filter.length() == 0 || filter.equals("None")) { filter = null; } } filterSet = getStringValue(BEAM_SPLITTER_FILTER_SET); } } class Timer extends SubBlock { } class Marker extends SubBlock { } }
package io.kemper; import com.amazonaws.services.lambda.runtime.Context; import com.amazonaws.services.lambda.runtime.RequestHandler; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import io.kemper.api.*; import io.kemper.domain.Riddle; import io.kemper.service.RiddleService; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.*; public class GetRiddleHandler implements RequestHandler<LambdaProxyRequest, Response> { @Override public Response handleRequest(LambdaProxyRequest request, Context context) { context.getLogger().log("Input: " + request); Map<String, String> requestBody = parseRequestBody(request.body); System.out.println(requestBody); if("help".equals(requestBody.get("text"))) { return getHelp(); } return getRiddle(requestBody.get("text")); } private Response getRiddle(String category) { Riddle riddle = new RiddleService().getRandomRiddle(); Action getAnswerAction = new Action("answer", "Get Answer", "button", riddle.getId().toString()); List<Action> actions = new ArrayList<>(); actions.add(getAnswerAction); Attachment getRiddleAttachment = new Attachment("...", "Unable to get answer", "mycallbackid", "#00FF00", "default", actions); List<Attachment> attachments = new ArrayList<>(); attachments.add(getRiddleAttachment); SlackResponse slackResponse = new SlackResponse(ResponseType.in_channel, riddle.getQuestion(), attachments); String body = marshall(slackResponse); Response response = new Response(body, 200); System.out.println(response); return response; } private Response getHelp() { SlackResponse slackResponse = new SlackResponse(ResponseType.ephemeral, "Type `/riddle` to get a random riddle.", null); String body = marshall(slackResponse); Response response = new Response(body, 200); return response; } public Map<String, String> parseRequestBody(String body) { try { body = URLDecoder.decode(body, "UTF-8"); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } Map<String, String> requestBody = new HashMap<>(); String[] keypairs = body.split("&"); for(String keypair : keypairs) { String[] temp = keypair.split("="); if(temp.length == 2) { requestBody.put(temp[0], temp[1]); } } return requestBody; } public static String marshall(Object obj) { ObjectMapper mapper = new ObjectMapper(); String body = null; try { body = mapper.writeValueAsString(obj); } catch (JsonProcessingException e) { e.printStackTrace(); } return body; } }
package com.axelor.meta; import static com.axelor.common.StringUtils.isBlank; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.nio.file.CopyOption; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.SimpleFileVisitor; import java.nio.file.StandardCopyOption; import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.FileAttribute; import javax.inject.Inject; import javax.persistence.PersistenceException; import com.axelor.app.AppSettings; import com.axelor.db.EntityHelper; import com.axelor.db.Model; import com.axelor.dms.db.DMSFile; import com.axelor.dms.db.repo.DMSFileRepository; import com.axelor.inject.Beans; import com.axelor.meta.db.MetaAttachment; import com.axelor.meta.db.MetaFile; import com.axelor.meta.db.repo.MetaAttachmentRepository; import com.axelor.meta.db.repo.MetaFileRepository; import com.google.common.base.Preconditions; import com.google.inject.persist.Transactional; /** * This class provides some helper methods to deal with files. * */ public class MetaFiles { private static final String DEFAULT_UPLOAD_PATH = "{java.io.tmpdir}/axelor/attachments"; private static final Path UPLOAD_PATH = Paths.get(AppSettings.get().get("file.upload.dir", DEFAULT_UPLOAD_PATH)); private static final Path UPLOAD_PATH_TEMP = UPLOAD_PATH.resolve("tmp"); private static final CopyOption[] COPY_OPTIONS = { StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.COPY_ATTRIBUTES }; private static final CopyOption[] MOVE_OPTIONS = { StandardCopyOption.REPLACE_EXISTING }; // temp clean up threshold 24 hours private static final long TEMP_THRESHOLD = 24 * 3600 * 1000; private static final Object lock = new Object(); private MetaFileRepository filesRepo; @Inject public MetaFiles(MetaFileRepository filesRepo) { this.filesRepo = filesRepo; } /** * Get the actual storage path of the file represented by the give * {@link MetaFile} instance. * * @param file * the given {@link MetaFile} instance * @return actual file path */ public static Path getPath(MetaFile file) { Preconditions.checkNotNull(file, "file instance can't be null"); return UPLOAD_PATH.resolve(file.getFilePath()); } /** * Get the actual storage path of the given relative file path. * * @param filePath * relative file path * @return actual file path */ public static Path getPath(String filePath) { Preconditions.checkNotNull(filePath, "file path can't be null"); return UPLOAD_PATH.resolve(filePath); } /** * Create a temporary file under upload directory. * * @see Files#createTempFile(String, String, FileAttribute...) */ public static Path createTempFile(String prefix, String suffix, FileAttribute<?>... attrs) throws IOException { // make sure the upload directories exist Files.createDirectories(UPLOAD_PATH_TEMP); return Files.createTempFile(UPLOAD_PATH_TEMP, prefix, suffix, attrs); } /** * Find a temporary file by the given name created previously. * * @param name * name of the temp file * @return file path */ public static Path findTempFile(String name) { return UPLOAD_PATH_TEMP.resolve(name); } private Path getNextPath(String fileName) { synchronized (lock) { int dotIndex = fileName.lastIndexOf('.'); int counter = 1; String fileNameBase = fileName.substring(0, dotIndex); String fileNameExt = ""; if (dotIndex > -1) { fileNameExt = fileName.substring(dotIndex); } String targetName = fileName; Path target = UPLOAD_PATH.resolve(targetName); while (Files.exists(target)) { targetName = fileNameBase + " (" + counter++ + ")" + fileNameExt; target = UPLOAD_PATH.resolve(targetName); } return target; } } /** * Clean up obsolete temporary files from upload directory. * */ public void clean() throws IOException { if (!Files.isDirectory(UPLOAD_PATH_TEMP)) { return; } final long currentTime = System.currentTimeMillis(); Files.walkFileTree(UPLOAD_PATH_TEMP, new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { long diff = currentTime - Files.getLastModifiedTime(file).toMillis(); if (diff >= TEMP_THRESHOLD) { Files.deleteIfExists(file); } return FileVisitResult.CONTINUE; } }); } /** * This method can be used to delete temporary file of an incomplete upload. * * @param fileId * the upload file id */ public void clean(String fileId) throws IOException { Files.deleteIfExists(UPLOAD_PATH_TEMP.resolve(fileId)); } /** * Upload the given chunk of file data to a temporary file identified by the * given file id. * * <p> * Upload would restart if startOffset is 0 (zero), otherwise upload file * size is checked against given startOffset. The startOffset must be less * than expected fileSize. * * <p> * Unlike the {@link #upload(File, MetaFile)} or {@link #upload(File)} * methods, this method doesn't create {@link MetaFile} instance. * * <p> * The temporary file generated should be manually uploaded again using * {@link #upload(File, MetaFile)} or should be deleted using * {@link #clean(String)} method if something went wrong. * * @param chunk * the input stream * @param startOffset * the start offset byte position * @param fileSize * the actual file size * @param fileId * an unique upload file identifier * @return a temporary file where upload is being saved * @throws IOException * if there is any error during io operations */ public File upload(InputStream chunk, long startOffset, long fileSize, String fileId) throws IOException { final Path tmp = UPLOAD_PATH_TEMP.resolve(fileId); if ((fileSize > -1 && startOffset > fileSize) || (Files.exists(tmp) && Files.size(tmp) != startOffset) || (!Files.exists(tmp) && startOffset > 0)) { throw new IllegalArgumentException("Start offset is out of bound."); } // make sure the upload directories exist Files.createDirectories(UPLOAD_PATH_TEMP); // clean up obsolete temporary files try { clean(); } catch (Exception e) { } final File file = tmp.toFile(); final BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(file, startOffset > 0)); try { int read = 0; long total = startOffset; byte[] bytes = new byte[4096]; while ((read = chunk.read(bytes)) != -1) { total += read; if (fileSize > -1 && total > fileSize) { throw new IllegalArgumentException("Invalid chunk, oversized upload."); } bos.write(bytes, 0, read); } bos.flush(); } finally { bos.close(); } return file; } /** * Upload the given file to the file upload directory and create an instance * of {@link MetaFile} for the given file. * * @param file * the given file * @return an instance of {@link MetaFile} * @throws IOException * if unable to read the file * @throws PersistenceException * if unable to save to a {@link MetaFile} instance */ @Transactional public MetaFile upload(File file) throws IOException { return upload(file, new MetaFile()); } /** * Upload the given {@link File} to the upload directory and link it to the * to given {@link MetaFile}. * * <p> * Any existing file linked to the given {@link MetaFile} will be removed * from the upload directory. * </p> * * @param file * the file to upload * @param metaFile * the target {@link MetaFile} instance * @return persisted {@link MetaFile} instance * @throws IOException * if unable to read the file * @throws PersistenceException * if unable to save to {@link MetaFile} instance */ @Transactional public MetaFile upload(File file, MetaFile metaFile) throws IOException { Preconditions.checkNotNull(metaFile); Preconditions.checkNotNull(file); final boolean update = !isBlank(metaFile.getFilePath()); final String fileName = isBlank(metaFile.getFileName()) ? file.getName() : metaFile.getFileName(); final String targetName = update ? metaFile.getFilePath() : fileName; final Path path = UPLOAD_PATH.resolve(targetName); final Path tmp = update ? createTempFile(null, null) : null; if (update && Files.exists(path)) { Files.move(path, tmp, MOVE_OPTIONS); } try { final Path source = file.toPath(); final Path target = getNextPath(fileName); // make sure the upload path exists Files.createDirectories(UPLOAD_PATH); // if source is in tmp directory, move it otherwise copy if (UPLOAD_PATH_TEMP.equals(source.getParent())) { Files.move(source, target, MOVE_OPTIONS); } else { Files.copy(source, target, COPY_OPTIONS); } // only update file name if not provides from meta file if (isBlank(metaFile.getFileName())) { metaFile.setFileName(file.getName()); } if (isBlank(metaFile.getFileType())) { metaFile.setFileType(Files.probeContentType(target)); } metaFile.setFileSize(Files.size(target)); metaFile.setFilePath(target.toFile().getName()); try { return filesRepo.save(metaFile); } catch (Exception e) { // delete the uploaded file Files.deleteIfExists(target); // restore original file if (tmp != null) { Files.move(tmp, target, MOVE_OPTIONS); } throw new PersistenceException(e); } } finally { if (tmp != null) { Files.deleteIfExists(tmp); } } } /** * Upload the given stream to the upload directory and link it to the to * given {@link MetaFile}. * * <p> * The given {@link MetaFile} instance must have fileName set to save the * stream as file. * </p> * * Upload the stream * * @param stream * the stream to upload * @param metaFile * the {@link MetaFile} to link the uploaded file * @return the given {@link MetaFile} instance * @throws IOException */ @Transactional public MetaFile upload(InputStream stream, MetaFile metaFile) throws IOException { Preconditions.checkNotNull(stream, "stream can't be null"); Preconditions.checkNotNull(metaFile, "meta file can't be null"); Preconditions.checkNotNull(metaFile.getFileName(), "meta file should have filename"); final Path tmp = createTempFile(null, null); final File tmpFile = upload(stream, 0, -1, tmp.toFile().getName()); return upload(tmpFile, metaFile); } /** * Upload the given stream to the upload directory. * * @param stream * the stream to upload * @param fileName * the file name to use * @return a new {@link MetaFile} instance * @throws IOException */ @Transactional public MetaFile upload(InputStream stream, String fileName) throws IOException { final MetaFile file = new MetaFile(); file.setFileName(fileName); return upload(stream, file); } /** * Upload the given file stream and attach it to the given record. * * <p> * The attachment will be saved as {@link DMSFile} and will be visible in * DMS user interface. Use {@link #upload(InputStream, String)} along with * {@link #attach(MetaFile, Model)} if you don't want to show the attachment * in DMS interface. * </p> * * @param stream * the stream to upload * @param fileName * the file name to use * @param entity * the record to attach to * @return a {@link DMSFile} record created for the attachment * @throws IOException */ @Transactional public DMSFile attach(InputStream stream, String fileName, Model entity) throws IOException { final MetaFile metaFile = upload(stream, fileName); return attach(metaFile, fileName, entity); } /** * Attach the given file to the given record. * * @param metaFile * the file to attach * @param fileName * alternative file name to use (optional, can be null) * @param entity * the record to attach to * @return a {@link DMSFile} record created for the attachment * @throws IOException */ @Transactional public DMSFile attach(MetaFile metaFile, String fileName, Model entity) throws IOException { Preconditions.checkNotNull(metaFile); Preconditions.checkNotNull(metaFile.getId()); Preconditions.checkNotNull(entity); Preconditions.checkNotNull(entity.getId()); final String name = isBlank(fileName) ? metaFile.getFileName() : fileName; final DMSFile dmsFile = new DMSFile(); final DMSFileRepository repository = Beans.get(DMSFileRepository.class); dmsFile.setFileName(name); dmsFile.setMetaFile(metaFile); dmsFile.setRelatedId(entity.getId()); dmsFile.setRelatedModel(EntityHelper.getEntityClass(entity).getName()); repository.save(dmsFile); return dmsFile; } /** * Delete the given {@link DMSFile} and also delete linked file if not * referenced by any other record. * * <p> * It will attempt to clean up associated {@link MetaFile} and * {@link MetaAttachment} records and also try to delete linked file from * upload directory. * </p> * * @param file * the {@link DMSFile} to delete */ @Transactional public void delete(DMSFile file) { final DMSFileRepository repository = Beans.get(DMSFileRepository.class); repository.remove(file); } /** * Attach the given {@link MetaFile} to the given {@link Model} object and * return an instance of a {@link MetaAttachment} that represents the * attachment. * <p> * The {@link MetaAttachment} instance is not persisted. * </p> * * @param file * the given {@link MetaFile} instance * @param entity * the given {@link Model} instance * @return a new instance of {@link MetaAttachment} */ public MetaAttachment attach(MetaFile file, Model entity) { Preconditions.checkNotNull(file); Preconditions.checkNotNull(entity); Preconditions.checkNotNull(entity.getId()); MetaAttachment attachment = new MetaAttachment(); attachment.setMetaFile(file); attachment.setObjectId(entity.getId()); attachment.setObjectName(EntityHelper.getEntityClass(entity).getName()); return attachment; } /** * Delete the given attachment & related {@link MetaFile} instance along * with the file content. * * @param attachment * the attachment to delete * @throws IOException if unable to delete file */ @Transactional public void delete(MetaAttachment attachment) throws IOException { Preconditions.checkNotNull(attachment); Preconditions.checkNotNull(attachment.getMetaFile()); MetaAttachmentRepository attachments = Beans.get(MetaAttachmentRepository.class); MetaFileRepository files = Beans.get(MetaFileRepository.class); DMSFileRepository dms = Beans.get(DMSFileRepository.class); attachments.remove(attachment); MetaFile metaFile = attachment.getMetaFile(); long count = dms.all().filter("self.metaFile = ?", metaFile).count(); if (count == 0) { count = attachments.all() .filter("self.metaFile = ? and self.id != ?", metaFile, attachment.getId()) .count(); } // only delete real file if not reference anywhere else if (count > 0) { return; } files.remove(metaFile); Path target = UPLOAD_PATH.resolve(metaFile.getFilePath()); Files.deleteIfExists(target); } /** * Delete the given {@link MetaFile} instance along with the file content. * * @param file * the file to delete * @throws IOException * if unable to delete file */ @Transactional public void delete(MetaFile metaFile) throws IOException { Preconditions.checkNotNull(metaFile); MetaFileRepository files = Beans.get(MetaFileRepository.class); Path target = UPLOAD_PATH.resolve(metaFile.getFilePath()); files.remove(metaFile); Files.deleteIfExists(target); } public String fileTypeIcon(MetaFile file) { String fileType = file.getFileType(); switch (fileType) { case "application/msword": case "application/vnd.openxmlformats-officedocument.wordprocessingml.document": case "application/vnd.oasis.opendocument.text": return "fa-file-word-o"; case "application/vnd.ms-excel": case "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet": case "application/vnd.oasis.opendocument.spreadsheet": return "fa-file-excel-o"; case "application/pdf": return "fa-file-pdf-o"; case "application/zip": case "application/gzip": return "fa-file-archive-o"; default: if (fileType.startsWith("text")) return "fa-file-text-o"; if (fileType.startsWith("image")) return "fa-file-image-o"; if (fileType.startsWith("video")) return "fa-file-video-o"; } return "fa-file-o"; } }
package nars.entity; import nars.core.control.NAL; import java.util.ArrayList; import java.util.List; import nars.core.Events.BeliefSelect; import nars.core.Events.ConceptBeliefAdd; import nars.core.Events.ConceptBeliefRemove; import nars.core.Events.ConceptGoalAdd; import nars.core.Events.ConceptGoalRemove; import nars.core.Events.ConceptQuestionAdd; import nars.core.Events.ConceptQuestionRemove; import nars.core.Events.TaskLinkAdd; import nars.core.Events.TaskLinkRemove; import nars.core.Events.TermLinkAdd; import nars.core.Events.TermLinkRemove; import nars.core.Events.UnexecutableGoal; import nars.core.Memory; import nars.core.NARRun; import static nars.inference.BudgetFunctions.distributeAmongLinks; import static nars.inference.BudgetFunctions.rankBelief; import nars.inference.Executive; import static nars.inference.LocalRules.revisible; import static nars.inference.LocalRules.revision; import static nars.inference.LocalRules.trySolution; import static nars.inference.TemporalRules.concurrent; import static nars.inference.TemporalRules.solutionQuality; import static nars.inference.UtilityFunctions.or; import nars.io.Symbols; import nars.language.CompoundTerm; import nars.language.Term; import nars.storage.Bag; import nars.storage.Bag.MemoryAware; public class Concept extends Item<Term> { /** * The term is the unique ID of the concept */ public final Term term; /** * Task links for indirect processing */ public final Bag<TaskLink,Task> taskLinks; /** * Term links between the term and its components and compounds; beliefs */ public final Bag<TermLink,TermLink> termLinks; /** * Link templates of TermLink, only in concepts with CompoundTerm Templates * are used to improve the efficiency of TermLink building */ private final List<TermLink> termLinkTemplates; /** * Pending Question directly asked about the term * * Note: since this is iterated frequently, an array should be used. To * avoid iterator allocation, use .get(n) in a for-loop */ public final List<Task> questions; /** * Pending Quests to be answered by new desire values */ public final ArrayList<Task> quests; /** * Judgments directly made about the term Use ArrayList because of access * and insertion in the middle */ public final ArrayList<Sentence> beliefs; /** * Desire values on the term, similar to the above one */ public final ArrayList<Sentence> desires; /** * Reference to the memory to which the Concept belongs */ public final Memory memory; /** * The display window */ /** * Constructor, called in Memory.getConcept only * * @param tm A term corresponding to the concept * @param memory A reference to the memory */ public Concept(final BudgetValue b, final Term tm, Bag<TaskLink,Task> taskLinks, Bag<TermLink,TermLink> termLinks, final Memory memory) { super(b); this.term = tm; this.memory = memory; this.questions = new ArrayList<>(); this.beliefs = new ArrayList<>(); this.quests = new ArrayList<>(); this.desires = new ArrayList<>(); this.taskLinks = taskLinks; this.termLinks = termLinks; if (taskLinks instanceof MemoryAware) ((MemoryAware)taskLinks).setMemory(memory); if (termLinks instanceof MemoryAware) ((MemoryAware)termLinks).setMemory(memory); if (tm instanceof CompoundTerm) { this.termLinkTemplates = ((CompoundTerm) tm).prepareComponentLinks(); } else { this.termLinkTemplates = null; } } // @Override public int hashCode() { // return term.hashCode(); // @Override public boolean equals(final Object obj) { // if (this == obj) return true; // if (obj instanceof Concept) { // Concept t = (Concept)obj; // return (t.term.equals(term)); // return false; @Override public boolean equals(Object obj) { if (this == obj) return true; if (!(obj instanceof Concept)) return false; return ((Concept)obj).name().equals(name()); } @Override public int hashCode() { return name().hashCode(); } @Override public Term name() { return term; } /** * Directly process a new task. Called exactly once on each task. Using * local information and finishing in a constant time. Provide feedback in * the taskBudget value of the task. * <p> * called in Memory.immediateProcess only * * @param task The task to be processed * @return whether it was processed */ public boolean directProcess(final NAL nal, final Task task) { char type = task.sentence.punctuation; switch (type) { case Symbols.JUDGMENT_MARK: memory.logic.JUDGMENT_PROCESS.commit(); processJudgment(nal, task); break; case Symbols.GOAL_MARK: memory.logic.GOAL_PROCESS.commit(); processGoal(nal, task); break; case Symbols.QUESTION_MARK: case Symbols.QUEST_MARK: memory.logic.QUESTION_PROCESS.commit(); processQuestion(nal, task); break; default: return false; } if (task.aboveThreshold()) { // still need to be processed memory.logic.LINK_TO_TASK.commit(); linkToTask(task); } return true; } /** * To accept a new judgment as belief, and check for revisions and solutions * * @param judg The judgment to be accepted * @param task The task to be processed * @return Whether to continue the processing of the task */ protected void processJudgment(final NAL nal, final Task task) { final Sentence judg = task.sentence; final Sentence oldBelief = selectCandidate(judg, beliefs); // only revise with the strongest -- how about projection? if (oldBelief != null) { final Stamp newStamp = judg.stamp; final Stamp oldStamp = oldBelief.stamp; if (newStamp.equals(oldStamp,false,false,true,true)) { if (task.getParentTask() != null && task.getParentTask().sentence.isJudgment()) { //task.budget.decPriority(0); // duplicated task } // else: activated belief memory.removeTask(task, "Duplicated"); return; } else if (revisible(judg, oldBelief)) { nal.setTheNewStamp(newStamp, oldStamp, memory.time()); // if (nal.setTheNewStamp( //temporarily removed // /* // if (equalBases(first.getBase(), second.getBase())) { // return null; // do not merge identical bases // } // */ // // if (first.baseLength() > second.baseLength()) { // new Stamp(newStamp, oldStamp, memory.time()) // keep the order for projection // // } else { // // return new Stamp(second, first, time); // ) != null) { Sentence projectedBelief = oldBelief.projection(newStamp.getOccurrenceTime(), memory.time()); if (projectedBelief.getOccurenceTime()!=oldBelief.getOccurenceTime()) { nal.singlePremiseTask(projectedBelief, task.budget); } nal.setCurrentBelief(projectedBelief); revision(judg, projectedBelief, false, nal); } } if (task.aboveThreshold()) { int nnq = questions.size(); for (int i = 0; i < nnq; i++) { trySolution(judg, questions.get(i), nal); } addToTable(task, judg, beliefs, memory.param.conceptBeliefsMax.get(), ConceptBeliefAdd.class, ConceptBeliefRemove.class); } } protected void addToTable(final Task task, final Sentence newSentence, final ArrayList<Sentence> table, final int max, final Class eventAdd, final Class eventRemove, final Object... extraEventArguments) { int preSize = table.size(); Sentence removed; synchronized (table) { removed = addToTable(newSentence, table, max); } if (removed != null) { memory.event.emit(eventRemove, this, removed, task, extraEventArguments); } if ((preSize != table.size()) || (removed != null)) { memory.event.emit(eventAdd, this, newSentence, task, extraEventArguments); } } /** * To accept a new goal, and check for revisions and realization, then * decide whether to actively pursue it * * @param judg The judgment to be accepted * @param task The task to be processed * @return Whether to continue the processing of the task */ protected void processGoal(final NAL nal, final Task task) { final Sentence goal = task.sentence; final Sentence oldGoal = selectCandidate(goal, desires); // revise with the existing desire values if (oldGoal != null) { final Stamp newStamp = goal.stamp; final Stamp oldStamp = oldGoal.stamp; if (newStamp.equals(oldStamp,false,false,true,true)) { return; //duplicate } else if (revisible(goal, oldGoal)) { nal.setTheNewStamp(newStamp, oldStamp, memory.time()); boolean success=revision(goal,oldGoal,false,nal); if(success) { //it is revised, so there is a new task for which this function will be called return; //with higher/lower desire } } } if (task.aboveThreshold()) { final Sentence belief = selectCandidate(goal, beliefs); // check if the Goal is already satisfied if (belief != null) { trySolution(belief, task, nal); // check if the Goal is already satisfied } // still worth pursuing? if (task.aboveThreshold()) { addToTable(task, goal, desires, memory.param.conceptGoalsMax.get(), ConceptGoalAdd.class, ConceptGoalRemove.class); if(task.sentence.getOccurenceTime()==Stamp.ETERNAL || task.sentence.getOccurenceTime()>=memory.time()-memory.param.duration.get()) { if(!memory.executive.executeDecision(task, this)) { memory.emit(UnexecutableGoal.class, task, this, nal); } } } } } /** * To answer a question by existing beliefs * * @param task The task to be processed * @return Whether to continue the processing of the task */ protected void processQuestion(final NAL nal, final Task task) { Sentence ques = task.sentence; boolean newQuestion = true; for (final Task t : questions) { final Sentence q = t.sentence; if (q.equalsContent(ques)) { ques = q; newQuestion = false; break; } } if (newQuestion) { if (questions.size() + 1 > memory.param.conceptQuestionsMax.get()) { Task removed = questions.remove(0); // FIFO memory.event.emit(ConceptQuestionRemove.class, this, removed); } questions.add(task); memory.event.emit(ConceptQuestionAdd.class, this, task); } final Sentence newAnswer = (ques.isQuestion()) ? selectCandidate(ques, beliefs) : selectCandidate(ques, desires); if (newAnswer != null) { trySolution(newAnswer, task, nal); } } /** * Link to a new task from all relevant concepts for continued processing in * the near future for unspecified time. * <p> * The only method that calls the TaskLink constructor. * * @param task The task to be linked * @param content The content of the task */ public void linkToTask(final Task task) { final BudgetValue taskBudget = task.budget; insertTaskLink(new TaskLink(task, null, taskBudget, memory.param.termLinkRecordLength.get())); // link type: SELF if (term instanceof CompoundTerm) { if (!termLinkTemplates.isEmpty()) { final BudgetValue subBudget = distributeAmongLinks(taskBudget, termLinkTemplates.size()); if (subBudget.aboveThreshold()) { for (int t = 0; t < termLinkTemplates.size(); t++) { TermLink termLink = termLinkTemplates.get(t); // if (!(task.isStructural() && (termLink.getType() == TermLink.TRANSFORM))) { // avoid circular transform Term componentTerm = termLink.target; Concept componentConcept = memory.conceptualize(subBudget, componentTerm); if (componentConcept != null) { componentConcept.insertTaskLink( new TaskLink(task, termLink, subBudget, memory.param.termLinkRecordLength.get())); } } buildTermLinks(taskBudget); // recursively insert TermLink } } } } /** * Add a new belief (or goal) into the table Sort the beliefs/desires by * rank, and remove redundant or low rank one * * @param newSentence The judgment to be processed * @param table The table to be revised * @param capacity The capacity of the table * @return whether table was modified */ private static Sentence addToTable(final Sentence newSentence, final List<Sentence> table, final int capacity) { final float rank1 = rankBelief(newSentence); // for the new isBelief float rank2; int i; for (i = 0; i < table.size(); i++) { Sentence judgment2 = table.get(i); rank2 = rankBelief(judgment2); if (rank1 >= rank2) { if (newSentence.equivalentTo(judgment2)) { return null; } table.add(i, newSentence); break; } } if (table.size() >= capacity) { if (table.size() > capacity) { Sentence removed = table.remove(table.size() - 1); return removed; } } else if (i == table.size()) { table.add(newSentence); } return null; } /** * Select a belief value or desire value for a given query * * @param query The query to be processed * @param list The list of beliefs or desires to be used * @return The best candidate selected */ private Sentence selectCandidate(final Sentence query, final List<Sentence> list) { // if (list == null) { // return null; float currentBest = 0; float beliefQuality; Sentence candidate = null; synchronized (list) { for (int i = 0; i < list.size(); i++) { Sentence judg = list.get(i); beliefQuality = solutionQuality(query, judg, memory); if (beliefQuality > currentBest) { currentBest = beliefQuality; candidate = judg; } } } return candidate; } /** * Insert a TaskLink into the TaskLink bag * <p> * called only from Memory.continuedProcess * * @param taskLink The termLink to be inserted */ protected boolean insertTaskLink(final TaskLink taskLink) { TaskLink removed = taskLinks.putIn(taskLink); if (removed!=null) { if (removed == taskLink) { memory.emit(TaskLinkRemove.class, taskLink, this); return false; } else { memory.emit(TaskLinkRemove.class, removed, this); } } memory.emit(TaskLinkAdd.class, taskLink, this); return true; } /** * Recursively build TermLinks between a compound and its components * <p> * called only from Memory.continuedProcess * * @param taskBudget The BudgetValue of the task */ public void buildTermLinks(final BudgetValue taskBudget) { if (termLinkTemplates.size() > 0) { BudgetValue subBudget = distributeAmongLinks(taskBudget, termLinkTemplates.size()); if (subBudget.aboveThreshold()) { for (final TermLink template : termLinkTemplates) { if (template.type != TermLink.TRANSFORM) { Term target = template.target; final Concept concept = memory.conceptualize(taskBudget, target); if (concept != null) { // this termLink to that insertTermLink(new TermLink(target, template, subBudget)); // that termLink to this concept.insertTermLink(new TermLink(term, template, subBudget)); if (target instanceof CompoundTerm) { concept.buildTermLinks(subBudget); } } } } } } } /** * Insert a TermLink into the TermLink bag * <p> * called from buildTermLinks only * * @param termLink The termLink to be inserted */ public boolean insertTermLink(final TermLink termLink) { TermLink removed = termLinks.putIn(termLink); if (removed!=null) { if (removed == termLink) { memory.emit(TermLinkRemove.class, termLink, this); return false; } else { memory.emit(TermLinkRemove.class, removed, this); } } memory.emit(TermLinkAdd.class, termLink, this); return true; } /** * Return a string representation of the concept, called in ConceptBag only * * @return The concept name, with taskBudget in the full version */ @Override public String toString() { // called from concept bag //return (super.toStringBrief() + " " + key); return super.toStringExternal(); } /** * called from {@link NARRun} */ @Override public String toStringLong() { String res = toStringExternal() + " " + term.name() + toStringIfNotNull(termLinks.size(), "termLinks") + toStringIfNotNull(taskLinks.size(), "taskLinks") + toStringIfNotNull(beliefs.size(), "beliefs") + toStringIfNotNull(desires.size(), "desires") + toStringIfNotNull(questions.size(), "questions") + toStringIfNotNull(quests.size(), "quests"); //+ toStringIfNotNull(null, "questions"); /*for (Task t : questions) { res += t.toString(); }*/ // TODO other details? return res; } private String toStringIfNotNull(final Object item, final String title) { if (item == null) { return ""; } final String itemString = item.toString(); return new StringBuilder(2 + title.length() + itemString.length() + 1). append(" ").append(title).append(':').append(itemString).toString(); } /** * Recalculate the quality of the concept [to be refined to show * extension/intension balance] * * @return The quality value */ @Override public float getQuality() { float linkPriority = termLinks.getAveragePriority(); float termComplexityFactor = 1.0f / term.getComplexity(); float result = or(linkPriority, termComplexityFactor); if (result < 0) { throw new RuntimeException("Concept.getQuality < 0: result=" + result + ", linkPriority=" + linkPriority + " ,termComplexityFactor=" + termComplexityFactor + ", termLinks.size=" + termLinks.size()); } return result; } /** * Return the templates for TermLinks, only called in * Memory.continuedProcess * * @return The template get */ public List<TermLink> getTermLinkTemplates() { return termLinkTemplates; } /** * Select a isBelief to interact with the given task in inference * <p> * get the first qualified one * <p> * only called in RuleTables.reason * * @param task The selected task * @return The selected isBelief */ public Sentence getBelief(final NAL nal, final Task task) { final Stamp taskStamp = task.sentence.stamp; final long currentTime = memory.time(); for (final Sentence belief : beliefs) { nal.emit(BeliefSelect.class, belief); nal.setTheNewStamp(taskStamp, belief.stamp, currentTime); //// if (memory.newStamp != null) { // return belief.projection(taskStamp.getOccurrenceTime(), currentTime); Sentence projectedBelief = belief.projection(taskStamp.getOccurrenceTime(), memory.time()); if (projectedBelief.getOccurenceTime()!=belief.getOccurenceTime()) { nal.singlePremiseTask(projectedBelief, task.budget); } return projectedBelief; // return the first satisfying belief } return null; } /** * Get the current overall desire value. TODO to be refined */ public TruthValue getDesire() { if (desires.isEmpty()) { return null; } TruthValue topValue = desires.get(0).truth; return topValue; } @Override public void end() { //empty bags and lists for (Task t : questions) t.end(); questions.clear(); for (Task t : quests) t.end(); quests.clear(); termLinks.clear(); taskLinks.clear(); beliefs.clear(); } /** * Collect direct isBelief, questions, and desires for display * * @return String representation of direct content */ public String displayContent() { final StringBuilder buffer = new StringBuilder(18); buffer.append("\n Beliefs:\n"); if (!beliefs.isEmpty()) { for (Sentence s : beliefs) { buffer.append(s).append('\n'); } } if (!questions.isEmpty()) { buffer.append("\n Question:\n"); for (Task t : questions) { buffer.append(t).append('\n'); } } return buffer.toString(); } /** * Replace default to prevent repeated inference, by checking TaskLink * * @param taskLink The selected TaskLink * @param time The current time * @return The selected TermLink */ public TermLink selectTermLink(final TaskLink taskLink, final long time) { int toMatch = memory.param.termLinkMaxMatched.get(); //Math.min(memory.param.termLinkMaxMatched.get(), termLinks.size()); for (int i = 0; (i < toMatch) && (termLinks.size() > 0); i++) { final TermLink termLink = termLinks.takeNext(); if (termLink==null) break; if (taskLink.novel(termLink, time)) { //return, will be re-inserted in caller method when finished processing it return termLink; } returnTermLink(termLink); } return null; } public void returnTermLink(TermLink termLink) { termLinks.putBack(termLink, memory.param.cycles(memory.param.termLinkForgetDurations), memory); } /** * Return the questions, called in ComposionalRules in * dedConjunctionByQuestion only */ public List<Task> getQuestions() { return questions; } public void discountConfidence(final boolean onBeliefs) { if (onBeliefs) { for (final Sentence s : beliefs) { s.discountConfidence(); } } else { for (final Sentence s : desires) { s.discountConfidence(); } } } /** get a random belief, weighted by their sentences confidences */ public Sentence getBeliefRandomByConfidence() { if (beliefs.isEmpty()) return null; float totalConfidence = getBeliefConfidenceSum(); float r = Memory.randomNumber.nextFloat() * totalConfidence; Sentence s = null; for (int i = 0; i < beliefs.size(); i++) { s = beliefs.get(i); r -= s.truth.getConfidence(); if (r < 0) return s; } return s; } public float getBeliefConfidenceSum() { float t = 0; for (final Sentence s : beliefs) t += s.truth.getConfidence(); return t; } public float getBeliefFrequencyMean() { if (beliefs.isEmpty()) return 0.5f; float t = 0; for (final Sentence s : beliefs) t += s.truth.getFrequency(); return t / beliefs.size(); } public CharSequence getBeliefsSummary() { if (beliefs.isEmpty()) return "0 beliefs"; StringBuilder sb = new StringBuilder(); for (Sentence s : beliefs) sb.append(s.toString()).append('\n'); return sb; } public CharSequence getDesiresSummary() { if (desires.isEmpty()) return "0 desires"; StringBuilder sb = new StringBuilder(); for (Sentence s : desires) sb.append(s.toString()).append('\n'); return sb; } }
package view.TableTab; import javafx.scene.control.Button; import javafx.scene.control.ChoiceBox; import javafx.scene.control.TextField; import javafx.scene.layout.HBox; import javafx.scene.layout.VBox; import javafx.scene.control.Label; /* * Class is responsible for drawing the settings in the Pile Settings Menu. * It is in a separate class to * TODO * make look better - css or general font formatting/ spacing * spacing for input boxes needs work * text/input box alignment * sceen size affects view of menu * make things generic - ie. type menus * */ public class PileSettingsView extends VBox { private Button updatePileButton; private Button addPileButton; private Button deletePileButton; private TextField nameInput; private ChoiceBox typeChoiceInput; private TextField minNumCardsInput; private TextField maxNumCardsInput; private TextField xCoordInput; private TextField yCoordInput; /* TODO missing Pile Associations */ /* private pileAssociations */ private ChoiceBox pileViewableChoiceInput; private ChoiceBox cardOrientationChoiceInput; public PileSettingsView(){ this.setSpacing(2); /* Name - extract */ HBox nameHBox = new HBox(5); Label name = new Label("Name:"); nameInput = new TextField(); nameInput.setMaxSize(120, 20); nameHBox.getChildren().addAll(name, nameInput); /* Type - extract */ HBox typeHBox = new HBox(4); Label type = new Label("Type:"); typeChoiceInput = new ChoiceBox(); typeChoiceInput.getItems().addAll("Hand", "Deck", "Discard", "None"); typeHBox.getChildren().addAll(type, typeChoiceInput); /* max/min number Cards in Pile - extract */ VBox pileCardCount = new VBox(); Label numCards = new Label("Number Cards"); /* min */ HBox minNumCardsHBox = new HBox(4); Label minNumCards = new Label("min:"); minNumCardsInput = new TextField(); minNumCardsInput.setMaxSize(50, 20); minNumCardsHBox.getChildren().addAll(minNumCards, minNumCardsInput); /* max */ HBox maxNumCardsHBox = new HBox(4); Label maxNumCards = new Label("max:"); maxNumCardsInput = new TextField(); maxNumCardsInput.setMaxSize(50, 20); maxNumCardsHBox.getChildren().addAll(maxNumCards, maxNumCardsInput); pileCardCount.getChildren().addAll(numCards, minNumCardsHBox, maxNumCardsHBox); /* Corrdinates - extract */ Label gridCoordinates = new Label("Coordinates"); VBox pileCoordinates = new VBox(); HBox xCoordHBox = new HBox(4); Label xCoord = new Label("x:"); xCoordInput = new TextField(); xCoordInput.setMaxSize(30, 10); xCoordHBox.getChildren().addAll(xCoord, xCoordInput); Label yCoord = new Label("y:"); HBox yCoordHBox = new HBox(4); yCoordInput = new TextField(); yCoordInput.setMaxSize(30, 10); yCoordHBox.getChildren().addAll(yCoord, yCoordInput); pileCoordinates.getChildren().addAll(gridCoordinates, xCoordHBox, yCoordHBox); /* Player Pile Association Settings */ Label playerAssociation = new Label("Player Association"); /* Pile Viewable Settings */ HBox pileViewableHBox = new HBox(4); Label viewableBy = new Label("Viewable By:"); pileViewableChoiceInput = new ChoiceBox(); pileViewableChoiceInput.getItems().addAll("All", "None"); pileViewableHBox.getChildren().addAll(viewableBy, pileViewableChoiceInput); /* Pile Orientation Settings - for display purposes. Will determine how top of Pile is displayed */ HBox cardOrientationHBox = new HBox(4); Label orientation = new Label("Card Orientation"); cardOrientationChoiceInput = new ChoiceBox(); cardOrientationChoiceInput.getItems().addAll("Face Up", "Face Down"); cardOrientationHBox.getChildren().addAll(orientation, cardOrientationChoiceInput); /* TODO add anchor?*/ HBox addUpdateButtonsHBox = new HBox(20); updatePileButton = new Button("Update Pile"); updatePileButton.setDisable(true); addPileButton = new Button("Add Pile"); deletePileButton = new Button("Delete Pile"); deletePileButton.setDisable(true); addUpdateButtonsHBox.getChildren().addAll(updatePileButton, addPileButton, deletePileButton); this.getChildren().addAll(nameHBox, typeHBox, pileCardCount, pileCoordinates, playerAssociation, pileViewableHBox, cardOrientationHBox, addUpdateButtonsHBox); } public Button getAddPileButton() { return addPileButton; } public Button getUpdatePileButton() { return updatePileButton; } public Button getDeletePileButton() { return deletePileButton; } public void enableAddPileButton(){ addPileButton.setDisable(false); } public void disableAddPileButton(){ addPileButton.setDisable(true); } public void enableUpdatePileButton(){ updatePileButton.setDisable(false); } public void disableUpdatePileButton(){ updatePileButton.setDisable(true); } public void enableDeletePileButton(){ deletePileButton.setDisable(false); } public void disableDeletePileButton(){ deletePileButton.setDisable(true); } public TextField getNameInput() { return nameInput; } public ChoiceBox getTypeChoiceInput() { return typeChoiceInput; } public TextField getMinNumCardsInput() { return minNumCardsInput; } public TextField getMaxNumCardsInput() { return maxNumCardsInput; } public TextField getxCoordInput() { return xCoordInput; } public TextField getyCoordInput() { return yCoordInput; } public ChoiceBox getPileViewableChoiceInput() { return pileViewableChoiceInput; } public ChoiceBox getCardOrientationChoiceInput() { return cardOrientationChoiceInput; } }
package fastmatrices; import java.util.Random; public class NeuralNetwork { public Matrix theta1, theta2; public NeuralNetwork(int inputNodes, int hiddenNodes, int outputNodes) { theta1 = new Matrix(randomArray(hiddenNodes, inputNodes + 1), hiddenNodes, inputNodes + 1); theta2 = new Matrix(randomArray(outputNodes, hiddenNodes + 1), outputNodes, hiddenNodes + 1); } public void train(Matrix x, Matrix y, final double learningRate, int iterations) { Matrix a1, a2, a3 = null, t = null, d3, d2; int mod = iterations / 5; for (int i = 0; i < iterations; i++) { for (int j = 1; j <= x.rows; j++) { a1 = x.getRowAsColumn(j); a2 = calculateLayer(theta1, a1); a3 = calculateLayer(theta2, a2); t = y.getRowAsColumn(j); d3 = calculateOutputError(t, a3); d2 = calculateHiddenError(theta2, d3, a2); updateWeights(theta1, d2, a1, learningRate); updateWeights(theta2, d3, a2, learningRate); } if (i % mod == 0) { Matrix d = t.subtract(a3); Matrix e = d.multiplyTransposeOp1(d); System.out.println(e.data[0]); } } } public double[] predict(Matrix x) { Matrix a1 = Utilities.appendVertical(1, x); Matrix a2 = Utilities.appendVertical(1, theta1.multiply(a1, sig)); Matrix a3 = theta2.multiply(a2, sig); return a3.transpose().data; } public double[][] bulkPredict(Matrix x) { double[][] answer = new double[x.rows][]; for (int i = 0; i < x.rows; i++) { answer[i] = predict(x.part(i + 1, i + 1, 1, -1).transpose()); } return answer; } /** * Calculates the activations matrix given the previous layer's activations * and the weights for this layer, * i.e., calculates sigmoid(theta * activations). * @param theta * @param activations * @return */ private Matrix calculateLayer(Matrix theta, Matrix activations) { double[] answer = new double[theta.rows]; int answerindex = 0, thetaindex = 0, activationsindex; if (theta.columns != (activations.rows + 1)) throw new IllegalArgumentException(String.format( "non-conformant arguments (theta is %dx%d, activations is %dx%d)", theta.rows, theta.columns, activations.rows, activations.columns)); for (int i = 0; i < theta.rows; i++) { double sum = theta.data[thetaindex]; activationsindex = 0; for (int k = 0; k < activations.rows; k++) { sum += theta.data[thetaindex + k + 1] * activations.data[activationsindex++]; } answer[answerindex++] = 1.0 / (1 + Math.exp(-sum)); thetaindex += theta.columns; } return new Matrix(answer, theta.rows, activations.columns); } /** * Calculates the output error given the expected output and the actual output. * @param target The expected output. * @param activations The actual output. * @return */ private Matrix calculateOutputError(Matrix target, Matrix activations) { double[] answer = new double[target.rows]; for (int i = 0; i < target.rows; i++) { double a = activations.data[i]; answer[i] = (target.data[i] - a) * a * (1 - a); } return new Matrix(answer, target.rows, 1); } /** * Calculates the hidden layer error given the output layer weights and error, * and the hidden layer activations. * @param theta The output layer weights. * @param delta The output layer error. * @param activations The hidden layer activations. * @return */ private Matrix calculateHiddenError(Matrix theta, Matrix delta, Matrix activations) { double[] answer = new double[theta.columns - 1]; int answerindex = 0, thetaindex, deltaindex; if (theta.rows != delta.rows) throw new IllegalArgumentException(String.format( "non-conformant arguments (op1 is %dx%d, op2 is %dx%d)", theta.rows, theta.columns, delta.rows, delta.columns)); for (int i = 1; i < theta.columns; i++) { double sum = 0; thetaindex = i; deltaindex = 0; for (int k = 0; k < delta.rows; k++) { sum += theta.data[thetaindex] * delta.data[deltaindex]; thetaindex += theta.columns; deltaindex += delta.columns; } double d = activations.data[i - 1]; answer[answerindex++] = sum * d * (1 - d); } return new Matrix(answer, theta.columns - 1, 1); } /** * Updates the weights according to the specified parameters, * i.e., calculates weights += learningRate * delta * activations'. * @param weights * @param delta * @param activations * @param learningRate * @return */ private Matrix updateWeights(Matrix weights, Matrix delta, Matrix activations, double learningRate) { int answerindex = 0, deltaindex = 0, activationsindex; if (delta.columns != activations.columns) throw new IllegalArgumentException(String.format( "non-conformant arguments (delta is %dx%d, activations is %dx%d)", delta.rows, delta.columns, activations.rows, activations.columns)); if (delta.rows != weights.rows || (activations.rows + 1) != weights.columns) throw new IllegalArgumentException(String.format( "non-conformant arguments (weights is %dx%d, delta * activations' is %dx%d)", weights.rows, weights.columns, delta.rows, activations.rows)); for (int i = 0; i < delta.rows; i++) { weights.data[answerindex++] += delta.data[deltaindex] * learningRate; activationsindex = 0; for (int j = 0; j < activations.rows; j++) { weights.data[answerindex++] += delta.data[deltaindex] * activations.data[activationsindex] * learningRate; activationsindex += activations.columns; } deltaindex += delta.columns; } return weights; } /** * Calculates the sigmoid activation function. */ private Matrix.Function sig = new Matrix.Function() { @Override public double apply(double value, int row, int col) { return 1.0 / (1 + Math.exp(-value)); } }; private static double[] randomArray(int rows, int columns) { double[] a = new double[rows * columns]; Random random = new Random(1); int index = 0; for (int i = 0; i < rows; i++) { for (int j = 0; j < columns; j++) { a[index++] = random.nextDouble(); } } return a; } }
package com.jme3.effect; import com.jme3.bounding.BoundingBox; import com.jme3.effect.ParticleMesh.Type; import com.jme3.effect.influencers.DefaultParticleInfluencer; import com.jme3.effect.influencers.ParticleInfluencer; import com.jme3.effect.shapes.EmitterPointShape; import com.jme3.effect.shapes.EmitterShape; import com.jme3.export.JmeExporter; import com.jme3.export.JmeImporter; import com.jme3.export.InputCapsule; import com.jme3.export.OutputCapsule; import com.jme3.math.ColorRGBA; import com.jme3.math.FastMath; import com.jme3.math.Matrix3f; import com.jme3.math.Vector3f; import com.jme3.renderer.Camera; import com.jme3.renderer.RenderManager; import com.jme3.renderer.ViewPort; import com.jme3.renderer.queue.RenderQueue.Bucket; import com.jme3.renderer.queue.RenderQueue.ShadowMode; import com.jme3.scene.Geometry; import com.jme3.scene.Spatial; import com.jme3.scene.control.Control; import com.jme3.util.TempVars; import java.io.IOException; /** * <code>ParticleEmitter</code> is a special kind of geometry which simulates * a particle system. * <p> * Particle emitters can be used to simulate various kinds of phenomena, * such as fire, smoke, explosions and much more. * <p> * Particle emitters have many properties which are used to control the * simulation. The interpretation of these properties depends on the * {@link ParticleInfluencer} that has been assigned to the emitter via * {@link ParticleEmitter#setParticleInfluencer(com.jme3.effect.influencers.ParticleInfluencer) }. * By default the implementation {@link DefaultParticleInfluencer} is used. * * @author Kirill Vainer */ public class ParticleEmitter extends Geometry { private static final EmitterShape DEFAULT_SHAPE = new EmitterPointShape(Vector3f.ZERO); private static final ParticleInfluencer DEFAULT_INFLUENCER = new DefaultParticleInfluencer(); private ParticleEmitterControl control; private EmitterShape shape = DEFAULT_SHAPE; private ParticleMesh particleMesh; private ParticleInfluencer particleInfluencer = DEFAULT_INFLUENCER; private ParticleMesh.Type meshType; private Particle[] particles; private int firstUnUsed; private int lastUsed; // private int next = 0; // private ArrayList<Integer> unusedIndices = new ArrayList<Integer>(); private boolean randomAngle; private boolean selectRandomImage; private boolean facingVelocity; private float particlesPerSec = 20; private float emitCarry; private float lowLife = 3f; private float highLife = 7f; private Vector3f gravity = new Vector3f(0.0f, 0.1f, 0.0f); private float rotateSpeed; private Vector3f faceNormal = new Vector3f(Vector3f.NAN); private int imagesX = 1; private int imagesY = 1; private boolean enabled = true; private ColorRGBA startColor = new ColorRGBA(0.4f, 0.4f, 0.4f, 0.5f); private ColorRGBA endColor = new ColorRGBA(0.1f, 0.1f, 0.1f, 0.0f); private float startSize = 0.2f; private float endSize = 2f; private boolean worldSpace = true; //variable that helps with computations private transient Vector3f temp = new Vector3f(); public static class ParticleEmitterControl implements Control { ParticleEmitter parentEmitter; public ParticleEmitterControl() { } public ParticleEmitterControl(ParticleEmitter parentEmitter) { this.parentEmitter = parentEmitter; } public Control cloneForSpatial(Spatial spatial) { return this; // WARNING: Sets wrong control on spatial. Will be // fixed automatically by ParticleEmitter.clone() method. } public void setSpatial(Spatial spatial) { } public void setEnabled(boolean enabled) { parentEmitter.setEnabled(enabled); } public boolean isEnabled() { return parentEmitter.isEnabled(); } public void update(float tpf) { parentEmitter.updateFromControl(tpf); } public void render(RenderManager rm, ViewPort vp) { parentEmitter.renderFromControl(rm, vp); } public void write(JmeExporter ex) throws IOException { } public void read(JmeImporter im) throws IOException { } } @Override public ParticleEmitter clone() { ParticleEmitter clone = (ParticleEmitter) super.clone(); clone.shape = shape.deepClone(); // Reinitialize particle list clone.setNumParticles(particles.length); clone.faceNormal = faceNormal.clone(); clone.startColor = startColor.clone(); clone.endColor = endColor.clone(); clone.particleInfluencer = particleInfluencer.clone(); // remove wrong control clone.controls.remove(control); // put correct control clone.controls.add(new ParticleEmitterControl(clone)); // Reinitialize particle mesh switch (meshType) { case Point: clone.particleMesh = new ParticlePointMesh(); clone.setMesh(clone.particleMesh); break; case Triangle: clone.particleMesh = new ParticleTriMesh(); clone.setMesh(clone.particleMesh); break; default: throw new IllegalStateException("Unrecognized particle type: " + meshType); } clone.particleMesh.initParticleData(clone, clone.particles.length); clone.particleMesh.setImagesXY(clone.imagesX, clone.imagesY); return clone; } public ParticleEmitter(String name, Type type, int numParticles) { super(name); // ignore world transform, unless user sets inLocalSpace this.setIgnoreTransform(true); // particles neither receive nor cast shadows this.setShadowMode(ShadowMode.Off); // particles are usually transparent this.setQueueBucket(Bucket.Transparent); meshType = type; // Must create clone of shape/influencer so that a reference to a static is // not maintained shape = shape.deepClone(); particleInfluencer = particleInfluencer.clone(); control = new ParticleEmitterControl(this); controls.add(control); switch (meshType) { case Point: particleMesh = new ParticlePointMesh(); this.setMesh(particleMesh); break; case Triangle: particleMesh = new ParticleTriMesh(); this.setMesh(particleMesh); break; default: throw new IllegalStateException("Unrecognized particle type: " + meshType); } this.setNumParticles(numParticles); // particleMesh.initParticleData(this, particles.length); } /** * For serialization only. Do not use. */ public ParticleEmitter() { super(); } public void setShape(EmitterShape shape) { this.shape = shape; } public EmitterShape getShape() { return shape; } /** * Set the {@link ParticleInfluencer} to influence this particle emitter. * * @param particleInfluencer the {@link ParticleInfluencer} to influence * this particle emitter. * * @see ParticleInfluencer */ public void setParticleInfluencer(ParticleInfluencer particleInfluencer) { this.particleInfluencer = particleInfluencer; } /** * Returns the {@link ParticleInfluencer} that influences this * particle emitter. * * @return the {@link ParticleInfluencer} that influences this * particle emitter. * * @see ParticleInfluencer */ public ParticleInfluencer getParticleInfluencer() { return particleInfluencer; } /** * Returns the mesh type used by the particle emitter. * * <p>This value is set in the constructor and cannot be modified * afterwards. * * @return the mesh type used by the particle emitter. * * @see ParticleEmitter#ParticleEmitter(java.lang.String, com.jme3.effect.ParticleMesh.Type, int) */ public ParticleMesh.Type getMeshType() { return meshType; } /** * Returns true if particles should spawn in world space. * * @return true if particles should spawn in world space. * * @see ParticleEmitter#setInWorldSpace(boolean) */ public boolean isInWorldSpace() { return worldSpace; } /** * Set to true if particles should spawn in world space. * * <p>If set to true and the particle emitter is moved in the scene, * then particles that have already spawned won't be effected by this * motion. If set to false, the particles will emit in local space * and when the emitter is moved, so are all the particles that * were emitted previously. * * @param worldSpace true if particles should spawn in world space. */ public void setInWorldSpace(boolean worldSpace) { this.setIgnoreTransform(worldSpace); this.worldSpace = worldSpace; } /** * Returns the number of visible particles (spawned but not dead). * * @return the number of visible particles */ public int getNumVisibleParticles() { // return unusedIndices.size() + next; return lastUsed + 1; } /** * Set the maximum amount of particles that * can exist at the same time with this emitter. * Calling this method many times is not recommended. * * @param numParticles the maximum amount of particles that * can exist at the same time with this emitter. */ public final void setNumParticles(int numParticles) { particles = new Particle[numParticles]; for (int i = 0; i < numParticles; i++) { particles[i] = new Particle(); } //We have to reinit the mesh's buffers with the new size particleMesh.initParticleData(this, particles.length); particleMesh.setImagesXY(this.imagesX, this.imagesY); firstUnUsed = 0; lastUsed = -1; } public int getMaxNumParticles() { return particles.length; } /** * Returns a list of all particles (shouldn't be used in most cases). * * <p> * This includes both existing and non-existing particles. * The size of the array is set to the <code>numParticles</code> value * specified in the constructor or {@link ParticleEmitter#setNumParticles(int) } * method. * * @return a list of all particles. */ public Particle[] getParticles() { return particles; } /** * Get the normal which particles are facing. * * @return the normal which particles are facing. * * @see ParticleEmitter#setFaceNormal(com.jme3.math.Vector3f) */ public Vector3f getFaceNormal() { if (Vector3f.isValidVector(faceNormal)) { return faceNormal; } else { return null; } } /** * Sets the normal which particles are facing. * * <p>By default, particles * will face the camera, but for some effects (e.g shockwave) it may * be necessary to face a specific direction instead. To restore * normal functionality, provide <code>null</code> as the argument for * <code>faceNormal</code>. * * @param faceNormal The normals particles should face, or <code>null</code> * if particles should face the camera. */ public void setFaceNormal(Vector3f faceNormal) { if (faceNormal == null || !Vector3f.isValidVector(faceNormal)) { this.faceNormal.set(Vector3f.NAN); } else { this.faceNormal = faceNormal; } } /** * Returns the rotation speed in radians/sec for particles. * * @return the rotation speed in radians/sec for particles. * * @see ParticleEmitter#setRotateSpeed(float) */ public float getRotateSpeed() { return rotateSpeed; } /** * Set the rotation speed in radians/sec for particles * spawned after the invocation of this method. * * @param rotateSpeed the rotation speed in radians/sec for particles * spawned after the invocation of this method. */ public void setRotateSpeed(float rotateSpeed) { this.rotateSpeed = rotateSpeed; } /** * Returns true if every particle spawned * should have a random facing angle. * * @return true if every particle spawned * should have a random facing angle. * * @see ParticleEmitter#setRandomAngle(boolean) */ public boolean isRandomAngle() { return randomAngle; } /** * Set to true if every particle spawned * should have a random facing angle. * * @param randomAngle if every particle spawned * should have a random facing angle. */ public void setRandomAngle(boolean randomAngle) { this.randomAngle = randomAngle; } /** * Returns true if every particle spawned should get a random * image. * * @return True if every particle spawned should get a random * image. * * @see ParticleEmitter#setSelectRandomImage(boolean) */ public boolean isSelectRandomImage() { return selectRandomImage; } /** * Set to true if every particle spawned * should get a random image from a pool of images constructed from * the texture, with X by Y possible images. * * <p>By default, X and Y are equal * to 1, thus allowing only 1 possible image to be selected, but if the * particle is configured with multiple images by using {@link ParticleEmitter#setImagesX(int) } * and {#link ParticleEmitter#setImagesY(int) } methods, then multiple images * can be selected. Setting to false will cause each particle to have an animation * of images displayed, starting at image 1, and going until image X*Y when * the particle reaches its end of life. * * @param selectRandomImage True if every particle spawned should get a random * image. */ public void setSelectRandomImage(boolean selectRandomImage) { this.selectRandomImage = selectRandomImage; } /** * Check if particles spawned should face their velocity. * * @return True if particles spawned should face their velocity. * * @see ParticleEmitter#setFacingVelocity(boolean) */ public boolean isFacingVelocity() { return facingVelocity; } /** * Set to true if particles spawned should face * their velocity (or direction to which they are moving towards). * * <p>This is typically used for e.g spark effects. * * @param followVelocity True if particles spawned should face their velocity. * */ public void setFacingVelocity(boolean followVelocity) { this.facingVelocity = followVelocity; } /** * Get the end color of the particles spawned. * * @return the end color of the particles spawned. * * @see ParticleEmitter#setEndColor(com.jme3.math.ColorRGBA) */ public ColorRGBA getEndColor() { return endColor; } /** * Set the end color of the particles spawned. * * <p>The * particle color at any time is determined by blending the start color * and end color based on the particle's current time of life relative * to its end of life. * * @param endColor the end color of the particles spawned. */ public void setEndColor(ColorRGBA endColor) { this.endColor.set(endColor); } /** * Get the end size of the particles spawned. * * @return the end size of the particles spawned. * * @see ParticleEmitter#setEndSize(float) */ public float getEndSize() { return endSize; } /** * Set the end size of the particles spawned. * * <p>The * particle size at any time is determined by blending the start size * and end size based on the particle's current time of life relative * to its end of life. * * @param endSize the end size of the particles spawned. */ public void setEndSize(float endSize) { this.endSize = endSize; } /** * This method sets the gravity value of Y axis. * * By default the Y axis is the only one to have gravity value non zero. * * @param gravity * Set the gravity of Y axis, in units/sec/sec, of particles * spawned. * * @deprecated Use {@link ParticleEmitter#setGravity(float, float, float) instead. */ @Deprecated public void setGravity(float gravity) { this.gravity.y = gravity; } /** * Get the gravity vector. * * @return the gravity vector. * * @see ParticleEmitter#setGravity(com.jme3.math.Vector3f) */ public Vector3f getGravity() { return gravity; } /** * This method sets the gravity vector. * * @param gravity the gravity vector */ public void setGravity(Vector3f gravity) { this.gravity.set(gravity); } /** * Sets the gravity vector. * * @param gravity the gravity vector */ public void setGravity(float x, float y, float z) { this.gravity.x = x; this.gravity.y = y; this.gravity.z = z; } /** * Get the high value of life. * * @return the high value of life. * * @see ParticleEmitter#setHighLife(float) */ public float getHighLife() { return highLife; } /** * Set the high value of life. * * <p>The particle's lifetime/expiration * is determined by randomly selecting a time between low life and high life. * * @param highLife the high value of life. */ public void setHighLife(float highLife) { this.highLife = highLife; } /** * Get the number of images along the X axis (width). * * @return the number of images along the X axis (width). * * @see ParticleEmitter#setImagesX(int) */ public int getImagesX() { return imagesX; } /** * Set the number of images along the X axis (width). * * <p>To determine * how multiple particle images are selected and used, see the * {@link ParticleEmitter#setSelectRandomImage(boolean) } method. * * @param imagesX the number of images along the X axis (width). */ public void setImagesX(int imagesX) { this.imagesX = imagesX; particleMesh.setImagesXY(this.imagesX, this.imagesY); } /** * Get the number of images along the Y axis (height). * * @return the number of images along the Y axis (height). * * @see ParticleEmitter#setImagesY(int) */ public int getImagesY() { return imagesY; } /** * Set the number of images along the Y axis (height). * * <p>To determine how multiple particle images are selected and used, see the * {@link ParticleEmitter#setSelectRandomImage(boolean) } method. * * @param imagesY the number of images along the Y axis (height). */ public void setImagesY(int imagesY) { this.imagesY = imagesY; particleMesh.setImagesXY(this.imagesX, this.imagesY); } /** * Get the low value of life. * * @return the low value of life. * * @see ParticleEmitter#setLowLife(float) */ public float getLowLife() { return lowLife; } /** * Set the low value of life. * * <p>The particle's lifetime/expiration * is determined by randomly selecting a time between low life and high life. * * @param lowLife the low value of life. */ public void setLowLife(float lowLife) { this.lowLife = lowLife; } /** * Get the number of particles to spawn per * second. * * @return the number of particles to spawn per * second. * * @see ParticleEmitter#setParticlesPerSec(float) */ public float getParticlesPerSec() { return particlesPerSec; } /** * Set the number of particles to spawn per * second. * * @param particlesPerSec the number of particles to spawn per * second. */ public void setParticlesPerSec(float particlesPerSec) { this.particlesPerSec = particlesPerSec; } /** * Get the start color of the particles spawned. * * @return the start color of the particles spawned. * * @see ParticleEmitter#setStartColor(com.jme3.math.ColorRGBA) */ public ColorRGBA getStartColor() { return startColor; } /** * Set the start color of the particles spawned. * * <p>The particle color at any time is determined by blending the start color * and end color based on the particle's current time of life relative * to its end of life. * * @param startColor the start color of the particles spawned */ public void setStartColor(ColorRGBA startColor) { this.startColor.set(startColor); } /** * Get the start color of the particles spawned. * * @return the start color of the particles spawned. * * @see ParticleEmitter#setStartSize(float) */ public float getStartSize() { return startSize; } /** * Set the start size of the particles spawned. * * <p>The particle size at any time is determined by blending the start size * and end size based on the particle's current time of life relative * to its end of life. * * @param startSize the start size of the particles spawned. */ public void setStartSize(float startSize) { this.startSize = startSize; } /** * @deprecated Use ParticleEmitter.getParticleInfluencer().getInitialVelocity() instead. */ @Deprecated public Vector3f getInitialVelocity() { return particleInfluencer.getInitialVelocity(); } /** * @param initialVelocity Set the initial velocity a particle is spawned with, * the initial velocity given in the parameter will be varied according * to the velocity variation set in {@link ParticleEmitter#setVelocityVariation(float) }. * A particle will move toward its velocity unless it is effected by the * gravity. * * @deprecated * This method is deprecated. * Use ParticleEmitter.getParticleInfluencer().setInitialVelocity(initialVelocity); instead. * * @see ParticleEmitter#setVelocityVariation(float) * @see ParticleEmitter#setGravity(float) */ @Deprecated public void setInitialVelocity(Vector3f initialVelocity) { this.particleInfluencer.setInitialVelocity(initialVelocity); } /** * @deprecated * This method is deprecated. * Use ParticleEmitter.getParticleInfluencer().getVelocityVariation(); instead. * @return the initial velocity variation factor */ @Deprecated public float getVelocityVariation() { return particleInfluencer.getVelocityVariation(); } /** * @param variation Set the variation by which the initial velocity * of the particle is determined. <code>variation</code> should be a value * from 0 to 1, where 0 means particles are to spawn with exactly * the velocity given in {@link ParticleEmitter#setStartVel(com.jme3.math.Vector3f) }, * and 1 means particles are to spawn with a completely random velocity. * * @deprecated * This method is deprecated. * Use ParticleEmitter.getParticleInfluencer().setVelocityVariation(variation); instead. */ @Deprecated public void setVelocityVariation(float variation) { this.particleInfluencer.setVelocityVariation(variation); } // private int newIndex(){ // liveParticles ++; // return unusedIndices.remove(0); // if (unusedIndices.size() > 0){ // liveParticles++; // return unusedIndices.remove(0); // }else if (next < particles.length){ // liveParticles++; // return next++; // }else{ // return -1; // private void freeIndex(int index){ // liveParticles--; // if (index == next-1) // next--; // else // assert !unusedIndices.contains(index); // unusedIndices.add(index); private boolean emitParticle(Vector3f min, Vector3f max) { // int idx = newIndex(); // if (idx == -1) // return false; int idx = lastUsed + 1; if (idx >= particles.length) { return false; } Particle p = particles[idx]; if (selectRandomImage) { p.imageIndex = FastMath.nextRandomInt(0, imagesY - 1) * imagesX + FastMath.nextRandomInt(0, imagesX - 1); } p.startlife = lowLife + FastMath.nextRandomFloat() * (highLife - lowLife); p.life = p.startlife; p.color.set(startColor); p.size = startSize; //shape.getRandomPoint(p.position); particleInfluencer.influenceParticle(p, shape); if (worldSpace) { p.position.addLocal(worldTransform.getTranslation()); } if (randomAngle) { p.angle = FastMath.nextRandomFloat() * FastMath.TWO_PI; } if (rotateSpeed != 0) { p.rotateSpeed = rotateSpeed * (0.2f + (FastMath.nextRandomFloat() * 2f - 1f) * .8f); } temp.set(p.position).addLocal(p.size, p.size, p.size); max.maxLocal(temp); temp.set(p.position).subtractLocal(p.size, p.size, p.size); min.minLocal(temp); ++lastUsed; firstUnUsed = idx + 1; return true; } /** * Instantly emits all the particles possible to be emitted. Any particles * which are currently inactive will be spawned immediately. */ public void emitAllParticles() { // Force world transform to update this.getWorldTransform(); TempVars vars = TempVars.get(); BoundingBox bbox = (BoundingBox) this.getMesh().getBound(); Vector3f min = vars.vect1; Vector3f max = vars.vect2; bbox.getMin(min); bbox.getMax(max); if (!Vector3f.isValidVector(min)) { min.set(Vector3f.POSITIVE_INFINITY); } if (!Vector3f.isValidVector(max)) { max.set(Vector3f.NEGATIVE_INFINITY); } while (emitParticle(min, max)); bbox.setMinMax(min, max); this.setBoundRefresh(); vars.release(); } /** * Instantly kills all active particles, after this method is called, all * particles will be dead and no longer visible. */ public void killAllParticles() { for (int i = 0; i < particles.length; ++i) { if (particles[i].life > 0) { this.freeParticle(i); } } } private void freeParticle(int idx) { Particle p = particles[idx]; p.life = 0; p.size = 0f; p.color.set(0, 0, 0, 0); p.imageIndex = 0; p.angle = 0; p.rotateSpeed = 0; // freeIndex(idx); if (idx == lastUsed) { while (lastUsed >= 0 && particles[lastUsed].life == 0) { lastUsed } } if (idx < firstUnUsed) { firstUnUsed = idx; } } private void swap(int idx1, int idx2) { Particle p1 = particles[idx1]; particles[idx1] = particles[idx2]; particles[idx2] = p1; } private void updateParticleState(float tpf) { // Force world transform to update this.getWorldTransform(); TempVars vars = TempVars.get(); Vector3f min = vars.vect1.set(Vector3f.POSITIVE_INFINITY); Vector3f max = vars.vect2.set(Vector3f.NEGATIVE_INFINITY); for (int i = 0; i < particles.length; ++i) { Particle p = particles[i]; if (p.life == 0) { // particle is dead // assert i <= firstUnUsed; continue; } p.life -= tpf; if (p.life <= 0) { this.freeParticle(i); continue; } // position += velocity * tpf //p.distToCam = -1; // applying gravity p.velocity.x -= gravity.x * tpf; p.velocity.y -= gravity.y * tpf; p.velocity.z -= gravity.z * tpf; temp.set(p.velocity).multLocal(tpf); p.position.addLocal(temp); // affecting color, size and angle float b = (p.startlife - p.life) / p.startlife; p.color.interpolate(startColor, endColor, b); p.size = FastMath.interpolateLinear(b, startSize, endSize); p.angle += p.rotateSpeed * tpf; // Computing bounding volume temp.set(p.position).addLocal(p.size, p.size, p.size); max.maxLocal(temp); temp.set(p.position).subtractLocal(p.size, p.size, p.size); min.minLocal(temp); if (!selectRandomImage) { p.imageIndex = (int) (b * imagesX * imagesY); } if (firstUnUsed < i) { this.swap(firstUnUsed, i); if (i == lastUsed) { lastUsed = firstUnUsed; } firstUnUsed++; } } float particlesToEmitF = particlesPerSec * tpf; int particlesToEmit = (int) particlesToEmitF; emitCarry += particlesToEmitF - particlesToEmit; while (emitCarry > 1f) { ++particlesToEmit; emitCarry -= 1f; } for (int i = 0; i < particlesToEmit; ++i) { this.emitParticle(min, max); } BoundingBox bbox = (BoundingBox) this.getMesh().getBound(); bbox.setMinMax(min, max); this.setBoundRefresh(); vars.release(); } /** * Set to enable or disable the particle emitter * * <p>When a particle is * disabled, it will be "frozen in time" and not update. * * @param enabled True to enable the particle emitter */ public void setEnabled(boolean enabled) { this.enabled = enabled; } /** * Check if a particle emitter is enabled for update. * * @return True if a particle emitter is enabled for update. * * @see ParticleEmitter#setEnabled(boolean) */ public boolean isEnabled() { return enabled; } /** * Callback from Control.update(), do not use. * @param tpf */ public void updateFromControl(float tpf) { if (enabled) { this.updateParticleState(tpf); } } /** * Callback from Control.render(), do not use. * * @param rm * @param vp */ private void renderFromControl(RenderManager rm, ViewPort vp) { Camera cam = vp.getCamera(); if (meshType == ParticleMesh.Type.Point) { float C = cam.getProjectionMatrix().m00; C *= cam.getWidth() * 0.5f; // send attenuation params this.getMaterial().setFloat("Quadratic", C); } Matrix3f inverseRotation = Matrix3f.IDENTITY; TempVars vars = null; if (!worldSpace) { vars = TempVars.get(); inverseRotation = this.getWorldRotation().toRotationMatrix(vars.tempMat3).invertLocal(); } particleMesh.updateParticleData(particles, cam, inverseRotation); if (!worldSpace) { vars.release(); } } public void preload(RenderManager rm, ViewPort vp) { this.updateParticleState(0); particleMesh.updateParticleData(particles, vp.getCamera(), Matrix3f.IDENTITY); } @Override public void write(JmeExporter ex) throws IOException { super.write(ex); OutputCapsule oc = ex.getCapsule(this); oc.write(shape, "shape", DEFAULT_SHAPE); oc.write(meshType, "meshType", ParticleMesh.Type.Triangle); oc.write(enabled, "enabled", true); oc.write(particles.length, "numParticles", 0); oc.write(particlesPerSec, "particlesPerSec", 0); oc.write(lowLife, "lowLife", 0); oc.write(highLife, "highLife", 0); oc.write(gravity, "gravity", null); oc.write(imagesX, "imagesX", 1); oc.write(imagesY, "imagesY", 1); oc.write(startColor, "startColor", null); oc.write(endColor, "endColor", null); oc.write(startSize, "startSize", 0); oc.write(endSize, "endSize", 0); oc.write(worldSpace, "worldSpace", false); oc.write(facingVelocity, "facingVelocity", false); oc.write(selectRandomImage, "selectRandomImage", false); oc.write(randomAngle, "randomAngle", false); oc.write(rotateSpeed, "rotateSpeed", 0); oc.write(particleInfluencer, "influencer", DEFAULT_INFLUENCER); } @Override public void read(JmeImporter im) throws IOException { super.read(im); InputCapsule ic = im.getCapsule(this); shape = (EmitterShape) ic.readSavable("shape", DEFAULT_SHAPE); if (shape == DEFAULT_SHAPE) { // Prevent reference to static shape = shape.deepClone(); } meshType = ic.readEnum("meshType", ParticleMesh.Type.class, ParticleMesh.Type.Triangle); int numParticles = ic.readInt("numParticles", 0); enabled = ic.readBoolean("enabled", true); particlesPerSec = ic.readFloat("particlesPerSec", 0); lowLife = ic.readFloat("lowLife", 0); highLife = ic.readFloat("highLife", 0); gravity = (Vector3f) ic.readSavable("gravity", null); imagesX = ic.readInt("imagesX", 1); imagesY = ic.readInt("imagesY", 1); startColor = (ColorRGBA) ic.readSavable("startColor", null); endColor = (ColorRGBA) ic.readSavable("endColor", null); startSize = ic.readFloat("startSize", 0); endSize = ic.readFloat("endSize", 0); worldSpace = ic.readBoolean("worldSpace", false); facingVelocity = ic.readBoolean("facingVelocity", false); selectRandomImage = ic.readBoolean("selectRandomImage", false); randomAngle = ic.readBoolean("randomAngle", false); rotateSpeed = ic.readFloat("rotateSpeed", 0); switch (meshType) { case Point: particleMesh = new ParticlePointMesh(); this.setMesh(particleMesh); break; case Triangle: particleMesh = new ParticleTriMesh(); this.setMesh(particleMesh); break; default: throw new IllegalStateException("Unrecognized particle type: " + meshType); } this.setNumParticles(numParticles); // particleMesh.initParticleData(this, particles.length); // particleMesh.setImagesXY(imagesX, imagesY); particleInfluencer = (ParticleInfluencer) ic.readSavable("influencer", DEFAULT_INFLUENCER); if (particleInfluencer == DEFAULT_INFLUENCER) { particleInfluencer = particleInfluencer.clone(); } if (im.getFormatVersion() == 0) { // compatibility before the control inside particle emitter // was changed: // find it in the controls and take it out, then add the proper one in for (int i = 0; i < controls.size(); i++) { Object obj = controls.get(i); if (obj instanceof ParticleEmitter) { controls.remove(i); // now add the proper one in controls.add(new ParticleEmitterControl(this)); break; } } // compatability before gravity was not a vector but a float if (gravity == null) { gravity = new Vector3f(); gravity.y = ic.readFloat("gravity", 0); } } else { // since the parentEmitter is not loaded, it must be // loaded separately control = getControl(ParticleEmitterControl.class); control.parentEmitter = this; } } }
package org.popkit.leap.elpa.utils; import org.apache.commons.lang3.StringUtils; import org.popkit.core.logger.LeapLogger; import org.popkit.leap.elpa.entity.RecipeDo; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; public class RecipeParser { private RecipeParser(){} public static void main(String[] args) { String origin = "(wiki;afsf"; String origin2 = "(wiki\";\";a\"fsf"; System.out.println("origin:" + origin); System.out.println("origin:" + trimComments(origin)); System.out.println("origin:" + origin2); System.out.println("origin:" + trimComments(origin2)); } public static String trimComments(String origin) { int idex = origin.lastIndexOf(";"); if (idex > 0) { String other = origin.substring(idex); if (!other.contains("\"")) { return origin.substring(0, idex); } } return origin; } public static String readFileToStringWithoutComments(File file) { BufferedReader br = null; StringBuilder stringBuilder = new StringBuilder(); try { br = new BufferedReader(new FileReader(file)); String sCurrentLine; boolean needContinue = true; while ((sCurrentLine = br.readLine()) != null && needContinue) { if (sCurrentLine.contains(";")) { stringBuilder.append(trimComments(sCurrentLine)).append(" "); } else { stringBuilder.append(sCurrentLine).append(" "); } } } catch (IOException e) { LeapLogger.error("error", e); e.printStackTrace(); } finally { try { if (br != null) br.close(); } catch (IOException ex) { ex.printStackTrace(); } } return stringBuilder.toString(); } public static RecipeDo parsePkgRecipe(String pkgName) { String recipe = PelpaUtils.getRecipeFilePath() + pkgName; File recipeFile = new File(recipe); if (recipeFile.exists() && recipeFile.isFile()) { try { //FileUtils.readFileToString(recipeFile, "UTF-8"); String content = readFileToStringWithoutComments(recipeFile); return parse(content); } catch (Exception e) { } } return null; } public static RecipeDo parse(String origin) { String sub = null; try { sub = origin.substring(origin.indexOf('(') + 1, origin.lastIndexOf(')')); } catch (Exception e) { e.printStackTrace(); } if (StringUtils.isBlank(sub)) { return null; } String[] suArr = sub.split("\\s+"); RecipeDo recipeDo = new RecipeDo(); recipeDo.setPkgName(suArr[0].trim()); String keyValueStringPairString = sub.substring(sub.indexOf(suArr[0]) + suArr[0].length()).trim(); String[] keyValueStringPair = keyValueStringPairString.split(":"); for (String keyValueString : keyValueStringPair) { if (StringUtils.isBlank(keyValueString)) { continue; } String[] keyValuePair = keyValueString.split("\\s+"); if (keyValuePair.length <= 1) { continue; } String key = keyValuePair[0].trim(); String value = keyValueString.substring(keyValueString.indexOf(keyValuePair[0]) + keyValuePair[0].length()); if ("repo".equals(key)) { recipeDo.update(key, PelpaUtils.unwrap(value)); } else if ("files".endsWith(key)) { String fileString = extraFileListString(keyValueStringPairString); recipeDo.update(key, fileString); } else { recipeDo.update(key, value.trim()); } } String url = extraUrl(keyValueStringPairString); if (url != null) { recipeDo.setUrl(url); } return recipeDo; } private static String extraUrl(String keyValueStringPairString) { if (!keyValueStringPairString.contains(":url")) { return null; } String key = ":url"; int index = keyValueStringPairString.indexOf(key); String otherString = keyValueStringPairString.substring(index + key.length()); int startIndex = -1; int endIndex; int i = -1; for (char c : otherString.toCharArray()) { i++; if (c == '"' && startIndex == -1) { startIndex = i; continue; } if (c == '"' && startIndex != -1) { endIndex = i; if (endIndex > startIndex) { return otherString.substring(startIndex + 1, endIndex); } else { return null; } } } return null; } private static String extraFileListString(String keyValueStringPairString) { int index = keyValueStringPairString.indexOf(":files"); return index > 0 ? extraPairContent(keyValueStringPairString.substring(index)).replaceAll("\"", "") : StringUtils.EMPTY; } public static String extraPairContent(String origin) { boolean gotfirstLeft = false; int leftIndex = -1; int rightIndex = -1; int match = 0; for (int i=0; i<origin.length(); i++) { if (origin.charAt(i) == '(') { if (!gotfirstLeft) { gotfirstLeft = true; leftIndex = i; } else { match ++; } } else if (origin.charAt(i) == ')') { if (match == 0) { rightIndex = i; break; } else { match } } } if (leftIndex >= 0 && rightIndex >= 0 && rightIndex > leftIndex) { return origin.substring(leftIndex + 1, rightIndex); } return StringUtils.EMPTY; } public static int findAnotherBracket(int leftBracketPos, String content) { int flagInt = 0; for (int i=leftBracketPos + 1; i<content.length(); i++) { if ('(' == content.charAt(i)) { flagInt ++; } if (')' == content.charAt(i) && flagInt == 0) { return i; } if (')' == content.charAt(i) && flagInt > 0) { flagInt } } return -1; } }
package application; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.List; import java.util.Optional; import javafx.application.Application; import javafx.event.ActionEvent; import javafx.event.EventHandler; import javafx.geometry.Insets; import javafx.geometry.Pos; import javafx.stage.FileChooser; import javafx.stage.Stage; import javafx.scene.Scene; import javafx.scene.control.Alert; import javafx.scene.control.Alert.AlertType; import javafx.scene.control.Button; import javafx.scene.control.ButtonType; import javafx.scene.control.Label; import javafx.scene.control.ScrollPane; import javafx.scene.control.TextArea; import javafx.scene.control.TextField; import javafx.scene.control.TextInputDialog; import javafx.scene.input.KeyCode; import javafx.scene.input.KeyEvent; import javafx.scene.layout.BorderPane; import javafx.scene.layout.GridPane; import javafx.scene.layout.HBox; import javafx.scene.layout.VBox; import javafx.scene.paint.Color; import javafx.scene.shape.Line; import javafx.scene.text.Font; import javafx.scene.text.FontWeight; import javafx.scene.text.Text; public class Main extends Application { private static TestiFailiSisu loeFail (File fail) throws Exception { Nimekiri kysimused = new Nimekiri(); List<String> kysimusteList = kysimused.loeNimekiriVeerust(fail, 4, 1); Nimekiri valikVastused = new Nimekiri(); List<String> valikvastusteList = valikVastused.loeNimekiriVeerust(fail, 4, 2); Organisatsioon organisatsioonid = new Organisatsioon(); ArrayList<Organisatsioon> tulemused = organisatsioonid.loeOrganisatsioonideAndmed(fail); // Testi tutvustuse olemasolu kontroll Lahter tutvustus = new Lahter(fail); String tutvustusTekst = tutvustus.loeLahter(); TestiFailiSisu testiFailiSisu = new TestiFailiSisu(kysimusteList, valikvastusteList, tulemused, tutvustusTekst); return testiFailiSisu; } private static TestiFailiSisu salvestaFail(File lahteFail, File sihtFail) throws Exception { InputStream is = null; OutputStream os = null; is = new FileInputStream(lahteFail); TestiFailiSisu testiFailiSisu = loeFail(lahteFail); os = new FileOutputStream(sihtFail); byte[] buffer = new byte[1024]; int length; while ((length = is.read(buffer)) > 0) { os.write(buffer, 0, length); } is.close(); os.close(); return testiFailiSisu; } protected TestiFailiSisu testiSisu; @Override public void start(Stage pealava) { Parool parool = new Parool(); Top topNr = new Top(); BorderPane piiripaan = new BorderPane(); HBox tekstbox = new HBox(); tekstbox.setPadding(new Insets(100, 15, 15, 15)); Text testit = new Text("TESTIT"); testit.setFont(Font.font ("Bradley Hand ITC", FontWeight.BOLD, 150)); tekstbox.getChildren().add(testit); tekstbox.setAlignment(Pos.CENTER); piiripaan.setTop(tekstbox); HBox hbox = new HBox(); hbox.setPadding(new Insets(15, 15, 15, 15)); hbox.setSpacing(10); Button nuppLoo = new Button("Loo test"); nuppLoo.setPrefSize(500, 100); nuppLoo.setStyle("-fx-font-size: 20pt;"); Button nuppTest = new Button("Täida test"); nuppTest.setPrefSize(500, 100); nuppTest.setStyle("-fx-font-size: 20pt;"); hbox.getChildren().addAll(nuppLoo, nuppTest); hbox.setAlignment(Pos.CENTER); piiripaan.setCenter(hbox); nuppLoo.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent e) { // Esimese akna sulgemine pealava.hide(); // Luuakse teine lava Stage looLava = new Stage(); BorderPane piiripaanLoo = new BorderPane(); // TESTIT kujundus ja paigutus HBox testitbox = new HBox(); testitbox.setPadding(new Insets(20, 15, 15, 15)); Text testit = new Text("TESTIT"); testit.setFont(Font.font ("Bradley Hand ITC", FontWeight.BOLD, 60)); testitbox.getChildren().add(testit); testitbox.setAlignment(Pos.TOP_CENTER); piiripaanLoo.setTop(testitbox); // Infoteksti kujundus ja paigutus VBox infobox = new VBox(); infobox.setPadding(new Insets(15, 15, 15, 15)); infobox.setSpacing(24); Text loomiseInfo = new Text(); String loomiseInfoTekst = "Testi loomiseks täida allolevad lüngad ning lae üles testi aluseks olev csv fail."; loomiseInfo.setText(loomiseInfoTekst); loomiseInfo.setFont(Font.font ("Regular", 18)); GridPane tabelPaigutus = new GridPane(); tabelPaigutus.setPadding(new Insets(15, 15, 15, 15)); tabelPaigutus.setVgap(5); tabelPaigutus.setHgap(5); Label info = new Label("Sisesta siia KOOD (ilma tühikuteta, vähemalt 3 tähemärki), millega jõuab vastaja Sinu testini:" + "\n" + "(Sisestatud info kinnitamiseks vajuta ENTER)"); TextField kood = new TextField(); Label kuvaKoodiInfo = new Label("Testi ligipääsukood:"); final Label kuvaKood = new Label(); kuvaKood.setTextFill(Color.RED); GridPane.setConstraints(info, 0, 0); GridPane.setColumnSpan(info, 3); GridPane.setConstraints(kood, 0, 1); GridPane.setColumnSpan(kood, 3); GridPane.setConstraints(kuvaKoodiInfo, 0, 2); GridPane.setConstraints(kuvaKood, 1, 2); Label tulemusteInfo = new Label("Sisesta siia arv, mitut parimat tulemust vastaja lõpuks nägema peaks:" + "\n" + "(Sisestatud info kinnitamiseks vajuta ENTER)"); TextField tulemusteTop = new TextField(); Label kuvaTulemusteTop = new Label("Vastajale on lõpuks nähtav tema tulemuste"); final Label kuvaTop = new Label(); kuvaTop.setTextFill(Color.RED); GridPane.setConstraints(tulemusteInfo, 0, 4); GridPane.setColumnSpan(tulemusteInfo, 3); GridPane.setConstraints(tulemusteTop, 0, 5); GridPane.setColumnSpan(tulemusteTop, 3); GridPane.setConstraints(kuvaTulemusteTop, 0, 6); GridPane.setConstraints(kuvaTop, 1, 6); tabelPaigutus.setAlignment(Pos.CENTER); tabelPaigutus.getChildren().addAll( info, kood, kuvaKoodiInfo, kuvaKood, tulemusteInfo, tulemusteTop, kuvaTulemusteTop, kuvaTop); kood.setOnKeyPressed(new EventHandler<KeyEvent>() { public void handle(KeyEvent keyEvent) { if (keyEvent.getCode() == KeyCode.ENTER) { if ((kood.getText() != null && kood.getText().length() >= 3 && !kood.getText().isEmpty())) { String failiKood = kood.getText(); // Kontrollitakse, kas sellise koodiga fail on ehk juba olemas. FailiKontrollija kt = new FailiKontrollija(failiKood); if (kt.kasOnFail()) { Alert alert = new Alert(AlertType.ERROR); alert.setTitle("Viga!"); alert.setHeaderText("Sellise koodi või võtmesõnaga test on juba olemas! Sisesta uus KOOD!"); alert.showAndWait(); kood.clear(); } else { parool.setParool(failiKood); kuvaKood.setText(failiKood); } } else { kuvaKood.setText("NB! Lisa testile MIN 3 tähemärgi pikkune ligipääsukood!"); } } } }); tulemusteTop.setOnKeyPressed(new EventHandler<KeyEvent>() { public void handle(KeyEvent keyEvent) { if (keyEvent.getCode() == KeyCode.ENTER) { try { if ((tulemusteTop.getText() != null && !tulemusteTop.getText().isEmpty())) { String top = tulemusteTop.getText(); topNr.setTopNr(Integer.parseInt(top)); kuvaTop.setText("TOP " + top); } else { kuvaTop.setText("NB! Lisa kasutajale tulemuseks kuvatav paremik!"); } } catch (NumberFormatException e) { Alert alert = new Alert(AlertType.ERROR); alert.setTitle("Viga!"); alert.setHeaderText("Sisesta paremiku lahtrisse täisarv!"); alert.showAndWait(); } } } }); Button nuppValiFail = new Button("Vali testi aluseks olev csv fail ..."); nuppValiFail.setPrefSize(800, 60); nuppValiFail.setStyle("-fx-font-size: 14pt;"); piiripaanLoo.setCenter(nuppValiFail); infobox.getChildren().addAll(loomiseInfo, tabelPaigutus, nuppValiFail); infobox.setAlignment(Pos.TOP_CENTER); piiripaanLoo.setCenter(infobox); FileChooser fileChooser = new FileChooser(); nuppValiFail.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent e) { if (parool.getParool() != null && topNr.getTopNr() != 0) { // Faili valik ja avamine File fail = fileChooser.showOpenDialog(pealava); // Koodiks ja faili pealkirjaks on kasutaja poolt sisestatud tekst tekstilahtris. String koodNimi = parool.getParool(); String top = Integer.toString(topNr.getTopNr()); File failKoopia = new File(koodNimi + "_" + top + ".csv"); if (fail != null) { loomiseInfo.setText(" "); tabelPaigutus.getChildren().removeAll( info, kood, tulemusteInfo, tulemusteTop); try { TestiFailiSisu testiFailiSisu = salvestaFail(fail, failKoopia); VBox tulemusbox = new VBox(); tulemusbox.setPadding(new Insets(15, 15, 15, 15)); tulemusbox.setSpacing(24); tulemusbox.setAlignment(Pos.TOP_LEFT); Line joon = new Line(); joon.setStartX(0.0f); joon.setStartY(0.0f); joon.setEndX(1000.0f); joon.setEndY(0.0f); TextArea tekstiala = new TextArea(); tekstiala.setEditable(false); tekstiala.setWrapText(true); tekstiala.setMaxWidth(Double.MAX_VALUE); tekstiala.setMaxHeight(Double.MAX_VALUE); Text testiInfo = new Text("Kokkuvõtlik ülevaade testi sisust"); testiInfo.setFont(Font.font ("Regular", 24)); tekstiala.setText(testiFailiSisu.toString(4)); HBox nupualus = new HBox(); nupualus.setPadding(new Insets(15, 15, 15, 15)); nupualus.setSpacing(10); Button nuppOK = new Button("Valmis"); nuppOK.setPrefSize(300, 20); nuppOK.setStyle("-fx-font-size: 16pt;"); Button nuppKustutaFail = new Button("Loobu ja kustuta"); nuppKustutaFail.setPrefSize(300, 20); nuppKustutaFail.setStyle("-fx-font-size: 16pt;"); nupualus.getChildren().addAll(nuppOK, nuppKustutaFail); nupualus.setAlignment(Pos.BOTTOM_RIGHT); tulemusbox.getChildren().addAll( joon, testiInfo, tekstiala, nupualus); piiripaanLoo.setBottom(tulemusbox); nuppOK.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent e) { looLava.close(); } }); nuppKustutaFail.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent e) { kood.clear(); kuvaKood.setText(""); tulemusteTop.clear(); kuvaTop.setText(""); parool.setParool(null); topNr.setTopNr(0); tulemusbox.getChildren().removeAll( joon, testiInfo, tekstiala, nupualus); File failitee = failKoopia.getAbsoluteFile(); failitee.delete(); loomiseInfo.setText(loomiseInfoTekst); tabelPaigutus.getChildren().addAll( info, kood, tulemusteInfo, tulemusteTop); } }); } catch (tyhiErind e1) { Alert alert = new Alert(AlertType.ERROR); alert.setTitle("Viga!"); alert.setHeaderText("Valitud failis on puuduvat infot!"); alert.setContentText(e1.getMessage()); alert.showAndWait(); System.out.println(e1.getMessage()); // Vea leidmisel uus loodud fail kustutatakse. File failitee = failKoopia.getAbsoluteFile(); failitee.delete(); } catch (Exception e2) { System.out.println(e2.getMessage()); } } } else { Alert alert = new Alert(AlertType.ERROR); alert.setTitle("Viga!"); alert.setHeaderText("Kontrolli ligipääsukoodi ja paremiku arvu!"); alert.setContentText("Sisesta ilma tühikuteta min 3 tähemärgi pikkune ligipääsukood testile ja täisarvuna vastajale kuvatav tulemuste paremiku hulk."); alert.showAndWait(); } } }); Scene stseen2 = new Scene(piiripaanLoo, 800,500); looLava.setScene(stseen2); looLava.show(); }}); nuppTest.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent e) { TextInputDialog koodiSisestusKoht = new TextInputDialog(); koodiSisestusKoht.setTitle("Võtmesõna või ligipääsukood"); koodiSisestusKoht.setHeaderText("Et jõuaksid õige testini, sisesta võtmesõna või kood, mille oled testi läbiviijalt saanud."); koodiSisestusKoht.setContentText("Sisesta testi kood/võtmesõna: "); Optional<String> result = koodiSisestusKoht.showAndWait(); if (result.isPresent()){ String votmesona = result.get(); FailiKontrollija koodigaFail = new FailiKontrollija(votmesona); if (!koodigaFail.kasOnFail() || koodigaFail.getFailideArv() > 1) { Alert alert = new Alert(AlertType.ERROR); alert.setTitle("Viga!"); alert.setHeaderText("Kontrolli ligipääsukoodi või võtmesõna!"); alert.setContentText("Sellise ligipääsukoodiga testi pole."); alert.showAndWait(); } else { pealava.hide(); // Paremiku arvu leidmine faili nimest File failitee = koodigaFail.getFailiAadress(); String failiteeTekstina = failitee.toString(); int indeks1 = failiteeTekstina.lastIndexOf("_"); int indeks2 = failiteeTekstina.lastIndexOf(".csv"); int topArv = Integer.parseInt(failiteeTekstina.substring(indeks1+1, indeks2)); // Testi andmete lugemine tabelist try { testiSisu = loeFail(failitee); } catch (Exception e1) { e1.printStackTrace(); } // Uus aken ja testi esitamine Stage uusLava = new Stage(); BorderPane uusPiiripaan = new BorderPane(); VBox paigutuskast = new VBox(); paigutuskast.setPadding(new Insets(15, 15, 15, 15)); paigutuskast.setSpacing(24); paigutuskast.setAlignment(Pos.CENTER); HBox vastusteKast = new HBox(); vastusteKast.setPadding(new Insets(15, 15, 15, 15)); vastusteKast.setSpacing(24); vastusteKast.setAlignment(Pos.CENTER); Text testiTutvustus = new Text(testiSisu.getTutvustusTekst()); testiTutvustus.setFont(Font.font ("Regular", 24)); testiTutvustus.setWrappingWidth(600); Button nuppTaitma = new Button("ASU TÄITMA!"); nuppTaitma.setPrefSize(500, 100); nuppTaitma.setStyle("-fx-font-size: 20pt;"); paigutuskast.getChildren().addAll( testiTutvustus, nuppTaitma); uusPiiripaan.setCenter(paigutuskast); Scene stseen3 = new Scene(uusPiiripaan, 800,500); uusLava.setScene(stseen3); uusLava.show(); nuppTaitma.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent e) { paigutuskast.getChildren().removeAll( testiTutvustus, nuppTaitma); List<String> kysimused = testiSisu.getKysimusteList(); List<String> valikvastused = testiSisu.getValikvastusteList(); Vastaja vastaja = new Vastaja(); for(int i = 0; i < kysimused.size(); i++) { Alert alert = new Alert(AlertType.CONFIRMATION); alert.setTitle("TESTIT"); alert.setHeaderText(kysimused.get(i)); // alert.setContentText(); String[] valikud = valikvastused.get(i).split(","); ButtonType[] nupud = new ButtonType[valikud.length]; for(int j = 0; j < valikud.length; j++) { ButtonType uusnupp = new ButtonType(valikud[j].trim()); nupud[j] = uusnupp; } alert.getButtonTypes().setAll(nupud); Optional<ButtonType> tulemus = alert.showAndWait(); String tulemusStr = tulemus.toString(); int i1 = tulemusStr.indexOf("="); int i2 = tulemusStr.lastIndexOf(","); String tulemusTekstina = tulemusStr.substring(i1+1, i2); vastaja.lisaVastus(tulemusTekstina); } ParimValik edetabel = new ParimValik(vastaja.getVastused(), testiSisu.getTulemused()); edetabel.valiParimEdetabel(); String testiTulemus = edetabel.kuvaTop(topArv); VBox tulemusteKuva = new VBox(); tulemusteKuva.setPadding(new Insets(15, 15, 15, 15)); tulemusteKuva.setSpacing(24); Text pealkiri = new Text("Sinu testi tulemus:"); pealkiri.setFont(Font.font ("Regular", FontWeight.BOLD, 24)); Text tulemusteTekst = new Text(testiTulemus); tulemusteTekst.setWrappingWidth(600); tulemusteTekst.setFont(Font.font ("Regular", 16)); tulemusteKuva.getChildren().addAll(pealkiri,tulemusteTekst); tulemusteKuva.setAlignment(Pos.CENTER); ScrollPane s1 = new ScrollPane(tulemusteKuva); s1.setFitToHeight(true); s1.setFitToWidth(true); uusPiiripaan.setCenter(s1); } }); } } } }); Scene scene = new Scene(piiripaan,600,400); pealava.setTitle("TESTIT"); pealava.setScene(scene); pealava.show(); } public static void main(String[] args) { launch(args); } }
package com.intellij.lang.folding; import com.intellij.lang.ASTNode; import com.intellij.lang.Language; import com.intellij.lang.LanguageExtension; import com.intellij.openapi.editor.Document; import com.intellij.openapi.project.DumbService; import com.intellij.psi.PsiElement; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * @author yole * @author Konstantin Bulenkov */ public class LanguageFolding extends LanguageExtension<FoldingBuilder> { public static final LanguageFolding INSTANCE = new LanguageFolding(); private LanguageFolding() { super("com.intellij.lang.foldingBuilder"); } /** * This method is left to preserve binary compatibility. */ @Override public FoldingBuilder forLanguage(@NotNull Language l) { return super.forLanguage(l); } @Override protected FoldingBuilder findForLanguage(@NotNull Language l) { List<FoldingBuilder> extensions = allForLanguage(l); if (extensions.isEmpty()) { return null; } else if (extensions.size() == 1) { return extensions.get(0); } else { return new CompositeFoldingBuilder(extensions); } } /** * Only queries base language results if there are no extensions for originally requested language. */ @NotNull @Override public List<FoldingBuilder> allForLanguage(@NotNull Language language) { for (Language l = language; l != null; l = l.getBaseLanguage()) { List<FoldingBuilder> extensions = new ArrayList<>(forKey(l)); if (!extensions.isEmpty()) { return extensions; } } return Collections.emptyList(); } @NotNull public static FoldingDescriptor[] buildFoldingDescriptors(@Nullable FoldingBuilder builder, @NotNull PsiElement root, @NotNull Document document, boolean quick) { if (!DumbService.isDumbAware(builder) && DumbService.getInstance(root.getProject()).isDumb()) { return FoldingDescriptor.EMPTY; } if (builder instanceof FoldingBuilderEx) { return ((FoldingBuilderEx)builder).buildFoldRegions(root, document, quick); } final ASTNode astNode = root.getNode(); if (astNode == null || builder == null) { return FoldingDescriptor.EMPTY; } return builder.buildFoldRegions(astNode, document); } }
package org.basex.http; import static javax.servlet.http.HttpServletResponse.*; import static org.basex.http.HTTPText.*; import java.io.*; import java.util.*; import javax.servlet.*; import javax.servlet.http.*; import org.basex.core.*; import org.basex.core.StaticOptions.*; import org.basex.core.jobs.*; import org.basex.core.users.*; import org.basex.query.*; import org.basex.query.value.*; import org.basex.query.value.item.*; import org.basex.server.*; import org.basex.util.*; public abstract class BaseXServlet extends HttpServlet { /** Servlet-specific user. */ String user; /** Servlet-specific authentication method. */ AuthMethod auth; @Override public void init(final ServletConfig config) throws ServletException { super.init(config); try { HTTPContext.init(config.getServletContext()); } catch(final IOException ex) { throw new ServletException(ex); } // set user and authentication method final Enumeration<String> en = config.getInitParameterNames(); while(en.hasMoreElements()) { String key = en.nextElement().toLowerCase(Locale.ENGLISH); final String val = config.getInitParameter(key); if(key.startsWith(Prop.DBPREFIX)) key = key.substring(Prop.DBPREFIX.length()); if(key.equalsIgnoreCase(StaticOptions.USER.name())) { user = val; } else if(key.equalsIgnoreCase(StaticOptions.AUTHMETHOD.name())) { auth = AuthMethod.valueOf(val); } } } @Override public final void service(final HttpServletRequest req, final HttpServletResponse res) throws IOException { final HTTPConnection conn = new HTTPConnection(req, res, this); try { conn.authenticate(); run(conn); conn.log(SC_OK, ""); } catch(final HTTPException ex) { conn.error(ex.getStatus(), Util.message(ex)); } catch(final LoginException ex) { conn.error(SC_UNAUTHORIZED, Util.message(ex)); } catch(final QueryException ex) { final Value v = ex.value(); final int code = v instanceof Int ? (int) ((Int) v).itr() : SC_BAD_REQUEST; conn.error(code, ex.getMessage(), Util.message(ex)); } catch(final IOException ex) { conn.error(SC_BAD_REQUEST, Util.message(ex)); } catch(final JobException ex) { conn.stop(ex); } catch(final Exception ex) { final String msg = Util.bug(ex); Util.errln(msg); conn.error(SC_INTERNAL_SERVER_ERROR, Util.info(UNEXPECTED_X, msg)); } finally { if(Prop.debug) { Util.outln("_ REQUEST _________________________________" + Prop.NL + req); final Enumeration<String> en = req.getHeaderNames(); while(en.hasMoreElements()) { final String key = en.nextElement(); Util.outln(Text.LI + key + Text.COLS + req.getHeader(key)); } Util.out("_ RESPONSE ________________________________" + Prop.NL + res); } } } /** * Runs the code. * @param conn HTTP connection * @throws Exception any exception */ protected abstract void run(HTTPConnection conn) throws Exception; /** * Returns a servlet-specific user name. By default, it returns the name of the database user. * @param http HTTP connection * @return user name or {@code null} */ public String username(final HTTPConnection http) { final User u = http.context.user(); return u != null ? u.name() : null; } }
package aQute.bnd.build; import java.io.*; import java.util.*; import java.util.Map.Entry; import java.util.regex.*; import aQute.bnd.differ.*; import aQute.bnd.differ.Baseline.Info; import aQute.bnd.header.*; import aQute.bnd.osgi.*; import aQute.bnd.service.*; import aQute.bnd.service.repository.*; import aQute.bnd.service.repository.SearchableRepository.ResourceDescriptor; import aQute.bnd.version.*; import aQute.lib.collections.*; import aQute.lib.io.*; public class ProjectBuilder extends Builder { private final DiffPluginImpl differ = new DiffPluginImpl(); Project project; boolean initialized; public ProjectBuilder(Project project) { super(project); this.project = project; } public ProjectBuilder(ProjectBuilder builder) { super(builder); this.project = builder.project; } @Override public long lastModified() { return Math.max(project.lastModified(), super.lastModified()); } /** * We put our project and our workspace on the macro path. */ @Override protected Object[] getMacroDomains() { return new Object[] { project, project.getWorkspace() }; } @Override public Builder getSubBuilder() throws Exception { return project.getBuilder(this); } public Project getProject() { return project; } @Override public void init() { try { if (!initialized) { initialized = true; for (Container file : project.getClasspath()) { addClasspath(file.getFile()); } for (Container file : project.getBuildpath()) { addClasspath(file.getFile()); } for (Container file : project.getBootclasspath()) { addClasspath(file.getFile()); } for (File file : project.getAllsourcepath()) { addSourcepath(file); } } } catch (Exception e) { msgs.Unexpected_Error_("ProjectBuilder init", e); } } @Override public List<Jar> getClasspath() { init(); return super.getClasspath(); } @Override protected void changedFile(File f) { project.getWorkspace().changedFile(f); } /** * Compare this builder's JAR with a baseline * * @throws Exception */ @Override public void doBaseline(Jar dot) throws Exception { String diffignore = project.getProperty(Constants.DIFFIGNORE); trace("ignore headers %s", diffignore); differ.setIgnore(diffignore); Jar fromRepo = getBaselineJar(); if (fromRepo == null) { trace("No baseline jar %s", getProperty(Constants.BASELINE)); return; } Version newer = new Version(getVersion()); Version older = new Version(fromRepo.getVersion()); if (!getBsn().equals(fromRepo.getBsn())) { error("The symbolic name of this project (%s) is not the same as the baseline: %s", getBsn(), fromRepo.getBsn()); return; } // Check if we want to overwrite an equal version that is not staging if (newer.getWithoutQualifier().equals(older.getWithoutQualifier())) { RepositoryPlugin rr = getBaselineRepo(); if (rr instanceof InfoRepository) { ResourceDescriptor descriptor = ((InfoRepository) rr).getDescriptor(getBsn(), older); if (descriptor != null && descriptor.phase != Phase.STAGING) { error("Baselining %s against same version %s but the repository says the older repository version is not the required %s but is instead %s", getBsn(), getVersion(), Phase.STAGING, descriptor.phase); return; } } } trace("baseline %s-%s against: %s", getBsn(), getVersion(), fromRepo.getName()); try { Baseline baseliner = new Baseline(this, differ); Set<Info> infos = baseliner.baseline(dot, fromRepo, null); if (infos.isEmpty()) trace("no deltas"); for (Info info : infos) { if (info.mismatch) { SetLocation l = error( "Baseline mismatch for package %s, %s change. Current is %s, repo is %s, suggest %s or %s\n", info.packageName, info.packageDiff.getDelta(), info.newerVersion, info.olderVersion, info.suggestedVersion, info.suggestedIfProviders == null ? "-" : info.suggestedIfProviders); l.header(Constants.BASELINE); fillInLocationForPackageInfo(l.location(), info.packageName); if (getPropertiesFile() != null) l.file(getPropertiesFile().getAbsolutePath()); l.details(info); } } aQute.bnd.differ.Baseline.BundleInfo binfo = baseliner.getBundleInfo(); if (binfo.mismatch) { SetLocation error = error("The bundle version %s is too low, must be at least %s", binfo.version, binfo.suggestedVersion); error.context("Baselining"); error.header(Constants.BUNDLE_VERSION); error.details(binfo); FileLine fl = getHeader(Pattern.compile("^" + Constants.BUNDLE_VERSION, Pattern.MULTILINE)); if (fl != null) { error.file(fl.file.getAbsolutePath()); error.line(fl.line); error.length(fl.length); } } } finally { fromRepo.close(); } } public void fillInLocationForPackageInfo(Location location, String packageName) throws Exception { Parameters eps = getExportPackage(); Attrs attrs = eps.get(packageName); FileLine fl; if (attrs != null && attrs.containsKey(Constants.VERSION_ATTRIBUTE)) { fl = getHeader(Pattern.compile(Constants.EXPORT_PACKAGE, Pattern.CASE_INSENSITIVE)); if (fl != null) { location.file = fl.file.getAbsolutePath(); location.line = fl.line; location.length = fl.length; return; } } Parameters ecs = getExportContents(); attrs = ecs.get(packageName); if (attrs != null && attrs.containsKey(Constants.VERSION_ATTRIBUTE)) { fl = getHeader(Pattern.compile(Constants.EXPORT_CONTENTS, Pattern.CASE_INSENSITIVE)); if (fl != null) { location.file = fl.file.getAbsolutePath(); location.line = fl.line; location.length = fl.length; return; } } for (File src : project.getSourcePath()) { String path = packageName.replace('.', '/'); File packageDir = IO.getFile(src, path); File pi = IO.getFile(packageDir, "package-info.java"); if (pi.isFile()) { fl = findHeader(pi, Pattern.compile("@Version\\s*([^)]+)")); if (fl != null) { location.file = fl.file.getAbsolutePath(); location.line = fl.line; location.length = fl.length; return; } } pi = IO.getFile(packageDir, "packageinfo"); if (pi.isFile()) { fl = findHeader(pi, Pattern.compile("^\\s*version.*$")); if (fl != null) { location.file = fl.file.getAbsolutePath(); location.line = fl.line; location.length = fl.length; return; } } } } public Jar getLastRevision() throws Exception { RepositoryPlugin releaseRepo = getReleaseRepo(); SortedSet<Version> versions = releaseRepo.versions(getBsn()); if (versions.isEmpty()) return null; Jar jar = new Jar(releaseRepo.get(getBsn(), versions.last(), null)); addClose(jar); return jar; } /** * This method attempts to find the baseline jar for the current project. It * reads the -baseline property and treats it as instructions. These * instructions are matched against the bsns of the jars (think sub * builders!). If they match, the sub builder is selected. * <p> * The instruction can then specify the following options: * * <pre> * version : baseline version from repository * file : a file path * </pre> * * If neither is specified, the current version is used to find the highest * version (without qualifier) that is below the current version. If a * version is specified, we take the highest version with the same base * version. * <p> * Since baselining is expensive and easily generates errors you must enable * it. The easiest solution is to {@code -baseline: *}. This will match all * sub builders and will calculate the version. * * @return a Jar or null */ public Jar getBaselineJar() throws Exception { String bl = getProperty(Constants.BASELINE); if (bl == null || Constants.NONE.equals(bl)) return null; Instructions baselines = new Instructions(getProperty(Constants.BASELINE)); if (baselines.isEmpty()) return null; // no baselining RepositoryPlugin repo = getBaselineRepo(); if (repo == null) return null; // errors reported already String bsn = getBsn(); Version version = new Version(getVersion()); SortedSet<Version> versions = removeStagedAndFilter(repo.versions(bsn), repo, bsn); if (versions.isEmpty()) { // We have a repo Version v = new Version(getVersion()); if (v.getWithoutQualifier().compareTo(Version.ONE) > 0) { warning("There is no baseline for %s in the baseline repo %s. The build is for version %s, which is <= 1.0.0 which suggests that there should be a prior version.", getBsn(), repo, v); } return null; } // Loop over the instructions, first match commits. for (Entry<Instruction,Attrs> e : baselines.entrySet()) { if (e.getKey().matches(bsn)) { Attrs attrs = e.getValue(); Version target; if (attrs.containsKey("version")) { // Specified version! String v = attrs.get("version"); if (!Verifier.isVersion(v)) { error("Not a valid version in %s %s", Constants.BASELINE, v); return null; } Version base = new Version(v); SortedSet<Version> later = versions.tailSet(base); if (later.isEmpty()) { error("For baselineing %s-%s, specified version %s not found", bsn, version, base); return null; } // First element is equal or next to the base we desire target = later.first(); // Now, we could end up with a higher version than our // current // project } else if (attrs.containsKey("file")) { // Can be useful to specify a file // for example when copying a bundle with a public api File f = getProject().getFile(attrs.get("file")); if (f != null && f.isFile()) { Jar jar = new Jar(f); addClose(jar); return jar; } error("Specified file for baseline but could not find it %s", f); return null; } else { target = versions.last(); } // Fetch the revision if (target.getWithoutQualifier().compareTo(version.getWithoutQualifier()) > 0) { error("The baseline version %s is higher than the current version %s for %s in %s", target, version, bsn, repo); return null; } if (target.getWithoutQualifier().compareTo(version.getWithoutQualifier()) == 0) { if (isPedantic()) { warning("Baselining against jar"); } } File file = repo.get(bsn, target, attrs); if (file == null || !file.isFile()) { error("Decided on version %s-%s but cannot get file from repo %s", bsn, version, repo); return null; } Jar jar = new Jar(file); addClose(jar); return jar; } } // Ignore, nothing matched return null; } /** * Remove any staging versions that have a variant with a higher qualifier. * * @param versions * @param repo * @return * @throws Exception */ private SortedSet<Version> removeStagedAndFilter(SortedSet<Version> versions, RepositoryPlugin repo, String bsn) throws Exception { List<Version> filtered = new ArrayList<Version>(versions); Collections.reverse(filtered); InfoRepository ir = (repo instanceof InfoRepository) ? (InfoRepository) repo : null; // Filter any versions that only differ in qualifier // The last variable is the last one added. Since we are // sorted from high to low, we skip any earlier base versions Version last = null; for (Iterator<Version> i = filtered.iterator(); i.hasNext();) { Version v = i.next(); // Check if same base version as last Version current = v.getWithoutQualifier(); if (last != null && current.equals(last)) { i.remove(); continue; } // Check if this is not a master if the repo // has a state for each resource if (ir != null && !isMaster(ir, bsn, v)) i.remove(); last = current; } SortedList<Version> set = new SortedList<Version>(filtered); trace("filtered for only latest staged: %s from %s in range ", set, versions); return set; } /** * Check if we have a master phase. * * @param repo * @param bsn * @param v * @return * @throws Exception */ private boolean isMaster(InfoRepository repo, String bsn, Version v) throws Exception { ResourceDescriptor descriptor = repo.getDescriptor(bsn, v); if (descriptor == null) return false; return descriptor.phase == Phase.MASTER; } private RepositoryPlugin getReleaseRepo() { String repoName = getProperty(Constants.RELEASEREPO); List<RepositoryPlugin> repos = getPlugins(RepositoryPlugin.class); for (RepositoryPlugin r : repos) { if (r.canWrite()) { if (repoName == null || r.getName().equals(repoName)) { return r; } } } if (repoName == null) error("Could not find a writable repo for the release repo (-releaserepo is not set)"); else error("No such -releaserepo %s found", repoName); return null; } private RepositoryPlugin getBaselineRepo() { String repoName = getProperty(Constants.BASELINEREPO); if (repoName == null) return getReleaseRepo(); List<RepositoryPlugin> repos = getPlugins(RepositoryPlugin.class); for (RepositoryPlugin r : repos) { if (r.getName().equals(repoName)) return r; } error("Could not find -baselinerepo %s", repoName); return null; } }
package org.micromanager.splitview; import com.swtdesigner.SwingResourceManager; import ij.process.ByteProcessor; import ij.process.ImageProcessor; import ij.process.ShortProcessor; import java.awt.Color; import java.awt.Dimension; import java.awt.Font; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import javax.swing.Timer; import java.text.NumberFormat; import java.util.prefs.Preferences; import javax.swing.JColorChooser; import mmcorej.CMMCore; import org.json.JSONArray; import org.json.JSONException; import org.micromanager.MMStudioMainFrame; import org.micromanager.api.ScriptInterface; import org.micromanager.api.DeviceControlGUI; import org.micromanager.utils.MMScriptException; import org.micromanager.utils.ReportingUtils; /** * * @author nico */ public class SplitViewFrame extends javax.swing.JFrame { private final ScriptInterface gui_; private final DeviceControlGUI dGui_; private final CMMCore core_; private Preferences prefs_; private NumberFormat nf_; private long imgDepth_; private int width_; private int height_; private int newWidth_; private int newHeight_; private String orientation_; Color col1_; Color col2_; private int frameXPos_ = 100; private int frameYPos_ = 100; private Timer timer_; private double interval_ = 30; private static final String ACQNAME = "Split View"; public static final String LR = "lr"; public static final String TB = "tb"; private static final String TOPLEFTCOLOR = "TopLeftColor"; private static final String BOTTOMRIGHTCOLOR = "BottomRightColor"; private static final String ORIENTATION = "Orientation"; private static final String FRAMEXPOS = "FRAMEXPOS"; private static final String FRAMEYPOS = "FRAMEYPOS"; private boolean autoShutterOrg_; private String shutterLabel_; private boolean shutterOrg_; private boolean appliedToMDA_ = false; private SplitViewProcessor mmImageProcessor_; public SplitViewFrame(ScriptInterface gui) throws Exception { gui_ = gui; dGui_ = (DeviceControlGUI) gui_; core_ = gui_.getMMCore(); nf_ = NumberFormat.getInstance(); prefs_ = Preferences.userNodeForPackage(this.getClass()); col1_ = new Color(prefs_.getInt(TOPLEFTCOLOR, Color.red.getRGB())); col2_ = new Color(prefs_.getInt(BOTTOMRIGHTCOLOR, Color.green.getRGB())); orientation_ = prefs_.get(ORIENTATION, LR); // initialize timer // TODO: Replace with Sequence-based live mode interval_ = 30; ActionListener timerHandler = new ActionListener() { @Override public void actionPerformed(ActionEvent evt) { calculateSize(); addSnapToImage(); } }; timer_ = new Timer((int) interval_, timerHandler); timer_.stop(); frameXPos_ = prefs_.getInt(FRAMEXPOS, frameXPos_); frameYPos_ = prefs_.getInt(FRAMEYPOS, frameYPos_); Font buttonFont = new Font("Arial", Font.BOLD, 10); initComponents(); setLocation(frameXPos_, frameYPos_); setBackground(gui_.getBackgroundColor()); Dimension buttonSize = new Dimension(120, 20); lrRadioButton.setSelected(orientation_.equals(LR)); if (orientation_.equals(LR)) { topLeftColorButton.setText("Left Color"); bottomRightColorButton.setText("Right Color"); } tbRadioButton.setSelected(orientation_.equals(TB)); if (orientation_.equals(TB)) { topLeftColorButton.setText("Top Color"); bottomRightColorButton.setText("Bottom Color"); } topLeftColorButton.setForeground(col1_); topLeftColorButton.setPreferredSize(buttonSize); bottomRightColorButton.setForeground(col2_); bottomRightColorButton.setPreferredSize(buttonSize); liveButton.setIconTextGap(6); liveButton.setFont(buttonFont); liveButton.setIcon(SwingResourceManager.getIcon(MMStudioMainFrame.class, "/org/micromanager/icons/camera_go.png")); liveButton.setText("Live"); snapButton.setIconTextGap(6); snapButton.setText("Snap"); snapButton.setIcon(SwingResourceManager.getIcon(SplitView.class, "/org/micromanager/icons/camera.png")); snapButton.setFont(buttonFont); snapButton.setToolTipText("Snap single image"); } private void doSnap() { calculateSize(); if (!gui_.acquisitionExists(ACQNAME)) { try { openAcq(); } catch (MMScriptException ex) { ReportingUtils.showError(ex, "Failed to open acquisition Window"); } } if (gui_.acquisitionExists(ACQNAME)) { addSnapToImage(); } } private void enableLiveMode(boolean enable) { try { if (enable) { if (timer_.isRunning()) { return; } if (!gui_.acquisitionExists(ACQNAME)) { try { openAcq(); } catch (MMScriptException ex) { ReportingUtils.showError(ex, "Failed to open acquisition Window"); } } // turn off auto shutter and open the shutter autoShutterOrg_ = core_.getAutoShutter(); shutterLabel_ = core_.getShutterDevice(); if (shutterLabel_.length() > 0) { shutterOrg_ = core_.getShutterOpen(); } core_.setAutoShutter(false); // only open the shutter when we have one and the Auto shutter checkbox was checked if ((shutterLabel_.length() > 0) && autoShutterOrg_) { core_.setShutterOpen(true); } timer_.start(); liveButton.setText("Stop"); } else { if (!timer_.isRunning()) { return; } timer_.stop(); // add metadata //addMetaData (); // save window position since it is not saved on close // savePosition(); // restore auto shutter and close the shutter if (shutterLabel_.length() > 0) { core_.setShutterOpen(shutterOrg_); } core_.setAutoShutter(autoShutterOrg_); liveButton.setText("Live"); } } catch (Exception err) { ReportingUtils.showError(err); } } private void calculateSize() { imgDepth_ = core_.getBytesPerPixel(); width_ = (int) core_.getImageWidth(); height_ = (int) core_.getImageHeight(); newWidth_ = calculateWidth(width_); newHeight_ = calculateHeight(height_); } public int calculateWidth(int width) { int newWidth = width; if (!orientation_.equals(LR) && !orientation_.equals(TB)) { orientation_ = LR; } if (orientation_.equals(LR)) { newWidth = width / 2; } else if (orientation_.equals(TB)) { newWidth = width; } return newWidth; } public int calculateHeight(int height) { int newHeight = height; if (!orientation_.equals(LR) && !orientation_.equals(TB)) { orientation_ = LR; } if (orientation_.equals(LR)) { newHeight = height; } else if (orientation_.equals(TB)) { newHeight = height / 2; } return newHeight; } public String getOrientation() { return orientation_; } private void openAcq() throws MMScriptException { gui_.openAcquisition(ACQNAME, "", 1, 2, 1); gui_.initializeAcquisition(ACQNAME, newWidth_, newHeight_, (int) imgDepth_); gui_.getAcquisition(ACQNAME).promptToSave(false); gui_.setChannelColor(ACQNAME, 0, col1_); gui_.setChannelColor(ACQNAME, 1, col2_); if (orientation_.equals(LR)) { gui_.setChannelName(ACQNAME, 0, "Left"); gui_.setChannelName(ACQNAME, 1, "Right"); } else { gui_.setChannelName(ACQNAME, 0, "Top"); gui_.setChannelName(ACQNAME, 1, "Bottom"); } } private void addSnapToImage() { Object img; ImageProcessor tmpImg; try { core_.snapImage(); img = core_.getImage(); if (imgDepth_ == 1) { tmpImg = new ByteProcessor(width_, height_); } else if (imgDepth_ == 2) { tmpImg = new ShortProcessor(width_, height_); } else // TODO throw error { return; } tmpImg.setPixels(img); if (!gui_.acquisitionExists(ACQNAME)) { enableLiveMode(false); return; } else if (gui_.getAcquisitionImageHeight(ACQNAME) != newHeight_ || gui_.getAcquisitionImageWidth(ACQNAME) != newWidth_ || gui_.getAcquisitionImageByteDepth(ACQNAME) != imgDepth_) { gui_.getAcquisition(ACQNAME).closeImageWindow(); gui_.closeAcquisition(ACQNAME); openAcq(); } tmpImg.setRoi(0, 0, newWidth_, newHeight_); // first channel gui_.addImage(ACQNAME, tmpImg.crop().getPixels(), 0, 0, 0); // second channel if (orientation_.equals(LR)) { tmpImg.setRoi(newWidth_, 0, newWidth_, height_); } else if (orientation_.equals(TB)) { tmpImg.setRoi(0, newHeight_, newWidth_, newHeight_); } gui_.addImage(ACQNAME, tmpImg.crop().getPixels(), 0, 1, 0); } catch (Exception e) { ReportingUtils.showError(e); } } public void safePrefs() { prefs_.putInt(FRAMEXPOS, this.getX()); prefs_.putInt(FRAMEYPOS, this.getY()); } /** This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { buttonGroup1 = new javax.swing.ButtonGroup(); buttonGroup2 = new javax.swing.ButtonGroup(); buttonGroup3 = new javax.swing.ButtonGroup(); lrRadioButton = new javax.swing.JRadioButton(); tbRadioButton = new javax.swing.JRadioButton(); topLeftColorButton = new javax.swing.JButton(); bottomRightColorButton = new javax.swing.JButton(); snapButton = new javax.swing.JButton(); liveButton = new javax.swing.JButton(); applyToMDACheckBox_ = new javax.swing.JCheckBox(); setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE); addWindowListener(new java.awt.event.WindowAdapter() { public void windowClosed(java.awt.event.WindowEvent evt) { formWindowClosed(evt); } }); buttonGroup1.add(lrRadioButton); lrRadioButton.setText("Left-Right Split"); lrRadioButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { lrRadioButtonActionPerformed(evt); } }); buttonGroup1.add(tbRadioButton); tbRadioButton.setText("Top-Bottom Split"); tbRadioButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { tbRadioButtonActionPerformed(evt); } }); topLeftColorButton.setText("Left Color"); topLeftColorButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { topLeftColorButtonActionPerformed(evt); } }); bottomRightColorButton.setText("Right Color"); bottomRightColorButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bottomRightColorButtonActionPerformed(evt); } }); snapButton.setText("Snap"); snapButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { snapButtonActionPerformed(evt); } }); liveButton.setText("Live"); liveButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { liveButtonActionPerformed(evt); } }); applyToMDACheckBox_.setText("Apply to Acquisition"); applyToMDACheckBox_.addChangeListener(new javax.swing.event.ChangeListener() { public void stateChanged(javax.swing.event.ChangeEvent evt) { applyToMDACheckBox_StateChanged(evt); } }); org.jdesktop.layout.GroupLayout layout = new org.jdesktop.layout.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(layout.createSequentialGroup() .addContainerGap() .add(applyToMDACheckBox_) .addContainerGap(135, Short.MAX_VALUE)) .add(layout.createSequentialGroup() .add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.TRAILING) .add(org.jdesktop.layout.GroupLayout.LEADING, layout.createSequentialGroup() .add(9, 9, 9) .add(lrRadioButton)) .add(org.jdesktop.layout.GroupLayout.LEADING, layout.createSequentialGroup() .addContainerGap() .add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(layout.createSequentialGroup() .add(21, 21, 21) .add(snapButton) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED)) .add(topLeftColorButton, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 121, Short.MAX_VALUE)))) .add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(layout.createSequentialGroup() .add(18, 18, 18) .add(tbRadioButton) .addContainerGap()) .add(org.jdesktop.layout.GroupLayout.TRAILING, layout.createSequentialGroup() .add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(layout.createSequentialGroup() .add(21, 21, 21) .add(liveButton)) .add(bottomRightColorButton, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 130, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)) .addContainerGap()))) ); layout.setVerticalGroup( layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING) .add(layout.createSequentialGroup() .add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE) .add(lrRadioButton) .add(tbRadioButton)) .addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED) .add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE) .add(topLeftColorButton) .add(bottomRightColorButton)) .addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED) .add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE) .add(liveButton) .add(snapButton)) .addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED) .add(applyToMDACheckBox_) .addContainerGap(org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); pack(); }// </editor-fold>//GEN-END:initComponents private void lrRadioButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_lrRadioButtonActionPerformed orientation_ = LR; prefs_.put(ORIENTATION, LR); topLeftColorButton.setText("Left Color"); bottomRightColorButton.setText("Right Color"); }//GEN-LAST:event_lrRadioButtonActionPerformed private void tbRadioButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_tbRadioButtonActionPerformed orientation_ = TB; prefs_.put(ORIENTATION, TB); topLeftColorButton.setText("Top Color"); bottomRightColorButton.setText("Bottom Color"); }//GEN-LAST:event_tbRadioButtonActionPerformed private void snapButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_snapButtonActionPerformed doSnap(); }//GEN-LAST:event_snapButtonActionPerformed private void formWindowClosed(java.awt.event.WindowEvent evt) {//GEN-FIRST:event_formWindowClosed safePrefs(); }//GEN-LAST:event_formWindowClosed private void liveButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_liveButtonActionPerformed if (timer_.isRunning()) { enableLiveMode(false); liveButton.setIcon(SwingResourceManager.getIcon(MMStudioMainFrame.class, "/org/micromanager/icons/camera_go.png")); } else { timer_.setDelay((int) interval_); enableLiveMode(true); liveButton.setIcon(SwingResourceManager.getIcon(MMStudioMainFrame.class, "/org/micromanager/icons/cancel.png")); } }//GEN-LAST:event_liveButtonActionPerformed private void topLeftColorButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_topLeftColorButtonActionPerformed col1_ = JColorChooser.showDialog(getContentPane(), "Choose left/top color", col1_); topLeftColorButton.setForeground(col1_); prefs_.putInt(TOPLEFTCOLOR, col1_.getRGB()); try { if (gui_.acquisitionExists(ACQNAME)) { gui_.setChannelColor(ACQNAME, 0, col1_); } } catch (MMScriptException ex) { ReportingUtils.logError(ex); } }//GEN-LAST:event_topLeftColorButtonActionPerformed private void bottomRightColorButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bottomRightColorButtonActionPerformed col2_ = JColorChooser.showDialog(getContentPane(), "Choose right/bottom color", col2_); bottomRightColorButton.setForeground(col2_); prefs_.putInt(BOTTOMRIGHTCOLOR, col2_.getRGB()); try { if (gui_.acquisitionExists(ACQNAME)) { gui_.setChannelColor(ACQNAME, 0, col2_); } } catch (MMScriptException ex) { ReportingUtils.logError(ex); } }//GEN-LAST:event_bottomRightColorButtonActionPerformed public JSONArray getColors() throws JSONException { JSONArray myColors = new JSONArray(); myColors.put(0, (Object) col1_.getRGB()); myColors.put(1, (Object) col2_.getRGB()); return myColors; } private void applyToMDACheckBox_StateChanged(javax.swing.event.ChangeEvent evt) {//GEN-FIRST:event_applyToMDACheckBox_StateChanged Object source = evt.getSource(); if (source != applyToMDACheckBox_) return; if (applyToMDACheckBox_.isSelected() && !appliedToMDA_) { mmImageProcessor_ = new SplitViewProcessor(this); mmImageProcessor_.setName("SplitView"); gui_.getAcquisitionEngine().addImageProcessor(mmImageProcessor_); appliedToMDA_ = true; } else if (!applyToMDACheckBox_.isSelected() && appliedToMDA_) { if (mmImageProcessor_ != null) gui_.getAcquisitionEngine().removeImageProcessor(mmImageProcessor_); appliedToMDA_ = false; } }//GEN-LAST:event_applyToMDACheckBox_StateChanged // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JCheckBox applyToMDACheckBox_; private javax.swing.JButton bottomRightColorButton; private javax.swing.ButtonGroup buttonGroup1; private javax.swing.ButtonGroup buttonGroup2; private javax.swing.ButtonGroup buttonGroup3; private javax.swing.JButton liveButton; private javax.swing.JRadioButton lrRadioButton; private javax.swing.JButton snapButton; private javax.swing.JRadioButton tbRadioButton; private javax.swing.JButton topLeftColorButton; // End of variables declaration//GEN-END:variables }
package StevenDimDoors.mod_pocketDim.helpers; import java.io.File; import java.io.FileOutputStream; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Random; import java.util.regex.Pattern; import net.minecraft.block.Block; import net.minecraft.block.BlockContainer; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.WeightedRandom; import net.minecraft.world.World; import StevenDimDoors.mod_pocketDim.DDProperties; import StevenDimDoors.mod_pocketDim.DimData; import StevenDimDoors.mod_pocketDim.DungeonGenerator; import StevenDimDoors.mod_pocketDim.LinkData; import StevenDimDoors.mod_pocketDim.mod_pocketDim; import StevenDimDoors.mod_pocketDim.helpers.jnbt.ByteArrayTag; import StevenDimDoors.mod_pocketDim.helpers.jnbt.CompoundTag; import StevenDimDoors.mod_pocketDim.helpers.jnbt.ListTag; import StevenDimDoors.mod_pocketDim.helpers.jnbt.NBTOutputStream; import StevenDimDoors.mod_pocketDim.helpers.jnbt.ShortTag; import StevenDimDoors.mod_pocketDim.helpers.jnbt.Tag; import StevenDimDoors.mod_pocketDim.items.itemDimDoor; import StevenDimDoors.mod_pocketDim.util.WeightedContainer; public class DungeonHelper { private static DungeonHelper instance = null; private static DDProperties properties = null; public static final Pattern SchematicNamePattern = Pattern.compile("[A-Za-z0-9_\\-]+"); public static final Pattern DungeonNamePattern = Pattern.compile("[A-Za-z0-9\\-]+"); public static final String SCHEMATIC_FILE_EXTENSION = ".schematic"; private static final int DEFAULT_DUNGEON_WEIGHT = 100; public static final int MAX_DUNGEON_WEIGHT = 10000; //Used to prevent overflows and math breaking down private static final int MAX_EXPORT_RADIUS = 50; public static final int FABRIC_OF_REALITY_EXPORT_ID = 1973; public static final int PERMAFABRIC_EXPORT_ID = 220; private static final String HUB_DUNGEON_TYPE = "Hub"; private static final String TRAP_DUNGEON_TYPE = "Trap"; private static final String SIMPLE_HALL_DUNGEON_TYPE = "SimpleHall"; private static final String COMPLEX_HALL_DUNGEON_TYPE = "ComplexHall"; private static final String EXIT_DUNGEON_TYPE = "Exit"; private static final String DEAD_END_DUNGEON_TYPE = "DeadEnd"; private static final String MAZE_DUNGEON_TYPE = "Maze"; //The list of dungeon types will be kept as an array for now. If we allow new //dungeon types in the future, then this can be changed to an ArrayList. private static final String[] DUNGEON_TYPES = new String[] { HUB_DUNGEON_TYPE, TRAP_DUNGEON_TYPE, SIMPLE_HALL_DUNGEON_TYPE, COMPLEX_HALL_DUNGEON_TYPE, EXIT_DUNGEON_TYPE, DEAD_END_DUNGEON_TYPE, MAZE_DUNGEON_TYPE }; private Random rand = new Random(); private HashMap<Integer, LinkData> customDungeonStatus = new HashMap<Integer, LinkData>(); public ArrayList<DungeonGenerator> customDungeons = new ArrayList<DungeonGenerator>(); public ArrayList<DungeonGenerator> registeredDungeons = new ArrayList<DungeonGenerator>(); private ArrayList<DungeonGenerator> simpleHalls = new ArrayList<DungeonGenerator>(); private ArrayList<DungeonGenerator> complexHalls = new ArrayList<DungeonGenerator>(); private ArrayList<DungeonGenerator> deadEnds = new ArrayList<DungeonGenerator>(); private ArrayList<DungeonGenerator> hubs = new ArrayList<DungeonGenerator>(); private ArrayList<DungeonGenerator> mazes = new ArrayList<DungeonGenerator>(); private ArrayList<DungeonGenerator> pistonTraps = new ArrayList<DungeonGenerator>(); private ArrayList<DungeonGenerator> exits = new ArrayList<DungeonGenerator>(); public ArrayList<Integer> metadataFlipList = new ArrayList<Integer>(); public ArrayList<Integer> metadataNextList = new ArrayList<Integer>(); public DungeonGenerator defaultBreak = new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/somethingBroke.schematic", true); public DungeonGenerator defaultUp = new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/simpleStairsUp.schematic", true); private HashSet<String> dungeonTypeChecker; private HashMap<String, ArrayList<DungeonGenerator>> dungeonTypeMapping; private DungeonHelper() { //Load the dungeon type checker with the list of all types in lowercase. //Capitalization matters for matching in a hash set. dungeonTypeChecker = new HashSet<String>(); for (String dungeonType : DUNGEON_TYPES) { dungeonTypeChecker.add(dungeonType.toLowerCase()); } //Add all the basic dungeon types to dungeonTypeMapping //Dungeon type names must be passed in lowercase to make matching easier. dungeonTypeMapping = new HashMap<String, ArrayList<DungeonGenerator>>(); dungeonTypeMapping.put(SIMPLE_HALL_DUNGEON_TYPE.toLowerCase(), simpleHalls); dungeonTypeMapping.put(COMPLEX_HALL_DUNGEON_TYPE.toLowerCase(), complexHalls); dungeonTypeMapping.put(HUB_DUNGEON_TYPE.toLowerCase(), hubs); dungeonTypeMapping.put(EXIT_DUNGEON_TYPE.toLowerCase(), exits); dungeonTypeMapping.put(DEAD_END_DUNGEON_TYPE.toLowerCase(), deadEnds); dungeonTypeMapping.put(MAZE_DUNGEON_TYPE.toLowerCase(), mazes); dungeonTypeMapping.put(TRAP_DUNGEON_TYPE.toLowerCase(), pistonTraps); //Load our reference to the DDProperties singleton if (properties == null) properties = DDProperties.instance(); registerCustomDungeons(); } private void registerCustomDungeons() { File file = new File(properties.CustomSchematicDirectory); if (file.exists() || file.mkdir()) { copyfile.copyFile("/mods/DimDoors/text/How_to_add_dungeons.txt", file.getAbsolutePath() + "/How_to_add_dungeons.txt"); } registerFlipBlocks(); importCustomDungeons(properties.CustomSchematicDirectory); registerBaseDungeons(); } public static DungeonHelper initialize() { if (instance == null) { instance = new DungeonHelper(); } else { throw new IllegalStateException("Cannot initialize DungeonHelper twice"); } return instance; } public static DungeonHelper instance() { if (instance == null) { //This is to prevent some frustrating bugs that could arise when classes //are loaded in the wrong order. Trust me, I had to squash a few... throw new IllegalStateException("Instance of DungeonHelper requested before initialization"); } return instance; } public LinkData createCustomDungeonDoor(World world, int x, int y, int z) { //Create a link above the specified position. Link to a new pocket dimension. LinkData link = new LinkData(world.provider.dimensionId, 0, x, y + 1, z, x, y + 1, z, true, 3); link = dimHelper.instance.createPocket(link, true, false); //Place a Warp Door linked to that pocket itemDimDoor.placeDoorBlock(world, x, y, z, 3, mod_pocketDim.ExitDoor); //Register the pocket as a custom dungeon customDungeonStatus.put(link.destDimID, dimHelper.instance.getLinkDataFromCoords(link.destXCoord, link.destYCoord, link.destZCoord, link.destDimID)); return link; } public boolean isCustomDungeon(int dimensionID) { return customDungeonStatus.containsKey(dimensionID); } public boolean validateDungeonType(String type) { //Check if the dungeon type is valid return dungeonTypeChecker.contains(type.toLowerCase()); } public boolean validateSchematicName(String name) { String[] dungeonData; if (!name.endsWith(SCHEMATIC_FILE_EXTENSION)) return false; dungeonData = name.substring(0, name.length() - SCHEMATIC_FILE_EXTENSION.length()).split("_"); //Check for a valid number of parts if (dungeonData.length < 3 || dungeonData.length > 4) return false; //Check if the dungeon type is valid if (!dungeonTypeChecker.contains(dungeonData[0].toLowerCase())) return false; //Check if the name is valid if (!SchematicNamePattern.matcher(dungeonData[1]).matches()) return false; //Check if the open/closed flag is present if (!dungeonData[2].equalsIgnoreCase("open") && !dungeonData[2].equalsIgnoreCase("closed")) return false; //If the weight is present, check that it is valid if (dungeonData.length == 4) { try { int weight = Integer.parseInt(dungeonData[3]); if (weight < 0 || weight > MAX_DUNGEON_WEIGHT) return false; } catch (NumberFormatException e) { //Not a number return false; } } return true; } public void registerCustomDungeon(File schematicFile) { String name = schematicFile.getName(); String path = schematicFile.getAbsolutePath(); try { if (validateSchematicName(name)) { //Strip off the file extension while splitting the file name String[] dungeonData = name.substring(0, name.length() - SCHEMATIC_FILE_EXTENSION.length()).split("_"); String dungeonType = dungeonData[0].toLowerCase(); boolean isOpen = dungeonData[2].equalsIgnoreCase("open"); int weight = (dungeonData.length == 4) ? Integer.parseInt(dungeonData[3]) : DEFAULT_DUNGEON_WEIGHT; //Add this custom dungeon to the list corresponding to its type DungeonGenerator generator = new DungeonGenerator(weight, path, isOpen); dungeonTypeMapping.get(dungeonType).add(generator); registeredDungeons.add(generator); customDungeons.add(generator); System.out.println("Imported " + name); } else { System.out.println("Could not parse dungeon filename, not adding dungeon to generation lists"); customDungeons.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, path, true)); System.out.println("Imported " + name); } } catch(Exception e) { e.printStackTrace(); System.out.println("Failed to import " + name); } } public void importCustomDungeons(String path) { File directory = new File(path); File[] schematicNames = directory.listFiles(); if (schematicNames != null) { for (File schematicFile: schematicNames) { if (schematicFile.getName().endsWith(SCHEMATIC_FILE_EXTENSION)) { registerCustomDungeon(schematicFile); } } } } public void registerFlipBlocks() { metadataFlipList.add(Block.dispenser.blockID); metadataFlipList.add(Block.stairsStoneBrick.blockID); metadataFlipList.add(Block.lever.blockID); metadataFlipList.add(Block.stoneButton.blockID); metadataFlipList.add(Block.redstoneRepeaterIdle.blockID); metadataFlipList.add(Block.redstoneRepeaterActive.blockID); metadataFlipList.add(Block.tripWireSource.blockID); metadataFlipList.add(Block.torchWood.blockID); metadataFlipList.add(Block.torchRedstoneIdle.blockID); metadataFlipList.add(Block.torchRedstoneActive.blockID); metadataFlipList.add(Block.doorIron.blockID); metadataFlipList.add(Block.doorWood.blockID); metadataFlipList.add(Block.pistonBase.blockID); metadataFlipList.add(Block.pistonStickyBase.blockID); metadataFlipList.add(Block.pistonExtension.blockID); metadataFlipList.add(Block.redstoneComparatorIdle.blockID); metadataFlipList.add(Block.redstoneComparatorActive.blockID); metadataFlipList.add(Block.signPost.blockID); metadataFlipList.add(Block.signWall.blockID); metadataFlipList.add(Block.skull.blockID); metadataFlipList.add(Block.ladder.blockID); metadataFlipList.add(Block.vine.blockID); metadataFlipList.add(Block.anvil.blockID); metadataFlipList.add(Block.chest.blockID); metadataFlipList.add(Block.chestTrapped.blockID); metadataFlipList.add(Block.hopperBlock.blockID); metadataFlipList.add(Block.stairsNetherBrick.blockID); metadataFlipList.add(Block.stairsCobblestone.blockID); metadataFlipList.add(Block.stairsNetherBrick.blockID); metadataFlipList.add(Block.stairsNetherQuartz.blockID); metadataFlipList.add(Block.stairsSandStone.blockID); metadataNextList.add(Block.redstoneRepeaterIdle.blockID); metadataNextList.add(Block.redstoneRepeaterActive.blockID); } public void registerBaseDungeons() { hubs.add(new DungeonGenerator(2 * DEFAULT_DUNGEON_WEIGHT, "/schematics/4WayBasicHall.schematic", false)); hubs.add(new DungeonGenerator(2 * DEFAULT_DUNGEON_WEIGHT, "/schematics/4WayHallExit.schematic", false)); hubs.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/doorTotemRuins.schematic", true)); hubs.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/hallwayTrapRooms1.schematic", false)); hubs.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/longDoorHallway.schematic", false)); hubs.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/smallRotundaWithExit.schematic", false)); hubs.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/fortRuins.schematic", true)); simpleHalls.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/collapsedSingleTunnel1.schematic", false)); simpleHalls.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/singleStraightHall1.schematic", false)); simpleHalls.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/smallBranchWithExit.schematic", false)); simpleHalls.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/smallSimpleLeft.schematic", false)); simpleHalls.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/smallSimpleRight.schematic", false)); simpleHalls.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/simpleStairsUp.schematic", false)); simpleHalls.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/simpleStairsDown.schematic", false)); simpleHalls.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/simpleSmallT1.schematic", false)); complexHalls.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/tntPuzzleTrap.schematic", false)); complexHalls.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/brokenPillarsO.schematic", true)); complexHalls.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/buggyTopEntry1.schematic", true)); complexHalls.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/exitRuinsWithHiddenDoor.schematic", true)); complexHalls.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/hallwayHiddenTreasure.schematic", false)); complexHalls.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/mediumPillarStairs.schematic", true)); complexHalls.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/ruinsO.schematic", true)); complexHalls.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/pitStairs.schematic", true)); deadEnds.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/azersDungeonO.schematic", false)); deadEnds.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/diamondTowerTemple1.schematic", true)); deadEnds.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/fallingTrapO.schematic", true)); deadEnds.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/hiddenStaircaseO.schematic", true)); deadEnds.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/lavaTrapO.schematic", true)); deadEnds.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/randomTree.schematic", true)); deadEnds.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/smallHiddenTowerO.schematic", true)); deadEnds.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/smallSilverfishRoom.schematic", false)); deadEnds.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/tntTrapO.schematic", true)); deadEnds.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/smallDesert.schematic", true)); deadEnds.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/smallPond.schematic", true)); pistonTraps.add(new DungeonGenerator(2 * DEFAULT_DUNGEON_WEIGHT, "/schematics/hallwayPitFallTrap.schematic", false)); pistonTraps.add(new DungeonGenerator(2 * DEFAULT_DUNGEON_WEIGHT, "/schematics/pistonFloorHall.schematic", false)); pistonTraps.add(new DungeonGenerator(2 * DEFAULT_DUNGEON_WEIGHT, "/schematics/wallFallcomboPistonHall.schematic", false)); pistonTraps.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/fakeTNTTrap.schematic", false)); pistonTraps.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/pistonFallRuins.schematic", false)); pistonTraps.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/pistonSmasherHall.schematic", false)); pistonTraps.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/simpleDropHall.schematic", false)); pistonTraps.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/fallingTNThall.schematic", false)); pistonTraps.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/lavaPyramid.schematic", true)); mazes.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/smallMaze1.schematic", false)); mazes.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/smallMultilevelMaze.schematic", false)); exits.add(new DungeonGenerator(2 * DEFAULT_DUNGEON_WEIGHT, "/schematics/lockingExitHall.schematic", false)); exits.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/exitCube.schematic", true)); exits.add(new DungeonGenerator(DEFAULT_DUNGEON_WEIGHT, "/schematics/smallExitPrison.schematic", true)); registeredDungeons.addAll(simpleHalls); registeredDungeons.addAll(exits); registeredDungeons.addAll(pistonTraps); registeredDungeons.addAll(mazes); registeredDungeons.addAll(deadEnds); registeredDungeons.addAll(complexHalls); registeredDungeons.addAll(hubs); } public boolean exportDungeon(World world, int centerX, int centerY, int centerZ, String exportPath) { int xMin, yMin, zMin; int xMax, yMax, zMax; int xStart, yStart, zStart; int xEnd, yEnd, zEnd; //Find the smallest bounding box that contains all non-air blocks within a max radius around the player. xMax = yMax = zMax = Integer.MIN_VALUE; xMin = yMin = zMin = Integer.MAX_VALUE; xStart = centerX - MAX_EXPORT_RADIUS; zStart = centerZ - MAX_EXPORT_RADIUS; yStart = Math.max(centerY - MAX_EXPORT_RADIUS, 0); xEnd = centerX + MAX_EXPORT_RADIUS; zEnd = centerZ + MAX_EXPORT_RADIUS; yEnd = Math.min(centerY - MAX_EXPORT_RADIUS, world.getActualHeight()); //This could be done more efficiently, but honestly, this is the simplest approach and it //makes it easy for us to verify that the code is correct. for (int x = xStart; x <= xEnd; x++) { for (int z = zStart; z <= zEnd; z++) { for (int y = yStart; y <= yEnd; y++) { if (!world.isAirBlock(x, y, z)) { xMax = x > xMax ? x : xMax; zMax = z > zMax ? z : zMax; yMax = y > yMax ? y : yMax; xMin = x < xMin ? x : xMin; zMin = z < zMin ? z : zMin; yMin = y < yMin ? y : yMin; } } } } //Export all the blocks within our selected bounding box short width = (short) (xMax - xMin + 1); short height = (short) (yMax - yMin + 1); short length = (short) (zMax - zMin + 1); ArrayList<CompoundTag> tileEntities = new ArrayList<CompoundTag>(); byte[] blocks = new byte[width * height * length]; byte[] addBlocks = null; byte[] blockData = new byte[width * height * length]; for (int x = 0; x < width; x++) { for (int z = 0; z < length; z++) { for (int y = 0; y < height; y++) { int index = y * width * length + z * width + x; int blockID = world.getBlockId(x + xMin, y + yMin, z + zMin); int metadata = world.getBlockMetadata(x + xMin, y + yMin, z + zMin); if (blockID == properties.DimensionalDoorID) { blockID = Block.doorIron.blockID; } if (blockID == properties.WarpDoorID) { blockID = Block.doorWood.blockID; } //Map fabric of reality and permafabric blocks to standard export IDs if (blockID == properties.FabricBlockID) { blockID = FABRIC_OF_REALITY_EXPORT_ID; } if (blockID == properties.PermaFabricBlockID) { blockID = PERMAFABRIC_EXPORT_ID; } // Save 4096 IDs in an AddBlocks section if (blockID > 255) { if (addBlocks == null) { //Lazily create section addBlocks = new byte[(blocks.length >> 1) + 1]; } addBlocks[index >> 1] = (byte) (((index & 1) == 0) ? addBlocks[index >> 1] & 0xF0 | (blockID >> 8) & 0xF : addBlocks[index >> 1] & 0xF | ((blockID >> 8) & 0xF) << 4); } blocks[index] = (byte) blockID; blockData[index] = (byte) metadata; if (Block.blocksList[blockID] instanceof BlockContainer) { //Export container information TileEntity container = world.getBlockTileEntity(x + xMin, y + yMin, z + zMin); NBTTagCompound entityData = new NBTTagCompound(); container.writeToNBT(entityData); //TODO fix this /** TileEntity tileEntityBlock = world.getBlockTileEntity(x+xMin, y+yMin, z+zMin); NBTTagCompound tag = new NBTTagCompound(); tileEntityBlock.writeToNBT(tag); CompoundTag tagC = new CompoundTag("TileEntity",Map.class.cast(tag.getTags())); // Get the list of key/values from the block if (tagC != null) { tileEntites.add(tagC); } **/ } } } } HashMap<String, Tag> schematic = new HashMap<String, Tag>(); schematic.put("Blocks", new ByteArrayTag("Blocks", blocks)); schematic.put("Data", new ByteArrayTag("Data", blockData)); schematic.put("Width", new ShortTag("Width", (short) width)); schematic.put("Length", new ShortTag("Length", (short) length)); schematic.put("Height", new ShortTag("Height", (short) height)); schematic.put("TileEntites", new ListTag("TileEntities", CompoundTag.class, tileEntities)); if (addBlocks != null) { schematic.put("AddBlocks", new ByteArrayTag("AddBlocks", addBlocks)); } CompoundTag schematicTag = new CompoundTag("Schematic", schematic); try { NBTOutputStream stream = new NBTOutputStream(new FileOutputStream(exportPath)); stream.writeTag(schematicTag); stream.close(); return true; } catch(Exception e) { e.printStackTrace(); return false; } } public void generateDungeonLink(LinkData incoming) { DungeonGenerator dungeon; int depth = dimHelper.instance.getDimDepth(incoming.locDimID); int depthWeight = rand.nextInt(depth + 2) + rand.nextInt(depth + 2) - 2; int count = 10; boolean flag = true; try { if (incoming.destYCoord > 15) { do { count flag = true; //Select a dungeon at random, taking into account its weight dungeon = getRandomDungeon(rand, registeredDungeons); if (depth <= 1) { if(rand.nextBoolean()) { dungeon = complexHalls.get(rand.nextInt(complexHalls.size())); } else if(rand.nextBoolean()) { dungeon = hubs.get(rand.nextInt(hubs.size())); } else if(rand.nextBoolean()) { dungeon = hubs.get(rand.nextInt(hubs.size())); } else if(deadEnds.contains(dungeon)||exits.contains(dungeon)) { flag=false; } } else if (depth <= 3 && (deadEnds.contains(dungeon) || exits.contains(dungeon) || rand.nextBoolean())) { if(rand.nextBoolean()) { dungeon = hubs.get(rand.nextInt(hubs.size())); } else if(rand.nextBoolean()) { dungeon = mazes.get(rand.nextInt(mazes.size())); } else if(rand.nextBoolean()) { dungeon = pistonTraps.get(rand.nextInt(pistonTraps.size())); } else { flag = false; } } else if (rand.nextInt(3) == 0 && !complexHalls.contains(dungeon)) { if (rand.nextInt(3) == 0) { dungeon = simpleHalls.get(rand.nextInt(simpleHalls.size())); } else if(rand.nextBoolean()) { dungeon = pistonTraps.get(rand.nextInt(pistonTraps.size())); } else if (depth < 4) { dungeon = hubs.get(rand.nextInt(hubs.size())); } } else if (depthWeight - depthWeight / 2 > depth -4 && (deadEnds.contains(dungeon) || exits.contains(dungeon))) { if(rand.nextBoolean()) { dungeon = simpleHalls.get(rand.nextInt(simpleHalls.size())); } else if(rand.nextBoolean()) { dungeon = complexHalls.get(rand.nextInt(complexHalls.size())); } else if(rand.nextBoolean()) { dungeon = pistonTraps.get(rand.nextInt(pistonTraps.size())); } else { flag = false; } } else if (depthWeight > 7 && hubs.contains(dungeon)) { if(rand.nextInt(12)+5<depthWeight) { if(rand.nextBoolean()) { dungeon = exits.get(rand.nextInt(exits.size())); } else if(rand.nextBoolean()) { dungeon = deadEnds.get(rand.nextInt(deadEnds.size())); } else { dungeon = pistonTraps.get(rand.nextInt(pistonTraps.size())); } } else { flag = false; } } else if (depth > 10 && hubs.contains(dungeon)) { flag = false; } if(getDungeonDataInChain(dimHelper.dimList.get(incoming.locDimID)).contains(dungeon)) { flag=false; } } while (!flag && count > 0); } else { dungeon = defaultUp; } } catch (Exception e) { e.printStackTrace(); if (registeredDungeons.size() > 0) { //Select a random dungeon dungeon = getRandomDungeon(rand, registeredDungeons); } else { return; } } dimHelper.dimList.get(incoming.destDimID).dungeonGenerator = dungeon; } public Collection<String> getDungeonNames() { //Use a HashSet to guarantee that all dungeon names will be distinct. //This shouldn't be necessary if we keep proper lists without repetitions, //but it's a fool-proof workaround. HashSet<String> dungeonNames = new HashSet<String>(); dungeonNames.addAll( parseDungeonNames(registeredDungeons) ); dungeonNames.addAll( parseDungeonNames(customDungeons) ); //Sort dungeon names alphabetically ArrayList<String> sortedNames = new ArrayList<String>(dungeonNames); Collections.sort(sortedNames); return sortedNames; } private static ArrayList<String> parseDungeonNames(ArrayList<DungeonGenerator> dungeons) { String name; File schematic; ArrayList<String> names = new ArrayList<String>(dungeons.size()); for (DungeonGenerator dungeon : dungeons) { //Retrieve the file name and strip off the file extension schematic = new File(dungeon.schematicPath); name = schematic.getName(); name = name.substring(0, name.length() - SCHEMATIC_FILE_EXTENSION.length()); names.add(name); } return names; } private static DungeonGenerator getRandomDungeon(Random random, Collection<DungeonGenerator> dungeons) { //Use Minecraft's WeightedRandom to select our dungeon. =D ArrayList<WeightedContainer<DungeonGenerator>> weights = new ArrayList<WeightedContainer<DungeonGenerator>>(dungeons.size()); for (DungeonGenerator dungeon : dungeons) { weights.add(new WeightedContainer<DungeonGenerator>(dungeon, dungeon.weight)); } @SuppressWarnings("unchecked") WeightedContainer<DungeonGenerator> resultContainer = (WeightedContainer<DungeonGenerator>) WeightedRandom.getRandomItem(random, weights); return (resultContainer != null) ? resultContainer.getData() : null; } public static ArrayList<DungeonGenerator> getDungeonDataInChain(DimData dimData) { DimData startingDim = dimHelper.dimList.get(dimHelper.instance.getLinkDataFromCoords(dimData.exitDimLink.destXCoord, dimData.exitDimLink.destYCoord, dimData.exitDimLink.destZCoord, dimData.exitDimLink.destDimID).destDimID); return getDungeonDataBelow(startingDim); } private static ArrayList<DungeonGenerator> getDungeonDataBelow(DimData dimData) { ArrayList<DungeonGenerator> dungeonData = new ArrayList<DungeonGenerator>(); if(dimData.dungeonGenerator!=null) { dungeonData.add(dimData.dungeonGenerator); for(LinkData link : dimData.getLinksInDim()) { if(dimHelper.dimList.containsKey(link.destDimID)) { if(dimHelper.dimList.get(link.destDimID).dungeonGenerator!=null&&dimHelper.instance.getDimDepth(link.destDimID)==dimData.depth+1) { for(DungeonGenerator dungeonGen :getDungeonDataBelow(dimHelper.dimList.get(link.destDimID)) ) { if(!dungeonData.contains(dungeonGen)) { dungeonData.add(dungeonGen); } } } } } } return dungeonData; } }
package com.spaceproject.screens.animations; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.Input; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.glutils.ShapeRenderer; import com.badlogic.gdx.math.ConvexHull; import com.badlogic.gdx.math.MathUtils; import com.badlogic.gdx.math.Matrix4; import com.badlogic.gdx.math.Polygon; import com.badlogic.gdx.math.Rectangle; import com.badlogic.gdx.math.Vector2; import com.badlogic.gdx.physics.box2d.Body; import com.badlogic.gdx.physics.box2d.BodyDef; import com.badlogic.gdx.physics.box2d.Box2DDebugRenderer; import com.badlogic.gdx.physics.box2d.Fixture; import com.badlogic.gdx.physics.box2d.PolygonShape; import com.badlogic.gdx.physics.box2d.Transform; import com.badlogic.gdx.physics.box2d.World; import com.badlogic.gdx.utils.Array; import com.badlogic.gdx.utils.FloatArray; import com.spaceproject.generation.BodyFactory; import com.spaceproject.math.MyMath; import com.spaceproject.ui.CustomShapeRenderer; import java.util.Iterator; public class AsteroidAnim extends TitleAnimation { Array<Asteroid> asteroids = new Array<Asteroid>(); CustomShapeRenderer customShapeRenderer; Matrix4 projectionMatrix = new Matrix4(); int velocityIterations = 6; int positionIterations = 2; float timeStep = 1 / 60.0f; float accumulator = 0f; World world; Box2DDebugRenderer box2DDebugRenderer; Array<Body> bodies = new Array<>(); float[] vertices = new float[100]; Vector2 tempVertex = new Vector2(); Body bullet; //1. move to b2d //2. move ship to polygon -> b2d //2. move bullet to circle body //3; fix rendering //4; fortune test //5; local test public AsteroidAnim() { Asteroid asteroid = new Asteroid(new Vector2(Gdx.graphics.getWidth() * MathUtils.random(), Gdx.graphics.getHeight() * MathUtils.random()), 200, 0, 0); asteroids.add(asteroid); customShapeRenderer = new CustomShapeRenderer(ShapeRenderer.ShapeType.Filled, new ShapeRenderer().getRenderer()); world = new World(new Vector2(), true); box2DDebugRenderer = new Box2DDebugRenderer(true, true, true, true, true, true); Body body = BodyFactory.createPoly(200, 200, asteroid.hullPoly.getVertices(), BodyDef.BodyType.DynamicBody, world); body.applyForceToCenter(10,1,true); body.applyAngularImpulse(200, true); bullet = BodyFactory.createCircle(Gdx.graphics.getWidth() * 0.5f, Gdx.graphics.getHeight() * 0.5f, 20, world); } @Override public void render(float deltaTime, ShapeRenderer shape) { accumulator += deltaTime; while (accumulator >= timeStep) { world.step(timeStep, velocityIterations, positionIterations); accumulator -= timeStep; } Vector2 centerScreen = new Vector2(Gdx.graphics.getWidth() * 0.5f, Gdx.graphics.getHeight() * 0.5f); Vector2 mousePos = new Vector2(Gdx.input.getX(),Gdx.graphics.getHeight()-Gdx.input.getY()); float mouseAngle = MyMath.angleTo(mousePos.x, mousePos.y, centerScreen.x, centerScreen.y); if (Gdx.input.isButtonPressed(Input.Buttons.LEFT)) { bullet.setTransform(centerScreen, mouseAngle); bullet.setLinearVelocity(MyMath.vector(mouseAngle, 100000)); } customShapeRenderer.setProjectionMatrix(projectionMatrix); customShapeRenderer.begin(ShapeRenderer.ShapeType.Filled); world.getBodies(bodies); for (Body body : bodies) { Fixture fixture = body.getFixtureList().first(); if (!(fixture.getShape() instanceof PolygonShape)) continue; PolygonShape polyShape = (PolygonShape)fixture.getShape(); int vertexCount = polyShape.getVertexCount(); Transform transform = body.getTransform(); for (int i = 0; i < vertexCount; i++) { polyShape.getVertex(i, tempVertex); transform.mul(tempVertex); vertices[i] = tempVertex.x; vertices[i+1] = tempVertex.y; } //customShapeRenderer.fillPolygon(vertices, 0, vertexCount*2, Color.WHITE); } for (Asteroid a : asteroids) { //a.renderBody(customShapeRenderer); } customShapeRenderer.end(); shape.begin(ShapeRenderer.ShapeType.Line); for (Iterator<Asteroid> asteroidIterator = new Array.ArrayIterator<>(asteroids); asteroidIterator.hasNext(); ) { Asteroid a = asteroidIterator.next(); //a.render(shape, deltaTime); } if (Gdx.input.isButtonJustPressed(Input.Buttons.RIGHT)) { asteroids.add(new Asteroid(mousePos.cpy(), 200, 0, 0)); } shape.end(); shape.begin(ShapeRenderer.ShapeType.Filled); // draw ship shape.setColor(Color.WHITE); setShape(Gdx.graphics.getWidth()/2, Gdx.graphics.getHeight()/2, mouseAngle); for(int i = 0, j = shapeX.length - 1; i < shapeY.length; j = i++) { shape.line(shapeX[i], shapeY[i], shapeX[j], shapeY[j]); } shape.end(); projectionMatrix.setToOrtho2D(0, 0, Gdx.graphics.getWidth(), Gdx.graphics.getHeight()); box2DDebugRenderer.render(world, projectionMatrix); } private float[] shapeX = new float[4]; private float[] shapeY = new float[4]; private void setShape(float x, float y, float radians) { float scale = 40; shapeX[0] = x + MathUtils.cos(radians) * scale; shapeY[0] = y + MathUtils.sin(radians) * scale; shapeX[1] = x + MathUtils.cos(radians - 4 * 3.1415f / 5) * scale; shapeY[1] = y + MathUtils.sin(radians - 4 * 3.1415f / 5) * scale; shapeX[2] = x + MathUtils.cos(radians + 3.1415f) * scale * (5.0f/8); shapeY[2] = y + MathUtils.sin(radians + 3.1415f) * scale * (5.0f/8); shapeX[3] = x + MathUtils.cos(radians + 4 * 3.1415f / 5) * scale; shapeY[3] = y + MathUtils.sin(radians + 4 * 3.1415f / 5) * scale; } @Override public void resize(int width, int height) { } private class Asteroid { Vector2 position; float angle, velocity; Polygon hullPoly; int size; public Asteroid(Vector2 position, int size, float angle, float velocity) { this.size = size; this.angle = angle; this.velocity = velocity; this.position = position.sub(size/2, size/2); FloatArray points = new FloatArray(); int numPoints = 7;//Box2D poly vert limit: Assertion `3 <= count && count <= 8' failed. for (int i = 0; i < numPoints * 2; i += 2) { float x = MathUtils.random(size); float y = MathUtils.random(size); points.add(x); points.add(y); } ConvexHull convex = new ConvexHull(); float[] hull = convex.computePolygon(points, false).toArray(); hullPoly = new Polygon(hull); hullPoly.setOrigin(size / 2, size / 2);//should actually be center of mass//TODO: lookup center of mass for arbitrary poly } public void render(ShapeRenderer shape, float delta) { //float angle = MyMath.angleTo(new Vector2(Gdx.input.getX(), Gdx.graphics.getHeight()-Gdx.input.getY()), position); //position.add(MyMath.vector(angle,10*delta)); Rectangle bounds = hullPoly.getBoundingRectangle(); if (bounds.y <= 0) { position.add(0, 1); angle = MathUtils.PI2 - angle; } else if (bounds.y + bounds.height >= Gdx.graphics.getHeight()) { position.sub(0, 1); angle = MathUtils.PI2 - angle; } //shape.triangle(); if (bounds.x <= 0) { position.add(1, 0); angle = MathUtils.PI - angle; } else if (bounds.x + bounds.width >= Gdx.graphics.getWidth()) { position.sub(1, 0); angle = MathUtils.PI - angle; } position.add(MyMath.vector(angle, velocity * delta)); hullPoly.rotate(10 * delta); hullPoly.setPosition(position.x, position.y); shape.setColor(Color.BLACK); //shape.polyline(hullPoly.getTransformedVertices()); shape.setColor(Color.RED); Rectangle rectangle = bounds; //shape.rect(rectangle.x, rectangle.y, rectangle.width, rectangle.height); } public void renderBody(CustomShapeRenderer shape) { shape.fillPolygon(hullPoly.getTransformedVertices(), 0, hullPoly.getVertices().length, Color.WHITE); } } }
package net.minecraft.src; import java.util.LinkedHashSet; public class PacketHooks { public interface IPacketEventListener { public void onPacket(Packet packet, boolean isSend); } private static LinkedHashSet<IPacketEventListener> packetEventListeners = new LinkedHashSet(); public static void register(IPacketEventListener listener) { try { // make sure our modifications to NetworkManager and GuiMultiplayer are present if (!NetworkManager.packetHooks.getClass().equals(PacketHooks.class) || !GuiMultiplayer.packetHooks.getClass().equals(PacketHooks.class)) { throw new RuntimeException("internal error"); } } catch (LinkageError e) { throw new RuntimeException("Unable to register packet events. This is most likely due to a mod overwriting lg.class (NetworkManager.java) or acp.class (GuiMultiplayer.java).", e); } packetEventListeners.add(listener); } public static void unregister(IPacketEventListener listener) { packetEventListeners.remove(listener); } protected void dispatchEvent(Packet packet, boolean isSend) { for (IPacketEventListener listener : packetEventListeners) { listener.onPacket(packet, isSend); } } /** Called when there are multiple sockets open (i.e. when pinging each server on the server list) */ protected synchronized void dispatchEventSynchronized(Packet packet, boolean isSend) { for (IPacketEventListener listener : packetEventListeners) { listener.onPacket(packet, isSend); } } }
package org.cache2k.impl; import org.apache.commons.logging.Log; import org.cache2k.CacheConfig; import org.cache2k.StorageConfiguration; import org.cache2k.impl.timer.TimerPayloadListener; import org.cache2k.impl.timer.TimerService; import org.cache2k.storage.CacheStorage; import org.cache2k.storage.CacheStorageContext; import org.cache2k.storage.ImageFileStorage; import org.cache2k.storage.MarshallerFactory; import org.cache2k.storage.Marshallers; import org.cache2k.storage.StorageEntry; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import java.util.concurrent.BlockingDeque; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.SynchronousQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; /** * Passes cache operation to the storage layer. Implements common * services for the storage layer, just as timing. * * @author Jens Wilke; created: 2014-05-08 */ @SuppressWarnings({"unchecked", "SynchronizeOnNonFinalField"}) class PassingStorageAdapter extends StorageAdapter { private BaseCache cache; CacheStorage storage; CacheStorage copyForClearing; boolean passivation = false; long storageErrorCount = 0; Set<Object> deletedKeys = null; StorageContext context; StorageConfiguration config; ExecutorService executor = Executors.newCachedThreadPool(); TimerService.CancelHandle flushTimerHandle; boolean needsFlush; Future<Void> executingFlush; public PassingStorageAdapter(BaseCache _cache, CacheConfig _cacheConfig, StorageConfiguration _storageConfig) { cache = _cache; context = new StorageContext(_cache); context.keyType = _cacheConfig.getKeyType(); context.valueType = _cacheConfig.getValueType(); config = _storageConfig; } public void open() { try { ImageFileStorage s = new ImageFileStorage(); s.open(context, config); storage = s; if (config.isPassivation()) { deletedKeys = new HashSet<>(); passivation = true; } cache.getLog().info("open " + storage); } catch (Exception ex) { cache.getLog().warn("error initializing storage, running in-memory", ex); } } /** * Store entry on cache put. Entry must be locked, since we use the * entry directly for handing it over to the storage, it is not * allowed to change. If storeAlways is switched on the entry will * be in memory and in the storage after this operation. */ public void put(BaseCache.Entry e) { if (deletedKeys != null) { synchronized (deletedKeys) { deletedKeys.remove(e.getKey()); } return; } try { storage.put(e); checkStartFlushTimer(); } catch (Exception ex) { storageErrorCount++; throw new CacheStorageException("cache put", ex); } } void checkStartFlushTimer() { needsFlush = true; if (config.getSyncInterval() <= 0) { return; } if (flushTimerHandle != null) { return; } synchronized (this) { if (flushTimerHandle != null) { return; } scheduleTimer(); } } private void scheduleTimer() { if (flushTimerHandle != null) { flushTimerHandle.cancel(); } TimerPayloadListener<Void> l = new TimerPayloadListener<Void>() { @Override public void fire(Void _payload, long _time) { flush(); } }; long _fireTime = System.currentTimeMillis() + config.getSyncInterval(); flushTimerHandle = cache.timerService.add(l, null, _fireTime); } public StorageEntry get(Object k) { if (deletedKeys != null) { synchronized (deletedKeys) { if (deletedKeys.contains(k)) { return null; } } } try { return storage.get(k); } catch (Exception ex) { storageErrorCount++; throw new CacheStorageException("cache get", ex); } } public void evict(BaseCache.Entry e) { if (passivation) { putIfDirty(e); } } /** * Entry is evicted from memory cache either because of an expiry or an * eviction. */ public void expire(BaseCache.Entry e) { remove(e.getKey()); } private void putIfDirty(BaseCache.Entry e) { try { if (e.isDirty()) { storage.put(e); checkStartFlushTimer(); } } catch (Exception ex) { storageErrorCount++; throw new CacheStorageException("cache put", ex); } } public void remove(Object key) { if (deletedKeys != null) { synchronized (deletedKeys) { deletedKeys.remove(key); } return; } try { storage.remove(key); checkStartFlushTimer(); } catch (Exception ex) { storageErrorCount++; throw new CacheStorageException("cache remove", ex); } } @Override public Iterator<BaseCache.Entry> iterateAll() { final CompleteIterator it = new CompleteIterator(); synchronized (cache.lock) { it.localIteration = cache.iterateAllLocalEntries(); if (!passivation) { it.totalEntryCount = storage.getEntryCount(); } else { it.totalEntryCount = -1; } } it.executor = executor; it.runnable = new Runnable() { @Override public void run() { final BlockingDeque<BaseCache.Entry> _queue = it.queue; CacheStorage.EntryVisitor v = new CacheStorage.EntryVisitor() { @Override public void visit(StorageEntry se) { BaseCache.Entry e = cache.insertEntryFromStorage(se, true); _queue.addFirst(e); } }; CacheStorage.EntryFilter f = new CacheStorage.EntryFilter() { @Override public boolean shouldInclude(Object _key) { boolean b = !it.keysIterated.contains(_key); System.err.println("shouldInclude: " + _key + " -> " +b); return b; } }; final CacheStorage.VisitContext ctx = new CacheStorage.VisitContext() { @Override public boolean needMetaData() { return true; } @Override public boolean needValue() { return true; } @Override public ExecutorService getExecutorService() { return createOperationExecutor(); } @Override public boolean shouldStop() { return false; } }; try { storage.visit(v, f, ctx); } catch (Exception ex) { ex.printStackTrace(); } _queue.addFirst(LAST_ENTRY); } }; return it; } static final BaseCache.Entry LAST_ENTRY = new BaseCache.Entry(); static class CompleteIterator implements Iterator<BaseCache.Entry> { HashSet<Object> keysIterated = new HashSet<>(); Iterator<BaseCache.Entry> localIteration; int totalEntryCount; BaseCache.Entry entry; BlockingDeque<BaseCache.Entry> queue = new LinkedBlockingDeque<>(); Runnable runnable; ExecutorService executor; @Override public boolean hasNext() { if (localIteration != null) { boolean b = localIteration.hasNext(); if (b) { entry = localIteration.next(); keysIterated.add(entry.getKey()); return true; } localIteration = null; if (keysIterated.size() >= totalEntryCount) { queue = null; } else { executor.submit(runnable); } } if (queue != null) { try { entry = queue.takeFirst(); System.err.println(entry); if (entry != LAST_ENTRY) { return true; } } catch (InterruptedException ex) { } queue = null; } return false; } @Override public BaseCache.Entry next() { return entry; } @Override public void remove() { throw new UnsupportedOperationException(); } } public Future<Void> flush() { synchronized (this) { final Future<Void> _previousFlush = executingFlush; Callable<Void> c= new Callable<Void>() { @Override public Void call() throws Exception { if (_previousFlush != null) { _previousFlush.get(); } storage.flush(System.currentTimeMillis(), CacheStorage.DEFAULT_FLUSH_CONTEXT); getLog().info("flush " + storage); executingFlush = null; synchronized (this) { if (needsFlush) { scheduleTimer(); } else { if (flushTimerHandle != null) { flushTimerHandle.cancel(); flushTimerHandle = null; } } } return null; } }; return executingFlush = executor.submit(c); } } private Log getLog() { return cache.getLog(); } public void shutdown() { if (storage == null) { return; } try { if (passivation) { Iterator<BaseCache.Entry> it; synchronized (cache.lock) { it = cache.iterateAllLocalEntries(); } while (it.hasNext()) { BaseCache.Entry e = it.next(); putIfDirty(e); } if (deletedKeys != null) { for (Object k : deletedKeys) { storage.remove(k); } } } synchronized (this) { final CacheStorage _storage = storage; storage = null; final Future<Void> _previousFlush = executingFlush; Callable<Void> c= new Callable<Void>() { @Override public Void call() throws Exception { if (_previousFlush != null) { try { _previousFlush.cancel(true); _previousFlush.get(); } catch (Exception ex) { } } _storage.flush(System.currentTimeMillis(), CacheStorage.DEFAULT_FLUSH_CONTEXT); getLog().info("close " + storage); _storage.close(); return null; } }; Future<Void> f = executor.submit(c); f.get(); } } catch (Exception ex) { storageErrorCount++; } } public boolean clearPrepare() { if (copyForClearing != null) { return false; } copyForClearing = storage; storage = new CacheStorageBuffer(); return true; } public void clearProceed() { try { copyForClearing.clear(); ((CacheStorageBuffer) storage).transfer(copyForClearing); } catch (Exception ex) { ex.printStackTrace(); storageErrorCount++; } finally { synchronized (cache.lock) { storage = copyForClearing; copyForClearing = null; } } } /** * Calculates the cache size, depending on the persistence configuration */ @Override public int getTotalEntryCount() { if (!passivation) { return storage.getEntryCount(); } return storage.getEntryCount() + cache.getLocalSize(); } static class StorageContext implements CacheStorageContext { BaseCache cache; Class<?> keyType; Class<?> valueType; StorageContext(BaseCache cache) { this.cache = cache; } @Override public String getManagerName() { return cache.manager.getName(); } @Override public String getCacheName() { return cache.getName(); } @Override public Class<?> getKeyType() { return keyType; } @Override public Class<?> getValueType() { return valueType; } @Override public MarshallerFactory getMarshallerFactory() { return Marshallers.getInstance(); } @Override public void requestMaintenanceCall(int _intervalMillis) { } @Override public void notifyEvicted(StorageEntry e) { } @Override public void notifyExpired(StorageEntry e) { } } ExecutorService createOperationExecutor() { return new ThreadPoolExecutor(0, Runtime.getRuntime().availableProcessors() * 123 / 100, 21, TimeUnit.SECONDS, new SynchronousQueue<Runnable>(), THREAD_FACTORY, new ThreadPoolExecutor.AbortPolicy()); } static final ThreadFactory THREAD_FACTORY = new MyThreadFactory(); @SuppressWarnings("NullableProblems") static class MyThreadFactory implements ThreadFactory { AtomicInteger count = new AtomicInteger(); @Override public synchronized Thread newThread(Runnable r) { Thread t = new Thread(r, "cache2k-storage#" + count.incrementAndGet()); t.setDaemon(true); return t; } } }
package org.devilry.core.entity; import java.util.Date; import javax.persistence.*; @Entity @DiscriminatorValue("AN") public class AssignmentNode extends Node { @Temporal(TemporalType.DATE) private Date deadline; public AssignmentNode() { } public Date getDeadline() { return this.deadline; } public void setDeadline(Date deadline) { this.deadline = deadline; } }
package org.mwg.core.chunk.heap; import org.mwg.Callback; import org.mwg.Graph; import org.mwg.core.CoreConstants; import org.mwg.core.chunk.ChunkListener; import org.mwg.core.chunk.Stack; import org.mwg.core.utility.BufferBuilder; import org.mwg.core.utility.PrimitiveHelper; import org.mwg.plugin.Chunk; import org.mwg.plugin.ChunkIterator; import org.mwg.plugin.ChunkSpace; import org.mwg.plugin.ChunkType; import org.mwg.struct.Buffer; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicIntegerArray; import java.util.concurrent.atomic.AtomicReference; public class HeapChunkSpace implements ChunkSpace, ChunkListener { /** * Global variables */ private final int _maxEntries; private final int _saveBatchSize; private final AtomicInteger _elementCount; private final Stack _lru; private Graph _graph; /** * HashMap variables */ private final int[] _elementNext; private final int[] _elementHash; private final Chunk[] _values; private final AtomicIntegerArray _elementHashLock; private final AtomicReference<InternalDirtyStateList> _dirtyState; @Override public void setGraph(Graph p_graph) { this._graph = p_graph; } @Override public Graph graph() { return this._graph; } final class InternalDirtyStateList implements ChunkIterator { private final AtomicInteger _nextCounter; private final int[] _dirtyElements; private final int _max; private final AtomicInteger _iterationCounter; private final HeapChunkSpace _parent; InternalDirtyStateList(int maxSize, HeapChunkSpace p_parent) { this._dirtyElements = new int[maxSize]; this._nextCounter = new AtomicInteger(0); this._iterationCounter = new AtomicInteger(0); this._max = maxSize; this._parent = p_parent; } @Override public boolean hasNext() { return this._iterationCounter.get() < this._nextCounter.get(); } @Override public Chunk next() { int previous; int next; do { previous = this._iterationCounter.get(); if (this._nextCounter.get() == previous) { return null; } next = previous + 1; } while (!this._iterationCounter.compareAndSet(previous, next)); return this._parent.getValues()[this._dirtyElements[previous]]; } public boolean declareDirty(int dirtyIndex) { int previousDirty; int nextDirty; do { previousDirty = this._nextCounter.get(); if (previousDirty == this._max) { return false; } nextDirty = previousDirty + 1; } while (!this._nextCounter.compareAndSet(previousDirty, nextDirty)); //ok we have the token previous this._dirtyElements[previousDirty] = dirtyIndex; return true; } @Override public long size() { return this._nextCounter.get(); } @Override public void free() { //noop } } public Chunk[] getValues() { return _values; } public HeapChunkSpace(int initialCapacity, int saveBatchSize) { if (saveBatchSize > initialCapacity) { throw new RuntimeException("Save Batch Size can't be bigger than cache size"); } this._maxEntries = initialCapacity; this._saveBatchSize = saveBatchSize; this._lru = new FixedStack(initialCapacity); this._dirtyState = new AtomicReference<InternalDirtyStateList>(); this._dirtyState.set(new InternalDirtyStateList(saveBatchSize, this)); //init std variables this._elementNext = new int[initialCapacity]; this._elementHashLock = new AtomicIntegerArray(new int[initialCapacity]); this._elementHash = new int[initialCapacity]; this._values = new Chunk[initialCapacity]; this._elementCount = new AtomicInteger(0); //init internal structures for (int i = 0; i < initialCapacity; i++) { this._elementNext[i] = -1; this._elementHash[i] = -1; this._elementHashLock.set(i, -1); } } @Override public final Chunk getAndMark(byte type, long world, long time, long id) { final int index = (int) PrimitiveHelper.tripleHash(type, world, time, id, this._maxEntries); int m = this._elementHash[index]; Chunk result = null; while (m != -1) { HeapChunk foundChunk = (HeapChunk) this._values[m]; if (foundChunk != null && type == foundChunk.chunkType() && world == foundChunk.world() && time == foundChunk.time() && id == foundChunk.id()) { //GET VALUE if (foundChunk.mark() == 1) { //was at zero before, risky operation, check selectWith LRU if (this._lru.dequeue(m)) { result = foundChunk; break; } else { if (foundChunk.marks() > 1) { //ok fine we are several on the same object... result = foundChunk; break; } else { //better return null the object will be recycled by somebody else... result = null; break; } } } else { result = foundChunk; break; } } else { m = this._elementNext[m]; } } return result; } @Override public void getOrLoadAndMark(final byte type, final long world, final long time, final long id, final Callback<Chunk> callback) { Chunk fromMemory = getAndMark(type, world, time, id); if (fromMemory != null) { callback.on(fromMemory); } else { final Buffer keys = graph().newBuffer(); BufferBuilder.keyToBuffer(keys, type, world, time, id); graph().storage().get(keys, new Callback<Buffer>() { @Override public void on(final Buffer result) { if (result != null) { Chunk loadedChunk_0 = create(type, world, time, id, result, null); result.free(); if (loadedChunk_0 == null) { callback.on(null); } else { Chunk loadedChunk = putAndMark(loadedChunk_0); if (loadedChunk != loadedChunk_0) { freeChunk(loadedChunk_0); } callback.on(loadedChunk); } } else { keys.free(); callback.on(null); } } }); } } @Override public void unmark(byte type, long world, long time, long id) { int index = (int) PrimitiveHelper.tripleHash(type, world, time, id, this._maxEntries); int m = this._elementHash[index]; while (m != -1) { HeapChunk foundChunk = (HeapChunk) this._values[m]; if (foundChunk != null && type == foundChunk.chunkType() && world == foundChunk.world() && time == foundChunk.time() && id == foundChunk.id()) { if (foundChunk.unmark() == 0) { //declare available for recycling this._lru.enqueue(m); } return; } else { m = this._elementNext[m]; } } } @Override public void unmarkChunk(Chunk chunk) { HeapChunk heapChunk = (HeapChunk) chunk; if (heapChunk.unmark() == 0) { long nodeWorld = chunk.world(); long nodeTime = chunk.time(); long nodeId = chunk.id(); byte nodeType = chunk.chunkType(); int index = (int) PrimitiveHelper.tripleHash(chunk.chunkType(), nodeWorld, nodeTime, nodeId, this._maxEntries); int m = this._elementHash[index]; while (m != -1) { Chunk foundChunk = this._values[m]; if (foundChunk != null && nodeType == foundChunk.chunkType() && nodeWorld == foundChunk.world() && nodeTime == foundChunk.time() && nodeId == foundChunk.id()) { //chunk is available for recycling this._lru.enqueue(m); return; } else { m = this._elementNext[m]; } } } } @Override public void freeChunk(Chunk chunk) { //NOOP } @Override public Chunk create(byte p_type, long p_world, long p_time, long p_id, Buffer p_initialPayload, Chunk origin) { switch (p_type) { case ChunkType.STATE_CHUNK: return new HeapStateChunk(p_world, p_time, p_id, this, p_initialPayload, origin); case ChunkType.WORLD_ORDER_CHUNK: return new HeapWorldOrderChunk(p_world, p_time, p_id, this, p_initialPayload); case ChunkType.TIME_TREE_CHUNK: return new HeapTimeTreeChunk(p_world, p_time, p_id, this, p_initialPayload); case ChunkType.GEN_CHUNK: return new HeapGenChunk(p_world, p_time, p_id, this, p_initialPayload); } return null; } @Override public Chunk putAndMark(Chunk p_elem) { //first mark the object HeapChunk heapChunk = (HeapChunk) p_elem; if (heapChunk.mark() != 1) { throw new RuntimeException("Warning, trying to put an unsafe object " + p_elem); } int entry = -1; int hashIndex = (int) PrimitiveHelper.tripleHash(p_elem.chunkType(), p_elem.world(), p_elem.time(), p_elem.id(), this._maxEntries); int m = this._elementHash[hashIndex]; while (m >= 0) { Chunk currentM = this._values[m]; if (currentM != null && p_elem.chunkType() == currentM.chunkType() && p_elem.world() == currentM.world() && p_elem.time() == currentM.time() && p_elem.id() == currentM.id()) { entry = m; break; } m = this._elementNext[m]; } if (entry == -1) { //we look for nextIndex int currentVictimIndex = (int) this._lru.dequeueTail(); if (currentVictimIndex == -1) { //TODO cache is full :( System.gc(); try { Thread.sleep(100); } catch (Exception e) { e.printStackTrace(); } currentVictimIndex = (int) this._lru.dequeueTail(); if (currentVictimIndex == -1) { throw new RuntimeException("mwDB crashed, cache is full, please avoid to much retention of nodes or augment cache capacity!"); } } if (this._values[currentVictimIndex] != null) { Chunk victim = this._values[currentVictimIndex]; long victimWorld = victim.world(); long victimTime = victim.time(); long victimObj = victim.id(); byte victimType = victim.chunkType(); int indexVictim = (int) PrimitiveHelper.tripleHash(victimType, victimWorld, victimTime, victimObj, this._maxEntries); //negociate a lock on the indexVictim hash while (!this._elementHashLock.compareAndSet(indexVictim, -1, 1)) ; //we obtains the token, now remove the element m = _elementHash[indexVictim]; int last = -1; while (m >= 0) { Chunk currentM = this._values[m]; if (currentM != null && victimType == currentM.chunkType() && victimWorld == currentM.world() && victimTime == currentM.time() && victimObj == currentM.id()) { break; } last = m; m = _elementNext[m]; } //POP THE VALUE FROM THE NEXT LIST if (last == -1) { int previousNext = _elementNext[m]; _elementHash[indexVictim] = previousNext; } else { if (m == -1) { _elementNext[last] = -1; } else { _elementNext[last] = _elementNext[m]; } } _elementNext[m] = -1;//flag to dropped value //UNREF victim value object _values[currentVictimIndex] = null; //free the lock this._elementHashLock.set(indexVictim, -1); this._elementCount.decrementAndGet(); } _values[currentVictimIndex] = p_elem; //negociate the lock to write on hashIndex while (!this._elementHashLock.compareAndSet(hashIndex, -1, 1)) ; _elementNext[currentVictimIndex] = _elementHash[hashIndex]; _elementHash[hashIndex] = currentVictimIndex; //free the lock this._elementHashLock.set(hashIndex, -1); this._elementCount.incrementAndGet(); return p_elem; } else { return _values[entry]; } } @Override public ChunkIterator detachDirties() { return _dirtyState.getAndSet(new InternalDirtyStateList(this._saveBatchSize, this)); } @Override public void declareDirty(Chunk dirtyChunk) { long world = dirtyChunk.world(); long time = dirtyChunk.time(); long id = dirtyChunk.id(); byte type = dirtyChunk.chunkType(); int hashIndex = (int) PrimitiveHelper.tripleHash(type, world, time, id, this._maxEntries); int m = this._elementHash[hashIndex]; while (m >= 0) { HeapChunk currentM = (HeapChunk) this._values[m]; if (currentM != null && type == currentM.chunkType() && world == currentM.world() && time == currentM.time() && id == currentM.id()) { if (currentM.setFlags(CoreConstants.DIRTY_BIT, 0)) { //add an additional mark currentM.mark(); //now enqueue in the dirtyList to be saved later boolean success = false; while (!success) { InternalDirtyStateList previousState = this._dirtyState.get(); success = previousState.declareDirty(m); if (!success) { this._graph.save(null); } } } return; } m = this._elementNext[m]; } throw new RuntimeException("Try to declare a non existing object!"); } @Override public void declareClean(Chunk cleanChunk) { HeapChunk heapChunk = (HeapChunk) cleanChunk; long world = cleanChunk.world(); long time = cleanChunk.time(); long id = cleanChunk.id(); byte type = cleanChunk.chunkType(); int hashIndex = (int) PrimitiveHelper.tripleHash(type, world, time, id, this._maxEntries); int m = this._elementHash[hashIndex]; while (m >= 0) { HeapChunk currentM = (HeapChunk) this._values[m]; if (currentM != null && type == currentM.chunkType() && world == currentM.world() && time == currentM.time() && id == currentM.id()) { currentM.setFlags(0, CoreConstants.DIRTY_BIT); //free the save mark if (heapChunk.unmark() == 0) { this._lru.enqueue(m); } return; } m = this._elementNext[m]; } throw new RuntimeException("Try to declare a non existing object!"); } @Override public final void clear() { //TODO } @Override public void free() { //TODO } @Override public final long size() { return this._elementCount.get(); } @Override public long available() { return _lru.size(); } public void printMarked() { for (int i = 0; i < _values.length; i++) { if (_values[i] != null) { if (_values[i].marks() != 0) { switch (_values[i].chunkType()) { case ChunkType.STATE_CHUNK: System.out.println("STATE(" + _values[i].world() + "," + _values[i].time() + "," + _values[i].id() + ")->marks->" + _values[i].marks()); break; case ChunkType.TIME_TREE_CHUNK: System.out.println("TIME_TREE(" + _values[i].world() + "," + _values[i].time() + "," + _values[i].id() + ")->marks->" + _values[i].marks()); break; case ChunkType.WORLD_ORDER_CHUNK: System.out.println("WORLD_ORDER(" + _values[i].world() + "," + _values[i].time() + "," + _values[i].id() + ")->marks->" + _values[i].marks()); break; case ChunkType.GEN_CHUNK: System.out.println("GENERATOR(" + _values[i].world() + "," + _values[i].time() + "," + _values[i].id() + ")->marks->" + _values[i].marks()); break; } } } } } }
package org.realityforge.arez; import java.lang.reflect.Field; import java.util.Objects; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.realityforge.guiceyloops.shared.ValueUtil; import org.testng.Assert; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; public abstract class AbstractArezTest { @BeforeMethod protected void beforeTest() throws Exception { final ArezConfig.DynamicProvider provider = getConfigProvider(); provider.setEnableNames( true ); provider.setVerboseErrorMessages( true ); provider.setCheckInvariants( true ); provider.setPurgeReactionsWhenRunawayDetected( false ); provider.setEnforceTransactionType( true ); provider.setEnableSpy( true ); getProxyLogger().setLogger( new TestLogger() ); } @AfterMethod protected void afterTest() throws Exception { final ArezConfig.DynamicProvider provider = getConfigProvider(); provider.setEnableNames( false ); provider.setVerboseErrorMessages( false ); provider.setCheckInvariants( false ); provider.setPurgeReactionsWhenRunawayDetected( true ); provider.setEnforceTransactionType( false ); provider.setEnableSpy( false ); getProxyLogger().setLogger( null ); } @Nonnull final TestLogger getTestLogger() { return (TestLogger) getProxyLogger().getLogger(); } @Nonnull private ArezLogger.ProxyLogger getProxyLogger() { return (ArezLogger.ProxyLogger) ArezLogger.getLogger(); } @Nonnull final ArezConfig.DynamicProvider getConfigProvider() { return (ArezConfig.DynamicProvider) ArezConfig.getProvider(); } @Nonnull private Field getField( @Nonnull final Class<?> type, @Nonnull final String fieldName ) throws NoSuchFieldException { Class clazz = type; while ( null != clazz && Object.class != clazz ) { try { final Field field = clazz.getDeclaredField( fieldName ); field.setAccessible( true ); return field; } catch ( final Throwable t ) { clazz = clazz.getSuperclass(); } } Assert.fail(); throw new IllegalStateException(); } @SuppressWarnings( "SameParameterValue" ) final void setField( @Nonnull final Object object, @Nonnull final String fieldName, @Nullable final Object value ) throws NoSuchFieldException, IllegalAccessException { getField( object.getClass(), fieldName ).set( object, value ); } /** * Typically called to stop observer from being deactivate or stop invariant checks failing. */ @SuppressWarnings( "UnusedReturnValue" ) @Nonnull final Observer ensureDerivationHasObserver( @Nonnull final Observer observer ) { final Observer randomObserver = newReadOnlyObserver( observer.getContext() ); randomObserver.setState( ObserverState.UP_TO_DATE ); observer.getDerivedValue().addObserver( randomObserver ); randomObserver.getDependencies().add( observer.getDerivedValue() ); return randomObserver; } @Nonnull final Observer newReadWriteObserver( @Nonnull final ArezContext context ) { return new Observer( context, ValueUtil.randomString(), TransactionMode.READ_WRITE, new TestReaction() ); } @Nonnull final Observer newDerivation( @Nonnull final ArezContext context ) { return new ComputedValue<>( context, ValueUtil.randomString(), () -> "", Objects::equals ).getObserver(); } @Nonnull final Observer newReadOnlyObserverWithNoReaction( @Nonnull final ArezContext context ) { return new Observer( context, ValueUtil.randomString(), TransactionMode.READ_ONLY, null ); } @Nonnull final Observer newReadOnlyObserver( @Nonnull final ArezContext context ) { return new Observer( context, ValueUtil.randomString(), TransactionMode.READ_ONLY, new TestReaction() ); } @Nonnull protected final Observable newObservable( final ArezContext context ) { return new Observable( context, ValueUtil.randomString() ); } final void setCurrentTransaction( @Nonnull final ArezContext context ) { setCurrentTransaction( newReadOnlyObserver( context ) ); } final void setCurrentTransaction( @Nonnull final Observer observer ) { final ArezContext context = observer.getContext(); context.setTransaction( new Transaction( context, null, ValueUtil.randomString(), observer.getMode(), observer ) ); } }
package org.realityforge.arez; import java.lang.reflect.Field; import java.util.Objects; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.realityforge.guiceyloops.shared.ValueUtil; import org.testng.Assert; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; public abstract class AbstractArezTest { @BeforeMethod protected void beforeTest() throws Exception { final ArezConfig.DynamicProvider provider = getConfigProvider(); provider.setEnableNames( true ); provider.setVerboseErrorMessages( true ); provider.setCheckInvariants( true ); provider.setPurgeReactionsWhenRunawayDetected( false ); provider.setEnforceTransactionType( true ); getProxyLogger().setLogger( new TestLogger() ); } @AfterMethod protected void afterTest() throws Exception { final ArezConfig.DynamicProvider provider = getConfigProvider(); provider.setEnableNames( false ); provider.setVerboseErrorMessages( false ); provider.setCheckInvariants( false ); provider.setPurgeReactionsWhenRunawayDetected( true ); provider.setEnforceTransactionType( false ); getProxyLogger().setLogger( null ); } @Nonnull final TestLogger getTestLogger() { return (TestLogger) getProxyLogger().getLogger(); } @Nonnull private ArezLogger.ProxyLogger getProxyLogger() { return (ArezLogger.ProxyLogger) ArezLogger.getLogger(); } @Nonnull final ArezConfig.DynamicProvider getConfigProvider() { return (ArezConfig.DynamicProvider) ArezConfig.getProvider(); } @Nonnull private Field getField( @Nonnull final Class<?> type, @Nonnull final String fieldName ) throws NoSuchFieldException { Class clazz = type; while ( null != clazz && Object.class != clazz ) { try { final Field field = clazz.getDeclaredField( fieldName ); field.setAccessible( true ); return field; } catch ( final Throwable t ) { clazz = clazz.getSuperclass(); } } Assert.fail(); throw new IllegalStateException(); } final void setField( @Nonnull final Object object, @Nonnull final String fieldName, @Nullable final Object value ) throws NoSuchFieldException, IllegalAccessException { getField( object.getClass(), fieldName ).set( object, value ); } /** * Typically called to stop observer from being deactivate or stop invariant checks failing. */ @Nonnull final Observer ensureDerivationHasObserver( @Nonnull final Observer observer ) { final Observer randomObserver = newReadOnlyObserver( observer.getContext() ); observer.getDerivedValue().addObserver( randomObserver ); randomObserver.getDependencies().add( observer.getDerivedValue() ); return randomObserver; } @Nonnull final Observer newReadWriteObserver( @Nonnull final ArezContext context ) { return new Observer( context, ValueUtil.randomString(), TransactionMode.READ_WRITE, new TestReaction() ); } @Nonnull final Observer newDerivation( @Nonnull final ArezContext context ) { return new ComputedValue<>( context, ValueUtil.randomString(), () -> "", Objects::equals ).getObserver(); } @Nonnull final Observer newReadOnlyObserverWithNoReaction( @Nonnull final ArezContext context ) { return new Observer( context, ValueUtil.randomString(), TransactionMode.READ_ONLY, null ); } @Nonnull final Observer newReadOnlyObserver( @Nonnull final ArezContext context ) { return new Observer( context, ValueUtil.randomString(), TransactionMode.READ_ONLY, new TestReaction() ); } final void setCurrentTransaction( @Nonnull final ArezContext context ) { setCurrentTransaction( newReadOnlyObserver( context ) ); } final void setCurrentTransaction( @Nonnull final Observer observer ) { final ArezContext context = observer.getContext(); context.setTransaction( new Transaction( context, null, ValueUtil.randomString(), observer.getMode(), observer ) ); } }
package edu.wustl.catissuecore.deid; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.Set; import org.jdom.Element; import org.jdom.output.Format; import edu.wustl.catissuecore.domain.Participant; import edu.wustl.catissuecore.domain.SpecimenCollectionGroup; import edu.wustl.catissuecore.domain.pathology.DeidentifiedSurgicalPathologyReport; import edu.wustl.catissuecore.domain.pathology.IdentifiedSurgicalPathologyReport; import edu.wustl.catissuecore.domain.pathology.ReportSection; import edu.wustl.catissuecore.domain.pathology.TextContent; import edu.wustl.catissuecore.reportloader.Parser; import edu.wustl.catissuecore.reportloader.ReportLoaderUtil; import edu.wustl.common.util.XMLPropertyHandler; import edu.wustl.common.util.dbManager.DAOException; import edu.wustl.common.util.logger.Logger; import edu.wustl.catissuecore.util.global.Constants; /** * @author vijay_pande * This class is a thread which converts a single identified report into its equivalent de-identified report. */ public class DeidReport extends Thread { public static final Object OBJ=new Object(); private IdentifiedSurgicalPathologyReport ispr; /** * @param ispr identified Surgical Pathology Report * @throws Exception * constructor for the DeidReport thread */ public DeidReport(IdentifiedSurgicalPathologyReport ispr) throws Exception { this.ispr=ispr; } /** * @see java.lang.Thread#run() * This is default run method of the thread. Which is like a deid pipeline. This pipeline manages the de-identification process. */ public void run() { try { Logger.out.info("De-identification process started for "+ispr.getId().toString()); org.jdom.Document currentRequestDocument = new org.jdom.Document(new Element("Dataset")); SpecimenCollectionGroup scg=ispr.getSpecimenCollectionGroup(); Participant participant=scg.getCollectionProtocolRegistration().getParticipant(); TextContent tc=ispr.getTextContent(); tc.setData(synthesizeSPRText(ispr)); Logger.out.info("ReportText is synthesized for report "+ispr.getId().toString()); ispr.setTextContent(tc); Element reportElement = DeidUtils.buildReportElement(participant, ispr, ispr.getTextContent().getData()); currentRequestDocument.getRootElement().addContent(reportElement); String deidRequest = DeidUtils.convertDocumentToString(currentRequestDocument, Format.getPrettyFormat()); Logger.out.info("Calling native call for report "+ispr.getId().toString()); String deidReport=deIdentify(deidRequest); Logger.out.info("Calling native finished successfully for report "+ispr.getId().toString()); String deidText=""; Logger.out.info("Extracting report text for report "+ispr.getId().toString()); deidText=DeidUtils.extractReport(deidReport, XMLPropertyHandler.getValue("deid.dtd.filename")); Logger.out.info("Extracting report text finished for report "+ispr.getId().toString()); Date deidCollectionDate=null; deidCollectionDate=DeidUtils.extractDate(deidText); Logger.out.info("Creating deidentified report for identified report id="+ispr.getId().toString()); DeidentifiedSurgicalPathologyReport pathologyReport = createPathologyReport(ispr, deidText, deidCollectionDate); Logger.out.info("De-identification process finished for "+ispr.getId().toString()); try { Logger.out.info("Saving deidentified report for identified report id="+ispr.getId().toString()); ReportLoaderUtil.saveObject(pathologyReport); Logger.out.info("deidentified report saved for identified report id="+ispr.getId().toString()); ispr.setReportStatus(Parser.DEIDENTIFIED); ispr.setDeidentifiedSurgicalPathologyReport(pathologyReport); Logger.out.info("Updating identified report report id="+ispr.getId().toString()); ReportLoaderUtil.updateObject(ispr); } catch(DAOException daoEx) { Logger.out.error("Error while saving//updating Deidentified//Identified report ",daoEx); } } catch(Exception ex) { Logger.out.error("Deidentification process is failed:",ex); try { ispr.setReportStatus(Parser.DEID_PROCESS_FAILED); ReportLoaderUtil.updateObject(ispr); } catch(Exception e) { Logger.out.error("DeidReport: Updating Identified report status failed",e); } Logger.out.error("Upexpected error in DeidReport thread", ex); } } /** * @param ispr identified surgical pathology report * @param deidText de-intified text * @param deidCollectedDate collection date and time of report * @return DeidentifiedSurgicalPathologyReport * @throws Exception a generic exception oocured while creating de-identified report instance. */ private DeidentifiedSurgicalPathologyReport createPathologyReport(IdentifiedSurgicalPathologyReport ispr, String deidText, Date deidCollectedDate) throws Exception { DeidentifiedSurgicalPathologyReport deidReport=new DeidentifiedSurgicalPathologyReport(); if (ispr.getCollectionDateTime() != null) { deidReport.setCollectionDateTime(deidCollectedDate); } // deidReport.setAccessionNumber(ispr.getAccessionNumber()); deidReport.setActivityStatus(ispr.getActivityStatus()); deidReport.setReportStatus(Parser.PENDING_FOR_XML); deidReport.setIsQuanrantined(Constants.ACTIVITY_STATUS_ACTIVE); deidReport.setSpecimenCollectionGroup(ispr.getSpecimenCollectionGroup()); TextContent tc=new TextContent(); tc.setData(deidText); tc.setSurgicalPathologyReport(deidReport); deidReport.setSource(ispr.getSource()); deidReport.setTextContent(tc); deidReport.setIsFlagForReview(new Boolean(false)); return deidReport; } /** * @param ispr identified surgical pathoology report * @return sysnthesized Surgical pathology report text * @throws Exception a generic exception occured while synthesizing report text */ private String synthesizeSPRText(final IdentifiedSurgicalPathologyReport ispr) throws Exception { String docText = ""; //Get report sections for each report Set iss=ispr.getTextContent().getReportSectionCollection(); HashMap <String,String>nameToText = new HashMap<String, String>(); if(iss!=null) { for (Iterator i = iss.iterator(); i.hasNext();) { //Synthesize sections ReportSection rs = (ReportSection) i.next(); String abbr = rs.getName(); String text = rs.getDocumentFragment(); nameToText.put(abbr, text); } } else { Logger.out.info("NULL report section collection found in synthesizeSPRText method"); } for (int x = 0; x < DeIDPipelineManager.sectionPriority.size(); x++) { String abbr = (String) DeIDPipelineManager.sectionPriority.get(x); String sectionHeader = (String) DeIDPipelineManager.abbrToHeader.get(abbr); if (nameToText.containsKey(abbr)) { String sectionText = (String) nameToText.get(abbr); docText += "\n[" + sectionHeader + "]" + "\n\n" + sectionText + "\n\n"; } } return docText.trim(); } /** * @param text text to be de-identified * @return de-identified text * @throws Exception ocured while calling a native method call for de-identification * This method is responsible for preparing and calling a native method call to convert plain text into deindentified text. */ public String deIdentify(String text) throws Exception { String output = ""; try { synchronized(OBJ) { File f = new File("predeid.xml"); File f2 = new File("postdeid.tmp"); FileWriter fw = new FileWriter(f); fw.write(text); fw.close(); DeIDPipelineManager.deid.createDeidentifier(f.getAbsolutePath(), f2.getAbsolutePath()+ "?XML", DeIDPipelineManager.configFileName); BufferedReader br = new BufferedReader(new FileReader(f2)); String line = ""; while ((line = br.readLine()) != null) { output += line + "\n"; } br.close(); f.delete(); f2.delete(); OBJ.notifyAll(); } } catch (IOException ex) { Logger.out.error("File system error occured while creating or deleting temporary files for deidentification",ex); throw ex; } catch (Exception ex) { Logger.out.error("Severe error occured in the native method call for deidentification",ex); throw ex; } return output; } }
package main; import base.CaseConfig; import base.CaseServer; import base.TestException; import main.server.CaseServerImpl; import base.TestCase; import base.TestCasesFactory; /** * @author v.chibrikov */ public class CaseProcessor { private final CaseServer caseServer; private final TestCase[] testCases; private final CaseConfig cfg; public CaseProcessor(CaseConfig cfg) { this.caseServer = new CaseServerImpl(cfg); this.cfg = cfg; testCases = TestCasesFactory.createTestCases(cfg); } public CaseProcessor(CaseConfig cfg, CaseServer caseServer, TestCase... testCases) { this.cfg = cfg; this.caseServer = caseServer; this.testCases = testCases; } public boolean process() { for (TestCase client : testCases) { try { caseServer.run(); if (caseServer.joinTillStarted()) { boolean result = client.test(cfg); if (!result) return false; } else { return false; } Thread.sleep(cfg.getStopWaitPeriod()); } catch (TestException | InterruptedException e) { System.out.println(e.getMessage()); return false; } finally { caseServer.stop(); } } return true; } public String getServerOut() { return caseServer.getOut(); } }
public class HouseRobber { public static int rob(int[] nums) { if (nums == null || nums.length == 0) return 0; int n = nums.length; if (n == 1) return nums[0]; int[] dp = new int[n]; dp[0] = nums[0]; dp[1] = Math.max(nums[0], nums[1]); for (int i = 2; i < n; i++) { dp[i] = Math.max(dp[i-1], nums[i]+dp[i-2]); } return dp[n-1]; } public static void main(String[] args) { int[] arr1 = new int[] {1,2,3,1}; int[] arr2 = new int[] {2,7,9,3,1}; System.out.println("rob[1,2,3,1] = " + rob(arr1)); System.out.println("rob[2,7,9,3,1] = " + rob(arr2)); } }
/* * @author <a href="mailto:novotny@aei.mpg.de">Jason Novotny</a> * @version $Id$ */ package org.gridlab.gridsphere.layout; import org.gridlab.gridsphere.core.persistence.PersistenceManagerException; import org.gridlab.gridsphere.layout.event.PortletFrameEvent; import org.gridlab.gridsphere.layout.event.PortletFrameListener; import org.gridlab.gridsphere.layout.event.PortletTitleBarEvent; import org.gridlab.gridsphere.layout.event.PortletTitleBarListener; import org.gridlab.gridsphere.layout.event.impl.PortletFrameEventImpl; import org.gridlab.gridsphere.portlet.*; import org.gridlab.gridsphere.portletcontainer.GridSphereEvent; import org.gridlab.gridsphere.portletcontainer.GridSphereProperties; import org.gridlab.gridsphere.portletcontainer.PortletDataManager; import org.gridlab.gridsphere.portletcontainer.PortletInvoker; import org.gridlab.gridsphere.portletcontainer.impl.SportletDataManager; import java.io.IOException; import java.io.PrintWriter; import java.io.Serializable; import java.util.ArrayList; import java.util.Iterator; import java.util.List; /** * <code>PortletFrame</code> provides the visual representation of a portlet. A portlet frame * contains a portlet title bar unless visible is set to false. */ public class PortletFrame extends BasePortletComponent implements Serializable, PortletTitleBarListener, Cloneable { // renderPortlet is true in doView and false on minimized private boolean renderPortlet = true; private String portletClass = null; private PortletTitleBar titleBar = null; private List listeners = new ArrayList(); private PortletErrorFrame errorFrame = new PortletErrorFrame(); private boolean transparent = false; private String innerPadding = ""; private String outerPadding = ""; private String roleString = "GUEST"; private PortletRole requiredRole = PortletRole.GUEST; private transient PortletDataManager dataManager = SportletDataManager.getInstance(); /** * Constructs an instance of PortletFrame */ public PortletFrame() { } /** * Sets the portlet class contained by this portlet frame * * @param portletClass the fully qualified portlet classname */ public void setPortletClass(String portletClass) { this.portletClass = portletClass; } /** * Returns the portlet class contained by this portlet frame * * @return the fully qualified portlet classname */ public String getPortletClass() { return portletClass; } /** * Sets the inner padding of the portlet frame * * @param innerPadding the inner padding */ public void setInnerPadding(String innerPadding) { this.innerPadding = innerPadding; } /** * Returns the inner padding of the portlet frame * * @return the inner padding */ public String getInnerPadding() { return innerPadding; } /** * Sets the outer padding of the portlet frame * * @param outerPadding the outer padding */ public void setOuterPadding(String outerPadding) { this.outerPadding = outerPadding; } /** * Returns the outer padding of the portlet frame * * @return the outer padding */ public String getOuterPadding() { return outerPadding; } /** * If set to <code>true</code> the portlet is rendered transparently without a * defining border and title bar. This is used for example for the LogoutPortlet * * @param transparent if set to <code>true</code>, portlet frame is displayed transparently, <code>false</code> otherwise */ public void setTransparent(boolean transparent) { this.transparent = transparent; } /** * If set to <code>true</code> the portlet is rendered transparently without a * defining border and title bar. This is used for example for the LogoutPortlet * * @return <code>true</code> if the portlet frame is displayed transparently, <code>false</code> otherwise */ public boolean getTransparent() { return this.transparent; } /** * Allows a required role to be associated with viewing this portlet * * @param roleString the required portlet role expresses as a <code>String</code> */ public void setRequiredRole(String roleString) { this.roleString = roleString; } /** * Allows a required role to be associated with viewing this portlet * * @return the required portlet role expresses as a <code>String</code> */ public String getRequiredRole() { return roleString; } /** * Initializes the portlet frame component. Since the components are isolated * after Castor unmarshalls from XML, the ordering is determined by a * passed in List containing the previous portlet components in the tree. * * @param list a <code>List</code> of component identifiers * @return a <code>List</code> of updated component identifiers * @see ComponentIdentifier */ public List init(List list) { list = super.init(list); ComponentIdentifier compId = new ComponentIdentifier(); compId.setPortletComponent(this); compId.setPortletClass(portletClass); compId.setComponentID(list.size()); compId.setClassName(this.getClass().getName()); list.add(compId); // if the portlet frame is transparent then it doesn't get a title bar if (transparent == false) titleBar = new PortletTitleBar(); if (titleBar != null) { titleBar.setPortletClass(portletClass); list = titleBar.init(list); titleBar.addTitleBarListener(this); } try { requiredRole = PortletRole.toPortletRole(roleString); } catch (IllegalArgumentException e) { requiredRole = PortletRole.GUEST; } return list; } /** * Adds a portlet frame listener to be notified of portlet frame events * * @param listener a portlet frame listener * @see PortletFrameEvent */ public void addFrameListener(PortletFrameListener listener) { listeners.add(listener); } /** * Fires a frame event notification * * @param event a portlet frame event * @throws PortletLayoutException if a layout error occurs */ protected void fireFrameEvent(PortletFrameEvent event) throws PortletLayoutException { Iterator it = listeners.iterator(); PortletFrameListener l; while (it.hasNext()) { l = (PortletFrameListener) it.next(); l.handleFrameEvent(event); } } /** * Notifies this listener that a portlet title barw event has occured * * @param event the portolet title bar event * @throws PortletLayoutException if a portlet layout exception occurs during processing */ public void handleTitleBarEvent(PortletTitleBarEvent event) throws PortletLayoutException { if (event.getAction() == PortletTitleBarEvent.Action.WINDOW_MODIFY) { PortletWindow.State state = event.getState(); PortletFrameEvent evt = null; if (state == PortletWindow.State.MINIMIZED) { renderPortlet = false; evt = new PortletFrameEventImpl(PortletFrameEvent.Action.FRAME_MINIMIZED, COMPONENT_ID); } else if (state == PortletWindow.State.RESIZING) { renderPortlet = true; evt = new PortletFrameEventImpl(PortletFrameEvent.Action.FRAME_RESTORED, COMPONENT_ID); } else if (state == PortletWindow.State.MAXIMIZED) { renderPortlet = true; evt = new PortletFrameEventImpl(PortletFrameEvent.Action.FRAME_MAXIMIZED, COMPONENT_ID); } fireFrameEvent(evt); } } /** * Performs an action on this portlet frame component * * @param event a gridsphere event * @throws PortletLayoutException if a layout error occurs during rendering * @throws IOException if an I/O error occurs during rendering */ public void actionPerformed(GridSphereEvent event) throws PortletLayoutException, IOException { super.actionPerformed(event); // process events PortletRequest req = event.getPortletRequest(); PortletRole role = req.getRole(); if (role.compare(role, requiredRole) < 0) return; PortletResponse res = event.getPortletResponse(); req.setAttribute(GridSphereProperties.PORTLETID, portletClass); String newmode = req.getParameter(GridSphereProperties.PORTLETMODE); if (newmode != null) { req.setMode(Portlet.Mode.toMode(newmode)); } else { if (titleBar != null) { Portlet.Mode mode = titleBar.getPortletMode(); req.setMode(mode); } else { req.setMode(Portlet.Mode.VIEW); } } // Set the portlet data User user = req.getUser(); PortletData data = null; if (!(user instanceof GuestUser)) { try { data = dataManager.getPortletData(req.getUser(), portletClass); req.setAttribute(GridSphereProperties.PORTLETDATA, data); } catch (PersistenceManagerException e) { errorFrame.setError("Unable to retrieve user's portlet data!", e); } } // now perform actionPerformed on Portlet if it has an action if (event.hasAction()) { DefaultPortletAction action = event.getAction(); if (action.getName() != "") { try { PortletInvoker.actionPerformed(portletClass, action, req, res); } catch (PortletException e) { errorFrame.setException(e); } String message = (String)req.getAttribute(GridSphereProperties.PORTLETERROR); if (message != null) { errorFrame.setMessage(message); } } // in case portlet mode got reset } if (titleBar != null) titleBar.setPortletMode(req.getMode()); } /** * Renders the portlet frame component * * @param event a gridsphere event * @throws PortletLayoutException if a layout error occurs during rendering * @throws IOException if an I/O error occurs during rendering */ public void doRender(GridSphereEvent event) throws PortletLayoutException, IOException { super.doRender(event); PortletRequest req = event.getPortletRequest(); PortletRole role = req.getRole(); if (role.compare(role, requiredRole) < 0) return; PortletResponse res = event.getPortletResponse(); PrintWriter out = res.getWriter(); req.setAttribute(GridSphereProperties.PORTLETID, portletClass); if (errorFrame.hasError()) { errorFrame.doRender(event); return; } // Set the portlet data User user = req.getUser(); PortletData data = null; if (!(user instanceof GuestUser)) { try { data = dataManager.getPortletData(req.getUser(), portletClass); req.setAttribute(GridSphereProperties.PORTLETDATA, data); } catch (PersistenceManagerException e) { errorFrame.setError("Unable to retrieve user's portlet data", e); } } ///// begin portlet frame out.println("<!-- PORTLET STARTS HERE -->"); //out.println("<div class=\"window-main\">"); out.print("<table "); if (getOuterPadding().equals("")) { out.print(" cellspacing=\"0\" class=\"window-main\" "); } else { //out.print("border=\"0\" cellpadding=\"0\" cellspacing=\"0\" width=\"100%\""); // this is the main table around one portlet out.print(" cellspacing=\""+getOuterPadding()+"\" style=\"padding:"+getOuterPadding()+"px\" class=\"window-main\" "); // this is the main table around one portlet //out.print("cellpadding=\""+getOuterPadding()+"\" class=\"window-main\" "); // this is the main table around one portlet } out.println(">"); // Render title bar if (titleBar != null) { titleBar.doRender(event); if (titleBar.hasRenderError()) { errorFrame.setMessage(titleBar.getErrorMessage()); } } if (renderPortlet) { if (!transparent) { out.print("<tr><td "); // now the portlet content begins if (!getInnerPadding().equals("")) { out.print("style=\"padding:" + getInnerPadding() + "px\""); } out.println(" class=\"window-content\"> " ); } else { out.println("<tr><td>"); } if (errorFrame.hasError()) { errorFrame.doRender(event); } else { try { PortletInvoker.service(portletClass, req, res); } catch (PortletException e) { errorFrame.setError("Unable to invoke service method", e); errorFrame.doRender(event); } } out.println("</td></tr>"); } else { out.println("<tr><td class=\"window-content-minimize\">"); // now the portlet content begins out.println("</td></tr>"); } out.println("</table>"); out.println("<!--- PORTLET ENDS HERE -->"); } public Object clone() throws CloneNotSupportedException { PortletFrame f = (PortletFrame)super.clone(); f.titleBar = (this.titleBar == null) ? null : (PortletTitleBar)this.titleBar.clone(); f.outerPadding = this.outerPadding; f.errorFrame = this.errorFrame; f.transparent = this.transparent; f.innerPadding = this.innerPadding; f.portletClass = this.portletClass; f.roleString = this.roleString; f.requiredRole = this.requiredRole; f.renderPortlet = this.renderPortlet; return f; } }
package org.irmacard.androidcardproxy; import java.io.StringReader; import java.io.UnsupportedEncodingException; import java.util.Timer; import java.util.TimerTask; import net.sourceforge.scuba.smartcards.CardServiceException; import net.sourceforge.scuba.smartcards.IsoDepCardService; import net.sourceforge.scuba.smartcards.ProtocolCommand; import net.sourceforge.scuba.smartcards.ProtocolResponse; import net.sourceforge.scuba.smartcards.ProtocolResponses; import net.sourceforge.scuba.smartcards.ResponseAPDU; import org.apache.http.entity.StringEntity; import org.irmacard.android.util.pindialog.EnterPINDialogFragment; import org.irmacard.android.util.pindialog.EnterPINDialogFragment.PINDialogListener; import org.irmacard.androidcardproxy.messages.EventArguments; import org.irmacard.androidcardproxy.messages.PinResultArguments; import org.irmacard.androidcardproxy.messages.ReaderMessage; import org.irmacard.androidcardproxy.messages.ReaderMessageDeserializer; import org.irmacard.androidcardproxy.messages.ResponseArguments; import org.irmacard.androidcardproxy.messages.TransmitCommandSetArguments; import org.irmacard.idemix.IdemixService; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.app.DialogFragment; import android.app.PendingIntent; import android.content.DialogInterface; import android.content.Intent; import android.content.IntentFilter; import android.net.Uri; import android.nfc.NfcAdapter; import android.nfc.Tag; import android.nfc.tech.IsoDep; import android.os.AsyncTask; import android.os.Bundle; import android.os.CountDownTimer; import android.os.Handler; import android.os.Message; import android.util.Log; import android.view.Menu; import android.view.View; import android.widget.ImageView; import android.widget.TextView; import android.widget.Toast; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonParser; import com.google.gson.stream.JsonReader; import com.google.zxing.integration.android.IntentIntegrator; import com.google.zxing.integration.android.IntentResult; import com.loopj.android.http.AsyncHttpClient; import com.loopj.android.http.AsyncHttpResponseHandler; public class MainActivity extends Activity implements PINDialogListener { private String TAG = "CardProxyMainActivity"; private NfcAdapter nfcA; private PendingIntent mPendingIntent; private IntentFilter[] mFilters; private String[][] mTechLists; // PIN handling private int tries = -1; // State variables private IsoDep lastTag = null; private int activityState = STATE_IDLE; // New states private static final int STATE_IDLE = 1; private static final int STATE_CONNECTING_TO_SERVER = 2; private static final int STATE_CONNECTED = 3; private static final int STATE_READY = 4; private static final int STATE_COMMUNICATING = 5; private static final int STATE_WAITING_FOR_PIN = 6; // Timer for testing card connectivity Timer timer; private static final int CARD_POLL_DELAY = 2000; // Timer for briefly displaying feedback messages on CardProxy CountDownTimer cdt; private static final int FEEDBACK_SHOW_DELAY = 10000; private boolean showingFeedback = false; // Counter for number of connection tries private static final int MAX_RETRIES = 3; private int retry_counter = 0; private void setState(int state) { Log.i(TAG,"Set state: " + state); activityState = state; switch (activityState) { case STATE_IDLE: lastTag = null; break; default: break; } setUIForState(); } private void setUIForState() { int imageResource = 0; int statusTextResource = 0; int feedbackTextResource = 0; switch (activityState) { case STATE_IDLE: imageResource = R.drawable.irma_icon_place_card_520px; statusTextResource = R.string.status_idle; break; case STATE_CONNECTING_TO_SERVER: imageResource = R.drawable.irma_icon_place_card_520px; statusTextResource = R.string.status_connecting; break; case STATE_CONNECTED: imageResource = R.drawable.irma_icon_place_card_520px; statusTextResource = R.string.status_connected; feedbackTextResource = R.string.feedback_waiting_for_card; break; case STATE_READY: imageResource = R.drawable.irma_icon_card_found_520px; statusTextResource = R.string.status_ready; break; case STATE_COMMUNICATING: imageResource = R.drawable.irma_icon_card_found_520px; statusTextResource = R.string.status_communicating; break; case STATE_WAITING_FOR_PIN: imageResource = R.drawable.irma_icon_card_found_520px; statusTextResource = R.string.status_waitingforpin; break; default: break; } ((TextView)findViewById(R.id.status_text)).setText(statusTextResource); if(!showingFeedback) ((ImageView)findViewById(R.id.statusimage)).setImageResource(imageResource); if(feedbackTextResource != 0) ((TextView)findViewById(R.id.status_text)).setText(feedbackTextResource); } private void setFeedback(String message, String state) { int imageResource = 0; setUIForState(); if (state.equals("success")) { imageResource = R.drawable.irma_icon_ok_520px; } if (state.equals("warning")) { imageResource = R.drawable.irma_icon_warning_520px; } if (state.equals("failure")) { imageResource = R.drawable.irma_icon_missing_520px; } ((TextView)findViewById(R.id.feedback_text)).setText(message); if(imageResource != 0) { ((ImageView)findViewById(R.id.statusimage)).setImageResource(imageResource); showingFeedback = true; } if(cdt != null) cdt.cancel(); cdt = new CountDownTimer(FEEDBACK_SHOW_DELAY, 1000) { public void onTick(long millisUntilFinished) { } public void onFinish() { showingFeedback = false; ((TextView)findViewById(R.id.feedback_text)).setText(""); setUIForState(); } }.start(); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); // NFC stuff nfcA = NfcAdapter.getDefaultAdapter(getApplicationContext()); mPendingIntent = PendingIntent.getActivity(this, 0, new Intent(this, getClass()).addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP), 0); // Setup an intent filter for all TECH based dispatches IntentFilter tech = new IntentFilter(NfcAdapter.ACTION_TECH_DISCOVERED); mFilters = new IntentFilter[] { tech }; // Setup a tech list for all IsoDep cards mTechLists = new String[][] { new String[] { IsoDep.class.getName() } }; setState(STATE_IDLE); timer = new Timer(); timer.scheduleAtFixedRate(new CardPollerTask(), CARD_POLL_DELAY, CARD_POLL_DELAY); } @Override protected void onPause() { super.onPause(); if (nfcA != null) { nfcA.disableForegroundDispatch(this); } } @Override protected void onResume() { super.onResume(); Log.i(TAG, "Action: " + getIntent().getAction()); if (NfcAdapter.ACTION_TECH_DISCOVERED.equals(getIntent().getAction())) { processIntent(getIntent()); } else if (Intent.ACTION_VIEW.equals(getIntent().getAction()) && "cardproxy".equals(getIntent().getScheme())) { // TODO: this is legacy code to have the cardproxy app respond to cardproxy:// urls. This doesn't // work anymore, should check whether we want te re-enable it. Uri uri = getIntent().getData(); String startURL = "http://" + uri.getHost() + ":" + uri.getPort() + uri.getPath(); gotoConnectingState(startURL); } if (nfcA != null) { nfcA.enableForegroundDispatch(this, mPendingIntent, mFilters, mTechLists); } } @Override protected void onDestroy() { super.onDestroy(); } private static final int MESSAGE_STARTGET = 1; String currentReaderURL = ""; int currentHandlers = 0; Handler handler = new Handler() { @Override public void handleMessage(Message msg) { switch (msg.what) { case MESSAGE_STARTGET: Log.i(TAG,"MESSAGE_STARTGET received in handler!"); AsyncHttpClient client = new AsyncHttpClient(); client.setTimeout(50000); // timeout of 50 seconds client.setUserAgent("org.irmacard.androidcardproxy"); client.get(MainActivity.this, currentReaderURL, new AsyncHttpResponseHandler() { @Override public void onSuccess(int arg0, String responseData) { if (!responseData.equals("")) { //Toast.makeText(MainActivity.this, responseData, Toast.LENGTH_SHORT).show(); handleChannelData(responseData); } // Do a new request, but only if no new requests have started // in the mean time if (currentHandlers <= 1) { Message newMsg = new Message(); newMsg.what = MESSAGE_STARTGET; if(!(activityState == STATE_IDLE)) handler.sendMessageDelayed(newMsg, 200); } } @Override public void onFailure(Throwable arg0, String arg1) { if(activityState != STATE_CONNECTING_TO_SERVER) { retry_counter = 0; return; } retry_counter += 1; // We should try again, but only if no new requests have started // and we should wait a bit longer if (currentHandlers <= 1 && retry_counter < MAX_RETRIES) { Message newMsg = new Message(); setFeedback("Trying to reach server again...", "none"); newMsg.what = MESSAGE_STARTGET; handler.sendMessageDelayed(newMsg, 5000); } else { retry_counter = 0; setFeedback("Failed to connect to server", "warning"); setState(STATE_IDLE); } } public void onStart() { currentHandlers += 1; }; public void onFinish() { currentHandlers -= 1; }; }); break; default: break; } } }; private String currentWriteURL = null; private ReaderMessage lastReaderMessage = null; private void handleChannelData(String data) { Gson gson = new GsonBuilder(). registerTypeAdapter(ProtocolCommand.class, new ProtocolCommandDeserializer()). registerTypeAdapter(ReaderMessage.class, new ReaderMessageDeserializer()). create(); if (activityState == STATE_CONNECTING_TO_SERVER) { // this is the message that containts the url to write to JsonParser p = new JsonParser(); String write_url = p.parse(data).getAsJsonObject().get("write_url").getAsString(); currentWriteURL = write_url; setState(STATE_CONNECTED); // Signal to the other end that we we are ready accept commands postMessage( new ReaderMessage(ReaderMessage.TYPE_EVENT, ReaderMessage.NAME_EVENT_CARDREADERFOUND, null, new EventArguments().withEntry("type", "phone"))); } else { ReaderMessage rm; try { Log.i(TAG, "Length (real): " + data); JsonReader reader = new JsonReader(new StringReader(data)); reader.setLenient(true); rm = gson.fromJson(reader, ReaderMessage.class); } catch(Exception e) { e.printStackTrace(); return; } lastReaderMessage = rm; if (rm.type.equals(ReaderMessage.TYPE_COMMAND)) { Log.i(TAG, "Got command message"); if (activityState != STATE_READY) { // FIXME: Only when ready can we handle commands throw new RuntimeException( "Illegal command from server, no card currently connected"); } if (rm.name.equals(ReaderMessage.NAME_COMMAND_AUTHPIN)) { askForPIN(); } else { setState(STATE_COMMUNICATING); new ProcessReaderMessage().execute(new ReaderInput(lastTag, rm)); } } else if (rm.type.equals(ReaderMessage.TYPE_EVENT)) { EventArguments ea = (EventArguments)rm.arguments; if (rm.name.equals(ReaderMessage.NAME_EVENT_STATUSUPDATE)) { String state = ea.data.get("state"); String feedback = ea.data.get("feedback"); if (state != null) { setFeedback(feedback, state); } } else if(rm.name.equals(ReaderMessage.NAME_EVENT_TIMEOUT)) { setState(STATE_IDLE); } else if(rm.name.equals(ReaderMessage.NAME_EVENT_DONE)) { setState(STATE_IDLE); } } } } private void postMessage(ReaderMessage rm) { if (currentWriteURL != null) { Gson gson = new GsonBuilder(). registerTypeAdapter(ProtocolResponse.class, new ProtocolResponseSerializer()). create(); String data = gson.toJson(rm); AsyncHttpClient client = new AsyncHttpClient(); try { client.post(MainActivity.this, currentWriteURL, new StringEntity(data) , "application/json", new AsyncHttpResponseHandler() { @Override public void onSuccess(int arg0, String arg1) { // TODO: Should there be some simple user feedback? super.onSuccess(arg0, arg1); } @Override public void onFailure(Throwable arg0, String arg1) { // TODO: Give proper feedback to the user that we are unable to send stuff super.onFailure(arg0, arg1); } }); } catch (UnsupportedEncodingException e) { // Ignore, shouldn't happen ;) e.printStackTrace(); } } } public void onMainTouch(View v) { if (activityState == STATE_IDLE) { lastTag = null; startQRScanner("Scan the QR image in the browser."); } } @Override public void onNewIntent(Intent intent) { setIntent(intent); } public void processIntent(Intent intent) { Tag tagFromIntent = intent.getParcelableExtra(NfcAdapter.EXTRA_TAG); IsoDep tag = IsoDep.get(tagFromIntent); // Only proces tag when we're actually expecting a card. if (tag != null && activityState == STATE_CONNECTED) { setState(STATE_READY); postMessage(new ReaderMessage(ReaderMessage.TYPE_EVENT, ReaderMessage.NAME_EVENT_CARDFOUND, null)); lastTag = tag; } } class CardPollerTask extends TimerTask { /** * Dirty Hack. Since android doesn't produce events when an NFC card * is lost, we send a command to the card, and see if it still responds. * It is important that this command does not affect the card's state. * * FIXME: The command we sent is IRMA dependent, which is dangerous when * the proxy is used with other cards/protocols. */ public void run() { // Only in the ready state do we need to actively check for card // presence. if(activityState == STATE_READY) { Log.i("CardPollerTask", "Checking card presence"); ReaderMessage rm = new ReaderMessage( ReaderMessage.TYPE_COMMAND, ReaderMessage.NAME_COMMAND_IDLE, "idle"); new ProcessReaderMessage().execute(new ReaderInput(lastTag, rm)); } } } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); IntentResult scanResult = IntentIntegrator .parseActivityResult(requestCode, resultCode, data); // Process the results from the QR-scanning activity if (scanResult != null) { String contents = scanResult.getContents(); if (contents != null) { gotoConnectingState(contents); } } } private void gotoConnectingState(String url) { Log.i(TAG, "Start channel listening: " + url); currentReaderURL = url; Message msg = new Message(); msg.what = MESSAGE_STARTGET; setState(STATE_CONNECTING_TO_SERVER); handler.sendMessage(msg); } public void askForPIN() { setState(STATE_WAITING_FOR_PIN); DialogFragment newFragment = EnterPINDialogFragment.getInstance(tries); newFragment.show(getFragmentManager(), "pinentry"); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.activity_main, menu); return true; } public void startQRScanner(String message) { IntentIntegrator integrator = new IntentIntegrator(this); integrator.initiateScan(IntentIntegrator.QR_CODE_TYPES, message); } private class ReaderInput { public IsoDep tag; public ReaderMessage message; public String pincode = null; public ReaderInput(IsoDep tag, ReaderMessage message) { this.tag = tag; this.message = message; } public ReaderInput(IsoDep tag, ReaderMessage message, String pincode) { this.tag = tag; this.message = message; this.pincode = pincode; } } private class ProcessReaderMessage extends AsyncTask<ReaderInput, Void, ReaderMessage> { @Override protected ReaderMessage doInBackground(ReaderInput... params) { ReaderInput input = params[0]; IsoDep tag = input.tag; ReaderMessage rm = input.message; // It seems sometimes tag is null afterall if(tag == null) { Log.e("ReaderMessage", "tag is null, this should not happen!"); return new ReaderMessage(ReaderMessage.TYPE_EVENT, ReaderMessage.NAME_EVENT_CARDLOST, null); } // Make sure time-out is long enough (10 seconds) tag.setTimeout(10000); // TODO: The current version of the cardproxy shouldn't depend on idemix terminal, but for now // it is convenient. IdemixService is = new IdemixService(new IsoDepCardService(tag)); try { if (!is.isOpen()) { // TODO: this is dangerous, this call to IdemixService already does a "select applet" is.open(); } if (rm.name.equals(ReaderMessage.NAME_COMMAND_AUTHPIN)) { if (input.pincode != null) { // TODO: this should be done properly, maybe without using IdemixService? tries = is.sendCredentialPin(input.pincode.getBytes()); return new ReaderMessage("response", rm.name, rm.id, new PinResultArguments(tries)); } } else if (rm.name.equals(ReaderMessage.NAME_COMMAND_TRANSMIT)) { TransmitCommandSetArguments arg = (TransmitCommandSetArguments)rm.arguments; ProtocolResponses responses = new ProtocolResponses(); for (ProtocolCommand c: arg.commands) { ResponseAPDU apdu_response = is.transmit(c.getAPDU()); responses.put(c.getKey(), new ProtocolResponse(c.getKey(), apdu_response)); if(apdu_response.getSW() != 0x9000) { break; } } return new ReaderMessage(ReaderMessage.TYPE_RESPONSE, rm.name, rm.id, new ResponseArguments(responses)); } else if (rm.name.equals(ReaderMessage.NAME_COMMAND_IDLE)) { // FIXME: IRMA specific implementation, // This command is not allowed in normal mode, // so it will result in an exception. Log.i("READER", "Processing idle command"); is.getCredentials(); } } catch (CardServiceException e) { // FIXME: IRMA specific handling of failed command, this is too generic. if(e.getMessage().contains("Command failed:") && e.getSW() == 0x6982) { return null; } e.printStackTrace(); // TODO: maybe also include the information about the exception in the event? return new ReaderMessage(ReaderMessage.TYPE_EVENT, ReaderMessage.NAME_EVENT_CARDLOST, null); } catch (IllegalStateException e) { // This sometimes props up when applications comes out of suspend for now we just ignore this. Log.i("READER", "IllegalStateException ignored"); return null; } return null; } @Override protected void onPostExecute(ReaderMessage result) { if(result == null) return; // Update state if( result.type.equals(ReaderMessage.TYPE_EVENT) && result.name.equals(ReaderMessage.NAME_EVENT_CARDLOST)) { // Connection to the card is lost setState(STATE_CONNECTED); } else { if(activityState == STATE_COMMUNICATING) { setState(STATE_READY); } } if (result.name.equals(ReaderMessage.NAME_COMMAND_AUTHPIN)) { // Handle pin separately, abort if pin incorrect and more tries // left PinResultArguments args = (PinResultArguments) result.arguments; if (!args.success) { if (args.tries > 0) { // Still some tries left, asking again setState(STATE_WAITING_FOR_PIN); askForPIN(); return; // do not send a response yet. } else { // FIXME: No more tries left // Need to go to error state } } } // Post result to browser postMessage(result); } } @Override public void onPINEntry(String dialogPincode) { // TODO: in the final version, the following debug code should go :) Log.i(TAG, "PIN entered: " + dialogPincode); setState(STATE_COMMUNICATING); new ProcessReaderMessage().execute(new ReaderInput(lastTag, lastReaderMessage, dialogPincode)); } @Override public void onPINCancel() { Log.i(TAG, "PIN entry canceled!"); postMessage( new ReaderMessage(ReaderMessage.TYPE_RESPONSE, ReaderMessage.NAME_COMMAND_AUTHPIN, lastReaderMessage.id, new ResponseArguments("cancel"))); setState(STATE_READY); } public static class ErrorFeedbackDialogFragment extends DialogFragment { public static ErrorFeedbackDialogFragment newInstance(String title, String message) { ErrorFeedbackDialogFragment f = new ErrorFeedbackDialogFragment(); Bundle args = new Bundle(); args.putString("message", message); args.putString("title", title); f.setArguments(args); return f; } @Override public Dialog onCreateDialog(Bundle savedInstanceState) { AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); builder.setMessage(getArguments().getString("message")) .setTitle(getArguments().getString("title")) .setPositiveButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { dialog.dismiss(); } }); return builder.create(); } } }
package org.nschmidt.ldparteditor.data; import java.math.BigDecimal; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import org.nschmidt.ldparteditor.enumtype.LDConfig; import org.nschmidt.ldparteditor.enumtype.View; import org.nschmidt.ldparteditor.helper.composite3d.YTruderSettings; import org.nschmidt.ldparteditor.logger.NLogger; import org.nschmidt.ldparteditor.text.DatParser; class VM27YTruder extends VM26LineIntersector { private static final double EPSILON = 0.000001; private static final double SMALL = 0.01; private double[] nullv = new double[] { 0.0, 0.0, 0.0 }; private static final int X_AXIS = 0; private static final int Y_AXIS = 1; private static final int Z_AXIS = 2; protected VM27YTruder(DatFile linkedDatFile) { super(linkedDatFile); } @SuppressWarnings("java:S2111") public void yTruder(YTruderSettings ys) { if (linkedDatFile.isReadOnly()) return; final double distance = ys.getDistance(); int mode = ys.getMode(); if (distance == 0 && (mode == YTruderSettings.MODE_TRANSLATE_BY_DISTANCE || mode == YTruderSettings.MODE_EXTRUDE_RADIALLY)) return; final Set<GData2> originalSelection = new HashSet<>(); originalSelection.addAll(selectedLines); if (originalSelection.isEmpty()) return; final Set<GData2> newLines = new HashSet<>(); final Set<GData3> newTriangles = new HashSet<>(); final Set<GData4> newQuads = new HashSet<>(); final Set<GData5> newCondlines = new HashSet<>(); final GColour col16 = LDConfig.getColour16(); final GColour lineColour = DatParser.validateColour(24, .5f, .5f, .5f, 1f).createClone(); final GColour bodyColour = DatParser.validateColour(16, col16.getR(), col16.getG(), col16.getB(), 1f).createClone(); final int maxLine = originalSelection.size() * 3; final int maxTri = originalSelection.size() * 3; double[][][] inLine = new double[maxLine][2][3]; int[] lineUsed = new int[maxLine]; double[][][] surf = new double[maxTri][4][3]; double[][][] condLine = new double[maxTri][4][3]; int[] condFlag = new int[maxTri]; int numSurf; int numCond; int x = 0; int y = 1; int z = 2; double angleLineThr = ys.getCondlineAngleThreshold(); int end; int current; int surfstart; boolean flag = false; if (ys.getAxis() == X_AXIS) { x = 1; y = 0; z = 2; } else if (ys.getAxis() == Y_AXIS) { x = 0; y = 1; z = 2; } else if (ys.getAxis() == Z_AXIS) { x = 0; y = 2; z = 1; } int originalLineCount = 0; for (GData2 gData2 : originalSelection) { Vertex[] verts = lines.get(gData2); if (verts != null) { inLine[originalLineCount][0][x] = verts[0].xp.doubleValue(); inLine[originalLineCount][0][y] = verts[0].yp.doubleValue(); inLine[originalLineCount][0][z] = verts[0].zp.doubleValue(); inLine[originalLineCount][1][x] = verts[1].xp.doubleValue(); inLine[originalLineCount][1][y] = verts[1].yp.doubleValue(); inLine[originalLineCount][1][z] = verts[1].zp.doubleValue(); lineUsed[originalLineCount] = 0; originalLineCount++; } } // Extruding... numSurf = 0; numCond = 0; condFlag[numCond] = 0; for (int i = 0; i < originalLineCount; i++) { double[] p0 = new double[3]; double[] p1 = new double[3]; double d0; double d1; if (lineUsed[i] == 0) { lineUsed[i] = 1; current = i; end = 0; do { flag = false; for (int j = 0; j < originalLineCount; j++) { if (lineUsed[j] == 0) { for (int k = 0; k < 2; k++) { if (manhattan(inLine[current][end], inLine[j][k]) < SMALL) { current = j; end = 1 - k; lineUsed[current] = 1; flag = true; break; } } } if (flag) break; } } while (flag); end = 1 - end; surfstart = numSurf; set(surf[numSurf][0], inLine[current][1 - end]); set(surf[numSurf][1], inLine[current][end]); set(surf[numSurf][2], inLine[current][end]); set(surf[numSurf][3], inLine[current][1 - end]); switch (mode) { case YTruderSettings.MODE_TRANSLATE_BY_DISTANCE: surf[numSurf][2][1] = surf[numSurf][2][1] + distance; surf[numSurf][3][1] = surf[numSurf][3][1] + distance; break; case YTruderSettings.MODE_SYMMETRY_ACROSS_PLANE: surf[numSurf][2][1] = 2 * distance - surf[numSurf][2][1]; surf[numSurf][3][1] = 2 * distance - surf[numSurf][3][1]; break; case YTruderSettings.MODE_PROJECTION_ON_PLANE: surf[numSurf][2][1] = distance; surf[numSurf][3][1] = distance; break; case YTruderSettings.MODE_EXTRUDE_RADIALLY: p0[0] = 0; p0[1] = surf[numSurf][0][1]; p0[2] = 0; p1[0] = 0; p1[1] = surf[numSurf][1][1]; p1[2] = 0; d0 = dist(p0, surf[numSurf][0]); d1 = dist(p1, surf[numSurf][1]); if (d0 > EPSILON) { surf[numSurf][3][0] = surf[numSurf][3][0] * (d0 + distance) / d0; surf[numSurf][3][2] = surf[numSurf][3][2] * (d0 + distance) / d0; } if (d1 > EPSILON) { surf[numSurf][2][0] = surf[numSurf][2][0] * (d1 + distance) / d1; surf[numSurf][2][2] = surf[numSurf][2][2] * (d1 + distance) / d1; } double a; a = triAngle(surf[numSurf][0], surf[numSurf][1], surf[numSurf][2], surf[numSurf][0], surf[numSurf][2], surf[numSurf][3]); if (a > 0.5) { set(condLine[numCond][0], surf[numSurf][0]); set(condLine[numCond][1], surf[numSurf][2]); set(condLine[numCond][2], surf[numSurf][1]); set(condLine[numCond][3], surf[numSurf][3]); condFlag[numCond] = 5; numCond++; } break; default: break; } numSurf++; lineUsed[current] = 2; do { flag = false; for (int j = 0; j < originalLineCount; j++) { if (lineUsed[j] < 2) { for (int k = 0; k < 2; k++) { if (manhattan(inLine[current][end], inLine[j][k]) < SMALL && lineUsed[j] < 2) { current = j; end = 1 - k; flag = true; set(surf[numSurf][0], inLine[current][1 - end]); set(surf[numSurf][1], inLine[current][end]); set(surf[numSurf][2], inLine[current][end]); set(surf[numSurf][3], inLine[current][1 - end]); switch (mode) { case YTruderSettings.MODE_TRANSLATE_BY_DISTANCE: surf[numSurf][2][1] = surf[numSurf][2][1] + distance; surf[numSurf][3][1] = surf[numSurf][3][1] + distance; break; case YTruderSettings.MODE_SYMMETRY_ACROSS_PLANE: surf[numSurf][2][1] = 2 * distance - surf[numSurf][2][1]; surf[numSurf][3][1] = 2 * distance - surf[numSurf][3][1]; break; case YTruderSettings.MODE_PROJECTION_ON_PLANE: surf[numSurf][2][1] = distance; surf[numSurf][3][1] = distance; break; case YTruderSettings.MODE_EXTRUDE_RADIALLY: p0[0] = 0; p0[1] = surf[numSurf][0][1]; p0[2] = 0; p1[0] = 0; p1[1] = surf[numSurf][1][1]; p1[2] = 0; d0 = dist(p0, surf[numSurf][0]); d1 = dist(p1, surf[numSurf][1]); if (d0 > EPSILON) { surf[numSurf][3][0] = surf[numSurf][3][0] * (d0 + distance) / d0; surf[numSurf][3][2] = surf[numSurf][3][2] * (d0 + distance) / d0; } if (d1 > EPSILON) { surf[numSurf][2][0] = surf[numSurf][2][0] * (d1 + distance) / d1; surf[numSurf][2][2] = surf[numSurf][2][2] * (d1 + distance) / d1; } set(condLine[numCond][0], surf[numSurf][0]); set(condLine[numCond][1], surf[numSurf][2]); set(condLine[numCond][2], surf[numSurf][1]); set(condLine[numCond][3], surf[numSurf][3]); condFlag[numCond] = 5; numCond++; break; default: break; } set(condLine[numCond][0], surf[numSurf][0]); set(condLine[numCond][1], surf[numSurf][3]); set(condLine[numCond][2], surf[numSurf][1]); set(condLine[numCond][3], surf[numSurf - 1][0]); condFlag[numCond] = 5; numSurf++; numCond++; lineUsed[current] = 2; } if (flag) break; } } if (flag) break; } } while (flag); if (manhattan(surf[numSurf - 1][1], surf[surfstart][0]) < SMALL) { set(condLine[numCond][0], surf[numSurf - 1][1]); set(condLine[numCond][1], surf[numSurf - 1][2]); set(condLine[numCond][2], surf[numSurf - 1][0]); set(condLine[numCond][3], surf[surfstart][1]); condFlag[numCond] = 5; numCond++; } else { set(condLine[numCond][0], surf[numSurf - 1][1]); set(condLine[numCond][1], surf[numSurf - 1][2]); condFlag[numCond] = 2; numCond++; set(condLine[numCond][0], surf[surfstart][0]); set(condLine[numCond][1], surf[surfstart][3]); condFlag[numCond] = 2; numCond++; } } } for (int k = 0; k < numSurf; k++) { if (manhattan(surf[k][0], surf[k][3]) < SMALL && manhattan(surf[k][1], surf[k][2]) < SMALL) continue; if (manhattan(surf[k][0], surf[k][3]) < SMALL) { Vertex v1 = new Vertex(new BigDecimal(surf[k][0][x]), new BigDecimal(surf[k][0][y]), new BigDecimal(surf[k][0][z])); Vertex v2 = new Vertex(new BigDecimal(surf[k][1][x]), new BigDecimal(surf[k][1][y]), new BigDecimal(surf[k][1][z])); Vertex v3 = new Vertex(new BigDecimal(surf[k][2][x]), new BigDecimal(surf[k][2][y]), new BigDecimal(surf[k][2][z])); newTriangles.add(new GData3( bodyColour.getColourNumber(), bodyColour.getR(), bodyColour.getG(), bodyColour.getB(), bodyColour.getA(), v1, v2, v3, View.DUMMY_REFERENCE, linkedDatFile, true)); } else if (manhattan(surf[k][1], surf[k][2]) < SMALL) { Vertex v1 = new Vertex(new BigDecimal(surf[k][0][x]), new BigDecimal(surf[k][0][y]), new BigDecimal(surf[k][0][z])); Vertex v2 = new Vertex(new BigDecimal(surf[k][1][x]), new BigDecimal(surf[k][1][y]), new BigDecimal(surf[k][1][z])); Vertex v3 = new Vertex(new BigDecimal(surf[k][3][x]), new BigDecimal(surf[k][3][y]), new BigDecimal(surf[k][3][z])); newTriangles.add(new GData3( bodyColour.getColourNumber(), bodyColour.getR(), bodyColour.getG(), bodyColour.getB(), bodyColour.getA(), v1, v2, v3, View.DUMMY_REFERENCE, linkedDatFile, true)); } else if (mode == YTruderSettings.MODE_TRANSLATE_BY_DISTANCE || mode == YTruderSettings.MODE_SYMMETRY_ACROSS_PLANE || triAngle(surf[k][0], surf[k][1], surf[k][2], surf[k][0], surf[k][2], surf[k][3]) <= 0.5) { Vertex v1 = new Vertex(new BigDecimal(surf[k][0][x]), new BigDecimal(surf[k][0][y]), new BigDecimal(surf[k][0][z])); Vertex v2 = new Vertex(new BigDecimal(surf[k][1][x]), new BigDecimal(surf[k][1][y]), new BigDecimal(surf[k][1][z])); Vertex v3 = new Vertex(new BigDecimal(surf[k][2][x]), new BigDecimal(surf[k][2][y]), new BigDecimal(surf[k][2][z])); Vertex v4 = new Vertex(new BigDecimal(surf[k][3][x]), new BigDecimal(surf[k][3][y]), new BigDecimal(surf[k][3][z])); newQuads.add(new GData4( bodyColour.getColourNumber(), bodyColour.getR(), bodyColour.getG(), bodyColour.getB(), bodyColour.getA(), v1, v2, v3, v4, View.DUMMY_REFERENCE, linkedDatFile)); } else { { Vertex v1 = new Vertex(new BigDecimal(surf[k][0][x]), new BigDecimal(surf[k][0][y]), new BigDecimal(surf[k][0][z])); Vertex v2 = new Vertex(new BigDecimal(surf[k][1][x]), new BigDecimal(surf[k][1][y]), new BigDecimal(surf[k][1][z])); Vertex v3 = new Vertex(new BigDecimal(surf[k][2][x]), new BigDecimal(surf[k][2][y]), new BigDecimal(surf[k][2][z])); newTriangles.add(new GData3( bodyColour.getColourNumber(), bodyColour.getR(), bodyColour.getG(), bodyColour.getB(), bodyColour.getA(), v1, v2, v3, View.DUMMY_REFERENCE, linkedDatFile, true)); } { Vertex v1 = new Vertex(new BigDecimal(surf[k][0][x]), new BigDecimal(surf[k][0][y]), new BigDecimal(surf[k][0][z])); Vertex v2 = new Vertex(new BigDecimal(surf[k][2][x]), new BigDecimal(surf[k][2][y]), new BigDecimal(surf[k][2][z])); Vertex v3 = new Vertex(new BigDecimal(surf[k][3][x]), new BigDecimal(surf[k][3][y]), new BigDecimal(surf[k][3][z])); newTriangles.add(new GData3( bodyColour.getColourNumber(), bodyColour.getR(), bodyColour.getG(), bodyColour.getB(), bodyColour.getA(), v1, v2, v3, View.DUMMY_REFERENCE, linkedDatFile, true)); } } } for (int k = 0; k < numCond; k++) { if (manhattan(condLine[k][0], condLine[k][1]) < SMALL) continue; if (condFlag[k] == 5) { double a; a = triAngle(condLine[k][0], condLine[k][1], condLine[k][2], condLine[k][0], condLine[k][3], condLine[k][1]); if (a < angleLineThr) { Vertex v1 = new Vertex(new BigDecimal(condLine[k][0][x]), new BigDecimal(condLine[k][0][y]), new BigDecimal(condLine[k][0][z])); Vertex v2 = new Vertex(new BigDecimal(condLine[k][1][x]), new BigDecimal(condLine[k][1][y]), new BigDecimal(condLine[k][1][z])); Vertex v3 = new Vertex(new BigDecimal(condLine[k][2][x]), new BigDecimal(condLine[k][2][y]), new BigDecimal(condLine[k][2][z])); Vertex v4 = new Vertex(new BigDecimal(condLine[k][3][x]), new BigDecimal(condLine[k][3][y]), new BigDecimal(condLine[k][3][z])); newCondlines.add(new GData5( lineColour.getColourNumber(), lineColour.getR(), lineColour.getG(), lineColour.getB(), lineColour.getA(), v1, v2, v3, v4, View.DUMMY_REFERENCE, linkedDatFile)); } else { Vertex v1 = new Vertex(new BigDecimal(condLine[k][0][x]), new BigDecimal(condLine[k][0][y]), new BigDecimal(condLine[k][0][z])); Vertex v2 = new Vertex(new BigDecimal(condLine[k][1][x]), new BigDecimal(condLine[k][1][y]), new BigDecimal(condLine[k][1][z])); newLines.add(new GData2( lineColour.getColourNumber(), lineColour.getR(), lineColour.getG(), lineColour.getB(), lineColour.getA(), v1, v2, View.DUMMY_REFERENCE, linkedDatFile, true)); } } if (condFlag[k] == 2) { Vertex v1 = new Vertex(new BigDecimal(condLine[k][0][x]), new BigDecimal(condLine[k][0][y]), new BigDecimal(condLine[k][0][z])); Vertex v2 = new Vertex(new BigDecimal(condLine[k][1][x]), new BigDecimal(condLine[k][1][y]), new BigDecimal(condLine[k][1][z])); newLines.add(new GData2( lineColour.getColourNumber(), lineColour.getR(), lineColour.getG(), lineColour.getB(), lineColour.getA(), v1, v2, View.DUMMY_REFERENCE, linkedDatFile, true)); } } NLogger.debug(getClass(), "Check for identical vertices and collinearity."); //$NON-NLS-1$ final Set<GData2> linesToDelete2 = new HashSet<>(); final Set<GData3> trisToDelete2 = new HashSet<>(); final Set<GData4> quadsToDelete2 = new HashSet<>(); final Set<GData5> condlinesToDelete2 = new HashSet<>(); { for (GData2 g2 : newLines) { Vertex[] verts = lines.get(g2); SortedSet<Vertex> verts2 = new TreeSet<>(); verts2.addAll(Arrays.asList(verts)); if (verts2.size() < 2) { linesToDelete2.add(g2); } } for (GData3 g3 : newTriangles) { Vertex[] verts = triangles.get(g3); SortedSet<Vertex> verts2 = new TreeSet<>(); verts2.addAll(Arrays.asList(verts)); if (verts2.size() < 3 || g3.isCollinear()) { trisToDelete2.add(g3); } } for (GData4 g4 : newQuads) { Vertex[] verts = quads.get(g4); SortedSet<Vertex> verts2 = new TreeSet<>(); verts2.addAll(Arrays.asList(verts)); if (verts2.size() < 4 || g4.isCollinear()) { quadsToDelete2.add(g4); } } for (GData5 g5 : newCondlines) { Vertex[] verts = condlines.get(g5); SortedSet<Vertex> verts2 = new TreeSet<>(); verts2.addAll(Arrays.asList(verts)); if (verts2.size() < 4) { condlinesToDelete2.add(g5); } } } // Append the new data for (GData2 line : newLines) { linkedDatFile.addToTailOrInsertAfterCursor(line); } for (GData3 tri : newTriangles) { linkedDatFile.addToTailOrInsertAfterCursor(tri); } for (GData4 quad : newQuads) { linkedDatFile.addToTailOrInsertAfterCursor(quad); } for (GData5 condline : newCondlines) { linkedDatFile.addToTailOrInsertAfterCursor(condline); } NLogger.debug(getClass(), "Delete new, but invalid objects."); //$NON-NLS-1$ clearSelection2(); newLines.removeAll(linesToDelete2); newTriangles.removeAll(trisToDelete2); newQuads.removeAll(quadsToDelete2); newCondlines.removeAll(condlinesToDelete2); selectedLines.addAll(linesToDelete2); selectedTriangles.addAll(trisToDelete2); selectedQuads.addAll(quadsToDelete2); selectedCondlines.addAll(condlinesToDelete2); selectedData.addAll(selectedLines); selectedData.addAll(selectedTriangles); selectedData.addAll(selectedQuads); selectedData.addAll(selectedCondlines); delete(false, false); // Round to 6 decimal places selectedLines.addAll(newLines); selectedTriangles.addAll(newTriangles); selectedQuads.addAll(newQuads); selectedCondlines.addAll(newCondlines); selectedData.addAll(selectedLines); selectedData.addAll(selectedTriangles); selectedData.addAll(selectedQuads); selectedData.addAll(selectedCondlines); NLogger.debug(getClass(), "Round."); //$NON-NLS-1$ roundSelection(6, 10, true, false, true, true, true); setModified(true, true); validateState(); NLogger.debug(getClass(), "Done."); //$NON-NLS-1$ } private void cross(double[] dest, double[] left, double[] right) { dest[0] = left[1] * right[2] - left[2] * right[1]; dest[1] = left[2] * right[0] - left[0] * right[2]; dest[2] = left[0] * right[1] - left[1] * right[0]; } private double dot(double[] v1, double[] v2) { return v1[0] * v2[0] + v1[1] * v2[1] + v1[2] * v2[2]; } private void sub(double[] dest, double[] left, double[] right) { dest[0] = left[0] - right[0]; dest[1] = left[1] - right[1]; dest[2] = left[2] - right[2]; } private void mult(double[] dest, double[] v, double factor) { dest[0] = factor * v[0]; dest[1] = factor * v[1]; dest[2] = factor * v[2]; } private void set(double[] dest, double[] src) { dest[0] = src[0]; dest[1] = src[1]; dest[2] = src[2]; } private double manhattan(double[] v1, double[] v2) { return Math.abs(v1[0] - v2[0]) + Math.abs(v1[1] - v2[1]) + Math.abs(v1[2] - v2[2]); } private double dist(double[] v1, double[] v2) { return Math.sqrt((v1[0] - v2[0]) * (v1[0] - v2[0]) + (v1[1] - v2[1]) * (v1[1] - v2[1]) + (v1[2] - v2[2]) * (v1[2] - v2[2])); } // Tri_Angle computes the cosine of the angle between the planes of two // triangles. // They are assumed to be non-degenerated private double triAngle(double[] u0, double[] u1, double[] u2, double[] v0, double[] v1, double[] v2) { double[] unorm = new double[3]; double[] vnorm = new double[3]; double[] temp = new double[3]; double[] u10 = new double[3]; double[] u20 = new double[3]; double[] v10 = new double[3]; double[] v20 = new double[3]; double len; sub(u10, u1, u0); sub(u20, u2, u0); sub(v10, v1, v0); sub(v20, v2, v0); cross(temp, u10, u20); len = dist(temp, nullv); mult(unorm, temp, 1 / len); cross(temp, v10, v20); len = dist(temp, nullv); mult(vnorm, temp, 1 / len); return 180 / 3.14159 * Math.acos(dot(unorm, vnorm)); } }
package org.nutz.dao.impl.link; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import org.nutz.dao.entity.Entity; import org.nutz.dao.entity.LinkField; import org.nutz.dao.impl.AbstractLinkVisitor; import org.nutz.dao.sql.Pojo; import org.nutz.dao.sql.PojoCallback; import org.nutz.dao.util.Pojos; import org.nutz.lang.Each; import org.nutz.lang.ExitLoop; import org.nutz.lang.Lang; import org.nutz.lang.LoopException; public class DoInsertLinkVisitor extends AbstractLinkVisitor { public void visit(final Object obj, final LinkField lnk) { final Object value = lnk.getValue(obj); if (Lang.length(value) == 0) return; opt.add(Pojos.createRun(new PojoCallback() { public Object invoke(Connection conn, ResultSet rs, Pojo pojo) throws SQLException { lnk.updateLinkedField(obj, value); return pojo.getOperatingObject(); } }).setOperatingObject(obj)); // : holder.getEntityBy final Entity<?> en = lnk.getLinkedEntity(); Lang.each(value, new Each<Object>() { public void invoke(int i, Object ele, int length) throws ExitLoop, LoopException { if (ele == null) throw new NullPointerException("null ele in linked field!!"); opt.addInsert(en, ele); opt.add(Pojos.createRun(new PojoCallback() { public Object invoke(Connection conn, ResultSet rs, Pojo pojo) throws SQLException { lnk.saveLinkedField(obj, pojo.getOperatingObject()); return pojo.getOperatingObject(); } }).setOperatingObject(ele)); } }); } }
package org.owasp.esapi.reference; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.text.DateFormat; import java.util.Arrays; import java.util.Date; import java.util.Enumeration; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import org.owasp.esapi.ESAPI; import org.owasp.esapi.errors.EncodingException; import org.owasp.esapi.errors.IntrusionException; import org.owasp.esapi.errors.ValidationAvailabilityException; import org.owasp.esapi.errors.ValidationException; import org.owasp.validator.html.AntiSamy; import org.owasp.validator.html.CleanResults; import org.owasp.validator.html.Policy; import org.owasp.validator.html.PolicyException; import org.owasp.validator.html.ScanException; public class DefaultValidator implements org.owasp.esapi.Validator { /** OWASP AntiSamy markup verification policy */ private Policy antiSamyPolicy = null; /** constants */ private static final int MAX_CREDIT_CARD_LENGTH = 19; private static final int MAX_PARAMETER_NAME_LENGTH = 100; private static final int MAX_PARAMETER_VALUE_LENGTH = 65535; //max length of MySQL "text" column type public DefaultValidator() { } // FIXME: need to return 3 ways - safe input, safe input with message, ValidationException // Going to have to implement a ValidationResult (toString should be the safe input) // FIXME: Why do we need "safe input with message?" public boolean isValidInput(String context, String input, String type, int maxLength, boolean allowNull) throws IntrusionException { try { getValidInput( context, input, type, maxLength, allowNull); return true; } catch( Exception e ) { return false; } } public String getValidInput(String context, String input, String type, int maxLength, boolean allowNull) throws ValidationException, IntrusionException { try { context = ESAPI.encoder().canonicalize( context ); String canonical = ESAPI.encoder().canonicalize( input ); if (isEmpty(canonical)) { if (allowNull) return null; throw new ValidationException( context + ": Input required", "Input required: context=" + context + ", type=" + type + "), input=" + input ); } if (canonical.length() > maxLength) { //FIXME: ENHANCE if the length is exceeded by a wide margin, throw IntrusionException? throw new ValidationException( context + ": Input can not exceed " + maxLength + " characters", "Input exceeds maximum allowed length of " + maxLength + " by " + (canonical.length()-maxLength) + " characters: context=" + context + ", type=" + type + "), input=" + input); } if ( type == null || type.length() == 0 ) { throw new ValidationException( context + ": Invalid input", "Validation misconfiguration, specified type to validate against was null: context=" + context + ", type=" + type + "), input=" + input ); } //TODO - let us know when its a ESAPI.properties config problem! This exception does not differentiate Pattern p = ((DefaultSecurityConfiguration)ESAPI.securityConfiguration()).getValidationPattern( type ); if ( p == null ) { try { p = Pattern.compile( type ); } catch( PatternSyntaxException e ) { throw new ValidationException( context + ": Invalid input", "Validation misconfiguration, type to validate against must be defined in ESAPI.properties or a valid regular expression: context=" + context + ", type=" + type + "), input=" + input ); } } if ( !p.matcher(canonical).matches() ) { throw new ValidationException( context + ": Invalid input", "Invalid input: context=" + context + ", type=" + type + "( " + p.pattern() + "), input=" + input ); } return canonical; } catch (EncodingException e) { throw new ValidationException( context + ": Invalid input", "Error canonicalizing user input", e); } } /** * Returns true if input is a valid date according to the specified date format. */ public boolean isValidDate(String context, String input, DateFormat format, boolean allowNull) throws IntrusionException { try { getValidDate( context, input, format, allowNull); return true; } catch( Exception e ) { return false; } } /* * Returns a valid date as a Date. Invalid input will generate a descriptive ValidationException, * and input that is clearly an attack will generate a descriptive IntrusionException. * * @see org.owasp.esapi.interfaces.IValidator#getValidDate(java.lang.String) */ public Date getValidDate(String context, String input, DateFormat format, boolean allowNull) throws ValidationException, IntrusionException { try { if (isEmpty(input)) { if (allowNull) return null; throw new ValidationException( context + ": Input date required", "Input date required: context=" + context + ", input=" + input ); } Date date = format.parse(input); return date; } catch (Exception e) { throw new ValidationException( context + ": Invalid date must follow " + format + " format", "Invalid date: context=" + context + ", format=" + format + ", input=" + input,e ); } } /* * Returns true if input is "safe" HTML. Implementors should reference the OWASP AntiSamy project for ideas * on how to do HTML validation in a whitelist way, as this is an extremely difficult problem. * * @see org.owasp.esapi.interfaces.IValidator#isValidSafeHTML(java.lang.String) */ public boolean isValidSafeHTML(String context, String input, int maxLength, boolean allowNull) throws IntrusionException { try { if ( antiSamyPolicy == null ) { if (ESAPI.securityConfiguration().getResourceDirectory() == null) { //TODO - load via classpath - AntiSamy does not support this yet } else { //load via fileio antiSamyPolicy = Policy.getInstance( ESAPI.securityConfiguration().getResourceDirectory() + "antisamy-esapi.xml"); } } AntiSamy as = new AntiSamy(); CleanResults test = as.scan(input, antiSamyPolicy); return(test.getErrorMessages().size() == 0); } catch (Exception e) { return false; } } /* * Returns canonicalized and validated "safe" HTML. Implementors should reference the OWASP AntiSamy project for ideas * on how to do HTML validation in a whitelist way, as this is an extremely difficult problem. * * @see org.owasp.esapi.interfaces.IValidator#getValidSafeHTML(java.lang.String) */ public String getValidSafeHTML( String context, String input, int maxLength, boolean allowNull ) throws ValidationException, IntrusionException { if (isEmpty(input)) { if (allowNull) return null; throw new ValidationException( context + ": Input HTML required", "Input HTML required: context=" + context + ", input=" + input ); } if (input.length() > maxLength) { //TODO - if the length is exceeded by a wide margin, throw IntrusionException? throw new ValidationException( context + ": Invalid HTML input can not exceed " + maxLength + " characters", context + " input exceedes maxLength by " + (input.length()-maxLength) + " characters"); } try { if ( antiSamyPolicy == null ) { antiSamyPolicy = Policy.getInstance( ESAPI.securityConfiguration().getResourceDirectory() + "antisamy-esapi.xml"); } AntiSamy as = new AntiSamy(); CleanResults test = as.scan(input, antiSamyPolicy); List errors = test.getErrorMessages(); // FIXME: AAA log detailed messages for now - would be nice to report // FIXME: Enhance - antisamy has html markup in error messages - not perfect for log if ( errors.size() > 0 ) { // just create new exception to get it logged and intrusion detected new ValidationException( "Invalid HTML input: context=" + context, "Invalid HTML input: context=" + context + ", errors=" + errors ); } return(test.getCleanHTML().trim()); } catch (ScanException e) { throw new ValidationException( context + ": Invalid HTML input", "Invalid HTML input: context=" + context + " error=" + e.getMessage(), e ); } catch (PolicyException e) { throw new ValidationException( context + ": Invalid HTML input", "Invalid HTML input does not follow rules in antisamy-esapi.xml: context=" + context + " error=" + e.getMessage(), e ); } } /* * Returns true if input is a valid credit card. Maxlength is mandated by valid credit card type. * * @see org.owasp.esapi.interfaces.IValidator#isValidCreditCard(java.lang.String) */ public boolean isValidCreditCard(String context, String input, boolean allowNull) throws IntrusionException { try { getValidCreditCard( context, input, allowNull); return true; } catch( Exception e ) { return false; } } /** * Returns a canonicalized and validated credit card number as a String. Invalid input * will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ public String getValidCreditCard(String context, String input, boolean allowNull) throws ValidationException, IntrusionException { if (isEmpty(input)) { if (allowNull) return null; throw new ValidationException( context + ": Input credit card required", "Input credit card required: context=" + context + ", input=" + input ); } String canonical = getValidInput( context, input, "CreditCard", MAX_CREDIT_CARD_LENGTH, allowNull); // perform Luhn algorithm checking StringBuffer digitsOnly = new StringBuffer(); char c; for (int i = 0; i < canonical.length(); i++) { c = canonical.charAt(i); if (Character.isDigit(c)) { digitsOnly.append(c); } } int sum = 0; int digit = 0; int addend = 0; boolean timesTwo = false; for (int i = digitsOnly.length() - 1; i >= 0; i digit = Integer.parseInt(digitsOnly.substring(i, i + 1)); if (timesTwo) { addend = digit * 2; if (addend > 9) { addend -= 9; } } else { addend = digit; } sum += addend; timesTwo = !timesTwo; } int modulus = sum % 10; if (modulus != 0) throw new ValidationException( context + ": Invalid credit card input", "Invalid credit card input: context=" + context ); return canonical; } /** * Returns true if the directory path (not including a filename) is valid. * * @see org.owasp.esapi.Validator#isValidDirectoryPath(java.lang.String) */ public boolean isValidDirectoryPath(String context, String input, boolean allowNull) throws IntrusionException { try { getValidDirectoryPath( context, input, allowNull); return true; } catch( Exception e ) { return false; } } /** * Returns a canonicalized and validated directory path as a String. Invalid input * will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ public String getValidDirectoryPath(String context, String input, boolean allowNull) throws ValidationException, IntrusionException { String canonical = ""; try { if (isEmpty(input)) { if (allowNull) return null; throw new ValidationException( context + ": Input directory path required", "Input directory path required: context=" + context + ", input=" + input ); } // do basic validation canonical = ESAPI.encoder().canonicalize(input); getValidInput( context, canonical, "DirectoryName", 255, false); // get the canonical path without the drive letter if present String cpath = new File(canonical).getCanonicalPath().replaceAll( "\\\\", "/"); String temp = cpath.toLowerCase(); if (temp.length() >= 2 && temp.charAt(0) >= 'a' && temp.charAt(0) <= 'z' && temp.charAt(1) == ':') { cpath = cpath.substring(2); } // prepare the input without the drive letter if present String escaped = canonical.replaceAll( "\\\\", "/"); temp = escaped.toLowerCase(); if (temp.length() >= 2 && temp.charAt(0) >= 'a' && temp.charAt(0) <= 'z' && temp.charAt(1) == ':') { escaped = escaped.substring(2); } // the path is valid if the input matches the canonical path if (!escaped.equals(cpath.toLowerCase())) { throw new ValidationException( context + ": Invalid directory name", "Invalid directory name does not match the canonical path: context=" + context + ", input=" + input + ", canonical=" + canonical ); } } catch (IOException e) { throw new ValidationException( context + ": Invalid directory name", "Invalid directory name does not exist: context=" + context + ", canonical=" + canonical, e ); } catch (EncodingException ee) { throw new IntrusionException( context + ": Invalid directory name", "Invalid directory name: context=" + context + ", canonical=" + canonical, ee ); } return canonical; } /** * Returns true if input is a valid file name. */ public boolean isValidFileName(String context, String input, boolean allowNull) throws IntrusionException { try { getValidFileName( context, input, allowNull); return true; } catch( Exception e ) { return false; } } /** * Returns a canonicalized and validated file name as a String. Invalid input * will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ public String getValidFileName(String context, String input, boolean allowNull) throws ValidationException, IntrusionException { String canonical = ""; // FIXME: AAA verify no disallowed characters like %00 ? * \ / - use SafeFile? // detect path manipulation try { if (isEmpty(input)) { if (allowNull) return null; throw new ValidationException( context + ": Input file name required", "Input required: context=" + context + ", input=" + input ); } // do basic validation canonical = ESAPI.encoder().canonicalize(input); getValidInput( context, input, "FileName", 255, true ); File f = new File(canonical); String c = f.getCanonicalPath(); String cpath = c.substring(c.lastIndexOf(File.separator) + 1); // the path is valid if the input matches the canonical path if (!input.equals(cpath.toLowerCase())) { throw new ValidationException( context + ": Invalid file name", "Invalid directory name does not match the canonical path: context=" + context + ", input=" + input + ", canonical=" + canonical ); } } catch (IOException e) { throw new ValidationException( context + ": Invalid file name", "Invalid file name does not exist: context=" + context + ", canonical=" + canonical, e ); } catch (EncodingException ee) { throw new IntrusionException( context + ": Invalid file name", "Invalid file name: context=" + context + ", canonical=" + canonical, ee ); } // verify extensions List extensions = ESAPI.securityConfiguration().getAllowedFileExtensions(); Iterator i = extensions.iterator(); while (i.hasNext()) { String ext = (String) i.next(); if (input.toLowerCase().endsWith(ext.toLowerCase())) { return canonical; } } throw new ValidationException( context + ": Invalid file name does not have valid extension ( "+ESAPI.securityConfiguration().getAllowedFileExtensions()+")", "Invalid file name does not have valid extension ( "+ESAPI.securityConfiguration().getAllowedFileExtensions()+"): context=" + context+", input=" + input); } /* * Returns true if input is a valid number. * * @see org.owasp.esapi.interfaces.IValidator#isValidNumber(java.lang.String) */ public boolean isValidNumber(String context, String input, long minValue, long maxValue, boolean allowNull) throws IntrusionException { try { getValidNumber( context, input, minValue, maxValue, allowNull); return true; } catch( Exception e ) { return false; } } /** * Returns a validated number as a double. Invalid input * will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ public Double getValidNumber(String context, String input, long minValue, long maxValue, boolean allowNull) throws ValidationException, IntrusionException { Double minDoubleValue = new Double(minValue); Double maxDoubleValue = new Double(maxValue); return getValidDouble( context, input, minDoubleValue.doubleValue(), maxDoubleValue.doubleValue(), allowNull); } /* * Returns true if input is a valid number. * * @see org.owasp.esapi.interfaces.IValidator#isValidNumber(java.lang.String) */ public boolean isValidDouble(String context, String input, double minValue, double maxValue, boolean allowNull) throws IntrusionException { return isValidDouble( context, input, minValue, maxValue, allowNull ); } /** * Returns a validated number as a double. Invalid input * will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ public Double getValidDouble(String context, String input, double minValue, double maxValue, boolean allowNull) throws ValidationException, IntrusionException { if (minValue > maxValue) { //should this be a RunTime? throw new ValidationException( context + ": Invalid double input: context", "Validation parameter error for double: maxValue ( " + maxValue + ") must be greater than minValue ( " + minValue + ") for " + context ); } if (isEmpty(input)) { if (allowNull) return null; throw new ValidationException( context + ": Input required: context", "Input required: context=" + context + ", input=" + input ); } try { Double d = new Double(Double.parseDouble(input)); if (d.isInfinite()) throw new ValidationException( "Invalid double input: context=" + context, "Invalid double input is infinite: context=" + context + ", input=" + input ); if (d.isNaN()) throw new ValidationException( "Invalid double input: context=" + context, "Invalid double input is infinite: context=" + context + ", input=" + input ); if (d.doubleValue() < minValue) throw new ValidationException( "Invalid double input must be between " + minValue + " and " + maxValue + ": context=" + context, "Invalid double input must be between " + minValue + " and " + maxValue + ": context=" + context + ", input=" + input ); if (d.doubleValue() > maxValue) throw new ValidationException( "Invalid double input must be between " + minValue + " and " + maxValue + ": context=" + context, "Invalid double input must be between " + minValue + " and " + maxValue + ": context=" + context + ", input=" + input ); return d; } catch (NumberFormatException e) { throw new ValidationException( context + ": Invalid double input", "Invalid double input format: context=" + context + ", input=" + input, e); } } /* * Returns true if input is a valid number. * * @see org.owasp.esapi.interfaces.IValidator#isValidInteger(java.lang.String) */ public boolean isValidInteger(String context, String input, int minValue, int maxValue, boolean allowNull) throws IntrusionException { try { getValidInteger( context, input, minValue, maxValue, allowNull); return true; } catch( Exception e ) { return false; } } /** * Returns a validated number as a double. Invalid input * will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ public Integer getValidInteger(String context, String input, int minValue, int maxValue, boolean allowNull) throws ValidationException, IntrusionException { if (minValue > maxValue) { //should this be a RunTime? throw new ValidationException( context + ": Invalid Integer", "Validation parameter error for double: maxValue ( " + maxValue + ") must be greater than minValue ( " + minValue + ") for " + context ); } if (isEmpty(input)) { if (allowNull) return null; throw new ValidationException( context + ": Input required", "Input required: context=" + context + ", input=" + input ); } try { int i = Integer.parseInt(input); if (i < minValue || i > maxValue ) throw new ValidationException( context + ": Invalid Integer. Value must be between " + minValue + " and " + maxValue, "Invalid int input must be between " + minValue + " and " + maxValue + ": context=" + context + ", input=" + input ); return new Integer(i); } catch (NumberFormatException e) { throw new ValidationException( context + ": Invalid integer input", "Invalid int input: context=" + context + ", input=" + input, e); } } /** * Returns true if input is valid file content. */ public boolean isValidFileContent(String context, byte[] input, int maxBytes, boolean allowNull) throws IntrusionException { try { getValidFileContent( context, input, maxBytes, allowNull); return true; } catch( Exception e ) { return false; } } /** * Returns validated file content as a byte array. Invalid input * will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ public byte[] getValidFileContent(String context, byte[] input, int maxBytes, boolean allowNull) throws ValidationException, IntrusionException { if (isEmpty(input)) { if (allowNull) return null; throw new ValidationException( context + ": Input required", "Input required: context=" + context + ", input=" + input ); } // FIXME: AAA - temporary - what makes file content valid? Maybe need a parameter here? long esapiMaxBytes = ESAPI.securityConfiguration().getAllowedFileUploadSize(); if (input.length > esapiMaxBytes ) throw new ValidationException( context + ": Invalid file content can not exceed " + esapiMaxBytes + " bytes", "Exceeded ESAPI max length"); if (input.length > maxBytes ) throw new ValidationException( context + ": Invalid file content can not exceed " + maxBytes + " bytes", "Exceeded maxBytes ( " + input.length + ")"); return input; } /** * Returns true if a file upload has a valid name, path, and content. */ public boolean isValidFileUpload(String context, String directorypath, String filename, byte[] content, int maxBytes, boolean allowNull) throws IntrusionException { return( isValidFileName( context, filename, allowNull ) && isValidDirectoryPath( context, directorypath, allowNull ) && isValidFileContent( context, content, maxBytes, allowNull ) ); } /** * Validates the filepath, filename, and content of a file. Invalid input * will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ public void assertValidFileUpload(String context, String directorypath, String filename, byte[] content, int maxBytes, boolean allowNull) throws ValidationException, IntrusionException { getValidFileName( context, filename, allowNull ); getValidDirectoryPath( context, directorypath, allowNull ); getValidFileContent( context, content, maxBytes, allowNull ); } /** * Validates the current HTTP request by comparing parameters, headers, and cookies to a predefined whitelist of allowed * characters. Invalid input will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. * * Uses current HTTPRequest saved in EASPI Authenticator */ public boolean isValidHTTPRequest() throws IntrusionException { try { assertIsValidHTTPRequest(); return true; } catch( Exception e ) { return false; } } /** * Validates the current HTTP request by comparing parameters, headers, and cookies to a predefined whitelist of allowed * characters. Invalid input will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ public boolean isValidHTTPRequest(HttpServletRequest request) throws IntrusionException { try { assertIsValidHTTPRequest(request); return true; } catch( Exception e ) { return false; } } /** * Validates the current HTTP request by comparing parameters, headers, and cookies to a predefined whitelist of allowed * characters. Invalid input will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. * * Uses current HTTPRequest saved in EASPI Authenticator * */ public void assertIsValidHTTPRequest() throws ValidationException, IntrusionException { HttpServletRequest request = ESAPI.httpUtilities().getCurrentRequest(); assertIsValidHTTPRequest(request); } /** * Validates the current HTTP request by comparing parameters, headers, and cookies to a predefined whitelist of allowed * characters. Invalid input will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ public void assertIsValidHTTPRequest(HttpServletRequest request) throws ValidationException, IntrusionException { if (request == null) { throw new ValidationException( "Input required: HTTP request is null", "Input required: HTTP request is null" ); } // FIXME: make allowed methods configurable if ( !request.getMethod().equals( "GET") && !request.getMethod().equals("POST") ) { throw new IntrusionException( "Bad HTTP method received", "Bad HTTP method received: " + request.getMethod() ); } Iterator i1 = request.getParameterMap().entrySet().iterator(); while (i1.hasNext()) { Map.Entry entry = (Map.Entry) i1.next(); String name = (String) entry.getKey(); getValidInput( "HTTP request parameter: " + name, name, "HTTPParameterName", MAX_PARAMETER_NAME_LENGTH, false ); String[] values = (String[]) entry.getValue(); Iterator i3 = Arrays.asList(values).iterator(); // FIXME:Enhance - consider throwing an exception if there are multiple parameters with the same name while (i3.hasNext()) { String value = (String) i3.next(); getValidInput( "HTTP request parameter: " + name, value, "HTTPParameterValue", MAX_PARAMETER_VALUE_LENGTH, true ); } } if (request.getCookies() != null) { Iterator i2 = Arrays.asList(request.getCookies()).iterator(); while (i2.hasNext()) { Cookie cookie = (Cookie) i2.next(); String name = cookie.getName(); getValidInput( "HTTP request cookie: " + name, name, "HTTPCookieName", MAX_PARAMETER_NAME_LENGTH, true ); String value = cookie.getValue(); getValidInput( "HTTP request cookie: " + name, value, "HTTPCookieValue", MAX_PARAMETER_VALUE_LENGTH, true ); } } Enumeration e = request.getHeaderNames(); while (e.hasMoreElements()) { String name = (String) e.nextElement(); if (name != null && !name.equalsIgnoreCase( "Cookie")) { getValidInput( "HTTP request header: " + name, name, "HTTPHeaderName", MAX_PARAMETER_NAME_LENGTH, true ); Enumeration e2 = request.getHeaders(name); while (e2.hasMoreElements()) { String value = (String) e2.nextElement(); getValidInput( "HTTP request header: " + name, value, "HTTPHeaderValue", MAX_PARAMETER_VALUE_LENGTH, true ); } } } } /* * Returns true if input is a valid list item. * * @see org.owasp.esapi.interfaces.IValidator#isValidListItem(java.util.List, * java.lang.String) */ public boolean isValidListItem(String context, String input, List list) { try { getValidListItem( context, input, list); return true; } catch( Exception e ) { return false; } } /** * Returns the list item that exactly matches the canonicalized input. Invalid or non-matching input * will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ public String getValidListItem(String context, String input, List list) throws ValidationException, IntrusionException { if (list.contains(input)) return input; throw new ValidationException( context + ": Invalid list item", "Invalid list item: context=" + context + ", input=" + input ); } /* * Returns true if the parameters in the current request contain all required parameters and only optional ones in addition. * * @see org.owasp.esapi.interfaces.IValidator#isValidParameterSet(java.util.Set, * java.util.Set, java.util.Set) */ public boolean isValidHTTPRequestParameterSet(String context, Set requiredNames, Set optionalNames) { try { assertIsValidHTTPRequestParameterSet( context, requiredNames, optionalNames); return true; } catch( Exception e ) { return false; } } /** * Validates that the parameters in the current request contain all required parameters and only optional ones in * addition. Invalid input will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ public void assertIsValidHTTPRequestParameterSet(String context, Set required, Set optional) throws ValidationException, IntrusionException { HttpServletRequest request = ESAPI.httpUtilities().getCurrentRequest(); Set actualNames = request.getParameterMap().keySet(); // verify ALL required parameters are present Set missing = new HashSet(required); missing.removeAll(actualNames); if (missing.size() > 0) { //TODO - we need to know WHICH element is missing throw new ValidationException( context + ": Invalid HTTP request missing parameters", "Invalid HTTP request missing parameters " + missing + ": context=" + context ); } // verify ONLY optional + required parameters are present Set extra = new HashSet(actualNames); extra.removeAll(required); extra.removeAll(optional); if (extra.size() > 0) { throw new ValidationException( context + ": Invalid HTTP request extra parameters " + extra, "Invalid HTTP request extra parameters " + extra + ": context=" + context ); } } public boolean isValidPrintable(String context, byte[] input, int maxLength, boolean allowNull) throws IntrusionException { try { getValidPrintable( context, input, maxLength, allowNull); return true; } catch( Exception e ) { return false; } } /** * Returns canonicalized and validated printable characters as a byte array. Invalid input will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ public byte[] getValidPrintable(String context, byte[] input, int maxLength, boolean allowNull) throws ValidationException, IntrusionException { if (isEmpty(input)) { if (allowNull) return null; throw new ValidationException( context + ": Input bytes required", "Input bytes required: HTTP request is null" ); } if (input.length > maxLength) { throw new ValidationException( context + ": Input bytes can not exceed " + maxLength + " bytes", "Input exceeds maximum allowed length of " + maxLength + " by " + (input.length-maxLength) + " bytes: context=" + context + ", input=" + input); } for (int i = 0; i < input.length; i++) { if (input[i] < 33 || input[i] > 126) { throw new ValidationException( context + ": Invalid input bytes: context=" + context, "Invalid non-ASCII input bytes, context=" + context + ", input=" + input ); } } return input; } /* * Returns true if input is valid printable ASCII characters (32-126). * * @see org.owasp.esapi.interfaces.IValidator#isValidPrintable(java.lang.String) */ public boolean isValidPrintable(String context, String input, int maxLength, boolean allowNull) throws IntrusionException { try { getValidPrintable( context, input, maxLength, allowNull); return true; } catch( Exception e ) { return false; } } /** * Returns canonicalized and validated printable characters as a String. Invalid input will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ public String getValidPrintable(String context, String input, int maxLength, boolean allowNull) throws ValidationException, IntrusionException { String canonical = ""; try { canonical = ESAPI.encoder().canonicalize(input); return new String( getValidPrintable( context, canonical.getBytes(), maxLength, allowNull) ); } catch (EncodingException e) { throw new ValidationException( context + ": Invalid printable input", "Invalid encoding of printable input, context=" + context + ", input=" + input, e); } } /** * Returns true if input is a valid redirect location. */ public boolean isValidRedirectLocation(String context, String input, boolean allowNull) throws IntrusionException { // FIXME: ENHANCE - it's too hard to put valid locations in as regex // FIXME: ENHANCE - configurable redirect length return ESAPI.validator().isValidInput( context, input, "Redirect", 512, allowNull); } /** * Returns a canonicalized and validated redirect location as a String. Invalid input will generate a descriptive ValidationException, and input that is clearly an attack * will generate a descriptive IntrusionException. */ public String getValidRedirectLocation(String context, String input, boolean allowNull) throws ValidationException, IntrusionException { // FIXME: ENHANCE - it's too hard to put valid locations in as regex return ESAPI.validator().getValidInput( context, input, "Redirect", 512, allowNull); } /** * This implementation reads until a newline or the specified number of * characters. * * @param in * the in * @param max * the max * @return the string * @throws ValidationException * the validation exception * @see org.owasp.esapi.Validator#safeReadLine(java.io.InputStream, * int) */ public String safeReadLine(InputStream in, int max) throws ValidationException { if (max <= 0) throw new ValidationAvailabilityException( "Invalid input", "Invalid readline. Must read a positive number of bytes from the stream"); StringBuffer sb = new StringBuffer(); int count = 0; int c; // FIXME: AAA - verify this method's behavior exactly matches BufferedReader.readLine() // so it can be used as a drop in replacement. try { while (true) { c = in.read(); if ( c == -1 ) { if (sb.length() == 0) return null; break; } if (c == '\n' || c == '\r') break; count++; if (count > max) { throw new ValidationAvailabilityException( "Invalid input", "Invalid readLine. Read more than maximum characters allowed ( " + max + ")"); } sb.append((char) c); } return sb.toString(); } catch (IOException e) { throw new ValidationAvailabilityException( "Invalid input", "Invalid readLine. Problem reading from input stream", e); } } /** * helper function to check if a string is empty * * @param input string input value * @return boolean response if input is empty or not */ private final boolean isEmpty(String input) { return (input==null || input.trim().length() == 0); } /** * helper function to check if a byte is empty * * @param input string input value * @return boolean response if input is empty or not */ private final boolean isEmpty(byte[] input) { return (input==null || input.length == 0); } }
package org.pentaho.di.trans.steps.groupby; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.math.BigDecimal; import java.net.SocketTimeoutException; import java.util.ArrayList; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.exception.KettleValueException; import org.pentaho.di.core.row.RowDataUtil; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueDataUtil; import org.pentaho.di.core.row.ValueMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStep; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; /** * Groups informations based on aggregation rules. (sum, count, ...) * * @author Matt * @since 2-jun-2003 */ public class GroupBy extends BaseStep implements StepInterface { private GroupByMeta meta; private GroupByData data; public GroupBy(StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans) { super(stepMeta, stepDataInterface, copyNr, transMeta, trans); meta=(GroupByMeta)getStepMeta().getStepMetaInterface(); data=(GroupByData)stepDataInterface; } public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException { meta=(GroupByMeta)smi; data=(GroupByData)sdi; Object[] r=getRow(); // get row! if (first) { // What is the output looking like? data.inputRowMeta = getInputRowMeta(); // In case we have 0 input rows, we still want to send out a single row aggregate // However... the problem then is that we don't know the layout from receiving it from the previous step over the row set. // So we need to calculated based on the metadata... if (data.inputRowMeta==null) { data.inputRowMeta = getTransMeta().getPrevStepFields(getStepMeta()); } data.outputRowMeta = data.inputRowMeta.clone(); meta.getFields(data.outputRowMeta, getStepname(), null, null, this); // Do all the work we can beforehand // Calculate indexes, loop up fields, etc. data.counts = new long[meta.getSubjectField().length]; data.subjectnrs = new int[meta.getSubjectField().length]; data.cumulativeSumSourceIndexes = new ArrayList<Integer>(); data.cumulativeSumTargetIndexes = new ArrayList<Integer>(); data.cumulativeAvgSourceIndexes = new ArrayList<Integer>(); data.cumulativeAvgTargetIndexes = new ArrayList<Integer>(); for (int i=0;i<meta.getSubjectField().length;i++) { data.subjectnrs[i] = data.inputRowMeta.indexOfValue(meta.getSubjectField()[i]); if (data.subjectnrs[i]<0) { logError(Messages.getString("GroupBy.Log.AggregateSubjectFieldCouldNotFound",meta.getSubjectField()[i])); //$NON-NLS-1$ //$NON-NLS-2$ setErrors(1); stopAll(); return false; } if (meta.getAggregateType()[i]==GroupByMeta.TYPE_GROUP_CUMULATIVE_SUM) { data.cumulativeSumSourceIndexes.add(data.subjectnrs[i]); // The position of the target in the output row is the input row size + i data.cumulativeSumTargetIndexes.add(getInputRowMeta().size()+i); } if (meta.getAggregateType()[i]==GroupByMeta.TYPE_GROUP_CUMULATIVE_AVERAGE) { data.cumulativeAvgSourceIndexes.add(data.subjectnrs[i]); // The position of the target in the output row is the input row size + i data.cumulativeAvgTargetIndexes.add(getInputRowMeta().size()+i); } } data.previousSums = new Object[data.cumulativeSumTargetIndexes.size()]; data.previousAvgSum = new Object[data.cumulativeAvgTargetIndexes.size()]; data.previousAvgCount = new long[data.cumulativeAvgTargetIndexes.size()]; data.groupnrs = new int[meta.getGroupField().length]; for (int i=0;i<meta.getGroupField().length;i++) { data.groupnrs[i] = data.inputRowMeta.indexOfValue(meta.getGroupField()[i]); if (data.groupnrs[i]<0) { logError(Messages.getString("GroupBy.Log.GroupFieldCouldNotFound",meta.getGroupField()[i])); //$NON-NLS-1$ //$NON-NLS-2$ setErrors(1); stopAll(); return false; } } // Create a metadata value for the counter Integers data.valueMetaInteger = new ValueMeta("count", ValueMetaInterface.TYPE_INTEGER); data.valueMetaNumber = new ValueMeta("sum", ValueMetaInterface.TYPE_NUMBER); // Initialize the group metadata initGroupMeta(data.inputRowMeta); // Create a new group aggregate (init) newAggregate(r); // for speed: groupMeta+aggMeta data.groupAggMeta=new RowMeta(); data.groupAggMeta.addRowMeta(data.groupMeta); data.groupAggMeta.addRowMeta(data.aggMeta); } if (r==null) // no more input to be expected... (or none received in the first place) { if (meta.passAllRows()) // ALL ROWS { if (data.previous!=null) { calcAggregate(data.previous); addToBuffer(data.previous); } data.groupResult = getAggregateResult(); Object[] row = getRowFromBuffer(); long lineNr=0; while (row!=null) { int size = data.inputRowMeta.size(); row=RowDataUtil.addRowData(row, size, data.groupResult); size+=data.groupResult.length; lineNr++; if (meta.isAddingLineNrInGroup() && !Const.isEmpty(meta.getLineNrInGroupField())) { Object lineNrValue= new Long(lineNr); // ValueMetaInterface lineNrValueMeta = new ValueMeta(meta.getLineNrInGroupField(), ValueMetaInterface.TYPE_INTEGER); // lineNrValueMeta.setLength(9); row=RowDataUtil.addValueData(row, size, lineNrValue); size++; } addCumulativeSums(row); addCumulativeAverages(row); putRow(data.outputRowMeta, row); row = getRowFromBuffer(); } closeInput(); } else // JUST THE GROUP + AGGREGATE { // Don't forget the last set of rows... if (data.previous!=null) { calcAggregate(data.previous); } Object[] result = buildResult(data.previous); putRow(data.groupAggMeta, result); } setOutputDone(); return false; } if (first) { first=false; data.previous = data.inputRowMeta.cloneRow(r); // copy the row to previous } else { calcAggregate(data.previous); //System.out.println("After calc, agg="+agg); if (meta.passAllRows()) { addToBuffer(data.previous); } } // System.out.println("Check for same group..."); if (!sameGroup(data.previous, r)) { // System.out.println("Different group!"); if (meta.passAllRows()) { // System.out.println("Close output..."); // Not the same group: close output (if any) closeOutput(); // System.out.println("getAggregateResult()"); // Get all rows from the buffer! data.groupResult = getAggregateResult(); // System.out.println("dump rows from the buffer"); Object[] row = getRowFromBuffer(); long lineNr=0; while (row!=null) { int size = data.inputRowMeta.size(); row=RowDataUtil.addRowData(row, size, data.groupResult); size+=data.groupResult.length; lineNr++; if (meta.isAddingLineNrInGroup() && !Const.isEmpty(meta.getLineNrInGroupField())) { Object lineNrValue= new Long(lineNr); // ValueMetaInterface lineNrValueMeta = new ValueMeta(meta.getLineNrInGroupField(), ValueMetaInterface.TYPE_INTEGER); // lineNrValueMeta.setLength(9); row=RowDataUtil.addValueData(row, size, lineNrValue); size++; } addCumulativeSums(row); addCumulativeAverages(row); putRow(data.outputRowMeta, row); row = getRowFromBuffer(); } closeInput(); } else { Object[] result = buildResult(data.previous); putRow(data.groupAggMeta, result); // copy row to possible alternate rowset(s). } newAggregate(r); // Create a new group aggregate (init) } data.previous=data.inputRowMeta.cloneRow(r); if ((linesRead>0) && (linesRead%Const.ROWS_UPDATE)==0) logBasic(Messages.getString("GroupBy.LineNumber")+linesRead); //$NON-NLS-1$ return true; } private void addCumulativeSums(Object[] row) throws KettleValueException { // We need to adjust this row with cumulative averages? for (int i=0;i<data.cumulativeSumSourceIndexes.size();i++) { int sourceIndex = data.cumulativeSumSourceIndexes.get(i); Object previousTarget = data.previousSums[i]; Object sourceValue = row[sourceIndex]; int targetIndex = data.cumulativeSumTargetIndexes.get(i); ValueMetaInterface sourceMeta = getInputRowMeta().getValueMeta(sourceIndex); ValueMetaInterface targetMeta = data.outputRowMeta.getValueMeta(targetIndex); // If the first values where null, or this is the first time around, just take the source value... if (targetMeta.isNull(previousTarget)) { row[targetIndex]=sourceMeta.convertToNormalStorageType(sourceValue); } else { // If the source value is null, just take the previous target value if (sourceMeta.isNull(sourceValue)) { row[targetIndex] = previousTarget; } else { row[targetIndex] = ValueDataUtil.plus(targetMeta, data.previousSums[i], sourceMeta, row[sourceIndex]); } } data.previousSums[i] = row[targetIndex]; } } private void addCumulativeAverages(Object[] row) throws KettleValueException { // We need to adjust this row with cumulative sums for (int i=0;i<data.cumulativeAvgSourceIndexes.size();i++) { int sourceIndex = data.cumulativeAvgSourceIndexes.get(i); Object previousTarget = data.previousAvgSum[i]; Object sourceValue = row[sourceIndex]; int targetIndex = data.cumulativeAvgTargetIndexes.get(i); ValueMetaInterface sourceMeta = getInputRowMeta().getValueMeta(sourceIndex); ValueMetaInterface targetMeta = data.outputRowMeta.getValueMeta(targetIndex); // If the first values where null, or this is the first time around, just take the source value... Object sum = null; if (targetMeta.isNull(previousTarget)) { sum=sourceMeta.convertToNormalStorageType(sourceValue); } else { // If the source value is null, just take the previous target value if (sourceMeta.isNull(sourceValue)) { sum = previousTarget; } else { if (sourceMeta.isInteger()) { sum = ValueDataUtil.plus(data.valueMetaInteger, data.previousAvgSum[i], sourceMeta, row[sourceIndex]); } else { sum = ValueDataUtil.plus(targetMeta, data.previousAvgSum[i], sourceMeta, row[sourceIndex]); } } } data.previousAvgSum[i] = sum; if (!sourceMeta.isNull(sourceValue)) data.previousAvgCount[i]++; if (sourceMeta.isInteger()) { // Change to number as the exception if (sum==null) { row[targetIndex] = null; } else { row[targetIndex] = new Double( ((Long)sum).doubleValue() / data.previousAvgCount[i] ); } } else { row[targetIndex] = ValueDataUtil.divide(targetMeta, sum, data.valueMetaInteger, data.previousAvgCount[i]); } } } // Is the row r of the same group as previous? private boolean sameGroup(Object[] previous, Object[] r) throws KettleValueException { return data.inputRowMeta.compare(previous, r, data.groupnrs) == 0; } // Calculate the aggregates in the row... private void calcAggregate(Object[] r) throws KettleValueException { for (int i=0;i<data.subjectnrs.length;i++) { Object subj = r[data.subjectnrs[i]]; ValueMetaInterface subjMeta=data.inputRowMeta.getValueMeta(data.subjectnrs[i]); Object value = data.agg[i]; ValueMetaInterface valueMeta=data.aggMeta.getValueMeta(i); //System.out.println(" calcAggregate value, i="+i+", agg.size()="+agg.size()+", subj="+subj+", value="+value); switch(meta.getAggregateType()[i]) { case GroupByMeta.TYPE_GROUP_SUM : data.agg[i]=ValueDataUtil.sum(valueMeta, value, subjMeta, subj); break; case GroupByMeta.TYPE_GROUP_AVERAGE : data.agg[i]=ValueDataUtil.sum(valueMeta, value, subjMeta, subj); data.counts[i]++; break; case GroupByMeta.TYPE_GROUP_COUNT_ALL : data.counts[i]++; break; case GroupByMeta.TYPE_GROUP_MIN : if (subjMeta.compare(subj,valueMeta,value)<0) data.agg[i]=subj; break; case GroupByMeta.TYPE_GROUP_MAX : if (subjMeta.compare(subj,valueMeta,value)>0) data.agg[i]=subj; break; case GroupByMeta.TYPE_GROUP_FIRST : if (!(subj==null) && value==null) data.agg[i]=subj; break; case GroupByMeta.TYPE_GROUP_LAST : if (!(subj==null)) data.agg[i]=subj; break; case GroupByMeta.TYPE_GROUP_FIRST_INCL_NULL: // This is on purpose. The calculation of the // first field is done when setting up a new group // This is just the field of the first row // if (linesWritten==0) value.setValue(subj); break; case GroupByMeta.TYPE_GROUP_LAST_INCL_NULL : data.agg[i]=subj; break; case GroupByMeta.TYPE_GROUP_CONCAT_COMMA : if (!(subj==null)) { String vString=valueMeta.getString(value); if (vString.length()>0) vString=vString+", "; //$NON-NLS-1$ data.agg[i]=vString+subjMeta.getString(subj); } break; default: break; } } } // Initialize a group.. private void newAggregate(Object[] r) { // Put all the counters at 0 for (int i=0;i<data.counts.length;i++) data.counts[i]=0; data.agg = new Object[data.subjectnrs.length]; data.aggMeta=new RowMeta(); for (int i=0;i<data.subjectnrs.length;i++) { ValueMetaInterface subjMeta=data.inputRowMeta.getValueMeta(data.subjectnrs[i]); Object v=null; ValueMetaInterface vMeta=null; switch(meta.getAggregateType()[i]) { case GroupByMeta.TYPE_GROUP_SUM : case GroupByMeta.TYPE_GROUP_AVERAGE : case GroupByMeta.TYPE_GROUP_CUMULATIVE_SUM : case GroupByMeta.TYPE_GROUP_CUMULATIVE_AVERAGE : vMeta = new ValueMeta(meta.getAggregateField()[i], subjMeta.isNumeric()?subjMeta.getType():ValueMetaInterface.TYPE_NUMBER); switch(subjMeta.getType()) { case ValueMetaInterface.TYPE_BIGNUMBER: v=new BigDecimal("0"); break; case ValueMetaInterface.TYPE_INTEGER: v=new Long(0L); break; case ValueMetaInterface.TYPE_NUMBER: default: v=new Double(0.0); break; } break; case GroupByMeta.TYPE_GROUP_COUNT_ALL : vMeta = new ValueMeta(meta.getAggregateField()[i], ValueMetaInterface.TYPE_INTEGER); v=new Long(0L); break; case GroupByMeta.TYPE_GROUP_FIRST : case GroupByMeta.TYPE_GROUP_LAST : case GroupByMeta.TYPE_GROUP_FIRST_INCL_NULL : case GroupByMeta.TYPE_GROUP_LAST_INCL_NULL : case GroupByMeta.TYPE_GROUP_MIN : case GroupByMeta.TYPE_GROUP_MAX : vMeta = new ValueMeta(meta.getAggregateField()[i], subjMeta.getType()); v = r==null ? null : r[data.subjectnrs[i]]; break; case GroupByMeta.TYPE_GROUP_CONCAT_COMMA : vMeta = new ValueMeta(meta.getAggregateField()[i], ValueMetaInterface.TYPE_STRING); v = ""; //$NON-NLS-1$ break; default: // TODO raise an error here because we cannot continue successfully maybe the UI should validate this break; } if (meta.getAggregateType()[i]!=GroupByMeta.TYPE_GROUP_COUNT_ALL) { vMeta.setLength(subjMeta.getLength(), subjMeta.getPrecision()); } if (v!=null) data.agg[i]=v; data.aggMeta.addValueMeta(vMeta); } // Also clear the cumulative data... for (int i=0;i<data.previousSums.length;i++) data.previousSums[i]=null; for (int i=0;i<data.previousAvgCount.length;i++) { data.previousAvgCount[i]=0L; data.previousAvgSum[i]=null; } } private Object[] buildResult(Object[] r) throws KettleValueException { Object[] result = RowDataUtil.allocateRowData(data.groupnrs.length); if (r!=null) { for (int i=0;i<data.groupnrs.length;i++) { result[i]=r[data.groupnrs[i]]; } } result=RowDataUtil.addRowData(result, data.groupnrs.length, getAggregateResult()); return result; } private void initGroupMeta(RowMetaInterface previousRowMeta) throws KettleValueException { data.groupMeta=new RowMeta(); for (int i=0;i<data.groupnrs.length;i++) { data.groupMeta.addValueMeta(previousRowMeta.getValueMeta(data.groupnrs[i])); } return; } private Object[] getAggregateResult() throws KettleValueException { Object[] result = new Object[data.subjectnrs.length]; if (data.subjectnrs!=null) { for (int i=0;i<data.subjectnrs.length;i++) { Object ag = data.agg[i]; switch(meta.getAggregateType()[i]) { case GroupByMeta.TYPE_GROUP_SUM : break; case GroupByMeta.TYPE_GROUP_AVERAGE : ag=ValueDataUtil.divide(data.aggMeta.getValueMeta(i), ag, new ValueMeta("c",ValueMetaInterface.TYPE_INTEGER), new Long(data.counts[i])); break; //$NON-NLS-1$ case GroupByMeta.TYPE_GROUP_COUNT_ALL : ag=new Long(data.counts[i]); break; case GroupByMeta.TYPE_GROUP_MIN : break; case GroupByMeta.TYPE_GROUP_MAX : break; default: break; } result[i]=ag; } } return result; } private void addToBuffer(Object[] row) throws KettleFileException { data.bufferList.add(row); if (data.bufferList.size()>5000) { if (data.rowsOnFile==0) { try { data.tempFile = File.createTempFile(meta.getPrefix(), ".tmp", new File(environmentSubstitute(meta.getDirectory()))); //$NON-NLS-1$ data.fos=new FileOutputStream(data.tempFile); data.dos=new DataOutputStream(data.fos); data.firstRead = true; } catch(IOException e) { throw new KettleFileException(Messages.getString("GroupBy.Exception.UnableToCreateTemporaryFile"), e); //$NON-NLS-1$ } } // OK, save the oldest rows to disk! Object[] oldest = (Object[]) data.bufferList.get(0); data.inputRowMeta.writeData(data.dos, oldest); data.bufferList.remove(0); data.rowsOnFile++; } } private Object[] getRowFromBuffer() throws KettleFileException { if (data.rowsOnFile>0) { if (data.firstRead) { // Open the inputstream first... try { data.fis=new FileInputStream( data.tempFile ); data.dis=new DataInputStream( data.fis ); data.firstRead = false; } catch(IOException e) { throw new KettleFileException(Messages.getString("GroupBy.Exception.UnableToReadBackRowFromTemporaryFile"), e); //$NON-NLS-1$ } } // Read one row from the file! Object[] row; try { row = data.inputRowMeta.readData(data.dis); } catch (SocketTimeoutException e) { throw new KettleFileException(e); // Shouldn't happen on files } data.rowsOnFile return row; } else { if (data.bufferList.size()>0) { Object[] row = (Object[])data.bufferList.get(0); data.bufferList.remove(0); return row; } else { return null; // Nothing left! } } } private void closeOutput() throws KettleFileException { try { if (data.dos!=null) { data.dos.close(); data.dos=null; } if (data.fos!=null) { data.fos.close(); data.fos=null; } data.firstRead = true; } catch(IOException e) { throw new KettleFileException(Messages.getString("GroupBy.Exception.UnableToCloseInputStream"), e); //$NON-NLS-1$ } } private void closeInput() throws KettleFileException { try { if (data.fis!=null) { data.fis.close(); data.fis=null; } if (data.dis!=null) { data.dis.close(); data.dis=null; } } catch(IOException e) { throw new KettleFileException(Messages.getString("GroupBy.Exception.UnableToCloseInputStream"), e); //$NON-NLS-1$ } } public boolean init(StepMetaInterface smi, StepDataInterface sdi) { meta=(GroupByMeta)smi; data=(GroupByData)sdi; if (super.init(smi, sdi)) { data.bufferList = new ArrayList<Object[]>(); data.rowsOnFile = 0; return true; } return false; } public void dispose(StepMetaInterface smi, StepDataInterface sdi) { if (data.tempFile!=null) data.tempFile.delete(); super.dispose(smi, sdi); } // Run is were the action happens! public void run() { BaseStep.runStepThread(this, meta, data); } }
package org.petschko.rpgmakermv.decrypt; import com.sun.istack.internal.NotNull; import org.json.JSONException; import sun.dc.path.PathException; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; public class RPGProject { private String path; private String outputPath = Config.defaultOutputDir; private File system = null; private String encryptionKeyName = "encryptionKey"; private boolean isEncrypted = true; private ArrayList<File> files = new ArrayList<>(); private ArrayList<File> encryptedFiles = new ArrayList<>(); /** * RPGProject Constructor * * @param path - Path to the RPG-Maker-Project * @throws PathException - Path doesn't exists exception */ public RPGProject(@NotNull String path) throws PathException { if(Files.notExists(Paths.get(path))) throw new PathException("Project-Path doesn't exists!"); this.setPath(path); this.loadFiles(); this.findSystemFile(); this.checkIfEncrypted(); if(this.isEncrypted()) this.findEncryptedFiles(); } /** * Returns the Path of the Project * * @return - Path of the Project */ public String getPath() { return path; } /** * Sets the Path of the Project * * @param path - Path of the Project */ private void setPath(@NotNull String path) { this.path = path; } /** * Returns the Output (Save-Dir)-Path of the Project * * @return - Output (Save-Dir)-Path of Project */ public String getOutputPath() { return outputPath; } /** * Sets the Output (Save-Dir)-Path of the Project * * @param outputPath - Output (Save-Dir)-Path of the Project */ public void setOutputPath(@NotNull String outputPath) { this.outputPath = outputPath; } /** * Returns the System-File * * @return - System-File or null if not set */ public File getSystem() { return system; } /** * Sets the System-File (with encryption key) * * @param system - System-File */ public void setSystem(@NotNull File system) { this.system = system; } /** * Returns the EncryptionKeyName * * @return - EncryptionKeyName */ public String getEncryptionKeyName() { return encryptionKeyName; } /** * Sets the EncryptionKeyName * * @param encryptionKeyName - EncryptionKeyName */ public void setEncryptionKeyName(String encryptionKeyName) { this.encryptionKeyName = encryptionKeyName; } /** * Set the EncryptionKeyName to the Default-Value */ public void setEncryptionKeyNameToDefault() { this.setEncryptionKeyName("encryptionKey"); } /** * Returns true if Project is encrypted * * @return - true if Project is encrypted else false */ public boolean isEncrypted() { return isEncrypted; } /** * Sets to true if the Project is Encrypted * * @param encrypted - true if the Project is encrypted else false */ private void setEncrypted(boolean encrypted) { isEncrypted = encrypted; } /** * Returns the File List of the Project * * @return - File List */ public ArrayList<File> getFiles() { return files; } /** * Returns the Encryption-File List * * @return - Encryption-File List */ public ArrayList<File> getEncryptedFiles() { return encryptedFiles; } /** * Load all Files of the Project into an ArrayList */ private void loadFiles() { java.io.File projectPath = new java.io.File(this.getPath()); ArrayList<java.io.File> files = File.readDirFiles(projectPath); for(java.io.File file : files) { try { this.getFiles().add(new File(file.getCanonicalPath())); } catch(Exception e) { e.printStackTrace(); } } } /** * Checks if the Encryption-Key is Found within the System-File */ private void checkIfEncrypted() { Decrypter d = new Decrypter(); try { d.detectEncryptionKey(this.getSystem(), this.getEncryptionKeyName()); // todo later: add more default values and test them } catch(JSONException e) { this.setEncrypted(false); } finally { this.setEncrypted(true); } } /** * Find all Encrypted-Files of the Project and save them into an ArrayList */ private void findEncryptedFiles() { if(! this.isEncrypted()) return; for(File file : this.getFiles()) { if(file.getExtension().equals("rpgmvp") || file.getExtension().equals("rpgmvm") || file.getExtension().equals("rpgmvo")) // todo later: add more default values and test them this.getEncryptedFiles().add(file); } } private void findSystemFile() { // todo implement find system.json } public void decryptFiles(boolean ignoreFakeHeader) throws JSONException { // todo implement decryption through encrypted files array and save them to the output location Decrypter decrypter = new Decrypter(); decrypter.detectEncryptionKey(this.getSystem(), this.getEncryptionKeyName()); decrypter.setIgnoreFakeHeader(ignoreFakeHeader); for(int i = 0; i < this.getEncryptedFiles().size(); i++) { File currentFile = this.getEncryptedFiles().get(i); try { decrypter.decryptFile(currentFile); } catch(Exception e) { e.printStackTrace(); } finally { currentFile.changePathToFile(this.getOutputPath()); currentFile.save(); currentFile.unloadContent(); } } } public boolean restoreProjectFile() { // todo implement create the project file and check if all is on the right spot and decrypted return false; } }
package org.unicef.gis.infrastructure.image; import java.io.File; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Date; import org.apache.commons.io.FileUtils; import org.unicef.gis.R; import android.annotation.SuppressLint; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Matrix; import android.media.ExifInterface; import android.net.Uri; import android.os.Environment; import android.provider.MediaStore; import android.util.Log; public class Camera { private static Bitmap PLACEHOLDER = null; public static final int TAKE_PICTURE_INTENT = 10; private static String UNICEF_GIS_ALBUM = "UNICEF-GIS-ALBUM"; private static String JPEG_PREFIX = "pic"; private static String JPEG_FILE_SUFFIX = ".jpg"; private final Context context; public Camera(Context context) { this.context = context; } private Bitmap loadPlaceholder() { return BitmapFactory.decodeResource(context.getResources(), R.drawable.content_picture); } public Bitmap getPlaceholder() { if (PLACEHOLDER == null) PLACEHOLDER = loadPlaceholder(); return PLACEHOLDER; } public File takePicture() throws IOException { File f = createImageFile(); Uri uri = Uri.fromFile(f); Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE); takePictureIntent.putExtra(MediaStore.EXTRA_OUTPUT, uri); ((Activity)context).startActivityForResult(takePictureIntent, TAKE_PICTURE_INTENT); return f; } //Expects a uri of the form file://FILE_PATH public static File fileFromUri(Uri uri) { return fileFromString(uri.toString()); } //Expects a string of the form file://FILE_PATH public static File fileFromString(String uri) { return new File(uri.substring(8)); } public void addPicToGallery(File imageFile) { Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE); mediaScanIntent.setData(Uri.fromFile(imageFile)); ((Activity)context).sendBroadcast(mediaScanIntent); } /*** * Causes the system to rescan storage looking for changes in media, * so that the media galleries reflect the most up to date state. * For example, if the user deleted reports and we didn't rescan the system, * the pics would be gone from storage but the photo gallery would still show * black placeholders where the deleted pics used to be until the next time the * cellphone is turned off and on. * By calling this after removing pics, the gallery is kept in sync. */ public void rescanMedia() { context.sendBroadcast(new Intent(Intent.ACTION_MEDIA_MOUNTED, Uri.parse("file://" + Environment.getExternalStorageDirectory()))); } @SuppressLint("SimpleDateFormat") private File createImageFile() throws IOException { String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date()); String imageFileName = JPEG_PREFIX + "_" + timeStamp; File image = File.createTempFile(imageFileName, JPEG_FILE_SUFFIX, getAlbumDir()); return image; } private File getAlbumDir() throws IOException { File albumDir = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES), UNICEF_GIS_ALBUM); if (!albumDir.exists() || !albumDir.isDirectory()) { if (albumDir.exists()) albumDir.delete(); albumDir.mkdir(); } return albumDir; } public Bitmap getThumbnail(File imageFile, int scaleFactor) { BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = false; options.inSampleSize = scaleFactor; Bitmap originalBitmap = BitmapFactory.decodeFile(imageFile.getAbsolutePath(), options); //Image was deleted from storage. Maybe we should launch an exception here. if (originalBitmap == null) return null; return tryToRotate(imageFile, originalBitmap); } public Bitmap getThumbnail(Uri imageUri, int scaleFactor) { return getThumbnail(fileFromUri(imageUri), scaleFactor); } private Bitmap tryToRotate(File imageFile, Bitmap bitmap) { try { ExifInterface exif = new ExifInterface(imageFile.getAbsolutePath()); int orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL); Matrix matrix = new Matrix(); matrix.postRotate(exifOrientationToDegrees(orientation)); return Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true); } catch (IOException e) { Log.d("Camera", "Couldn't open file to extract EXIF data"); e.printStackTrace(); return bitmap; } } private float exifOrientationToDegrees(int orientation) { switch (orientation) { case ExifInterface.ORIENTATION_NORMAL: return 0; case ExifInterface.ORIENTATION_ROTATE_180: return 180; case ExifInterface.ORIENTATION_ROTATE_90: return 90; case ExifInterface.ORIENTATION_ROTATE_270: return 270; default: return 0; } } public static Uri getUri(File imageFile) { return Uri.fromFile(imageFile); } public File rotateImageIfNecessary(String imageUri) { /* * Due to a bug in certain phone's brands that caused the images to be corrupted after rotation, * we're rotating the images server side. * * The uploaded picture's name will tell the server which rotation to apply, coded in the filename: * rotate0, rotate90, rotate180 and rotate270 */ Log.d("Camera", "Rotating image" + imageUri); File imageFile = fileFromString(imageUri); ExifInterface exif; try { exif = new ExifInterface(imageFile.getAbsolutePath()); } catch (IOException e) { e.printStackTrace(); Log.d("SyncAdapter", "Couldn't open EXIF data, settling with the original image."); return imageFile; } int orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL); String newFilename = "rotate" + Float.valueOf(exifOrientationToDegrees(orientation)).intValue() + "-" + imageFile.getName(); File destFile = new File(imageFile.getParentFile(), newFilename); try { FileUtils.copyFile(imageFile, destFile); } catch (IOException e) { e.printStackTrace(); Log.d("Camera", "Couldn't save rotated image, settling with original image."); return imageFile; } return destFile; } public void deleteOriginalAndRotatedImage(String imageUri) { //Delete original image (if it's still there) File originalImage = fileFromString(imageUri); deleteIfExists(originalImage); //Delete rotated image (if it's still there) File rotatedImage = fileFromString(rotatedFileNameFromOriginal(originalImage)); deleteIfExists(rotatedImage); } private String rotatedFileNameFromOriginal(File file) { return file.getParent() + "/rotated-" + file.getName(); } private void deleteIfExists(File file) { if (file.exists()) file.delete(); } }
// This file was generated by RobotBuilder. It contains sections of // code that are automatically generated and assigned by robotbuilder. // These sections will be updated in the future when you export to // Java from RobotBuilder. Do not put any code or make any change in // the blocks indicating autogenerated code or it will be lost on an // update. Deleting the comments indicating the section will prevent // it from being updated in the future. package org.usfirst.frc862.sirius.subsystems; import java.util.Map.Entry; import java.util.TreeMap; import org.usfirst.frc862.sirius.RobotMap; import edu.wpi.first.wpilibj.DigitalInput; import edu.wpi.first.wpilibj.Encoder; import edu.wpi.first.wpilibj.SpeedController; import edu.wpi.first.wpilibj.command.Subsystem; public class Pivot extends Subsystem { private static final double ANGLE_EPSILON = 1.0; class PowerTableValue { public double up_power; public double down_power; public double hold_power; public PowerTableValue(double u, double d, double h) { up_power = u; down_power = d; hold_power = h; } } // TODO switch to something that uses a primative double key so we don't have to create an object for each get private TreeMap<Double, PowerTableValue> powerTable; public Pivot() { powerTable = new TreeMap<>(); // TODO externalize to a file and expose to // smart dashboard -- verify list is always sorted powerTable.put(0.0, new PowerTableValue(-0.3, 0.1, -0.2)); powerTable.put(10.0, new PowerTableValue(-0.4, 0.1, -0.25)); powerTable.put(40.0, new PowerTableValue(-0.5, 0.1, -0.3)); } public PowerTableValue getPowerValues(double angle) { // find floor/ceiling values Entry<Double, PowerTableValue> floor = powerTable.floorEntry(angle); Entry<Double, PowerTableValue> ceil = powerTable.ceilingEntry(angle); // Pull angle and values from the ceil and floor double floorAngle = floor.getKey(); PowerTableValue floorValues = floor.getValue(); double ceilAngle = ceil.getKey(); PowerTableValue ceilValues = ceil.getValue(); // find position between double distance = ceilAngle - floorAngle; double percent = (angle - floorAngle) / distance; // interpolate to position return new PowerTableValue( interpolate(percent, floorValues.up_power, ceilValues.up_power), interpolate(percent, floorValues.down_power, ceilValues.down_power), interpolate(percent, floorValues.hold_power, ceilValues.hold_power) ); } public double interpolate(double dist, double low, double high) { return (high - low) * dist + low; } // BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTANTS // END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=CONSTANTS // BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=DECLARATIONS private final Encoder angleEncoder = RobotMap.pivotAngleEncoder; private final SpeedController angleMotor = RobotMap.pivotAngleMotor; private final DigitalInput hallEffect = RobotMap.pivotHallEffect; // END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=DECLARATIONS // Put methods for controlling this subsystem // here. Call these from Commands. public void initDefaultCommand() { // BEGIN AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=DEFAULT_COMMAND // END AUTOGENERATED CODE, SOURCE=ROBOTBUILDER ID=DEFAULT_COMMAND // Set the default command for a subsystem here. // setDefaultCommand(new MySpecialCommand()); } public void moveToAngle(double angle) { PowerTableValue val = getPowerValues(angle); if (atAngle(angle)) { angleMotor.set(val.hold_power); } else if (angleEncoder.getDistance() > angle) { angleMotor.set(val.down_power); } else { angleMotor.set(val.up_power); } } public void hold() { PowerTableValue val = getPowerValues(angleEncoder.getDistance()); angleMotor.set(val.hold_power); } public boolean atAngle(double intakeAngle) { return Math.abs(angleEncoder.getDistance() - intakeAngle) < ANGLE_EPSILON; } public double getAngle() { return angleEncoder.getDistance(); } public void setPower(double v) { angleMotor.set(v); } }
package peergos.shared.user.fs; import peergos.shared.util.*; import java.util.concurrent.*; public class BufferedAsyncReader implements AsyncReader { private final AsyncReader source; private final long fileSize; private final byte[] buffer; // bufferStartOffset <= readOffset <= bufferEndOffset at all times private long readOffsetInFile, bufferStartInFile, bufferEndInFile; private int startInBuffer; // index in buffer corresponding to bufferStartInFile private long lastReadEnd = -1; private volatile boolean closed = false; private final AsyncLock<Integer> lock = new AsyncLock<>(Futures.of(0)); public BufferedAsyncReader(AsyncReader source, int nChunksToBuffer, long fileSize, long bufferStartInFile) { this.source = source; this.buffer = new byte[nChunksToBuffer * Chunk.MAX_SIZE]; this.fileSize = fileSize; this.bufferStartInFile = bufferStartInFile; this.readOffsetInFile = bufferStartInFile; this.bufferEndInFile = bufferStartInFile; this.startInBuffer = 0; } public BufferedAsyncReader(AsyncReader source, int nChunksToBuffer, long fileSize) { this(source, nChunksToBuffer, fileSize, 0); } private void asyncBufferFill() { System.out.println("Async buffer fill"); ForkJoinPool.commonPool().execute(() -> lock.runWithLock(x -> bufferNextChunk())); } private synchronized CompletableFuture<Integer> bufferNextChunk() { if (closed) return Futures.errored(new RuntimeException("Stream Closed!")); if (bufferEndInFile - bufferStartInFile >= buffer.length) { System.out.println("Buffer full!"); return Futures.of(0); } long initialBufferEndOffset = bufferEndInFile; int writeFromBufferOffset = (int) (initialBufferEndOffset - bufferStartInFile + startInBuffer) % buffer.length; int toCopy = Math.min(buffer.length - writeFromBufferOffset, Chunk.MAX_SIZE); if (fileSize - bufferEndInFile < Chunk.MAX_SIZE) toCopy = (int) (fileSize - bufferEndInFile); if (toCopy == 0) return Futures.of(0); System.out.println("Buffering " + toString() + " size " + toCopy); return source.readIntoArray(buffer, writeFromBufferOffset, toCopy) .thenApply(read -> { this.bufferEndInFile = initialBufferEndOffset + read; return read; }); } /** * * @return Number of buffered bytes */ private synchronized int buffered() { return (int) (bufferEndInFile - bufferStartInFile); } /** * * @return Number of buffered bytes available to read */ private synchronized int available() { return (int) (bufferEndInFile - readOffsetInFile); } /** * * @return Number of buffered bytes that have already been read */ private synchronized int read() { return (int) (readOffsetInFile - bufferStartInFile); } @Override public synchronized CompletableFuture<Integer> readIntoArray(byte[] res, int offset, int length) { boolean twoConsecutiveReads = lastReadEnd == readOffsetInFile; lastReadEnd = readOffsetInFile + length; System.out.println("Read "+length+" from buffer " + toString()); return internalReadIntoArray(res, offset, length).thenApply(r -> { // Only pre-buffer the next chunk if we've done two consecutive reads,i.e. we're probably streaming if (twoConsecutiveReads && buffered() < buffer.length && buffered() < fileSize && ! closed) asyncBufferFill(); return r; }); } private synchronized CompletableFuture<Integer> internalReadIntoArray(byte[] res, int offset, int length) { int available = available(); if (available >= length) { // we already have all the data buffered int readStartInBuffer = (int) (startInBuffer + readOffsetInFile - bufferStartInFile) % buffer.length; int toCopy = Math.min(length, buffer.length - readStartInBuffer); System.arraycopy(buffer, readStartInBuffer, res, offset, toCopy); if (toCopy < length) System.arraycopy(buffer, 0, res, offset + toCopy, length - toCopy); readOffsetInFile += length; while (read() >= Chunk.MAX_SIZE) { bufferStartInFile += Chunk.MAX_SIZE; startInBuffer += Chunk.MAX_SIZE; } System.out.println("Finished read from buffer of " + length); return Futures.of(length); } if (available > 0) { // drain the rest of the buffer int readStartInBuffer = startInBuffer + (int) (readOffsetInFile - bufferStartInFile); int toCopy = Math.min(available, buffer.length - readStartInBuffer); System.arraycopy(buffer, readStartInBuffer, res, offset, toCopy); if (toCopy < available) System.arraycopy(buffer, 0, res, offset + toCopy, available - toCopy); readOffsetInFile += toCopy; while (read() >= Chunk.MAX_SIZE) { bufferStartInFile += Chunk.MAX_SIZE; startInBuffer += Chunk.MAX_SIZE; } System.out.println("Partial read from buffer of " + toCopy); return lock.runWithLock(x -> bufferNextChunk()) .thenCompose(x -> internalReadIntoArray(res, offset + toCopy, length - toCopy) .thenApply(i -> length)); } System.out.println("Buffer empty, refilling..."); return lock.runWithLock(x -> bufferNextChunk()) .thenCompose(x -> internalReadIntoArray(res, offset, length)); } @Override public CompletableFuture<AsyncReader> seekJS(int high32, int low32) { long seek = ((long) (high32) << 32) | (low32 & 0xFFFFFFFFL); return seek(seek); } @Override public synchronized CompletableFuture<AsyncReader> seek(long offset) { System.out.println("BufferedReader.seek " + offset); if (offset == readOffsetInFile) return Futures.of(this); close(); long aligned = offset - offset % Chunk.MAX_SIZE; return source.seek(aligned) .thenCompose(r -> { BufferedAsyncReader res = new BufferedAsyncReader(r, buffer.length / Chunk.MAX_SIZE, fileSize, aligned); // do a dummy read into our buffer to get to correct position return res.internalReadIntoArray(buffer, 0, (int) (offset - aligned)) .thenApply(n -> res); }); } @Override public CompletableFuture<AsyncReader> reset() { System.out.println("BufferedReader.reset()"); close(); return source.reset() .thenApply(r -> new BufferedAsyncReader(r, buffer.length/Chunk.MAX_SIZE, fileSize)); } @Override public void close() { System.out.println("BufferedReader.close()"); this.closed = true; } @Override public String toString() { return "BufferedReader{" + "readOffsetInFile=" + readOffsetInFile + ", bufferStartInFile=" + bufferStartInFile + ", bufferEndInFile=" + bufferEndInFile + ", startInBuffer=" + startInBuffer + '}'; } }
package pitt.search.semanticvectors; import java.lang.IllegalArgumentException; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedList; import java.util.Enumeration; import java.util.logging.Logger; import pitt.search.semanticvectors.LuceneUtils; import pitt.search.semanticvectors.VectorSearcher; import pitt.search.semanticvectors.VectorStore; import pitt.search.semanticvectors.vectors.BinaryVectorUtils; import pitt.search.semanticvectors.vectors.IncompatibleVectorsException; import pitt.search.semanticvectors.vectors.Vector; import pitt.search.semanticvectors.vectors.VectorType; import pitt.search.semanticvectors.vectors.VectorUtils; /** * Class for searching vector stores using different scoring functions. * Each VectorSearcher implements a particular scoring function which is * normally query dependent, so each query needs its own VectorSearcher. */ abstract public class VectorSearcher { private static final Logger logger = Logger.getLogger(VectorSearcher.class.getCanonicalName()); private VectorStore searchVecStore; private LuceneUtils luceneUtils; /** * Expand search space for dual-predicate searches */ public static VectorStore expandSearchSpace(VectorStore searchVecStore) { VectorStoreRAM nusearchspace = new VectorStoreRAM(searchVecStore.getVectorType(), searchVecStore.getDimension()); Enumeration<ObjectVector> allVectors = searchVecStore.getAllVectors(); ArrayList<ObjectVector> storeVectors = new ArrayList<ObjectVector>(); while (allVectors.hasMoreElements()) { ObjectVector nextObjectVector = allVectors.nextElement(); nusearchspace.putVector(nextObjectVector.getObject(), nextObjectVector.getVector()); storeVectors.add(nextObjectVector); } for (int x=0; x < storeVectors.size()-1; x++) for (int y=x; y < storeVectors.size(); y++) { Vector vec1 = storeVectors.get(x).getVector().copy(); Vector vec2 = storeVectors.get(y).getVector().copy(); String obj1 = storeVectors.get(x).getObject().toString(); String obj2 = storeVectors.get(y).getObject().toString(); vec1.release(vec2); nusearchspace.putVector(obj2+":"+obj1, vec1); if (nusearchspace.getVectorType().equals(VectorType.COMPLEX)) { vec2.release(storeVectors.get(x).getVector().copy()); nusearchspace.putVector(obj1+":"+obj2, vec2); } } System.err.println("Expanding search space from "+storeVectors.size()+" to "+nusearchspace.getNumVectors()); return nusearchspace; } /** * This needs to be filled in for each subclass. It takes an individual * vector and assigns it a relevance score for this VectorSearcher. */ public abstract double getScore(Vector testVector); /** * Performs basic initialization; subclasses should normally call super() to use this. * @param queryVecStore Vector store to use for query generation. * @param searchVecStore The vector store to search. * @param luceneUtils LuceneUtils object to use for query weighting. (May be null.) */ public VectorSearcher(VectorStore queryVecStore, VectorStore searchVecStore, LuceneUtils luceneUtils) { this.searchVecStore = searchVecStore; this.luceneUtils = luceneUtils; if (Flags.expandsearchspace) searchVecStore = expandSearchSpace(searchVecStore); } /** * This nearest neighbor search is implemented in the abstract * VectorSearcher class itself: this enables all subclasses to reuse * the search whatever scoring method they implement. Since query * expressions are built into the VectorSearcher, * getNearestNeighbors no longer takes a query vector as an * argument. * @param numResults the number of results / length of the result list. */ public LinkedList<SearchResult> getNearestNeighbors(int numResults) { LinkedList<SearchResult> results = new LinkedList<SearchResult>(); double score = -1; double threshold = Flags.searchresultsminscore; if (Flags.stdev) threshold = 0; //Counters for statistics to calculate standard deviation double sum=0, sumsquared=0; int count=0; Enumeration<ObjectVector> vecEnum = searchVecStore.getAllVectors(); while (vecEnum.hasMoreElements()) { // Test this element. ObjectVector testElement = vecEnum.nextElement(); score = getScore(testElement.getVector()); // This is a way of using the Lucene Index to get term and // document frequency information to reweight all results. It // seems to be good at moving excessively common terms further // down the results. Note that using this means that scores // returned are no longer just cosine similarities. if (this.luceneUtils != null && Flags.usetermweightsinsearch) { score = score * luceneUtils.getGlobalTermWeightFromString((String) testElement.getObject()); } if (Flags.stdev) { count++; sum += score; sumsquared += Math.pow(score, 2); } if (score > threshold) { boolean added = false; for (int i = 0; i < results.size(); ++i) { // Add to list if this is right place. if (score > results.get(i).getScore() && added == false) { results.add(i, new SearchResult(score, testElement)); added = true; } } // Prune list if there are already numResults. if (results.size() > numResults) { results.removeLast(); threshold = results.getLast().getScore(); } else { if (added == false) { results.add(new SearchResult(score, testElement)); } } } } if (Flags.stdev) results = transformToStats(results, count, sum, sumsquared); return results; } /** * This search is implemented in the abstract * VectorSearcher class itself: this enables all subclasses to reuse * the search whatever scoring method they implement. Since query * expressions are built into the VectorSearcher, * getAllAboveThreshold does not takes a query vector as an * argument. * * This will retrieve all the results above the threshold score passed * as a parameter. It is more computationally convenient than getNearestNeighbor * when large numbers of results are anticipated * * @param numResults the number of results / length of the result list. */ public LinkedList<SearchResult> getAllAboveThreshold(float threshold) { LinkedList<SearchResult> results = new LinkedList<SearchResult>(); double score; Enumeration<ObjectVector> vecEnum = null; vecEnum = searchVecStore.getAllVectors(); while (vecEnum.hasMoreElements()) { // Test this element. ObjectVector testElement = vecEnum.nextElement(); if (testElement == null) score = Float.MIN_VALUE; else { Vector testVector = testElement.getVector(); score = getScore(testVector); } if (score > threshold) { results.add(new SearchResult(score, testElement));} } return results; } /** * Class for searching a vector store using cosine similarity. * Takes a sum of positive query terms and optionally negates some terms. */ static public class VectorSearcherCosine extends VectorSearcher { Vector queryVector; /** * @param queryVecStore Vector store to use for query generation. * @param searchVecStore The vector store to search. * @param luceneUtils LuceneUtils object to use for query weighting. (May be null.) * @param queryTerms Terms that will be parsed into a query * expression. If the string "NOT" appears, terms after this will be negated. */ public VectorSearcherCosine( VectorStore queryVecStore, VectorStore searchVecStore, LuceneUtils luceneUtils, String[] queryTerms) throws ZeroVectorException { super(queryVecStore, searchVecStore, luceneUtils); this.queryVector = CompoundVectorBuilder.getQueryVector( queryVecStore, luceneUtils, queryTerms); if (this.queryVector.isZeroVector()) { throw new ZeroVectorException("Query vector is zero ... no results."); } } /** * @param queryVecStore Vector store to use for query generation. * @param searchVecStore The vector store to search. * @param luceneUtils LuceneUtils object to use for query weighting. (May be null.) * @param queryVector Vector representing query * expression. If the string "NOT" appears, terms after this will be negated. */ public VectorSearcherCosine( VectorStore queryVecStore, VectorStore searchVecStore, LuceneUtils luceneUtils, Vector queryVector) throws ZeroVectorException { super(queryVecStore, searchVecStore, luceneUtils); this.queryVector = queryVector; Vector testVector = searchVecStore.getAllVectors().nextElement().getVector(); IncompatibleVectorsException.checkVectorsCompatible(queryVector, testVector); if (this.queryVector.isZeroVector()) { throw new ZeroVectorException("Query vector is zero ... no results."); } } @Override public double getScore(Vector testVector) { return this.queryVector.measureOverlap(testVector); } } /** * Class for searching a vector store using the bound product of a series two vectors. */ static public class VectorSearcherBoundProduct extends VectorSearcher { Vector queryVector; public VectorSearcherBoundProduct(VectorStore queryVecStore, VectorStore boundVecStore, VectorStore searchVecStore, LuceneUtils luceneUtils, String term1, String term2) throws ZeroVectorException { super(queryVecStore, searchVecStore, luceneUtils); this.queryVector = CompoundVectorBuilder.getBoundProductQueryVectorFromString(queryVecStore, term1); queryVector.release(CompoundVectorBuilder.getBoundProductQueryVectorFromString(boundVecStore, term2)); if (this.queryVector.isZeroVector()) { throw new ZeroVectorException("Query vector is zero ... no results."); } } public VectorSearcherBoundProduct(VectorStore queryVecStore, VectorStore boundVecStore, VectorStore searchVecStore, LuceneUtils luceneUtils, String term1) throws ZeroVectorException { super(queryVecStore, searchVecStore, luceneUtils); this.queryVector = CompoundVectorBuilder.getBoundProductQueryVectorFromString(queryVecStore, boundVecStore, term1); if (this.queryVector.isZeroVector()) { throw new ZeroVectorException("Query vector is zero ... no results."); } } /** * @param queryVecStore Vector store to use for query generation. * @param searchVecStore The vector store to search. * @param luceneUtils LuceneUtils object to use for query weighting. (May be null.) * @param queryVector Vector representing query * expression. If the string "NOT" appears, terms after this will be negated. */ public VectorSearcherBoundProduct(VectorStore queryVecStore, VectorStore searchVecStore, LuceneUtils luceneUtils, Vector queryVector) throws ZeroVectorException { super(queryVecStore, searchVecStore, luceneUtils); this.queryVector = queryVector; Vector testVector = searchVecStore.getAllVectors().nextElement().getVector(); IncompatibleVectorsException.checkVectorsCompatible(queryVector, testVector); if (this.queryVector.isZeroVector()) { throw new ZeroVectorException("Query vector is zero ... no results."); } } @Override public double getScore(Vector testVector) { return this.queryVector.measureOverlap(testVector); } } /** * Class for searching a vector store using the bound product of a series two vectors. */ static public class VectorSearcherBoundProductSubSpace extends VectorSearcher { ArrayList<Vector> disjunctSpace; VectorType vectorType; public VectorSearcherBoundProductSubSpace(VectorStore queryVecStore, VectorStore boundVecStore, VectorStore searchVecStore, LuceneUtils luceneUtils, String term1, String term2) throws ZeroVectorException { super(queryVecStore, searchVecStore, luceneUtils); disjunctSpace = new ArrayList<Vector>(); vectorType = queryVecStore.getVectorType(); Vector queryVector = queryVecStore.getVector(term1).copy(); if (queryVector.isZeroVector()) { throw new ZeroVectorException("Query vector is zero ... no results."); } this.disjunctSpace = CompoundVectorBuilder.getBoundProductQuerySubSpaceFromString( boundVecStore, queryVector, term2); } @Override public double getScore(Vector testVector) { if (!vectorType.equals(VectorType.BINARY)) return VectorUtils.compareWithProjection(testVector, disjunctSpace); else return BinaryVectorUtils.compareWithProjection(testVector, disjunctSpace); } } /** * Class for searching a vector store using quantum disjunction similarity. */ static public class VectorSearcherSubspaceSim extends VectorSearcher { private ArrayList<Vector> disjunctSpace; private VectorType vectorType; /** * @param queryVecStore Vector store to use for query generation. * @param searchVecStore The vector store to search. * @param luceneUtils LuceneUtils object to use for query weighting. (May be null.) * @param queryTerms Terms that will be parsed and used to generate a query subspace. */ public VectorSearcherSubspaceSim(VectorStore queryVecStore, VectorStore searchVecStore, LuceneUtils luceneUtils, String[] queryTerms) throws ZeroVectorException { super(queryVecStore, searchVecStore, luceneUtils); this.disjunctSpace = new ArrayList<Vector>(); this.vectorType = queryVecStore.getVectorType(); for (int i = 0; i < queryTerms.length; ++i) { System.out.println("\t" + queryTerms[i]); // There may be compound disjuncts, e.g., "A NOT B" as a single argument. String[] tmpTerms = queryTerms[i].split("\\s"); Vector tmpVector = CompoundVectorBuilder.getQueryVector( queryVecStore, luceneUtils, tmpTerms); if (tmpVector != null) { this.disjunctSpace.add(tmpVector); } } if (this.disjunctSpace.size() == 0) { throw new ZeroVectorException("No nonzero input vectors ... no results."); } if (!vectorType.equals(VectorType.BINARY)) VectorUtils.orthogonalizeVectors(this.disjunctSpace); else BinaryVectorUtils.orthogonalizeVectors(this.disjunctSpace); } /** * Scoring works by taking scalar product with disjunctSpace * (which must by now be represented using an orthogonal basis). * @param testVector Vector being tested. */ @Override public double getScore(Vector testVector) { if (!vectorType.equals(VectorType.BINARY)) return VectorUtils.compareWithProjection(testVector, disjunctSpace); else return BinaryVectorUtils.compareWithProjection(testVector, disjunctSpace); } } /** * Class for searching a vector store using minimum distance similarity. */ static public class VectorSearcherMaxSim extends VectorSearcher { private ArrayList<Vector> disjunctVectors; /** * @param queryVecStore Vector store to use for query generation. * @param searchVecStore The vector store to search. * @param luceneUtils LuceneUtils object to use for query weighting. (May be null.) * @param queryTerms Terms that will be parsed and used to generate a query subspace. */ public VectorSearcherMaxSim(VectorStore queryVecStore, VectorStore searchVecStore, LuceneUtils luceneUtils, String[] queryTerms) throws ZeroVectorException { super(queryVecStore, searchVecStore, luceneUtils); this.disjunctVectors = new ArrayList<Vector>(); for (int i = 0; i < queryTerms.length; ++i) { // There may be compound disjuncts, e.g., "A NOT B" as a single argument. String[] tmpTerms = queryTerms[i].split("\\s"); Vector tmpVector = CompoundVectorBuilder.getQueryVector( queryVecStore, luceneUtils, tmpTerms); if (tmpVector != null) { this.disjunctVectors.add(tmpVector); } } if (this.disjunctVectors.size() == 0) { throw new ZeroVectorException("No nonzero input vectors ... no results."); } } /** * Scoring works by taking scalar product with disjunctSpace * (which must by now be represented using an orthogonal basis). * @param testVector Vector being tested. */ @Override public double getScore(Vector testVector) { double score = -1; double max_score = -1; for (int i = 0; i < disjunctVectors.size(); ++i) { score = this.disjunctVectors.get(i).measureOverlap(testVector); if (score > max_score) { max_score = score; } } return max_score; } } /** * Class for searching a permuted vector store using cosine similarity. * Uses implementation of rotation for permutation proposed by Sahlgren et al 2008 * Should find the term that appears frequently in the position p relative to the * index term (i.e. sat +1 would find a term occurring frequently immediately after "sat" */ static public class VectorSearcherPerm extends VectorSearcher { Vector theAvg; /** * @param queryVecStore Vector store to use for query generation. * @param searchVecStore The vector store to search. * @param luceneUtils LuceneUtils object to use for query weighting. (May be null.) * @param queryTerms Terms that will be parsed into a query * expression. If the string "?" appears, terms best fitting into this position will be returned */ public VectorSearcherPerm(VectorStore queryVecStore, VectorStore searchVecStore, LuceneUtils luceneUtils, String[] queryTerms) throws IllegalArgumentException, ZeroVectorException { super(queryVecStore, searchVecStore, luceneUtils); try { theAvg = pitt.search.semanticvectors.CompoundVectorBuilder. getPermutedQueryVector(queryVecStore, luceneUtils, queryTerms); } catch (IllegalArgumentException e) { logger.info("Couldn't create permutation VectorSearcher ..."); throw e; } if (theAvg.isZeroVector()) { throw new ZeroVectorException("Permutation query vector is zero ... no results."); } } @Override public double getScore(Vector testVector) { return theAvg.measureOverlap(testVector); } } /** * Test searcher for finding a is to b as c is to ? * * Doesn't do well yet! * * @author dwiddows */ static public class AnalogySearcher extends VectorSearcher { Vector queryVector; public AnalogySearcher( VectorStore queryVecStore, VectorStore searchVecStore, LuceneUtils luceneUtils, String[] queryTriple) { super(queryVecStore, searchVecStore, luceneUtils); Vector term0 = CompoundVectorBuilder.getQueryVectorFromString(queryVecStore, luceneUtils, queryTriple[0]); Vector term1 = CompoundVectorBuilder.getQueryVectorFromString(queryVecStore, luceneUtils, queryTriple[1]); Vector term2 = CompoundVectorBuilder.getQueryVectorFromString(queryVecStore, luceneUtils, queryTriple[2]); Vector relationVec = term0.copy(); relationVec.bind(term1); this.queryVector = term2.copy(); this.queryVector.release(relationVec); } @Override public double getScore(Vector testVector) { return queryVector.measureOverlap(testVector); } } /** * Class for searching a permuted vector store using cosine similarity. * Uses implementation of rotation for permutation proposed by Sahlgren et al 2008 * Should find the term that appears frequently in the position p relative to the * index term (i.e. sat +1 would find a term occurring frequently immediately after "sat" * This is a variant that takes into account different results obtained when using either * permuted or random index vectors as the cue terms, by taking the mean of the results * obtained with each of these options. */ static public class BalancedVectorSearcherPerm extends VectorSearcher { Vector oneDirection; Vector otherDirection; VectorStore searchVecStore, queryVecStore; LuceneUtils luceneUtils; String[] queryTerms; /** * @param queryVecStore Vector store to use for query generation (this is also reversed). * @param searchVecStore The vector store to search (this is also reversed). * @param luceneUtils LuceneUtils object to use for query weighting. (May be null.) * @param queryTerms Terms that will be parsed into a query * expression. If the string "?" appears, terms best fitting into this position will be returned */ public BalancedVectorSearcherPerm(VectorStore queryVecStore, VectorStore searchVecStore, LuceneUtils luceneUtils, String[] queryTerms) throws IllegalArgumentException, ZeroVectorException { super(queryVecStore, searchVecStore, luceneUtils); this.queryVecStore = queryVecStore; this.searchVecStore = searchVecStore; this.luceneUtils = luceneUtils; try { oneDirection = pitt.search.semanticvectors.CompoundVectorBuilder. getPermutedQueryVector(queryVecStore,luceneUtils,queryTerms); otherDirection = pitt.search.semanticvectors.CompoundVectorBuilder. getPermutedQueryVector(searchVecStore,luceneUtils,queryTerms); } catch (IllegalArgumentException e) { logger.info("Couldn't create balanced permutation VectorSearcher ..."); throw e; } if (oneDirection.isZeroVector()) { throw new ZeroVectorException("Permutation query vector is zero ... no results."); } } /** * This overides the nearest neighbor class implemented in the abstract * {@code VectorSearcher} class. * * WARNING: This implementation fails to respect flags used by the * {@code VectorSearcher.getNearestNeighbors} method. * * @param numResults the number of results / length of the result list. */ @Override public LinkedList<SearchResult> getNearestNeighbors(int numResults) { LinkedList<SearchResult> results = new LinkedList<SearchResult>(); double score, score1, score2 = -1; double threshold = Flags.searchresultsminscore; if (Flags.stdev) threshold = 0; //Counters for statistics to calculate standard deviation double sum=0, sumsquared=0; int count=0; Enumeration<ObjectVector> vecEnum = searchVecStore.getAllVectors(); Enumeration<ObjectVector> vecEnum2 = queryVecStore.getAllVectors(); while (vecEnum.hasMoreElements()) { // Test this element. ObjectVector testElement = vecEnum.nextElement(); ObjectVector testElement2 = vecEnum2.nextElement(); score1 = getScore(testElement.getVector()); score2 = getScore2(testElement2.getVector()); score = Math.max(score1,score2); // This is a way of using the Lucene Index to get term and // document frequency information to reweight all results. It // seems to be good at moving excessively common terms further // down the results. Note that using this means that scores // returned are no longer just cosine similarities. if ((this.luceneUtils != null) && Flags.usetermweightsinsearch) { score = score * luceneUtils.getGlobalTermWeightFromString((String) testElement.getObject()); } if (Flags.stdev) { count++; sum += score; sumsquared += Math.pow(score, 2); } if (score > threshold) { boolean added = false; for (int i = 0; i < results.size(); ++i) { // Add to list if this is right place. if (score > results.get(i).getScore() && added == false) { results.add(i, new SearchResult(score, testElement)); added = true; } } // Prune list if there are already numResults. if (results.size() > numResults) { results.removeLast(); threshold = results.getLast().getScore(); } else { if (added == false) { results.add(new SearchResult(score, testElement)); } } } } if (Flags.stdev) results = transformToStats(results, count, sum, sumsquared); return results; } @Override public double getScore(Vector testVector) { testVector.normalize(); return oneDirection.measureOverlap(testVector); } public double getScore2(Vector testVector) { testVector.normalize(); return (otherDirection.measureOverlap(testVector)); } } /** * calculates approximation of standard deviation (using a somewhat imprecise single-pass algorithm) * and recasts top scores as number of standard deviations from the mean (for a single search) * * @return list of results with scores as number of standard deviations from mean */ public static LinkedList<SearchResult> transformToStats( LinkedList<SearchResult> rawResults,int count, double sum, double sumsq) { LinkedList<SearchResult> transformedResults = new LinkedList<SearchResult>(); double variancesquared = sumsq - (Math.pow(sum,2)/count); double stdev = Math.sqrt(variancesquared/(count)); double mean = sum/count; Iterator<SearchResult> iterator = rawResults.iterator(); while (iterator.hasNext()) { SearchResult temp = iterator.next(); double score = temp.getScore(); score = new Double((score-mean)/stdev).floatValue(); if (score > Flags.searchresultsminscore) transformedResults.add(new SearchResult(score, temp.getObjectVector())); } return transformedResults; } }
package aQute.libg.reporter; import java.lang.reflect.*; import java.util.*; import java.util.regex.*; import aQute.libg.generics.*; import aQute.service.reporter.*; /** * Mainly used for testing where reporters are needed. */ public class ReporterAdapter implements Reporter, Report, Runnable { final List<String> errors = new ArrayList<String>(); final List<String> warnings = new ArrayList<String>(); final List<LocationImpl> locations = new ArrayList<LocationImpl>(); static class LocationImpl extends Location implements SetLocation { public LocationImpl(String e) { // TODO Auto-generated constructor stub } public SetLocation file(String file) { this.file = file; return this; } public SetLocation header(String header) { this.header = header; return this; } public SetLocation context(String context) { this.context = context; return this; } public SetLocation method(String methodName) { this.methodName = methodName; return this; } public SetLocation line(int line) { this.line = line; return this; } public SetLocation reference(String reference) { this.reference = reference; return this; } } final Formatter out; boolean trace; boolean pedantic; boolean exceptions; /** * @return the exceptions */ public boolean isExceptions() { return exceptions; } /** * @param exceptions * the exceptions to set */ public void setExceptions(boolean exceptions) { this.exceptions = exceptions; } /** * @return the out */ public Formatter getOut() { return out; } /** * @return the trace */ public boolean isTrace() { return trace; } /** * @param pedantic * the pedantic to set */ public void setPedantic(boolean pedantic) { this.pedantic = pedantic; } public ReporterAdapter() { out = null; } public ReporterAdapter(Appendable app) { out = new Formatter(app); } public SetLocation error(String s, Object... args) { String e = String.format(s, args); errors.add(e); trace("ERROR: %s", e); return location(e); } public SetLocation exception(Throwable t, String s, Object... args) { StackTraceElement[] stackTrace = t.getStackTrace(); String method = stackTrace[0].getMethodName(); String cname = stackTrace[0].getClassName(); String e = String.format("["+shorten(cname) +"."+method+"] " +s, args); errors.add(e); trace("ERROR: %s", e); if (isExceptions() || isTrace()) if (t instanceof InvocationTargetException) t.getCause().printStackTrace(System.err); else t.printStackTrace(System.err); return location(e); } private String shorten(String cname) { int index = cname.lastIndexOf('$'); if ( index < 0) index = cname.lastIndexOf('.'); return cname.substring(index+1); } public SetLocation warning(String s, Object... args) { String e = String.format(s, args); warnings.add(e); trace("warning: %s", e); return location(e); } private SetLocation location(String e) { LocationImpl loc = new LocationImpl(e); locations.add( loc ); return loc; } public void progress(float progress, String s, Object... args) { if (out != null) { out.format(s, args); if (!s.endsWith(String.format("%n"))) out.format("%n"); } } public void trace(String s, Object... args) { if (trace && out != null) { out.format("# " + s + "%n", args); out.flush(); } } public List<String> getWarnings() { return warnings; } public List<String> getErrors() { return errors; } public boolean isPedantic() { return false; } public void setTrace(boolean b) { this.trace = b; } public boolean isOk() { return errors.isEmpty(); } public boolean isPerfect() { return isOk() && warnings.isEmpty(); } public boolean check(String... pattern) { Set<String> missed = Create.set(); if (pattern != null) { for (String p : pattern) { boolean match = false; Pattern pat = Pattern.compile(p); for (Iterator<String> i = errors.iterator(); i.hasNext();) { if (pat.matcher(i.next()).find()) { i.remove(); match = true; } } for (Iterator<String> i = warnings.iterator(); i.hasNext();) { if (pat.matcher(i.next()).find()) { i.remove(); match = true; } } if (!match) missed.add(p); } } if (missed.isEmpty() && isPerfect()) return true; if (!missed.isEmpty()) System.err.println("Missed the following patterns in the warnings or errors: " + missed); report(System.err); return false; } /** * Report the errors and warnings */ public void report(Appendable out) { Formatter f = new Formatter(out); report("Error", getErrors(), f); report("Warning", getWarnings(), f); f.flush(); } void report(String title, Collection<String> list, Formatter f) { if (list.isEmpty()) return; f.format(title + (list.size() > 1 ? "s" : "") + "%n"); int n = 0; for (String s : list) { f.format("%3s. %s%n", n++, s); } } public boolean getInfo(Report other) { return getInfo(other,null); } public boolean getInfo(Report other, String prefix) { addErrors(prefix, other.getErrors()); addWarnings(prefix, other.getWarnings()); return other.isOk(); } public Location getLocation(String msg) { for ( LocationImpl loc : locations ) { if ((loc.message != null) && loc.message.equals(msg)) return loc; } return null; } /** * Handy routine that can be extended by subclasses * so they can run inside the context */ public void run() { throw new UnsupportedOperationException("Must be implemented by subclass"); } /** * Return a messages object bound to this adapter */ public <T> T getMessages(Class<T> c) { return ReporterMessages.base(this, c); } /** * Add a number of errors */ public void addErrors( String prefix, Collection<String> errors) { if ( prefix == null) prefix = ""; else prefix = prefix + ": "; for ( String s: errors) { this.errors.add( prefix + s); } } /** * Add a number of warnings */ public void addWarnings( String prefix, Collection<String> warnings) { if ( prefix == null) prefix = ""; else prefix = prefix + ": "; for ( String s: warnings) { this.warnings.add( prefix + s); } } }
package aQute.libg.reporter; import java.lang.reflect.*; import java.util.*; import java.util.regex.*; import aQute.libg.generics.*; import aQute.service.reporter.*; /** * Mainly used for testing where reporters are needed. */ public class ReporterAdapter implements Reporter, Report, Runnable { final List<String> errors = new ArrayList<String>(); final List<String> warnings = new ArrayList<String>(); final List<LocationImpl> locations = new ArrayList<LocationImpl>(); static class LocationImpl extends Location implements SetLocation { public LocationImpl(String e) { // TODO Auto-generated constructor stub } public SetLocation file(String file) { this.file = file; return this; } public SetLocation header(String header) { this.header = header; return this; } public SetLocation context(String context) { this.context = context; return this; } public SetLocation method(String methodName) { this.methodName = methodName; return this; } public SetLocation line(int line) { this.line = line; return this; } public SetLocation reference(String reference) { this.reference = reference; return this; } } final Formatter out; boolean trace; boolean pedantic; boolean exceptions; /** * @return the exceptions */ public boolean isExceptions() { return exceptions; } /** * @param exceptions * the exceptions to set */ public void setExceptions(boolean exceptions) { this.exceptions = exceptions; } /** * @return the out */ public Formatter getOut() { return out; } /** * @return the trace */ public boolean isTrace() { return trace; } /** * @param pedantic * the pedantic to set */ public void setPedantic(boolean pedantic) { this.pedantic = pedantic; } public ReporterAdapter() { out = null; } public ReporterAdapter(Appendable app) { out = new Formatter(app); } public SetLocation error(String s, Object... args) { String e = String.format(s, args); errors.add(e); trace("ERROR: %s", e); return location(e); } public SetLocation exception(Throwable t, String s, Object... args) { StackTraceElement[] stackTrace = t.getStackTrace(); String method = stackTrace[0].getMethodName(); String cname = stackTrace[0].getClassName(); String e = String.format("["+shorten(cname) +"."+method+"] " +s, args); errors.add(e); trace("ERROR: %s", e); if (isExceptions() || isTrace()) if (t instanceof InvocationTargetException) t.getCause().printStackTrace(System.err); else t.printStackTrace(System.err); return location(e); } private String shorten(String cname) { int index = cname.lastIndexOf('$'); if ( index < 0) index = cname.lastIndexOf('.'); return cname.substring(index+1); } public SetLocation warning(String s, Object... args) { String e = String.format(s, args); warnings.add(e); trace("warning: %s", e); return location(e); } private SetLocation location(String e) { LocationImpl loc = new LocationImpl(e); locations.add( loc ); return loc; } public void progress(float progress, String s, Object... args) { if (out != null) { out.format(s, args); if (!s.endsWith(String.format("%n"))) out.format("%n"); } } public void trace(String s, Object... args) { if (trace && out != null) { out.format("# " + s + "%n", args); out.flush(); } } public List<String> getWarnings() { return warnings; } public List<String> getErrors() { return errors; } public boolean isPedantic() { return false; } public void setTrace(boolean b) { this.trace = b; } public boolean isOk() { return errors.isEmpty(); } public boolean isPerfect() { return isOk() && warnings.isEmpty(); } public boolean check(String... pattern) { Set<String> missed = Create.set(); if (pattern != null) { for (String p : pattern) { boolean match = false; Pattern pat = Pattern.compile(p); for (Iterator<String> i = errors.iterator(); i.hasNext();) { if (pat.matcher(i.next()).find()) { i.remove(); match = true; } } for (Iterator<String> i = warnings.iterator(); i.hasNext();) { if (pat.matcher(i.next()).find()) { i.remove(); match = true; } } if (!match) missed.add(p); } } if (missed.isEmpty() && isPerfect()) return true; if (!missed.isEmpty()) System.err.println("Missed the following patterns in the warnings or errors: " + missed); report(System.err); return false; } /** * Report the errors and warnings */ public void report(Appendable out) { Formatter f = new Formatter(out); report("Error", getErrors(), f); report("Warning", getWarnings(), f); f.flush(); } void report(String title, Collection<String> list, Formatter f) { if (list.isEmpty()) return; f.format(title + (list.size() > 1 ? "s" : "") + "%n"); int n = 0; for (String s : list) { f.format("%3s. %s%n", n++, s); } } public boolean getInfo(Report other) { return getInfo(other,null); } public boolean getInfo(Report other, String prefix) { boolean ok = true; if ( prefix == null) prefix = ""; else prefix = prefix + ": "; for ( String error : other.getErrors()) { errors.add( prefix + error); ok = false; } for ( String warning : other.getWarnings()) { warnings.add( prefix + warning); } return ok; } public Location getLocation(String msg) { for ( LocationImpl loc : locations ) { if ((loc.message != null) && loc.message.equals(msg)) return loc; } return null; } /** * Handy routine that can be extended by subclasses * so they can run inside the context */ public void run() { throw new UnsupportedOperationException("Must be implemented by subclass"); } /** * Return a messages object bound to this adapter */ public <T> T getMessages(Class<T> c) { return ReporterMessages.base(this, c); } }
package com.caju.uheer.interfaces; public interface Routes { String URL = "http://54.207.33.242/api/"; String CHANNELS = URL + "channels/"; String MUSICS = URL + "musics/"; String STATUS = URL + "status/"; }
package com.dwj.coolweather; import android.app.ProgressDialog; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.ImageView; import android.widget.ListView; import android.widget.TextView; import android.widget.Toast; import com.dwj.coolweather.db.City; import com.dwj.coolweather.db.County; import com.dwj.coolweather.db.Province; import com.dwj.coolweather.util.DataUtil; import com.dwj.coolweather.util.HttpUtil; import org.litepal.crud.DataSupport; import java.io.IOException; import java.util.ArrayList; import java.util.List; import okhttp3.Call; import okhttp3.Response; public class AreaFragment extends Fragment { private static final String TAG = "AreaFragment"; private ListView mListView; private ImageView mBack; private TextView mText; private FragmentActivity mContext; private ArrayList<String> mDataList = new ArrayList<String>(); private List<Province> mProvincesList; private List<City> mCityList; private List<County> mCountyList; private static final int QUERY_PROVINCE = 0; private static final int QUERY_CITY = 1; private static final int QUERY_COUNTY = 2; private int mQueryNumber = QUERY_PROVINCE; private ArrayAdapter<String> mAdapter; private ProgressDialog mProgressDialog; private Province mSelectProvince; private City mSelectCity; public AreaFragment() { } @Nullable @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { View view = inflater.inflate(R.layout.area_fragment, container, false); mContext = getActivity(); mListView = ((ListView) view.findViewById(R.id.list_view)); mBack = ((ImageView) view.findViewById(R.id.back)); mText = ((TextView) view.findViewById(R.id.title)); mAdapter = new ArrayAdapter<>(mContext, android.R.layout.simple_dropdown_item_1line, mDataList); mListView.setAdapter(mAdapter); return view; } @Override public void onActivityCreated(@Nullable Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); mListView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) { switch (mQueryNumber) { case QUERY_PROVINCE: mQueryNumber = QUERY_CITY; mSelectProvince = mProvincesList.get(i); queryCity(); break; case QUERY_CITY: mQueryNumber = QUERY_COUNTY; mSelectCity = mCityList.get(i); queryCounty(); break; } } }); mBack.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { if (mQueryNumber == QUERY_COUNTY) { mQueryNumber = QUERY_CITY; queryCity(); } else if (mQueryNumber == QUERY_CITY) { mQueryNumber = QUERY_PROVINCE; queryProvince(); } } }); queryProvince(); } private void queryCounty() { mBack.setVisibility(View.VISIBLE); mText.setText(mSelectCity.getCityName()); mCountyList = DataSupport.where("cityId= ?", String.valueOf(mSelectCity.getCityCode())).find(County.class); if (mCountyList.size() > 0) { mDataList.clear(); for (County county : mCountyList) { mDataList.add(county.getCountyName()); } mAdapter.notifyDataSetChanged(); mListView.setSelection(0); } else { int provinceCode = mSelectCity.getProvinceId(); int cityCode = mSelectCity.getCityCode(); String countyUrl = "http://guolin.tech/api/china/" + provinceCode + "/" + cityCode; Log.d(TAG, "queryCounty: " + provinceCode + "cityCode " + cityCode); queryFromService(countyUrl); } } private void queryCity() { mBack.setVisibility(View.VISIBLE); mText.setText(mSelectProvince.getProvinceName()); mCityList = DataSupport.where("provinceId= ?", String.valueOf(mSelectProvince.getProvinceCode())).find(City.class); if (mCityList.size() > 0) { mDataList.clear(); for (City city : mCityList) { mDataList.add(city.getCityName()); } mAdapter.notifyDataSetChanged(); mListView.setSelection(0); } else { int provinceCode = mSelectProvince.getProvinceCode(); Log.d(TAG, "queryCity: " + provinceCode); String cityUrl = "http://guolin.tech/api/china/" + provinceCode; queryFromService(cityUrl); } } private void queryProvince() { mBack.setVisibility(View.GONE); mText.setText(R.string.china); mProvincesList = DataSupport.findAll(Province.class); if (mProvincesList.size() > 0) { mDataList.clear(); for (Province province : mProvincesList) { mDataList.add(province.getProvinceName()); } mAdapter.notifyDataSetChanged(); //list mListView.setSelection(0); } else { String provinceUrl = "http://guolin.tech/api/china"; queryFromService(provinceUrl); } } private void queryFromService(String url) { showProgressDialog(); HttpUtil.handleHttpRequest(url, new okhttp3.Callback() { @Override public void onFailure(Call call, IOException e) { mContext.runOnUiThread(new Runnable() { @Override public void run() { Toast.makeText(mContext, " ", Toast.LENGTH_SHORT).show(); Log.d(TAG, "onFailure: " + ""); dismissProgressDialog(); } }); } @Override public void onResponse(Call call, Response response) throws IOException { String data = response.body().string(); boolean isSuccess = false; if (mQueryNumber == QUERY_PROVINCE) { isSuccess = DataUtil.saveProvinceData(data); } else if (mQueryNumber == QUERY_CITY) { isSuccess = DataUtil.saveCityData(data, mSelectProvince.getProvinceCode()); } else if (mQueryNumber == QUERY_COUNTY) { isSuccess = DataUtil.saveCountyData(data, mSelectCity.getCityCode()); } if (isSuccess) { mContext.runOnUiThread(new Runnable() { @Override public void run() { dismissProgressDialog(); if (mQueryNumber == QUERY_PROVINCE) { queryProvince(); } else if (mQueryNumber == QUERY_CITY) { queryCity(); } else if (mQueryNumber == QUERY_COUNTY) { queryCounty(); } } }); } else { mContext.runOnUiThread(new Runnable() { @Override public void run() { dismissProgressDialog(); Toast.makeText(mContext, " ", Toast.LENGTH_SHORT).show(); Log.d(TAG, "onResponse: " + ""); } }); } } }); } private void showProgressDialog() { if (mProgressDialog == null) { mProgressDialog = new ProgressDialog(mContext); mProgressDialog.setTitle(""); mProgressDialog.setMessage("waiting. . ."); } mProgressDialog.show(); } private void dismissProgressDialog() { if (mProgressDialog != null) { mProgressDialog.dismiss(); mProgressDialog = null; } } }
package com.gcw.sapienza.places; import android.content.Context; import android.content.Intent; import android.net.Uri; import android.util.Log; import com.parse.ParsePushBroadcastReceiver; public class Receiver extends ParsePushBroadcastReceiver { @Override public void onPushOpen(Context context, Intent intent) { Log.d("Push", "Clicked"); // Default behavior: simply open up the Main Activity when clicking on the push notification // Intent i = new Intent(context, MainActivity.class); // i.putExtras(intent.getExtras()); // i.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); // context.startActivity(i); Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse("https://drive.google.com/folderview?id=0B1boWbY-47RQdHJnSlpScUNueTQ&usp=drive_web")); browserIntent.putExtras(intent.getExtras()); browserIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); context.startActivity(browserIntent); } }
package com.ruuvi.station.model; import android.content.Context; import java.util.Date; import java.util.List; import com.raizlabs.android.dbflow.annotation.Column; import com.raizlabs.android.dbflow.annotation.PrimaryKey; import com.raizlabs.android.dbflow.annotation.Table; import com.raizlabs.android.dbflow.data.Blob; import com.raizlabs.android.dbflow.sql.language.SQLite; import com.raizlabs.android.dbflow.structure.BaseModel; import com.ruuvi.station.R; import com.ruuvi.station.database.LocalDatabase; import com.ruuvi.station.util.Humidity; import com.ruuvi.station.util.Preferences; import com.ruuvi.station.util.Utils; @Table(database = LocalDatabase.class) public class RuuviTag extends BaseModel { @Column @PrimaryKey public String id; @Column public String url; @Column public int rssi; public double[] data; @Column public String name; @Column public double temperature; @Column public double humidity; @Column public double pressure; @Column public boolean favorite; @Column public Blob rawDataBlob; public byte[] rawData; @Column public double accelX; @Column public double accelY; @Column public double accelZ; @Column public double voltage; @Column public Date updateAt; @Column public String gatewayUrl; @Column public int defaultBackground; @Column public String userBackground; @Column public int dataFormat; @Column public double txPower; @Column public int movementCounter; @Column public int measurementSequenceNumber; public RuuviTag() { } public RuuviTag preserveData(RuuviTag tag) { tag.name = this.name; tag.favorite = this.favorite; tag.gatewayUrl = this.gatewayUrl; tag.defaultBackground = this.defaultBackground; tag.userBackground = this.userBackground; tag.updateAt = new Date(); return tag; } private double getFahrenheit() { return Utils.celciusToFahrenheit(this.temperature); } public static String getTemperatureUnit(Context context) { return new Preferences(context).getTemperatureUnit(); } public static HumidityUnit getHumidityUnit(Context context) { return new Preferences(context).getHumidityUnit(); } public String getTemperatureString(Context context) { String temperatureUnit = RuuviTag.getTemperatureUnit(context); if (temperatureUnit.equals("C")) { return String.format(context.getString(R.string.temperature_reading), this.temperature) + temperatureUnit; } return String.format(context.getString(R.string.temperature_reading), this.getFahrenheit()) + temperatureUnit; } public String getHumidityString(Context context) { HumidityUnit humidityUnit = RuuviTag.getHumidityUnit(context); Humidity calculation = new Humidity(temperature, humidity / 100.0); switch (humidityUnit) { case PERCENT: return String.format(context.getString(R.string.humidity_reading), humidity); case GM3: return String.format(context.getString(R.string.humidity_absolute_reading), calculation.getAh()); case DEW: String temperatureUnit = RuuviTag.getTemperatureUnit(context); if (temperatureUnit.equals("C")) { return String.format(context.getString(R.string.humidity_dew_reading), calculation.getTd()) + temperatureUnit; } else { return String.format(context.getString(R.string.humidity_dew_reading), calculation.getTdF()) + temperatureUnit; } default: return context.getString(R.string.n_a); } } public String getDispayName() { return (this.name != null && !this.name.isEmpty()) ? this.name : this.id; } public static List<RuuviTag> getAll(boolean favorite) { return SQLite.select() .from(RuuviTag.class) .where(RuuviTag_Table.favorite.eq(favorite)) .queryList(); } public static RuuviTag get(String id) { return SQLite.select() .from(RuuviTag.class) .where(RuuviTag_Table.id.eq(id)) .querySingle(); } public void deleteTagAndRelatives() { SQLite.delete() .from(Alarm.class) .where(Alarm_Table.ruuviTagId.eq(this.id)) .execute(); SQLite.delete() .from(TagSensorReading.class) .where(TagSensorReading_Table.ruuviTagId.eq(this.id)) .execute(); this.delete(); } }
package com.sgwares.android; import android.app.Activity; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.design.widget.Snackbar; import android.util.Log; import android.view.View; import android.view.Window; import android.view.WindowManager; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.ListView; import com.google.android.gms.tasks.OnCompleteListener; import com.google.android.gms.tasks.Task; import com.google.firebase.auth.FirebaseAuth; import com.google.firebase.database.ChildEventListener; import com.google.firebase.database.DataSnapshot; import com.google.firebase.database.DatabaseError; import com.google.firebase.database.DatabaseReference; import com.google.firebase.database.FirebaseDatabase; import com.sgwares.android.models.Game; import com.sgwares.android.models.User; import java.util.ArrayList; import java.util.List; public class GameActivity extends Activity { private static final String TAG = GameActivity.class.getSimpleName(); private FirebaseDatabase mDatabase; private DatabaseReference mGame; private List<User> mPossibleParticipants = new ArrayList<>();; private List<User> mParticipants = new ArrayList<>(); private ArrayAdapter mAdapter; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); requestWindowFeature(Window.FEATURE_NO_TITLE); getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN); setContentView(R.layout.activity_game); final FirebaseAuth auth = FirebaseAuth.getInstance(); mDatabase = FirebaseDatabase.getInstance(); mDatabase.getReference("users"); mAdapter = new ArrayAdapter(this, android.R.layout.simple_list_item_1, mPossibleParticipants); final ListView listView = (ListView) findViewById(R.id.possible_participants); listView.setAdapter(mAdapter); listView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { addParticipant(mPossibleParticipants.get(position)); } }); final ChildEventListener childEventListener = new ChildEventListener() { @Override public void onChildAdded(DataSnapshot dataSnapshot, String previousChildName) { Log.d(TAG, "onChildAdded: " + dataSnapshot.getKey()); User user = dataSnapshot.getValue(User.class); user.setKey(dataSnapshot.getKey()); if (user.getKey().equals(auth.getCurrentUser().getUid())) { Log.d(TAG, "Current user: " + user); mParticipants.add(user); } else { Log.d(TAG, "New possible participant: " + user); mPossibleParticipants.add(user); mAdapter.notifyDataSetChanged(); } } @Override public void onChildChanged(DataSnapshot dataSnapshot, String previousChildName) { Log.d(TAG, "onChildChanged:" + dataSnapshot.getKey()); User user = dataSnapshot.getValue(User.class); user.setKey(dataSnapshot.getKey()); //TODO update participant picker } @Override public void onChildRemoved(DataSnapshot dataSnapshot) { Log.d(TAG, "onChildRemoved:" + dataSnapshot.getKey()); //TODO remove from participant picker } @Override public void onChildMoved(DataSnapshot dataSnapshot, String previousChildName) { Log.d(TAG, "onChildMoved:" + dataSnapshot.getKey()); } @Override public void onCancelled(DatabaseError databaseError) { Log.w(TAG, "onCancelled", databaseError.toException()); } }; final DatabaseReference usersRef = mDatabase.getReference("users"); usersRef.addChildEventListener(childEventListener); final Button startGame = (Button) findViewById(R.id.start); startGame.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (mParticipants.isEmpty()) { Snackbar.make(findViewById(R.id.content_main), "No participants", Snackbar.LENGTH_LONG) .setAction("Action", null).show(); } else { usersRef.removeEventListener(childEventListener); createGame(); } } }); } private void addParticipant(User user) { Log.d(TAG, "addParticipant: " + user); mParticipants.add(user); mPossibleParticipants.remove(user); mAdapter.notifyDataSetChanged(); Snackbar.make(findViewById(R.id.content_main), user.getName() + " added", Snackbar.LENGTH_LONG) .setAction("Action", null).show(); } private void createGame() { final Game game = new Game(); game.setParticipants(mParticipants); game.setBackground("#bbbbbb"); mGame = mDatabase.getReference("games").push(); Log.d(TAG, "Created game key: " + mGame.getKey()); mGame.setValue(game).addOnCompleteListener(new OnCompleteListener<Void>() { @Override public void onComplete(@NonNull Task<Void> task) { Log.d(TAG, "Create game onComplete: " + task.isSuccessful()); GameSurface surface = new GameSurface(getApplicationContext(), game, mParticipants.get(0)); setContentView(surface); } }); } @Override public void onBackPressed() { Log.d(TAG, "onBackPressed: end game"); //TODO confirmation box, option to end game, show final score finish(); } }
package com.t28.rxweather; import android.net.Uri; import android.text.TextUtils; import android.util.Log; import com.android.volley.NetworkResponse; import com.android.volley.Request; import com.android.volley.Response; import com.android.volley.VolleyError; import org.apache.http.HttpStatus; public class WeatherRequest extends Request<Weather> { private WeatherRequest(Builder builder) { super(Method.GET, buildUrl(builder), null); } @Override protected Response<Weather> parseNetworkResponse(NetworkResponse response) { if (response.statusCode != HttpStatus.SC_OK) { return Response.error(new VolleyError("Invalid status code:" + response.statusCode)); } return null; } @Override protected void deliverResponse(Weather response) { } private static String buildUrl(Builder builder) { final Uri.Builder urlBuilder = new Uri.Builder(); urlBuilder.scheme("http").authority("api.openweathermap.org").path("/data/2.5/weather"); urlBuilder.appendQueryParameter("APPID", builder.mApiKey); if (!TextUtils.isEmpty(builder.mCityName) && !TextUtils.isEmpty(builder.mCountryCode)) { urlBuilder.appendQueryParameter("q", builder.mCityName + "," + builder.mCountryCode); } if (!TextUtils.isEmpty(builder.mCityId)) { urlBuilder.appendQueryParameter("id", builder.mCityId); } if (!Double.isNaN(builder.mLat)) { urlBuilder.appendQueryParameter("lat", String.valueOf(builder.mLat)); } if (!Double.isNaN(builder.mLon)) { urlBuilder.appendQueryParameter("lon", String.valueOf(builder.mLon)); } return urlBuilder.build().toString(); } public static class Builder { private static final double NO_LAT = Double.NaN; private static final double NO_LON = Double.NaN; private final String mApiKey; private String mCityName; private String mCountryCode; private String mCityId; private double mLat = NO_LAT; private double mLon = NO_LON; public Builder(String apiKey) { mApiKey = apiKey; } public Builder setCityName(String name) { mCityName = name; return this; } public Builder setCountryCode(String code) { mCountryCode = code; return this; } public Builder setCityId(String id) { mCityId = id; return this; } public Builder setLat(double lat) { mLat = lat; return this; } public Builder setLon(double lon) { mLon = lon; return this; } public WeatherRequest build() { return new WeatherRequest(this); } } }
package de.sopa.scene.game; import de.sopa.model.game.GameService; import de.sopa.model.game.Tile; import java.util.Map; import org.andengine.entity.Entity; import org.andengine.entity.IEntity; import org.andengine.entity.modifier.MoveXModifier; import org.andengine.entity.modifier.MoveYModifier; import org.andengine.entity.sprite.Sprite; import org.andengine.opengl.texture.region.ITextureRegion; import org.andengine.opengl.texture.region.TextureRegion; import org.andengine.opengl.vbo.VertexBufferObjectManager; import org.andengine.util.modifier.ease.*; /** * @author David Schilling - davejs92@gmail.com * @author Raphael Schilling */ public class GameFieldView extends Entity { private final GameService gameService; private final float spacePerTile; private final Map<Character, TextureRegion> tileRegionMap; private final VertexBufferObjectManager vbom; private final ITextureRegion tilesBorderRegion; private TileSprite[][] tileSprites; public GameFieldView(float pX, float pY, float spacePerTile, GameService gameService, Map<Character, TextureRegion> regionMap, VertexBufferObjectManager vbom, ITextureRegion tilesBorderRegion) { super(pX, pY); this.gameService = gameService; this.spacePerTile = spacePerTile; this.tileRegionMap = regionMap; this.vbom = vbom; this.tilesBorderRegion = tilesBorderRegion; } public void addTiles() { detachChildren(); Tile[][] field = gameService.getLevel().getField(); int width = field.length; int heigth = field[0].length; tileSprites = new TileSprite[width][heigth]; int tilePositionY = 0; for (int y = 0; y < heigth; y++) { int tilePositionX = 0; for (int x = 0; x < width; x++) { if (field[x][y].getShortcut() != 'n') { TextureRegion pTextureRegion = tileRegionMap.get(field[x][y].getShortcut()); switch (field[x][y].getTileType()) { case PUZZLE: TileSprite tileSprite = new TileSprite(tilePositionX, tilePositionY, spacePerTile, spacePerTile, pTextureRegion, vbom); attachChild(tileSprite); tileSprites[x][y] = tileSprite; break; case FINISH: createFinishAnsStart(x, y, tilePositionX,tilePositionY, pTextureRegion, field); break; case START: createFinishAnsStart(x, y, tilePositionX, tilePositionY, pTextureRegion, field); break; default: break; } } tilePositionX += spacePerTile; } tilePositionY += spacePerTile; } attachChild(new Sprite(spacePerTile, spacePerTile, spacePerTile * (width - 2), spacePerTile * (width - 2), tilesBorderRegion, vbom)); } public void oneStep(final boolean horizontal, int row, final int direction) { if (row < 0) { return; } final int finalRow = row; row++; if (horizontal) { if (row > tileSprites.length - 2) { return; } for (int x = 1; x < tileSprites.length - 1; x++) { TileSprite tileSprite = tileSprites[x][row]; tileSprite.registerEntityModifier(new MoveXModifier(0.3f, tileSprite.getX(), tileSprite.getX() + tileSprite.getWidth() * direction, EaseQuadInOut.getInstance()) { @Override protected void onModifierFinished(IEntity pItem) { gameService.shiftLine(horizontal, finalRow, direction); super.onModifierFinished(pItem); } }); } } else { if (row > tileSprites[0].length - 2) { return; } for (int y = 1; y < tileSprites[row].length - 1; y++) { TileSprite tileSprite = tileSprites[row][y]; tileSprite.registerEntityModifier(new MoveYModifier(0.3f, tileSprite.getY(), tileSprite.getY() + tileSprite.getWidth() * direction,EaseQuadInOut.getInstance()) { @Override protected void onModifierFinished(IEntity pItem) { gameService.shiftLine(horizontal, finalRow, direction); super.onModifierFinished(pItem); } }); } } } private void createFinishAnsStart(int x,int y, float tilePositionX, float tilePositionY, TextureRegion pTextureRegion, Tile[][] field ) { if(x == 0) { Sprite sprite = new TileSprite(tilePositionX + spacePerTile, tilePositionY, spacePerTile, spacePerTile, pTextureRegion, vbom); attachChild(sprite); } else if(x == field.length - 1) { Sprite sprite = new TileSprite(tilePositionX - spacePerTile, tilePositionY, spacePerTile, spacePerTile, pTextureRegion, vbom); sprite.setRotationCenter(sprite.getWidth() / 2, sprite.getHeight() / 2); sprite.setRotation(180f); attachChild(sprite); } else if(y == 0) { Sprite sprite = new TileSprite(tilePositionX, tilePositionY + spacePerTile, spacePerTile, spacePerTile, pTextureRegion, vbom); sprite.setRotationCenter(sprite.getWidth() / 2, sprite.getHeight() / 2); sprite.setRotation(90f); attachChild(sprite); } else if(y == field[x].length - 1) { Sprite sprite = new TileSprite(tilePositionX, tilePositionY - spacePerTile, spacePerTile, spacePerTile, pTextureRegion, vbom); sprite.setRotationCenter(sprite.getWidth() / 2, sprite.getHeight() / 2); sprite.setRotation(270f); attachChild(sprite); } } @Override public void dispose() { detachChildren(); super.dispose(); } }
package mn.devfest.api; import android.content.Context; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.v7.util.DiffUtil; import com.google.android.gms.auth.api.signin.GoogleSignInAccount; import com.google.firebase.auth.AuthCredential; import com.google.firebase.auth.FirebaseAuth; import com.google.firebase.auth.GoogleAuthProvider; import com.google.firebase.database.DataSnapshot; import com.google.firebase.database.DatabaseError; import com.google.firebase.database.DatabaseReference; import com.google.firebase.database.FirebaseDatabase; import com.google.firebase.database.ValueEventListener; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import mn.devfest.api.model.Conference; import mn.devfest.api.model.Session; import mn.devfest.api.model.Speaker; import mn.devfest.persistence.UserScheduleRepository; import timber.log.Timber; public class DevFestDataSource { private static final String DEVFEST_2017_KEY = "devfest2017"; private static final String SESSIONS_CHILD_KEY = "schedule"; private static final String SPEAKERS_CHILD_KEY = "speakers"; private static final String AGENDAS_KEY = "agendas"; private static DevFestDataSource mOurInstance; private UserScheduleRepository mScheduleRepository; private DatabaseReference mFirebaseDatabaseReference; private FirebaseAuth mFirebaseAuth; private GoogleSignInAccount mGoogleAccount; private Conference mConference = new Conference(); //TODO move to an array of listeners? private DataSourceListener mDataSourceListener; private UserScheduleListener mUserScheduleListener; private ValueEventListener mFirebaseUserScheduleListener; public static DevFestDataSource getInstance(Context context) { if (mOurInstance == null) { mOurInstance = new DevFestDataSource(context); } return mOurInstance; } public DevFestDataSource(Context context) { mScheduleRepository = new UserScheduleRepository(context); //TODO move all firebase access into a separate class and de-duplicate code mFirebaseAuth = FirebaseAuth.getInstance(); mFirebaseDatabaseReference = FirebaseDatabase.getInstance().getReference(); //Get sessions mFirebaseDatabaseReference.child(DEVFEST_2017_KEY) .child(SESSIONS_CHILD_KEY).addValueEventListener(new ValueEventListener() { @Override public void onDataChange(DataSnapshot dataSnapshot) { //Clear the old schedule data out HashMap map = new HashMap<String, Session>(); //Add each new session into the schedule for (DataSnapshot snapshot : dataSnapshot.getChildren()) { Timber.d("Session snapshot is: %s", snapshot.toString()); Session session = snapshot.getValue(Session.class); session.setId(snapshot.getKey()); map.put(session.getId(), session); } mConference.setSchedule(map); if (mDataSourceListener != null) { mDataSourceListener.onSessionsUpdate(new ArrayList<>(map.values())); } } @Override public void onCancelled(DatabaseError databaseError) { // TODO handle failing to read value Timber.e(databaseError.toException(), "Failed to read sessions value."); } }); //Get speakers mFirebaseDatabaseReference.child(DEVFEST_2017_KEY).child(SPEAKERS_CHILD_KEY) .addValueEventListener(new ValueEventListener() { @Override public void onDataChange(DataSnapshot dataSnapshot) { //Clear the old speaker data out HashMap map = new HashMap<String, Session>(); //Add each new speaker into the schedule for (DataSnapshot snapshot : dataSnapshot.getChildren()) { Timber.d("Speaker snapshot is: %s", snapshot.toString()); Speaker speaker = snapshot.getValue(Speaker.class); speaker.setId(snapshot.getKey()); map.put(speaker.getId(), speaker); } mConference.setSpeakers(map); if (mDataSourceListener != null) { mDataSourceListener.onSpeakersUpdate(new ArrayList<>(map.values())); } } @Override public void onCancelled(DatabaseError databaseError) { // TODO handle failing to read value Timber.e(databaseError.toException(), "Failed to read speakers value."); } }); } @NonNull public List<Session> getSessions() { if (mConference == null) { return new ArrayList<>(); } if (mConference.getSchedule() == null) { return new ArrayList<>(); } return new ArrayList<>(mConference.getSchedule().values()); } @Nullable public Session getSessionById(String id) { if (mConference == null) { return null; } return mConference.getSchedule().get(id); } @NonNull public List<Speaker> getSpeakers() { if (mConference == null) { return new ArrayList<>(); } if (mConference.getSpeakers() == null) { return new ArrayList<>(); } return new ArrayList<>(mConference.getSpeakers().values()); } @Nullable public Speaker getSpeakerById(String id) { if (mConference == null) { return null; } return mConference.getSpeakers().get(id); } @NonNull public List<Session> getUserSchedule() { // Find sessions with an ID matching the user's saved session IDs List<Session> sessions = getSessions(); List<Session> userSessions = new ArrayList<>(); if (sessions.size() == 0) { return sessions; } if (mScheduleRepository != null) { // We use a loop that goes backwards so we can remove items as we iterate over the list without // running into a concurrent modification issue or altering the indices of items for (int i = sessions.size() - 1; i >= 0; i Session session = sessions.get(i); if (mScheduleRepository.getScheduleIds().contains(session.getId())) { userSessions.add(session); } } } return userSessions; } /** * Adds the session with the given ID to the user's schedule * * @param sessionId ID of the session to be added */ public void addToUserSchedule(String sessionId) { mScheduleRepository.addSession(sessionId); mDataSourceListener.onUserScheduleUpdate(getUserSchedule()); attemptAddingSessionToFirebase(sessionId); } private void attemptAddingSessionToFirebase(String sessionId) { //We can't sync to Firebase if we aren't logged in if (!haveGoogleAccountAndId()) { //TODO prompt the user intermittently to allow schedule sync return; } //Add the session to the user's schedule in Firebase mFirebaseDatabaseReference.child(DEVFEST_2017_KEY).child(AGENDAS_KEY) .child(mGoogleAccount.getId()).setValue(sessionId); } /** * Removes the session with the given ID from the user's schedule * * @param sessionId ID of the session to be removed */ public void removeFromUserSchedule(String sessionId) { mScheduleRepository.removeSession(sessionId); mDataSourceListener.onUserScheduleUpdate(getUserSchedule()); attemptRemovingSessionFromFirebase(sessionId); } private void attemptRemovingSessionFromFirebase(String sessionId) { //We can't sync to Firebase if we aren't logged in if (!haveGoogleAccountAndId()) { //TODO prompt the user intermittently to allow schedule sync return; } //Add the session to the user's schedule in Firebase mFirebaseDatabaseReference.child(DEVFEST_2017_KEY).child(AGENDAS_KEY) .child(mGoogleAccount.getId()).child(sessionId).removeValue(); } private boolean haveGoogleAccountAndId() { return mGoogleAccount != null && mGoogleAccount.getId() != null; } /** * Checks if a given session is in the user's schedule * * @param sessionId ID of the session to check for inclusion in the list * @return true if the session is in the user's schedule; otherwise false */ public boolean isInUserSchedule(String sessionId) { return mScheduleRepository.isInSchedule(sessionId); } public void setDataSourceListener(DataSourceListener listener) { mDataSourceListener = listener; } public void setUserScheduleListener(UserScheduleListener listener) { mUserScheduleListener = listener; } private void onConferenceUpdated() { //Notify listener mDataSourceListener.onSessionsUpdate(getSessions()); mDataSourceListener.onSpeakersUpdate(getSpeakers()); mDataSourceListener.onUserScheduleUpdate(getUserSchedule()); } //TODO de-duplicate diff methods public DiffUtil.DiffResult calculateSessionDiff(final List<Session> oldList, List<Session> newList) { return DiffUtil.calculateDiff(new DiffUtil.Callback() { @Override public int getOldListSize() { return oldList.size(); } @Override public int getNewListSize() { return newList.size(); } @Override public boolean areItemsTheSame(int oldItemPosition, int newItemPosition) { return oldList.get(oldItemPosition).getId().equals(newList.get(newItemPosition).getId()); } @Override public boolean areContentsTheSame(int oldItemPosition, int newItemPosition) { return oldList.get(oldItemPosition).equals(newList.get(newItemPosition)); } }); } public DiffUtil.DiffResult calculateScheduleDiff(final List<Session> sessions, List<Session> oldSchedule, List<Session> newSchedule) { return DiffUtil.calculateDiff(new DiffUtil.Callback() { @Override public int getOldListSize() { return sessions.size(); } @Override public int getNewListSize() { return sessions.size(); } @Override public boolean areItemsTheSame(int oldItemPosition, int newItemPosition) { return oldItemPosition == newItemPosition; } @Override public boolean areContentsTheSame(int oldItemPosition, int newItemPosition) { Session session = sessions.get(oldItemPosition); return oldSchedule.contains(session) == newSchedule.contains(session); } }); } public DiffUtil.DiffResult calculateSpeakerDiff(final List<Speaker> oldList, List<Speaker> newList) { return DiffUtil.calculateDiff(new DiffUtil.Callback() { @Override public int getOldListSize() { return oldList.size(); } @Override public int getNewListSize() { return newList.size(); } @Override public boolean areItemsTheSame(int oldItemPosition, int newItemPosition) { return oldList.get(oldItemPosition).equals(newList.get(newItemPosition)); } @Override public boolean areContentsTheSame(int oldItemPosition, int newItemPosition) { return oldList.get(oldItemPosition).equals(newList.get(newItemPosition)); } }); } public GoogleSignInAccount getGoogleAccount() { return mGoogleAccount; } public void setGoogleAccount(GoogleSignInAccount googleAccount) { //TODO if we're logging into an account, use the schedule if we were logged out and reconcile the schedule if we //If we are removing the Google account, stop listening if (googleAccount == null) { if (mFirebaseUserScheduleListener != null && haveGoogleAccountAndId()) { mFirebaseDatabaseReference.child(DEVFEST_2017_KEY).child(AGENDAS_KEY) .child(mGoogleAccount.getId()) .removeEventListener(mFirebaseUserScheduleListener); } mGoogleAccount = null; return; } if (googleAccount.getId() == null) { throw new IllegalArgumentException("#setGoogleAccount() called without ID. googleAccount = " + googleAccount.toString()); } //If we had no account, or if this new account isn't already being tracked, store in Firebase and track it if (!haveGoogleAccountAndId() || !googleAccount.getId().equals(mGoogleAccount.getId())) { storeAuthInFirebase(googleAccount); mFirebaseUserScheduleListener = mFirebaseDatabaseReference.child(DEVFEST_2017_KEY).child(AGENDAS_KEY) .child(googleAccount.getId()).addValueEventListener(new ValueEventListener() { @Override public void onDataChange(DataSnapshot dataSnapshot) { //Gather all of the session IDs from the user's schedule ArrayList<String> scheduleIds = new ArrayList<>(); for (DataSnapshot snapshot : dataSnapshot.getChildren()) { Timber.d("User schedule snapshot is: %s", snapshot.toString()); String id = snapshot.getValue(String.class); scheduleIds.add(id); } //Update the schedule IDs and send the new user schedule to the listener mScheduleRepository.setScheduleIdStringSet(scheduleIds); if (mUserScheduleListener != null) { mUserScheduleListener.onScheduleUpdate(getUserSchedule()); } } @Override public void onCancelled(DatabaseError databaseError) { Timber.e(databaseError.toException(), "Failed to read user agenda value."); } }); } mGoogleAccount = googleAccount; } private void storeAuthInFirebase(GoogleSignInAccount account) { AuthCredential authCredential = GoogleAuthProvider.getCredential(account.getId(), null); mFirebaseAuth.signInWithCredential(authCredential) .addOnCompleteListener(task -> { if (task.isSuccessful()) { Timber.d("FirebaseAuth login successfully completed"); } else { Timber.d("FirebaseAuth login failed"); } }); } public interface UserScheduleListener { void onScheduleUpdate(List<Session> schedule); } //TODO break this into separate listeners /** * Listener for updates from the data source */ public interface DataSourceListener { //These methods are for updating the listener void onSessionsUpdate(List<Session> sessions); void onSpeakersUpdate(List<Speaker> speakers); void onUserScheduleUpdate(List<Session> userSchedule); } }
package org.zeroxlab.momodict; import android.content.Context; import android.support.annotation.NonNull; import org.zeroxlab.momodict.db.realm.RealmStore; import org.zeroxlab.momodict.model.Book; import org.zeroxlab.momodict.model.Card; import org.zeroxlab.momodict.model.Entry; import org.zeroxlab.momodict.model.Record; import org.zeroxlab.momodict.model.Store; import java.util.Collections; import java.util.List; import rx.Observable; public class Controller { private Context mCtx; private Store mStore; public Controller(@NonNull Context ctx) { mCtx = ctx; mStore = new RealmStore(mCtx); } public Observable<Book> getBooks() { return Observable.from(mStore.getBooks()); } public Observable<Entry> queryEntries(String keyWord) { // to make sure exact matched words are returned List<Entry> exact = mStore.getEntries(keyWord); List<Entry> list = mStore.queryEntries(keyWord); list.addAll(exact); Collections.sort(list, (left, right) -> { return left.wordStr.indexOf(keyWord) - right.wordStr.indexOf(keyWord); }); return Observable.from(list).distinct((item) -> item.wordStr); } public Observable<Entry> getEntries(String keyWord) { List<Entry> list = mStore.getEntries(keyWord); Collections.sort(list, (left, right) -> { return left.wordStr.indexOf(keyWord) - right.wordStr.indexOf(keyWord); }); return Observable.from(list); } public Observable<Record> getRecords() { List<Record> records = mStore.getRecords(); Collections.sort(records, (left, right) -> { // sorting by time. Move latest one to head return left.time.before(right.time) ? 1 : -1; }); return Observable.from(records); } public void clearRecords() { getRecords().subscribe( (record -> mStore.removeRecords(record.wordStr))); } public boolean setRecord(@NonNull Record record) { return mStore.upsertRecord(record); } public boolean removeRecord(@NonNull String keyWord) { return mStore.removeRecords(keyWord); } public Observable<Card> getCards() { List<Card> cards = mStore.getCards(); Collections.sort(cards, (left, right) -> { // sorting by time. Move latest one to head return left.time.before(right.time) ? 1 : -1; }); return Observable.from(cards); } public boolean setCard(@NonNull Card card) { return mStore.upsertCard(card); } public boolean removeCards(@NonNull String keyWord) { return mStore.removeCards(keyWord); } }
package pl.beerlurk.beerlurk; import android.app.Activity; import android.location.Location; import android.os.Bundle; import android.os.Parcelable; import android.util.Log; import com.google.android.gms.maps.CameraUpdateFactory; import com.google.android.gms.maps.GoogleMap; import com.google.android.gms.maps.MapFragment; import com.google.android.gms.maps.OnMapReadyCallback; import com.google.android.gms.maps.model.LatLng; import com.google.android.gms.maps.model.Marker; import com.google.android.gms.maps.model.MarkerOptions; import com.google.gson.FieldNamingPolicy; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import java.util.List; import pl.beerlurk.beerlurk.dto.BeerLocationsWrapper; import pl.beerlurk.beerlurk.dto.DistancedBeerLocation; import pl.beerlurk.beerlurk.dto.Factory; import pl.beerlurk.beerlurk.service.BeerApi; import pl.beerlurk.beerlurk.service.BeerService; import pl.beerlurk.beerlurk.service.GeocodeApi; import pl.beerlurk.beerlurk.service.MatrixApi; import retrofit.RestAdapter; import retrofit.converter.GsonConverter; import rx.Observable; import rx.android.schedulers.AndroidSchedulers; import rx.functions.Action1; public class MapActivity extends Activity implements OnMapReadyCallback { private MapFragment mapFragment; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.map_activity); mapFragment = (MapFragment) getFragmentManager().findFragmentById(R.id.map); mapFragment.getMapAsync(this); } @Override public void onMapReady(GoogleMap map) { map.setMyLocationEnabled(true); DistancedBeerLocation clickedLocation = getIntent().getParcelableExtra("clicked"); LatLng center = new LatLng(clickedLocation.getLocation().getLatitude(), clickedLocation.getLocation().getLongitude()); map.moveCamera(CameraUpdateFactory.newLatLngZoom(center, 15)); List<DistancedBeerLocation> all = getIntent().getParcelableArrayListExtra("all"); for (DistancedBeerLocation l : all) { LatLng position = new LatLng(l.getLocation().getLatitude(), l.getLocation().getLongitude()); map.addMarker(new MarkerOptions().position(position)); } } }
package me.winspeednl.libz.core; import me.winspeednl.libz.screen.Render; import me.winspeednl.libz.screen.Window; public class GameCore implements Runnable { private Thread thread; private LibZ game; private Window window; public Render renderer; private Input input; private int width = 320, height = 240; private int scale = 1; private String name = "LibZ"; private double frameCap = 1D / 60D; private boolean isRunning = false; private int spriteBGColor = 0xFF000000; private int fps = 0; private int offsetX, offsetY; private int playerX, playerY; public GameCore(LibZ game) { this.game = game; } public void start() { if (isRunning) return; window = new Window(this); renderer = new Render(this); input = new Input(this); isRunning = true; thread = new Thread(this); thread.start(); game.init(this); } public void stop() { if (!isRunning) return; isRunning = false; } public void run() { double currTime; double lastTime = System.nanoTime() / 1000000000D; double passedTime; double unprocessedTime = 0; double frameTime = 0; int frames = 0; while (isRunning) { boolean render = false; currTime = System.nanoTime() / 1000000000D; passedTime = currTime - lastTime; lastTime = currTime; unprocessedTime += passedTime; frameTime += passedTime; while (unprocessedTime >= frameCap) { game.update(this); unprocessedTime -= frameCap; render = true; if(frameTime >= 1) { frameTime = 0; fps = frames; frames = 0; } } offsetX = renderer.getOffsetX(); offsetY = renderer.getOffsetY(); if (render) { renderer.clear(); game.render(this, renderer); for (int i = 0; i < renderer.getOverlayPixels().length; i++) { if (renderer.getOverlayPixels()[i] != 0xFF000000) renderer.setPixel(i, renderer.getOverlayPixels()[i]); } window.update(); frames++; } else { try { Thread.sleep(1); } catch (InterruptedException e) { e.printStackTrace(); } } } cleanup(); } public void cleanup() { window.cleanUp(); } public int getWidth() { return width; } public void setWidth(int width) { this.width = width; } public int getHeight() { return height; } public void setHeight(int height) { this.height = height; } public float getScale() { return scale; } public void setScale(int scale) { this.scale = scale; } public String getName() { return name; } public void setName(String name) { this.name = name; } public Window getWindow() { return window; } public Input getInput() { return input; } public int getPlayerX() { return playerX; } public void setPlayerX(int playerX) { this.playerX = playerX; } public int getPlayerY() { return playerY; } public void setPlayerY(int playerY) { this.playerY = playerY; } public int getSpriteBGColor() { return spriteBGColor; } public void setSpriteBGColor(int spriteBGColor) { this.spriteBGColor = spriteBGColor; } public int fps() { return fps; } public int getOffsetX() { return offsetX; } public int getOffsetY() { return offsetY; } public void setOffsetX(int offsetX) { renderer.setOffsetX(offsetX); } public void setOffsetY(int offsetY) { renderer.setOffsetY(offsetY); } }
package presentacio; import dades.Player; import dades.PlayersAdmin; import exceptions.PlayerNotExistsExcepction; import javafx.fxml.FXML; import javafx.fxml.FXMLLoader; import javafx.scene.control.Button; import javafx.scene.control.Label; import javafx.scene.control.PasswordField; import javafx.scene.control.TextField; import javafx.scene.layout.AnchorPane; import javafx.stage.Stage; import java.io.IOException; public class UserConfigController extends AnchorPane { private PlayersAdmin mAdmin; private AnchorPane rootLayout; @FXML private TextField name; @FXML private TextField userName; @FXML private Label passAlert; @FXML private Label checkAlert; @FXML private Label fields; @FXML private PasswordField password; @FXML private PasswordField passwordR; @FXML private PasswordField oldPassword; @FXML private Button acceptButton; @FXML private Button cancelButton; private boolean result = false; UserConfigController(MainWindow main) { mAdmin = main.getPlayersAdmin(); FXMLLoader loader = new FXMLLoader(getClass().getResource("UserConfig.fxml")); loader.setRoot(this); loader.setController(this); try { rootLayout = loader.load(); } catch (IOException e) { e.printStackTrace(); } Player p; try { p = mAdmin.getPlayer(main.getUsername()); createFields(p); } catch (PlayerNotExistsExcepction e) { e.printStackTrace(); ((Stage) (this.getScene().getWindow())).close(); } } private boolean checkAccept() { if (password.getText().length() > 0 && !password.getText().equals(passwordR.getText())) { passAlert.setVisible(true); return false; } else passAlert.setVisible(false); return true; } public AnchorPane getRootLayout() { return rootLayout; } public boolean getResult() { return result; } private void createFields(Player player) { this.name.setText(player.getName()); this.userName.setText(player.getUserName()); } @FXML private void dialogAccept() { if (!checkAccept()) return; if (!mAdmin.checkLogin(userName.getText(), oldPassword.getText())) { checkAlert.setVisible(true); return; } else checkAlert.setVisible(false); if (password.getText().length() > 0) { mAdmin.changePassword(userName.getText(), oldPassword.getText(), password.getText()); } mAdmin.changeName(name.getText(), userName.getText()); result = true; Stage stage = (Stage) acceptButton.getScene().getWindow(); stage.close(); } @FXML private void dialogReject() { result = false; Stage stage = (Stage) cancelButton.getScene().getWindow(); stage.close(); } }
package soot; import java.io.IOException; import java.io.InputStream; import java.util.*; import soot.JastAddJ.BytecodeParser; import soot.JastAddJ.CompilationUnit; import soot.JastAddJ.JastAddJavaParser; import soot.JastAddJ.JavaParser; import soot.JastAddJ.Program; import soot.javaToJimple.IInitialResolver.Dependencies; import soot.options.Options; /** Loads symbols for SootClasses from either class files or jimple files. */ public class SootResolver { /** Maps each resolved class to a list of all references in it. */ private final Map<SootClass, Collection<Type>> classToTypesSignature = new HashMap<SootClass, Collection<Type>>(); /** Maps each resolved class to a list of all references in it. */ private final Map<SootClass, Collection<Type>> classToTypesHierarchy = new HashMap<SootClass, Collection<Type>>(); /** SootClasses waiting to be resolved. */ @SuppressWarnings("unchecked") private final Deque<SootClass>[] worklist = new Deque[4]; private Program program = null; public SootResolver(Singletons.Global g) { worklist[SootClass.HIERARCHY] = new ArrayDeque<SootClass>(); worklist[SootClass.SIGNATURES] = new ArrayDeque<SootClass>(); worklist[SootClass.BODIES] = new ArrayDeque<SootClass>(); } protected void initializeProgram() { if (Options.v().src_prec() != Options.src_prec_apk_c_j) { program = new Program(); program.state().reset(); program.initBytecodeReader(new BytecodeParser()); program.initJavaParser(new JavaParser() { public CompilationUnit parse(InputStream is, String fileName) throws IOException, beaver.Parser.Exception { return new JastAddJavaParser().parse(is, fileName); } }); program.options().initOptions(); program.options().addKeyValueOption("-classpath"); program.options().setValueForOption(Scene.v().getSootClassPath(), "-classpath"); if (Options.v().src_prec() == Options.src_prec_java) program.setSrcPrec(Program.SRC_PREC_JAVA); else if (Options.v().src_prec() == Options.src_prec_class) program.setSrcPrec(Program.SRC_PREC_CLASS); else if (Options.v().src_prec() == Options.src_prec_only_class) program.setSrcPrec(Program.SRC_PREC_CLASS); program.initPaths(); } } public static SootResolver v() { if (ModuleUtil.module_mode()) return G.v().soot_SootModuleResolver(); return G.v().soot_SootResolver(); } /** Returns true if we are resolving all class refs recursively. */ protected boolean resolveEverything() { if (Options.v().on_the_fly()) return false; return (Options.v().whole_program() || Options.v().whole_shimple() || Options.v().full_resolver() || Options.v().output_format() == Options.output_format_dava); } /** * Returns a (possibly not yet resolved) SootClass to be used in references * to a class. If/when the class is resolved, it will be resolved into this * SootClass. */ public SootClass makeClassRef(String className) { // If this class name is escaped, we need to un-escape it className = Scene.v().unescapeName(className); if (Scene.v().containsClass(className)) return Scene.v().getSootClass(className); SootClass newClass; newClass = new SootClass(className); newClass.setResolvingLevel(SootClass.DANGLING); Scene.v().addClass(newClass); return newClass; } /** * Resolves the given class. Depending on the resolver settings, may decide * to resolve other classes as well. If the class has already been resolved, * just returns the class that was already resolved. */ public SootClass resolveClass(String className, int desiredLevel) { SootClass resolvedClass = null; try { resolvedClass = makeClassRef(className); addToResolveWorklist(resolvedClass, desiredLevel); processResolveWorklist(); return resolvedClass; } catch (SootClassNotFoundException e) { // remove unresolved class and rethrow if (resolvedClass != null) { assert resolvedClass.resolvingLevel() == SootClass.DANGLING; Scene.v().removeClass(resolvedClass); } throw e; } } /** Resolve all classes on toResolveWorklist. */ protected void processResolveWorklist() { for (int i = SootClass.BODIES; i >= SootClass.HIERARCHY; i while (!worklist[i].isEmpty()) { SootClass sc = worklist[i].pop(); if (resolveEverything()) { // Whole program mode boolean onlySignatures = sc.isPhantom() || (Options.v().no_bodies_for_excluded() && Scene.v().isExcluded(sc) && !Scene.v().getBasicClasses().contains(sc.getName())); if (onlySignatures) { bringToSignatures(sc); sc.setPhantomClass(); for (SootMethod m : sc.getMethods()) { m.setPhantom(true); } for (SootField f : sc.getFields()) { f.setPhantom(true); } } else bringToBodies(sc); } else { // No transitive switch (i) { case SootClass.BODIES: bringToBodies(sc); break; case SootClass.SIGNATURES: bringToSignatures(sc); break; case SootClass.HIERARCHY: bringToHierarchy(sc); break; } } } } } protected void addToResolveWorklist(Type type, int level) { // We go from Type -> SootClass directly, since RefType.getSootClass // calls makeClassRef anyway if (type instanceof RefType) addToResolveWorklist(((RefType) type).getSootClass(), level); else if (type instanceof ArrayType) addToResolveWorklist(((ArrayType) type).baseType, level); // Other types ignored } protected void addToResolveWorklist(SootClass sc, int desiredLevel) { if (sc.resolvingLevel() >= desiredLevel) return; worklist[desiredLevel].add(sc); } /** * Hierarchy - we know the hierarchy of the class and that's it requires at * least Hierarchy for all supertypes and enclosing types. */ protected void bringToHierarchy(SootClass sc) { if (sc.resolvingLevel() >= SootClass.HIERARCHY) return; if (Options.v().debug_resolver()) G.v().out.println("bringing to HIERARCHY: " + sc); sc.setResolvingLevel(SootClass.HIERARCHY); bringToHierarchyUnchecked(sc); } protected void bringToHierarchyUnchecked(SootClass sc) { String className = sc.getName(); ClassSource is; if (ModuleUtil.module_mode()) { is = ModulePathSourceLocator.v().getClassSource(className, com.google.common.base.Optional.fromNullable(sc.moduleName)); } else { is = SourceLocator.v().getClassSource(className); } try { boolean modelAsPhantomRef = is == null; if (modelAsPhantomRef) { if (!Scene.v().allowsPhantomRefs()) { String suffix = ""; if (className.equals("java.lang.Object")) { suffix = " Try adding rt.jar to Soot's classpath, e.g.:\n" + "java -cp sootclasses.jar soot.Main -cp " + ".:/path/to/jdk/jre/lib/rt.jar <other options>"; } else if (className.equals("javax.crypto.Cipher")) { suffix = " Try adding jce.jar to Soot's classpath, e.g.:\n" + "java -cp sootclasses.jar soot.Main -cp " + ".:/path/to/jdk/jre/lib/rt.jar:/path/to/jdk/jre/lib/jce.jar <other options>"; } throw new SootClassNotFoundException( "couldn't find class: " + className + " (is your soot-class-path set properly?)" + suffix); } else { G.v().out.println("Warning: " + className + " is a phantom class!"); sc.setPhantomClass(); classToTypesSignature.put(sc, Collections.<Type>emptyList()); classToTypesHierarchy.put(sc, Collections.<Type>emptyList()); } } else { Dependencies dependencies = is.resolve(sc); if (!dependencies.typesToSignature.isEmpty()) classToTypesSignature.put(sc, dependencies.typesToSignature); if (!dependencies.typesToHierarchy.isEmpty()) classToTypesHierarchy.put(sc, dependencies.typesToHierarchy); } } finally { if (is != null) is.close(); } reResolveHierarchy(sc); } public void reResolveHierarchy(SootClass sc) { // Bring superclasses to hierarchy if (sc.hasSuperclass()) addToResolveWorklist(sc.getSuperclass(), SootClass.HIERARCHY); if (sc.hasOuterClass()) addToResolveWorklist(sc.getOuterClass(), SootClass.HIERARCHY); for (SootClass iface : sc.getInterfaces()) { addToResolveWorklist(iface, SootClass.HIERARCHY); } } /** * Signatures - we know the signatures of all methods and fields requires at * least Hierarchy for all referred to types in these signatures. */ protected void bringToSignatures(SootClass sc) { if (sc.resolvingLevel() >= SootClass.SIGNATURES) return; bringToHierarchy(sc); if (Options.v().debug_resolver()) G.v().out.println("bringing to SIGNATURES: " + sc); sc.setResolvingLevel(SootClass.SIGNATURES); bringToSignaturesUnchecked(sc); } protected void bringToSignaturesUnchecked(SootClass sc) { for (SootField f : sc.getFields()) { addToResolveWorklist(f.getType(), SootClass.HIERARCHY); } for (SootMethod m : sc.getMethods()) { addToResolveWorklist(m.getReturnType(), SootClass.HIERARCHY); for (Type ptype : m.getParameterTypes()) { addToResolveWorklist(ptype, SootClass.HIERARCHY); } for (SootClass exception : m.getExceptions()) { addToResolveWorklist(exception, SootClass.HIERARCHY); } } // Bring superclasses to signatures if (sc.hasSuperclass()) addToResolveWorklist(sc.getSuperclass(), SootClass.SIGNATURES); for (SootClass iface : sc.getInterfaces()) { addToResolveWorklist(iface, SootClass.SIGNATURES); } } /** * Bodies - we can now start loading the bodies of methods for all referred * to methods and fields in the bodies, requires signatures for the method * receiver and field container, and hierarchy for all other classes * referenced in method references. Current implementation does not * distinguish between the receiver and other references. Therefore, it is * conservative and brings all of them to signatures. But this could/should * be improved. */ protected void bringToBodies(SootClass sc) { if (sc.resolvingLevel() >= SootClass.BODIES) return; bringToSignatures(sc); if (Options.v().debug_resolver()) G.v().out.println("bringing to BODIES: " + sc); sc.setResolvingLevel(SootClass.BODIES); bringToBodiesUnchecked(sc); } protected void bringToBodiesUnchecked(SootClass sc) { { Collection<Type> references = classToTypesHierarchy.get(sc); if (references != null) { // This must be an iterator, not a for-all since the underlying // collection may change as we go Iterator<Type> it = references.iterator(); while (it.hasNext()) { final Type t = it.next(); addToResolveWorklist(t, SootClass.HIERARCHY); } } } { Collection<Type> references = classToTypesSignature.get(sc); if (references != null) { // This must be an iterator, not a for-all since the underlying // collection may change as we go Iterator<Type> it = references.iterator(); while (it.hasNext()) { final Type t = it.next(); addToResolveWorklist(t, SootClass.SIGNATURES); } } } } public void reResolve(SootClass cl, int newResolvingLevel) { int resolvingLevel = cl.resolvingLevel(); if (resolvingLevel >= newResolvingLevel) return; reResolveHierarchy(cl); cl.setResolvingLevel(newResolvingLevel); addToResolveWorklist(cl, resolvingLevel); processResolveWorklist(); } public void reResolve(SootClass cl) { reResolve(cl, SootClass.HIERARCHY); } public Program getProgram() { if (program == null) initializeProgram(); return program; } protected class SootClassNotFoundException extends RuntimeException { private static final long serialVersionUID = 1563461446590293827L; private SootClassNotFoundException(String s) { super(s); } } }
package sorter; import java.util.Comparator; /** * @author Kevin * @description * 1. * 2. * 3. * @date 2017/3/5 */ public class SelectSort implements Sorter { @Override public <T extends Comparable<T>> void sort(T[] list) { T temp; for (int i = 0, size = list.length; i < size; i++) { int k = i; for (int j = size - 1; j > i; j if (list[j].compareTo(list[k]) < 0) { k = j; } } temp = list[i]; list[i] = list[k]; list[k] = temp; } } @Override public <T> void sort(T[] list, Comparator comp) { } }
package base; import housing.roles.HousingBaseRole; import housing.roles.HousingRenterRole; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import java.util.concurrent.Semaphore; import market.roles.MarketCustomerRole; import reference.simcity.gui.SimCityGui; import restaurant.intermediate.RestaurantCustomerRole; import transportation.roles.TransportationBusRiderRole; import bank.roles.BankCustomerRole; import bank.roles.BankMasterTellerRole; import base.Event.EnumEventType; import base.Item.EnumMarketItemType; import base.interfaces.Person; import base.interfaces.Role; import city.gui.CityPerson; public class PersonAgent extends Agent implements Person { //Static data private static int sSSN = 0; private static int sTimeSchedule = 0; //0,1,2 private static int sEatingTime = 0; //Roles and Job public static enum EnumJobType {BANK, HOUSING, MARKET, RESTAURANT, TRANSPORTATION, NONE}; private EnumJobType mJobType; public Map<Role, Boolean> mRoles; //roles, active - i.e. WaiterRole, BankTellerRole, etc. public HousingBaseRole mHouseRole; private Location mJobLocation; //Lists List<Person> mFriends; // best are those with same timeshift SortedSet<Event> mEvents; // tree set ordered by time of event Map<EnumMarketItemType, Integer> mItemInventory; // personal inventory Map<EnumMarketItemType, Integer> mItemsDesired; // not ordered yet Set<Location> mHomeLocations; //multiple for landlord //Personal Variables private String mName; int mSSN; int mTimeShift; double mCash; double mLoan; boolean mHasCar; //Role References public BankMasterTellerRole mMasterTeller; private CityPerson mGui; //SHANE JERRY: 2 instantiate this private SimCityGui mRoleGui; //SHANE JERRY: 1 what type does this need to be? make sure this works //PAEA Helpers public Semaphore semAnimationDone = new Semaphore(1); private boolean mRoleFinished; public PersonAgent() { initializePerson(); } public PersonAgent(EnumJobType job, double cash, String name){ initializePerson(); mJobType = job; mCash = cash; mName = name; Role jobRole = null; switch (job){ case BANK: jobRole = SortingHat.getBankRole(mTimeShift); break; case MARKET: jobRole = SortingHat.getMarketRole(mTimeShift); break; case RESTAURANT: jobRole = SortingHat.getRestaurantRole(mTimeShift); break; case TRANSPORTATION: break; case HOUSING: break; case NONE: break; } boolean active = (mTimeShift == Time.GetShift()); if (jobRole != null){ mJobLocation = ContactList.sRoleLocations.get(jobRole); mRoles.put(jobRole, active); } if (active){ for (Role iRole : mRoles.keySet()){ iRole.setPerson(this); } } mHouseRole = (HousingBaseRole) SortingHat.getHousingRole(this); //get housing status mRoles.put(mHouseRole, true); //Add customer/rider role possibilities mRoles.put(new BankCustomerRole(this), false); mRoles.put(new HousingRenterRole(this), false); mRoles.put(new MarketCustomerRole(this), false); mRoles.put(new TransportationBusRiderRole(this), false); mRoles.put(new RestaurantCustomerRole(this), false); } private void initializePerson(){ //Roles and Job mJobType = null; mRoles = new HashMap<Role, Boolean>(); mHouseRole = null; mJobLocation = null; //Lists mFriends = new ArrayList<Person>(); mEvents = new TreeSet<Event>(); mItemInventory = Collections.synchronizedMap(new HashMap<EnumMarketItemType, Integer>()); mItemsDesired = Collections.synchronizedMap(new HashMap<EnumMarketItemType, Integer>()); mHomeLocations = Collections.synchronizedSet(new HashSet<Location>()); //Personal Variables mName = ""; mSSN = sSSN++; // assign SSN mTimeShift = (sTimeSchedule++ % 3); // assign time schedule mCash = 100; mLoan = 0; mHasCar = false; // Event Setup mEvents = Collections.synchronizedSortedSet(new TreeSet<Event>()); mEvents.add(new Event(EnumEventType.GET_CAR, 0)); mEvents.add(new Event(EnumEventType.JOB, mTimeShift + 0)); mEvents.add(new Event(EnumEventType.EAT, (mTimeShift + 8 + mSSN % 4) % 24)); // personal time mEvents.add(new Event(EnumEventType.EAT, (mTimeShift + 12 + mSSN % 4) % 24)); // shift 4 mEvents.add(new Event(EnumEventType.PARTY, (mTimeShift + 16) + (mSSN + 3) * 24)); // night time, every SSN+3 days } public void msgTimeShift() { if (Time.GetShift() == 0) { // resetting of variables? } stateChanged(); } public void msgAddEvent(Event event) { if ((event.mEventType == EnumEventType.RSVP1) && (mSSN % 2 == 1)) return; // maybe don't respond (half are deadbeats) mEvents.add(event); } public void msgAnimationDone(){ if (semAnimationDone.availablePermits() == 0) semAnimationDone.release(); } public void msgHereIsPayment(int senderSSN, double amount){ mCash += amount; } public void msgOverdrawnAccount(double loan) { mLoan += loan; } public void msgRoleFinished(){ mRoleFinished = true; } @Override public boolean pickAndExecuteAnAction() { //if not during job shift if ((mRoleFinished) && (Time.GetShift() != mTimeShift)){ // Process events (calendar) Iterator<Event> itr = mEvents.iterator(); while (itr.hasNext()) { Event event = itr.next(); if (event.mTime > Time.GetTime()) break; // don't do future calendar events processEvent(event); itr.remove(); } } // Do role actions for (Role iRole : mRoles.keySet()) { if (mRoles.get(iRole)) { if (iRole.pickAndExecuteAnAction()) return true; } } return false; } private synchronized void processEvent(Event event) { //One time events (Car) if (event.mEventType == EnumEventType.GET_CAR) { getCar(); //SHANE: 1 get car } //Daily Recurring Events (Job, Eat) else if (event.mEventType == EnumEventType.JOB) { //bank is closed on weekends if (!(Time.IsWeekend()) || (mJobType != EnumJobType.BANK)){ goToJob(); //SHANE: 1 go to job } mEvents.add(new Event(event, 24)); } else if (event.mEventType == EnumEventType.EAT) { eatFood(); //SHANE: 1 eat food mEvents.add(new Event(event, 24)); } //Intermittent Events (Deposit Check) else if (event.mEventType == EnumEventType.DEPOSIT_CHECK) { depositCheck(); //SHANE: 1 deposit check } else if (event.mEventType == EnumEventType.ASK_FOR_RENT) { invokeRent(); //SHANE: 1 invoke rent } else if (event.mEventType == EnumEventType.MAINTAIN_HOUSE) { invokeMaintenance(); //SHANE: 1 invoke maintenance } //Party Events else if (event.mEventType == EnumEventType.INVITE1) { inviteToParty(); //SHANE: 1 invite to party } else if (event.mEventType == EnumEventType.INVITE2) { reinviteDeadbeats(); //SHANE: 1 reinvite deadbeats } else if (event.mEventType == EnumEventType.RSVP1) { respondToRSVP(); //SHANE: 1 respond to rsvp } else if (event.mEventType == EnumEventType.RSVP2) { respondToRSVP(); //SHANE: 1 respond to rsvp (same) } else if (event.mEventType == EnumEventType.PARTY) { throwParty(); //SHANE: 1 throw party int inviteNextDelay = 24*mSSN; EventParty party = (EventParty) event; mEvents.add(new EventParty(party, inviteNextDelay + 2)); mEvents.add(new EventParty(party, EnumEventType.INVITE1, inviteNextDelay, getBestFriends())); mEvents.add(new EventParty(party, EnumEventType.INVITE2, inviteNextDelay + 1, getBestFriends())); //SHANE: 3 check event classes } } private void acquireSemaphore(Semaphore semaphore){ try { semaphore.acquire(); } catch (InterruptedException e) { e.printStackTrace(); } } private void getCar(){ Location location = ContactList.cMARKET_LOCATION; mGui.DoGoToDestination(location); acquireSemaphore(semAnimationDone); //remove current gui (isPresent = false) mGui.setInvisible(); //create new market gui //lock person until role is finished mRoleFinished = false; //activate marketcustomer role for (Role iRole : mRoles.keySet()){ if (iRole instanceof MarketCustomerRole){ mRoles.put(iRole, true); //set active } } //add desired item mItemsDesired.put(EnumMarketItemType.CAR, 1); //want 1 car //PAEA for role will message market cashier to start transaction //SHANE: 3 When gets car, change mHasCar to true } private void goToJob() { mGui.DoGoToDestination(mJobLocation); acquireSemaphore(semAnimationDone); //add job role // DoGoTo(work.location); // work.getHost().msgImHere(job); // job.active = T; // state = PersonState.Working; } public void eatFood() { //decide if eating at home or not //SHANE REX: 3 get to this //set random restaurant RestaurantCustomerRole restaurantCustomerRole = null; for (Role iRole : mRoles.keySet()){ if (iRole instanceof RestaurantCustomerRole){ restaurantCustomerRole = (RestaurantCustomerRole) iRole; } } int restaurantChoice = 1; // SHANE: Make random try { restaurantCustomerRole.setRestaurant(restaurantChoice); } catch (IOException e1) { e1.printStackTrace(); } // DAVID: 1 This is where it's set try { mGui.DoGoToDestination(ContactList.cRESTAURANT_LOCATIONS.get(restaurantChoice)); } catch (Exception e) { } acquireSemaphore(semAnimationDone); } private void depositCheck() { } private void throwParty() { } private void inviteToParty() { } private void reinviteDeadbeats() { } private void respondToRSVP(){ } private void invokeRent() { mHouseRole.msgTimeToCheckRent(); } private void invokeMaintenance() { mHouseRole.msgTimeToMaintain(); } //JERRY 0 FOR TESTING public void move(){ mGui.DoGoToDestination(ContactList.cBANK_LOCATION); } public void SetGui(CityPerson pGui){ mGui = pGui; } private List<Person> getBestFriends(){ List<Person> bestFriends = new ArrayList<Person>(); for (Person iPerson : mFriends){ if (iPerson.getTimeShift() == mTimeShift) bestFriends.add(iPerson); } return bestFriends; } //SHANE: 4 Organize PersonAgent Accessors public void addRole(Role role, boolean active) { mRoles.put(role, active); role.setPerson(this); } public void removeRole(Role r) { mRoles.remove(r); } public double getCash() { return mCash; } public void setCash(double cash) { mCash = cash; } public void addCash(double amount) { mCash += amount; } public void setLoan(double loan) { mLoan = loan; } public double getLoan() { return mLoan; } public Map<EnumMarketItemType, Integer> getItemsDesired() { return mItemsDesired; } public int getSSN() { return mSSN; } public Map<EnumMarketItemType, Integer> getItemInventory() { return mItemInventory; } protected void print(String msg) { System.out.println("" + mName + ": " + msg); } public String getName(){ return mName; } public int getTimeShift(){ return mTimeShift; } public void setName(String name) { mName = name; } public void setSSN(int SSN) { mSSN = SSN; } @Override public void setItemsDesired(Map<EnumMarketItemType, Integer> map) { mItemsDesired = map; } @Override public Map<Role, Boolean> getRoles() { return mRoles; } @Override public Role getHousingRole() { return mHouseRole; } }
package battle; import java.time.Duration; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Iterator; public class TurnOrder { /** * The date and time at the start of the battle. */ private final LocalDateTime startTime; /** * Keeps track of the current date and time. The clock starts at the local * date and time and increments from there. */ private LocalDateTime currentTime; /** * List of Subscriber objects who request to be updated when the TurnOrder * advances to the next Turn. */ private final ArrayList<Subscriber> subscribers; /** * An array of unit-time parings contained within an internal private class. */ private final ArrayList<TurnItem> turnList; /** * Basic constructor. */ protected TurnOrder() { this.startTime = LocalDateTime.now(); this.currentTime = this.startTime; this.subscribers = new ArrayList<>(); this.turnList = new ArrayList<>(); } /** * Adds a TurnItem at a time value equal to the next TurnItem in the * TurnOrder. * @param unit Unit object that that is the subject of the TurnItem. * @param stunnable true if the TurnItem is affected by stuns made to the * Unit. * @return true if the addition was successful. */ protected TurnItem addAfterNext(Unit unit, boolean stunnable) { turnList.sort(null); LocalDateTime time = turnList.get(0).getTime(); return addTurnItem(unit, time, stunnable); } /** * Takes TurnItem object and sorts it into the TurnOrder. * @param newItem new TurnItem to be added. Value is allowed to be null. */ protected void addTurnItem(TurnItem newItem) { turnList.add(newItem); } /** * Takes a Unit and time paring and sorts it into the TurnOrder. * @param unit Unit object to be called when the turn is due. * @param time time from the start of the TurnOrder when the turn is due. * Measured in milliseconds. * @param stunnable true if the turn is delayed when the Unit called by the * turn is stunned. * @return true when the addition of the turn information succeeds. */ protected TurnItem addTurnItem(Unit unit, LocalDateTime time, boolean stunnable) { TurnItem newItem = new TurnItem(unit, time, stunnable); turnList.add(newItem); return newItem; } /** * Takes a Unit and time paring and sorts it into the TurnOrder. * @param unit Unit object to be called when the turn is due. * @param time time from the start of the TurnOrder when the turn is due. * Measured in milliseconds. * @param stunnable true if the turn is delayed when the Unit called by the * turn is stunned. * @return true when the addition of the turn information succeeds. */ protected TurnItem addTurnItem(Unit unit, int time, boolean stunnable) { LocalDateTime dateTime = currentTime.plus(Duration.ofMillis(time)); TurnItem newItem = new TurnItem(unit, dateTime, stunnable); turnList.add(newItem); return newItem; } /** * Takes a Unit and time paring and sorts it into the TurnOrder. * @param unit Unit object to be called when the turn is due. * @param time time from the start of the TurnOrder when the turn is due. * Measured in milliseconds. * @param stunnable true if the turn is delayed when the Unit called by the * turn is stunned. * @return true when the addition of the turn information succeeds. */ protected TurnItem addTurnItem(Unit unit, Duration time, boolean stunnable) { TurnItem newItem = new TurnItem(unit, currentTime.plus(time), stunnable); turnList.add(newItem); return newItem; } /** * Getter for the current time of the battle. * @return date and time value. */ protected LocalDateTime getCurrentTime() { return currentTime; } /** * Getter for the start time of the battle. * @return date and time value. */ protected LocalDateTime getStartTime() { return startTime; } /** * Returns the next Unit to have a turn due and increments the clock to the * time of that turn. * @return the Unit who's turn is now due. */ protected Unit next() { turnList.sort(null); while (turnList.size() > 0) { TurnItem nextTurn = turnList.remove(0); if (currentTime.isBefore(nextTurn.getTime())) { currentTime = nextTurn.getTime(); for (Subscriber sub : subscribers) { sub.update(); } } if (nextTurn.getUnit() != null) { nextTurn.getUnit().removeTurnItem(nextTurn); return nextTurn.getUnit(); } } return null; } /** * Removes all TurnItem objects from the TurnOrder matching the given Unit. * @param oldUnit the Unit object used to identify all turns to be removed. * @return true if a match to the given Unit was found and removed. */ protected boolean removeUnit(Unit oldUnit) { boolean returnValue = false; Iterator<TurnItem> iterateTurns = turnList.iterator(); while (iterateTurns.hasNext()) { TurnItem nextTurn = iterateTurns.next(); if (nextTurn.getUnit() == oldUnit) { iterateTurns.remove(); returnValue = true; } } return returnValue; } /** * Adds a Subscriber object that is updated whenever the TurnOrder progresses * to the next TurnItem. * @param newSubscriber Subscriber requesting to receive requests to update. * @return true if the Subscriber was successfully added. */ protected boolean subscribe(Subscriber newSubscriber) { if ((newSubscriber != null) && (!subscribers.contains(newSubscriber))) { return subscribers.add(newSubscriber); } return false; } /** * Removes a Subscriber object that is updated whenever the TurnOrder * progresses to the next TurnItem. * @param oldSubscriber Subscriber requesting to be no longer receive requests * to update. * @return true if the Subscriber was successfully removed. */ protected boolean unsubscribe(Subscriber oldSubscriber) { return subscribers.remove(oldSubscriber); } }
package cgeo.geocaching.test; import junit.framework.Assert; import android.test.AndroidTestCase; import cgeo.geocaching.cgDestination; import cgeo.geocaching.geopoint.Geopoint; public class cgDestinationTest extends AndroidTestCase { cgDestination dest = null; @Override protected void setUp() throws Exception { super.setUp(); dest = new cgDestination(1, 10000, new Geopoint(52.5, 9.33)); } public void testSomething() throws Throwable { Assert.assertEquals(1, dest.getId()); Assert.assertEquals(10000, dest.getDate()); Assert.assertEquals(52.5, dest.getCoords().getLatitude()); Assert.assertEquals(9.33, dest.getCoords().getLongitude()); } }
package ch.elexis.data; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Comparator; import java.util.HashSet; import java.util.List; import ch.elexis.admin.AccessControlDefaults; import ch.elexis.core.constants.Preferences; import ch.elexis.core.data.activator.CoreHub; import ch.elexis.core.data.events.ElexisEventDispatcher; import ch.elexis.core.data.interfaces.IDiagnose; import ch.elexis.core.data.interfaces.IOptifier; import ch.elexis.core.data.interfaces.IVerrechenbar; import ch.elexis.core.data.interfaces.events.MessageEvent; import ch.elexis.core.data.status.ElexisStatus; import ch.elexis.core.exceptions.PersistenceException; import ch.elexis.core.model.prescription.EntryType; import ch.elexis.core.text.model.Samdas; import ch.rgw.tools.ExHandler; import ch.rgw.tools.JdbcLink; import ch.rgw.tools.JdbcLink.Stm; import ch.rgw.tools.Result; import ch.rgw.tools.StringTool; import ch.rgw.tools.TimeTool; import ch.rgw.tools.VersionedResource; import ch.rgw.tools.VersionedResource.ResourceItem; public class Konsultation extends PersistentObject implements Comparable<Konsultation> { public static final String FLD_ENTRY = "Eintrag"; public static final String DATE = "Datum"; public static final String FLD_BILL_ID = "RechnungsID"; public static final String FLD_CASE_ID = "FallID"; public static final String FLD_MANDATOR_ID = "MandantID"; private static final String TABLENAME = "BEHANDLUNGEN"; volatile int actEntry; protected String getTableName(){ return TABLENAME; } static { addMapping(TABLENAME, FLD_MANDATOR_ID, PersistentObject.DATE_COMPOUND, FLD_CASE_ID, FLD_BILL_ID, "Eintrag=S:V:Eintrag", "Diagnosen=JOINT:BehandlungsID:DiagnoseID:BEHDL_DG_JOINT"); } protected Konsultation(String id){ super(id); } public boolean isValid(){ if (!super.isValid()) { return false; } Mandant m = getMandant(); if ((m == null) || (!m.isValid())) { return false; } Fall fall = getFall(); if ((fall == null) || (!fall.isValid())) { return false; } return true; } public Fall getFall(){ return Fall.load(get(FLD_CASE_ID)); } /** Die Konsultation einem Fall zuordnen */ public void setFall(Fall f){ if (isEditable(true)) { Fall alt = getFall(); set(FLD_CASE_ID, f.getId()); if (alt != null) { List<Verrechnet> vv = getLeistungen(); for (Verrechnet v : vv) { v.setStandardPreis(); } } } } /** Eine neue Konsultation zu einem Fall erstellen */ public Konsultation(Fall fall){ if (fall == null) { fall = (Fall) ElexisEventDispatcher.getSelected(Fall.class); } if (fall == null) { MessageEvent.fireError("Kein Fall ausgewählt", "Bitte zunächst einen Fall auswählen, dem die neue Konsultation zugeordnet werden soll"); } else if (fall.isOpen() == false) { MessageEvent.fireError("Fall geschlossen", "Zu einem abgeschlossenen Fall kann keine neue Konsultation erstellt werden"); } else { create(null); set(new String[] { DATE, FLD_CASE_ID, FLD_MANDATOR_ID }, new TimeTool().toString(TimeTool.DATE_GER), fall.getId(), CoreHub.actMandant.getId()); fall.getPatient().setInfoElement("LetzteBehandlung", getId()); } if (getDefaultDiagnose() != null) addDiagnose(getDefaultDiagnose()); } /** Eine Konsultation anhand ihrer ID von der Datenbank einlesen */ public static Konsultation load(String id){ Konsultation ret = new Konsultation(id); return ret; } /** * get the number of the last (highest) Version * * @return */ public int getHeadVersion(){ VersionedResource vr = getVersionedResource(FLD_ENTRY, false); return vr.getHeadVersion(); } /** * get the text entry od this Konsultation * * @return */ public VersionedResource getEintrag(){ VersionedResource vr = getVersionedResource(FLD_ENTRY, true); return vr; } /** * Insert an XREF to the EMR text * * @param provider * unique String identifying the provider * @param id * String identifying the item * @param pos * position of the item as offset relative to the contents * @param text * text to insert */ public void addXRef(String provider, String id, int pos, String text){ VersionedResource vr = getEintrag(); String ntext = vr.getHead(); Samdas samdas = new Samdas(ntext); Samdas.Record record = samdas.getRecord(); String recText = record.getText(); if ((pos == -1) || pos > recText.length()) { pos = recText.length(); recText += text; } else { recText = recText.substring(0, pos) + text + recText.substring(pos); } record.setText(recText); Samdas.XRef xref = new Samdas.XRef(provider, id, pos, text.length()); record.add(xref); updateEintrag(samdas.toString(), true); // XRefs may always be added } private Samdas getEntryRaw(){ VersionedResource vr = getEintrag(); String ntext = vr.getHead(); Samdas samdas = new Samdas(ntext); return samdas; } private void updateEntryRaw(Samdas samdas){ updateEintrag(samdas.toString(), false); } /** * Remove an XREF from the EMR text. Will remove all XREFS of the given provider with the given * ID from this EMR. Warning: The IKonsExtension's removeXRef method will not be called. * * @param provider * unique provider id * @param id * item ID */ public void removeXRef(String provider, String id){ VersionedResource vr = getEintrag(); String ntext = vr.getHead(); Samdas samdas = new Samdas(ntext); Samdas.Record record = samdas.getRecord(); String recText = record.getText(); List<Samdas.XRef> xrefs = record.getXrefs(); boolean changed = false; for (Samdas.XRef xref : xrefs) { if ((xref.getProvider().equals(provider)) && (xref.getID().equals(id))) { if (recText.length() > xref.getPos() + xref.getLength()) { recText = recText.substring(0, xref.getPos()) + recText.substring(xref.getPos() + xref.getLength()); record.setText(recText); } record.remove(xref); changed = true; } } if (changed) { updateEintrag(samdas.toString(), true); } } /** * Normally, the thext of a Konsultation may only be changed, if the Konsultation has not yet * been billed. Due to customer demand, this was weakended: A User can have the right * ADMIN_KONS_EDIT_IF_BILLED and then can edit all Konsultations, even billed ones. * * @return */ private boolean isEintragEditable(){ boolean editable = false; boolean hasRight = CoreHub.acl.request(AccessControlDefaults.ADMIN_KONS_EDIT_IF_BILLED); if (hasRight) { // user has right to change Konsultation. in this case, the user // may change the text even if the Konsultation has already been // billed, so don't check if it is billed editable = isEditable(true, false, true); } else { // normal case, check all editable = isEditable(true, true, true); } return editable; } public void setEintrag(VersionedResource eintrag, boolean force){ if (force || isEintragEditable()) { setVersionedResource(FLD_ENTRY, eintrag.getHead()); } } public void updateEintrag(String eintrag, boolean force){ if (force || isEintragEditable()) { setVersionedResource(FLD_ENTRY, eintrag); // ElexisEventDispatcher.update(this); } } /** * remove all but the newest version of the entry */ public void purgeEintrag(){ VersionedResource vr = getEintrag(); vr.purge(); setBinary(FLD_ENTRY, vr.serialize()); } /** Den zugeordneten Mandanten holen */ public Mandant getMandant(){ return Mandant.load(get(FLD_MANDATOR_ID)); } /** Die Konsultation einem Mandanten zuordnen */ public void setMandant(Mandant m){ if (m != null) { set(FLD_MANDATOR_ID, m.getId()); } } public void setDatum(String dat, boolean force){ if (dat != null) { if (force || isEditable(true)) { set(DATE, dat); } } } /** das Behandlungsdatum auslesen */ public String getDatum(){ String ret = get(DATE); return ret; } public Rechnung getRechnung(){ return Rechnung.load(get(FLD_BILL_ID)); } /** * Lookup {@link Rechnung} including canceled ones for this {@link Konsultation}. Only works * with {@link Konsultation} created with Elexis version 3.0.0 or newer. * * @since 3.0.0 * @return */ public List<Rechnung> getRechnungen(){ List<VerrechnetCopy> konsVerrechnet = VerrechnetCopy.getVerrechnetCopyByConsultation(this); List<Rechnung> ret = new ArrayList<Rechnung>(); HashSet<String> rechnungsIds = new HashSet<String>(); for (VerrechnetCopy verrechnetCopy : konsVerrechnet) { String rechnungsId = verrechnetCopy.get(VerrechnetCopy.RECHNUNGID); rechnungsIds.add(rechnungsId); } for (String rechnungsId : rechnungsIds) { Rechnung rechnung = Rechnung.load(rechnungsId); if (rechnung != null) { ret.add(rechnung); } } return ret; } public void setRechnung(Rechnung r){ if (r != null) { set(FLD_BILL_ID, r.getId()); } } /** * Checks if the Konsultation can be altered. This method is internally used. * * @param checkMandant * checks whether the current mandant is the owner of this Konsultation * @param checkBill * checks whether the Konsultation has already been billed * @param showError * if true, show error messages * @return true if the Konsultation can be altered in repsect to the given checks, else * otherwise. */ private boolean isEditable(boolean checkMandant, boolean checkBill, boolean showError){ Mandant m = getMandant(); checkMandant = !CoreHub.acl.request(AccessControlDefaults.LSTG_CHARGE_FOR_ALL); boolean mandantOK = true; boolean billOK = true; Mandant mandator = ElexisEventDispatcher.getSelectedMandator(); boolean bMandantLoggedIn = (mandator != null); // if m is null, ignore checks (return true) if (m != null && mandator != null) { if (checkMandant && !(m.getId().equals(mandator.getId()))) { mandantOK = false; } if (checkBill) { Rechnung rn = getRechnung(); if (rn == null || (!rn.exists())) { billOK = true; } else { int stat = rn.getStatus(); if (stat == RnStatus.STORNIERT) { billOK = true; } else { billOK = false; } } } } boolean ok = billOK && mandantOK && bMandantLoggedIn; if (ok) { return true; } // something is not ok if (showError) { String msg = ""; if (!bMandantLoggedIn) { msg = "Es ist kein Mandant eingeloggt"; } else { if (!billOK) { msg = "Für diese Behandlung wurde bereits eine Rechnung erstellt."; } else { msg = "Diese Behandlung ist nicht von Ihnen"; } } MessageEvent.fireError("Konsultation kann nicht geändert werden", msg); } return false; } /** * Checks if the Konsultation can be altered. A user that has the right LSTG_CHARGE_FOR_ALL can * charge for all mandators. Others can only charge a Konsultation that belongs to their own * logged in mandator. * * @param showError * if true, show error messages * @return true if the Konsultation can be altered, else otherwise. */ public boolean isEditable(boolean showError){ Fall fall = getFall(); if (fall != null) { if ((!fall.isOpen()) && showError) { MessageEvent.fireError("Fall geschlossen", "Diese Konsultation gehört zu einem abgeschlossenen Fall"); return false; } } // check mandant and bill return isEditable(true, true, showError); } public int getStatus(){ Rechnung r = getRechnung(); if (r != null) { return r.getStatus(); } Mandant rm = getMandant(); if ((rm != null) && (rm.equals(ElexisEventDispatcher.getSelected(Mandant.class)))) { if (getDatum().equals(new TimeTool().toString(TimeTool.DATE_GER))) { return RnStatus.VON_HEUTE; } else { return RnStatus.NICHT_VON_HEUTE; } } else { return RnStatus.NICHT_VON_IHNEN; } } public String getStatusText(){ return RnStatus.getStatusText(getStatus()); } /** Eine einzeilige Beschreibung dieser Konsultation holen */ public String getLabel(){ StringBuffer ret = new StringBuffer(); Mandant m = getMandant(); ret.append(getDatum()).append(" (").append(getStatusText()).append(") - ") .append((m == null) ? "?" : m.getLabel()); return ret.toString(); } public String getVerboseLabel(){ StringBuilder ret = new StringBuilder(); ret.append(getFall().getPatient().getName()).append(" ") .append(getFall().getPatient().getVorname()).append(", ") .append(getFall().getPatient().getGeburtsdatum()).append(" - ").append(getDatum()); return ret.toString(); } /** Eine Liste der Diagnosen zu dieser Konsultation holen */ public ArrayList<IDiagnose> getDiagnosen(){ ArrayList<IDiagnose> ret = new ArrayList<IDiagnose>(); Stm stm = getDBConnection().getStatement(); ResultSet rs1 = stm.query( "SELECT DIAGNOSEID FROM BEHDL_DG_JOINT INNER JOIN BEHANDLUNGEN on BehandlungsID=BEHANDLUNGEN.id where BEHDL_DG_JOINT.deleted='0' and BEHANDLUNGEN.deleted='0' AND BEHANDLUNGSID=" + JdbcLink.wrap(getId())); StringBuilder sb = new StringBuilder(); try { while (rs1.next() == true) { String dgID = rs1.getString(1); Stm stm2 = getDBConnection().getStatement(); ResultSet rs2 = stm2 .query("SELECT DG_CODE,KLASSE FROM DIAGNOSEN WHERE ID=" + JdbcLink.wrap(dgID)); if (rs2.next()) { sb.setLength(0); sb.append(rs2.getString(2)).append("::"); sb.append(rs2.getString(1)); try { PersistentObject dg = CoreHub.poFactory.createFromString(sb.toString()); if (dg != null) { ret.add((IDiagnose) dg); } } catch (Exception ex) { log.error("Fehlerhafter Diagnosecode " + sb.toString()); } } rs2.close(); getDBConnection().releaseStatement(stm2); } rs1.close(); } catch (Exception ex) { ElexisStatus status = new ElexisStatus(ElexisStatus.ERROR, CoreHub.PLUGIN_ID, ElexisStatus.CODE_NONE, "Persistence error: " + ex.getMessage(), ex, ElexisStatus.LOG_ERRORS); throw new PersistenceException(status); } finally { getDBConnection().releaseStatement(stm); } return ret; } public void addDiagnose(IDiagnose dg){ if (!isEditable(true)) { return; } String dgid = prepareDiagnoseSelectWithCodeAndClass(dg.getCode(), dg.getClass().getName()); if (dgid != null) { return; } String diagnosisEntryExists = getDBConnection().queryString( "SELECT ID FROM DIAGNOSEN WHERE KLASSE=" + JdbcLink.wrap(dg.getClass().getName()) + " AND DG_CODE=" + JdbcLink.wrap(dg.getCode())); StringBuilder sql = new StringBuilder(200); if (StringTool.isNothing(diagnosisEntryExists)) { diagnosisEntryExists = StringTool.unique("bhdl"); sql.append("INSERT INTO DIAGNOSEN (ID, DG_CODE, DG_TXT, KLASSE) VALUES (") .append(JdbcLink.wrap(diagnosisEntryExists)).append(",") .append(JdbcLink.wrap(dg.getCode())).append(",").append(JdbcLink.wrap(dg.getText())) .append(",").append(JdbcLink.wrap(dg.getClass().getName())).append(")"); getDBConnection().exec(sql.toString()); sql.setLength(0); } sql.append("INSERT INTO BEHDL_DG_JOINT (ID,BEHANDLUNGSID,DIAGNOSEID) VALUES (") .append(JdbcLink.wrap(StringTool.unique("bhdx"))).append(",").append(getWrappedId()) .append(",").append(JdbcLink.wrap(diagnosisEntryExists)).append(")"); getDBConnection().exec(sql.toString()); getFall().getPatient().countItem(dg); CoreHub.actUser.countItem(dg); } /** Eine Diagnose aus der Diagnoseliste entfernen */ public void removeDiagnose(IDiagnose dg){ if (isEditable(true)) { String dgid = prepareDiagnoseSelectWithCodeAndClass(dg.getCode(), dg.getClass().getName()); if (dgid == null) { String code = dg.getCode(); // chapter of a TI-Code if (code != null && code.length() == 2 && code.charAt(1) == '0') { code = code.substring(0, 1); dgid = prepareDiagnoseSelectWithCodeAndClass(code, dg.getClass().getName()); } } if (dgid == null) { log.warn( "Requested delete of diagnosis which could not be resolved [{}] in consultation [{}]", dg.getCode() + "/" + dg.getClass().getName(), getId()); } else { StringBuilder sql = new StringBuilder(); sql.append("DELETE FROM BEHDL_DG_JOINT WHERE BehandlungsID=").append(getWrappedId()) .append(" AND DiagnoseId=" + JdbcLink.wrap(dgid)); log.debug(sql.toString()); getDBConnection().exec(sql.toString()); } } } private final String STM_S_BDJ = "SELECT BDJ.DiagnoseId FROM BEHDL_DG_JOINT BDJ, DIAGNOSEN D" + " WHERE BDJ.BehandlungsID=? AND D.ID = BDJ.DiagnoseID AND D.DG_CODE=? AND D.KLASSE=?;"; private String prepareDiagnoseSelectWithCodeAndClass(String code, String classname){ PreparedStatement pst = getDBConnection().getPreparedStatement(STM_S_BDJ); try { pst.setString(1, getId()); pst.setString(2, code); pst.setString(3, classname); ResultSet rs = pst.executeQuery(); if (rs.next()) { return rs.getString(1); } } catch (SQLException e) { MessageEvent.fireError("Fehler beim Löschen", e.getMessage(), e); log.error("Error deleting diagnosis", e); } finally { getDBConnection().releasePreparedStatement(pst); } return null; } public List<Verrechnet> getLeistungen(){ Query<Verrechnet> qbe = new Query<Verrechnet>(Verrechnet.class); qbe.add(Verrechnet.KONSULTATION, Query.EQUALS, getId()); qbe.orderBy(false, Verrechnet.CLASS, Verrechnet.LEISTG_CODE); return qbe.execute(); } /** * Eine Verrechenbar aus der Konsultation entfernen * * @param ls * die Verrechenbar * @return Ein Optifier- Resultat */ public Result<Verrechnet> removeLeistung(Verrechnet ls){ if (isEditable(true)) { IVerrechenbar v = ls.getVerrechenbar(); int z = ls.getZahl(); Result<Verrechnet> result = v.getOptifier().remove(ls, this); if (result.isOK()) { if (v instanceof Artikel) { Artikel art = (Artikel) v; CoreHub.getStockService().performSingleReturn(art, z); Object prescId = ls.getDetail(Verrechnet.FLD_EXT_PRESC_ID); if (prescId instanceof String) { Prescription prescription = Prescription.load((String) prescId); if (prescription.getEntryType() == EntryType.SELF_DISPENSED) { prescription.remove(); ElexisEventDispatcher.reload(Prescription.class); } } } } return result; } return new Result<Verrechnet>(Result.SEVERITY.WARNING, 3, "Behandlung geschlossen oder nicht von Ihnen", null, false); } public Result<IVerrechenbar> addLeistung(IVerrechenbar l){ if (isEditable(false)) { // TODO: ch.elexis.data.Konsultation.java: Weitere Leistungestypen // ausser Medikamente_BAG und arzttarif_ch=Tarmed, // TODO: oder an dieser Stelle zentral, dann ggf. auch die schon IOptifier optifier = l.getOptifier(); Result<IVerrechenbar> result = optifier.add(l, this); if (result.isOK()) { ElexisEventDispatcher.update(this); getFall().getPatient().countItem(l); CoreHub.actUser.countItem(l); CoreHub.actUser.statForString("LeistungenMFU", l.getCodeSystemName()); } return result; } return new Result<IVerrechenbar>(Result.SEVERITY.WARNING, 2, "Behandlung geschlossen oder nicht von Ihnen", null, false); } /** * Returns the author of the latest version of a consultation entry. Each consultation always * only has one author, and that's the one saved in the last version of a consultation entry. * * @return Username of the author or an empty string. */ public String getAuthor(){ String author = ""; VersionedResource resource = this.getEintrag(); if (resource != null) { ResourceItem item = resource.getVersion(resource.getHeadVersion()); if (item != null) { return item.remark; } } return author; } /** Wieviel hat uns diese Konsultation gekostet? */ public int getKosten(){ int sum = 0; /* * TimeTool mine=new TimeTool(getDatum()); List<Verrechenbar> l=getLeistungen(); * for(Verrechenbar v:l){ sum+=(v.getZahl()v.getKosten(mine)); } */ Stm stm = getDBConnection().getStatement(); try { ResultSet res = stm.query("SELECT EK_KOSTEN FROM LEISTUNGEN WHERE deleted='0' AND BEHANDLUNG=" + getWrappedId()); while ((res != null) && res.next()) { sum += res.getInt(1); } } catch (Exception ex) { ExHandler.handle(ex); return 0; } finally { getDBConnection().releaseStatement(stm); } return sum; } public int getMinutes(){ int sum = 0; List<Verrechnet> l = getLeistungen(); for (Verrechnet v : l) { IVerrechenbar iv = v.getVerrechenbar(); if (iv != null) { sum += (v.getZahl() * iv.getMinutes()); } } return sum; } /** * Wieviel Umsatz (in Rappen) bringt uns diese Konsultation ein? * * @deprecated not accurate. use getLeistungen() */ @Deprecated public double getUmsatz(){ double sum = 0.0; Stm stm = getDBConnection().getStatement(); try { ResultSet res = stm.query( "SELECT VK_PREIS,ZAHL,SCALE FROM LEISTUNGEN WHERE deleted='0' AND BEHANDLUNG=" + getWrappedId()); while ((res != null) && res.next()) { double scale = res.getDouble(3) / 100.0; sum += (res.getDouble(1) * res.getDouble(2)) * scale; } } catch (Exception ex) { ExHandler.handle(ex); return 0; } finally { getDBConnection().releaseStatement(stm); } return sum; } /** * Wieviel vom Umsatz bleibt uns von dieser Konsultation? * */ @Deprecated public double getGewinn(){ return getUmsatz() - getKosten(); } public void changeScale(IVerrechenbar v, int scale){ if (isEditable(true)) { StringBuilder sb = new StringBuilder(); sb.append("UPDATE LEISTUNGEN SET SCALE='").append(scale).append("' WHERE BEHANDLUNG=") .append(getWrappedId()) /* * .append ( " AND " ) .append ( "KLASSE=" ) .append ( * JdbcLink . wrap (v .getClass ( ).getName ())) */ .append(" AND LEISTG_CODE=").append(JdbcLink.wrap(v.getId())); getDBConnection().exec(sb.toString()); } } public void changeZahl(IVerrechenbar v, int nz){ if (isEditable(true)) { StringBuilder sql = new StringBuilder(); sql.append("UPDATE LEISTUNGEN SET ZAHL=").append(nz) /* * .append(" WHERE KLASSE=").append(JdbcLink.wrap(v.getClass().getName ())) */ .append(" WHERE LEISTG_CODE=").append(JdbcLink.wrap(v.getId())) .append(" AND BEHANDLUNG=").append(getWrappedId()); getDBConnection().exec(sql.toString()); } } @Override public boolean delete(){ return delete(true); } public boolean delete(boolean forced){ if (forced || isEditable(true)) { List<Verrechnet> vv = getLeistungen(); // VersionedResource vr=getEintrag(); if ((vv.size() == 0) || (forced == true) && (CoreHub.acl.request(AccessControlDefaults.DELETE_FORCED) == true)) { delete_dependent(); return super.delete(); } } return false; } private boolean delete_dependent(){ for (Verrechnet vv : new Query<Verrechnet>(Verrechnet.class, Verrechnet.KONSULTATION, getId()).execute()) { vv.delete(); } getDBConnection().exec("DELETE FROM BEHDL_DG_JOINT WHERE BEHANDLUNGSID=" + getWrappedId()); return true; } public int compareTo(Konsultation b){ TimeTool me = new TimeTool(getDatum()); TimeTool other = new TimeTool(b.getDatum()); return me.compareTo(other); } /** * Helper: Get the "active" cons. Normally, it is the actually selected cons. if the actually * selected cons does not match the actually selected patient, then it is rather the latest cons * of the actually selected patient. * * @return the active Kons * @author gerry new concept due to some obscure selection problems */ public static Konsultation getAktuelleKons(){ Konsultation ret = (Konsultation) ElexisEventDispatcher.getSelected(Konsultation.class); Patient pat = ElexisEventDispatcher.getSelectedPatient(); if ((ret != null) && ((pat == null) || (ret.getFall().getPatient().getId().equals(pat.getId())))) { return ret; } if (pat != null) { ret = pat.getLetzteKons(true); return ret; } MessageEvent.fireError("Kein Patient ausgewählt", "Bitte wählen Sie zuerst einen Patienten aus"); return null; } protected Konsultation(){} static class BehandlungsComparator implements Comparator<Konsultation> { boolean rev; BehandlungsComparator(boolean reverse){ rev = reverse; } public int compare(Konsultation b1, Konsultation b2){ TimeTool t1 = new TimeTool(b1.getDatum()); TimeTool t2 = new TimeTool(b2.getDatum()); if (rev == true) { return t2.compareTo(t1); } else { return t1.compareTo(t2); } } } @Override public boolean isDragOK(){ return true; } /* * public interface Listener { public boolean creatingKons(Konsultation k); } */ /** * Creates a new Konsultation object, with an optional initial text. * * @param initialText * the initial text to be set, or null if no initial text should be set. */ public static void neueKons(final String initialText){ Patient actPatient = ElexisEventDispatcher.getSelectedPatient(); Fall actFall = (Fall) ElexisEventDispatcher.getSelected(Fall.class); if (actFall == null) { if (actPatient == null) { MessageEvent.fireError(Messages.GlobalActions_CantCreateKons, Messages.GlobalActions_DoSelectPatient); return; } if (actFall == null) { Konsultation k = actPatient.getLetzteKons(false); if (k != null) { actFall = k.getFall(); if (actFall == null) { MessageEvent.fireError(Messages.GlobalActions_CantCreateKons, Messages.GlobalActions_DoSelectCase); return; } } else { Fall[] faelle = actPatient.getFaelle(); if ((faelle == null) || (faelle.length == 0)) { actFall = actPatient.neuerFall(Fall.getDefaultCaseLabel(), Fall.getDefaultCaseReason(), Fall.getDefaultCaseLaw()); } else { actFall = faelle[0]; } } } } else { if (!actFall.getPatient().equals(actPatient)) { if (actPatient != null) { Konsultation lk = actPatient.getLetzteKons(false); if (lk != null) { actFall = lk.getFall(); } } else { MessageEvent.fireError(Messages.GlobalActions_CantCreateKons, Messages.GlobalActions_DoSelectCase); return; } } } if (!actFall.isOpen()) { MessageEvent.fireError(Messages.GlobalActions_casclosed, Messages.GlobalActions_caseclosedexplanation); return; } Konsultation actLetzte = actFall.getLetzteBehandlung(); if ((actLetzte != null) && actLetzte.getDatum().equals(new TimeTool().toString(TimeTool.DATE_GER))) { if (cod.openQuestion(Messages.GlobalActions_SecondForToday, Messages.GlobalActions_SecondForTodayQuestion) == false) { return; } } Konsultation n = actFall.neueKonsultation(); n.setMandant(ElexisEventDispatcher.getSelectedMandator()); if (initialText != null) { n.updateEintrag(initialText, false); } ElexisEventDispatcher.fireSelectionEvent(actFall); ElexisEventDispatcher.fireSelectionEvent(n); } public static IDiagnose getDefaultDiagnose(){ IDiagnose ret = null; String diagnoseId = CoreHub.userCfg.get(Preferences.USR_DEFDIAGNOSE, ""); if (diagnoseId.length() > 1) { ret = (IDiagnose) CoreHub.poFactory.createFromString(diagnoseId); } return ret; } }
package com.ryanst.app.view; import android.app.Activity; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.ColorFilter; import android.graphics.Paint; import android.graphics.drawable.Drawable; import android.util.AttributeSet; import android.view.Gravity; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.widget.LinearLayout; import android.widget.ScrollView; import android.widget.TextView; import com.ryanst.app.R; import java.util.ArrayList; import java.util.List; public class WheelView extends ScrollView { public static final int DEFAULT_UN_SELECT_TEXT_COLOR = Color.parseColor("#bbbbbb"); public static final int DEFAULT_SELECT_TEXT_COLOR = Color.parseColor("#505050"); public static final int DEFAULT_TEXT_SIZE = 13; public static final int DEFAULT_TEXT_PADDING = 12; public static final int DEFAULT_FLING_SPEED = 3; public static final int DEFAULT_INDEX = 0; public static final int DEFAULT_OFF_SET = 1; private int selectedTextSize = DEFAULT_TEXT_SIZE; private int textSize = DEFAULT_TEXT_SIZE; private int textPadding = DEFAULT_TEXT_PADDING; private int textColor = DEFAULT_UN_SELECT_TEXT_COLOR; private int offset = DEFAULT_OFF_SET; private int selectTextColor = DEFAULT_SELECT_TEXT_COLOR; private int flingSpeed = DEFAULT_FLING_SPEED; private int defaultIndex = DEFAULT_INDEX; public void setTextSize(int textSize) { this.textSize = textSize; selectedTextSize = textSize; } public void setTextColor(int textColor) { this.textColor = textColor; } public void setTextPadding(int textPadding) { this.textPadding = textPadding; } public void setSelectTextColor(int selectTextColor) { this.selectTextColor = selectTextColor; } public void setFlingSpeed(int flingSpeed) { this.flingSpeed = flingSpeed; } public void setDefaultIndex(int defaultIndex) { this.defaultIndex = defaultIndex; } private int initialY; private Runnable scrollerTask; private int newCheck = 50; private int itemHeight = 0; private int selectedIndex = 1; //index+offset private Context context; private LinearLayout rootView; private List<String> items; public WheelView(Context context) { this(context, null); } public WheelView(Context context, AttributeSet attrs) { this(context, attrs, 0); } public WheelView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); initStaticData(context, attrs); init(context); } private void initStaticData(Context context, AttributeSet attrs) { TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.wheel_view); setTextSize((int) typedArray.getDimension(R.styleable.wheel_view_text_size, DEFAULT_TEXT_SIZE)); setTextColor(typedArray.getColor(R.styleable.wheel_view_un_select_color, DEFAULT_UN_SELECT_TEXT_COLOR)); setSelectTextColor(typedArray.getColor(R.styleable.wheel_view_select_color, DEFAULT_SELECT_TEXT_COLOR)); setOffset(typedArray.getInteger(R.styleable.wheel_view_offset, DEFAULT_OFF_SET)); setDefaultIndex(typedArray.getInteger(R.styleable.wheel_view_default_index, DEFAULT_INDEX)); setFlingSpeed(typedArray.getInteger(R.styleable.wheel_view_fling_speed, DEFAULT_FLING_SPEED)); setTextPadding((int) typedArray.getDimension(R.styleable.wheel_view_text_padding, DEFAULT_TEXT_PADDING)); typedArray.recycle(); } private void init(Context context) { this.context = context; this.setVerticalScrollBarEnabled(false); rootView = new LinearLayout(context); rootView.setOrientation(LinearLayout.VERTICAL); this.addView(rootView); initScrollTask(); } public List<String> getItems() { return items; } public void setItems(List<String> list) { if (null == items) { items = new ArrayList<>(); } items.clear(); items.addAll(list); for (int i = 0; i < offset; i++) { items.add(0, ""); items.add(""); } } public int getOffset() { return offset; } public void setOffset(int offset) { this.offset = offset; } public void refreshView() { rootView.removeAllViews(); for (String item : items) { rootView.addView(createTextView(item)); } refreshItemTextView(0); setSeletion(defaultIndex); } private TextView createTextView(String item) { TextView textView = new TextView(context); textView.setLayoutParams(new LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT)); textView.setSingleLine(true); textView.setTextSize(textSize); textView.setText(item); textView.setGravity(Gravity.CENTER); int padding = dip2px(textPadding); textView.setPadding(padding, padding, padding, padding); if (itemHeight == 0) { itemHeight = getViewMeasuredHeight(textView); int displayItemCount = offset * 2 + 1; rootView.setLayoutParams(new LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, itemHeight * displayItemCount)); LinearLayout.LayoutParams lp = (LinearLayout.LayoutParams) this.getLayoutParams(); this.setLayoutParams(new LinearLayout.LayoutParams(lp.width, itemHeight * displayItemCount)); } return textView; } @Override protected void onScrollChanged(int l, int t, int oldl, int oldt) { super.onScrollChanged(l, t, oldl, oldt); refreshItemTextView(t); if (t > oldt) { scrollDirection = SCROLL_DIRECTION_DOWN; } else { scrollDirection = SCROLL_DIRECTION_UP; } } private void refreshItemTextView(int y) { int index = y / itemHeight + offset; int overDistance = y % itemHeight; if (overDistance > itemHeight / 2) { index++; } int childSize = rootView.getChildCount(); for (int i = 0; i < childSize; i++) { TextView itemView = (TextView) rootView.getChildAt(i); if (itemView == null) { return; } if (index == i) { itemView.setTextSize(selectedTextSize); itemView.setTextColor(selectTextColor); } else { itemView.setTextColor(textColor); } } } private void onSeletedCallBack() { if (null != onWheelViewListener) { onWheelViewListener.onSelected(selectedIndex, items.get(selectedIndex)); } } //indexlistindex public void setSeletion(final int index) { selectedIndex = index + offset; this.post(new Runnable() { @Override public void run() { WheelView.this.smoothScrollTo(0, index * itemHeight); } }); } public String getSeletedItem() { return items.get(selectedIndex); } public int getSeletedIndex() { return selectedIndex - offset; } @Override public void fling(int velocityY) { super.fling((int) ((float) velocityY * flingSpeed / 10));//flingscrollview1/3 } @Override public boolean onTouchEvent(MotionEvent ev) { if (ev.getAction() == MotionEvent.ACTION_UP) { startScrollerTask(); } return super.onTouchEvent(ev); } public void startScrollerTask() { initialY = getScrollY(); this.postDelayed(scrollerTask, newCheck); } public static class OnWheelViewListener { public void onSelected(int selectedIndex, String item) { } } private void initScrollTask() { scrollerTask = new Runnable() { public void run() { int newY = getScrollY(); if (initialY - newY == 0) { // stopped final int overDistance = initialY % itemHeight; final int itemIndex = initialY / itemHeight; if (overDistance == 0) { selectedIndex = itemIndex + offset; onSeletedCallBack(); } else if (overDistance > itemHeight / 2) { WheelView.this.post(new Runnable() { @Override public void run() { WheelView.this.smoothScrollTo(0, initialY - overDistance + itemHeight); selectedIndex = itemIndex + offset + 1; onSeletedCallBack(); } }); } else { WheelView.this.post(new Runnable() { @Override public void run() { WheelView.this.smoothScrollTo(0, initialY - overDistance); selectedIndex = itemIndex + offset; onSeletedCallBack(); } }); } } else { initialY = getScrollY(); WheelView.this.postDelayed(scrollerTask, newCheck); } } }; } private OnWheelViewListener onWheelViewListener; public OnWheelViewListener getOnWheelViewListener() { return onWheelViewListener; } public void setOnWheelViewListener(OnWheelViewListener onWheelViewListener) { this.onWheelViewListener = onWheelViewListener; } private int dip2px(float dpValue) { final float scale = context.getResources().getDisplayMetrics().density; return (int) (dpValue * scale + 0.5f); } private int getViewMeasuredHeight(View view) { int width = MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED); int expandSpec = MeasureSpec.makeMeasureSpec(Integer.MAX_VALUE >> 2, MeasureSpec.AT_MOST); view.measure(width, expandSpec); return view.getMeasuredHeight(); } int[] selectedAreaBorder; private int[] obtainSelectedAreaBorder() { if (null == selectedAreaBorder) { selectedAreaBorder = new int[2]; selectedAreaBorder[0] = itemHeight * offset; selectedAreaBorder[1] = itemHeight * (offset + 1); } return selectedAreaBorder; } private int scrollDirection = -1; private static final int SCROLL_DIRECTION_UP = 0; private static final int SCROLL_DIRECTION_DOWN = 1; Paint paint; int viewWidth; @Override public void setBackgroundDrawable(Drawable background) { if (viewWidth == 0) { viewWidth = ((Activity) context).getWindowManager().getDefaultDisplay().getWidth(); } if (null == paint) { paint = new Paint(); paint.setColor(Color.parseColor("#c6c6c6")); paint.setStrokeWidth(dip2px(1f)); } background = new Drawable() { @Override public void draw(Canvas canvas) { canvas.drawLine(viewWidth * 0, obtainSelectedAreaBorder()[0], viewWidth, obtainSelectedAreaBorder()[0], paint); canvas.drawLine(viewWidth * 0, obtainSelectedAreaBorder()[1], viewWidth, obtainSelectedAreaBorder()[1], paint); } @Override public void setAlpha(int alpha) { } @Override public void setColorFilter(ColorFilter cf) { } @Override public int getOpacity() { return 0; } }; super.setBackgroundDrawable(background); } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); viewWidth = w; setBackgroundDrawable(null); } }
package com.frank.gangofsuits.desktop; import com.badlogic.gdx.backends.lwjgl.LwjglApplication; import com.badlogic.gdx.backends.lwjgl.LwjglApplicationConfiguration; import com.frank.gangofsuits.GangOfSuits; public class DesktopLauncher { public static void main (String[] arg) { LwjglApplicationConfiguration config = new LwjglApplicationConfiguration(); new LwjglApplication(new GangOfSuits(), config); config.width = 800; config.height = 600; config.fullscreen = false; } }
package mindpop.learnpop; import android.app.Activity; import android.app.ProgressDialog; import android.os.AsyncTask; import android.support.v4.app.FragmentActivity; import android.support.v7.app.ActionBarActivity; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.content.Context; import org.apache.http.NameValuePair; import org.apache.http.message.BasicNameValuePair; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; public class LoadResource extends AsyncTask<String, String,JSONObject> { private Context _context; private ProgressDialog pDialog; JSONParser jsonParser = new JSONParser(); private ArrayList<Resource> resourcesArrayList = new ArrayList<Resource>(); private static String urlResources = "http://austinartmap.com/CreativeTeach/PHP/getResourcesList_v2.php"; private String [] subjects; private String grade; private JSONArray resources; private final String TAG_SUCCESS = "success"; private final String TAG_RES = "resources"; private int type; // 0 = articles, 1 = videos, 2 = other private RecyclerView mRecyclerView; private FragmentActivity activity; public LoadResource(FragmentActivity ac, Context context, RecyclerView viewer, String [] subjects, String grade, int type){ this.activity = ac; this._context = context; this.subjects = subjects; this.grade = grade; this.type = type; this.mRecyclerView = viewer; } private String getResTypeQuery(){ String query = ""; switch(type){ case 0: query = "Strategy"; break; case 1: query = "Video"; break; case 2: query = "OtherR"; break; } return query; } @Override protected void onPreExecute(){ super.onPreExecute(); pDialog = new ProgressDialog(_context); pDialog.setMessage("Loading resources..."); pDialog.setIndeterminate(false); pDialog.setCancelable(false); pDialog.show(); } protected JSONObject doInBackground(String... args){ //parameters ArrayList<NameValuePair> params = new ArrayList<NameValuePair>(); //params.add(new BasicNameValuePair("GradeLevel[]", grade)); for(int i = 0; i < subjects.length; i++){ params.add(new BasicNameValuePair("Subject[]", subjects[i])); } JSONObject json = jsonParser.makeHttpRequest(urlResources, "GET", params); Log.d("Resources: ", json.toString()); try{ int success = json.getInt(TAG_SUCCESS); if(success == 1){ resources = json.getJSONArray(TAG_RES); Log.d("JSONArray in LoadResource", resources.toString()); for (int i = 0; i < resources.length(); i++){ JSONObject c = resources.getJSONObject(i); Resource res = new Resource(); res.setTitle(c.getString("ResName").trim()); res.setResourceId(c.getInt("ResID")); res.setSubject(c.getString("Subject").trim()); res.setType(c.getString("ResType").trim()); res.setUpVote(c.getInt("Likes")); res.setDownVote(c.getInt("Dislikes")); res.setAuthor(c.getString("Author").trim()); res.setImageURL("ImageURL"); //date format //2014-02-28 String dateString =c.getString("PublishingDate"); try{ SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); Date date = formatter.parse(dateString); res.setPublishDate(date); //change to date }catch(ParseException e){ e.printStackTrace(); } res.setUrl(c.getString("ResURL")); res.setSummary(c.getString("Summary")); // Log.d("Resource", res.getPublishDate().toString()); if(res.getType().equals(getResTypeQuery())) //add if resource type is the same resourcesArrayList.add(res); } }else{ //if there are no resources } }catch(JSONException e){ e.printStackTrace(); } return null; } protected void onPostExecute(JSONObject result) { pDialog.dismiss(); // delegate.processFinish(result); ResourceAdapter adapter = new ResourceAdapter(activity, resourcesArrayList); mRecyclerView.setAdapter(adapter); } }
package org.wikipedia.util; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.pm.ResolveInfo; import android.graphics.PorterDuff; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.net.Uri; import android.os.Build; import android.provider.Settings; import android.view.KeyCharacterMap; import android.view.KeyEvent; import android.view.View; import android.view.WindowManager; import android.view.accessibility.AccessibilityManager; import android.view.inputmethod.InputMethodManager; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.appcompat.widget.Toolbar; import org.wikipedia.R; import org.wikipedia.WikipediaApp; import org.wikipedia.util.log.L; import java.util.List; import static android.content.Context.ACCESSIBILITY_SERVICE; public final class DeviceUtil { /** * Utility method to detect whether an Email app is installed, * for conditionally enabling/disabling email links. * @param context Context of the calling app. * @return True if an Email app exists, false otherwise. */ public static boolean mailAppExists(Context context) { Intent intent = new Intent(); intent.setAction(Intent.ACTION_SENDTO); intent.setData(Uri.parse("mailto:test@wikimedia.org")); List<ResolveInfo> resInfo = context.getPackageManager().queryIntentActivities(intent, 0); return resInfo.size() > 0; } /** * Attempt to display the Android keyboard. * * FIXME: This should not need to exist. * Android should always show the keyboard at the appropriate time. This method allows you to display the keyboard * when Android fails to do so. * * @param view The currently focused view that will receive the keyboard input */ public static void showSoftKeyboard(View view) { InputMethodManager keyboard = (InputMethodManager)view.getContext().getSystemService(Context.INPUT_METHOD_SERVICE); keyboard.toggleSoftInput(0, 0); } /** * Attempt to hide the Android Keyboard. * * FIXME: This should not need to exist. * I do not know why Android does not handle this automatically. * * @param activity The current activity */ public static void hideSoftKeyboard(Activity activity) { hideSoftKeyboard(activity.getWindow().getDecorView()); } public static void hideSoftKeyboard(View view) { InputMethodManager keyboard = (InputMethodManager)view.getContext().getSystemService(Context.INPUT_METHOD_SERVICE); // Not using getCurrentFocus as that sometimes is null, but the keyboard is still up. keyboard.hideSoftInputFromWindow(view.getWindowToken(), 0); } public static void setWindowSoftInputModeResizable(Activity activity) { activity.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE); } public static void setLightSystemUiVisibility(@NonNull Activity activity) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { if (!WikipediaApp.getInstance().getCurrentTheme().isDark()) { // this make the system recognizes the status bar is light and will make status bar icons become visible activity.getWindow().getDecorView().setSystemUiVisibility(View.SYSTEM_UI_FLAG_LIGHT_STATUS_BAR); } else { resetSystemUiVisibility(activity); } } } public static void resetSystemUiVisibility(@NonNull Activity activity) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { activity.getWindow().getDecorView().setSystemUiVisibility(0); } } public static void updateStatusBarTheme(@NonNull Activity activity, @Nullable Toolbar toolbar, boolean reset) { if (reset) { resetSystemUiVisibility(activity); } else { setLightSystemUiVisibility(activity); } if (toolbar != null) { toolbar.getNavigationIcon().setColorFilter(reset ? activity.getResources().getColor(android.R.color.white) : ResourceUtil.getThemedColor(activity, R.attr.main_toolbar_icon_color), PorterDuff.Mode.SRC_IN); } } public static boolean isLocationServiceEnabled(@NonNull Context context) { int locationMode = Settings.Secure.LOCATION_MODE_OFF; try { locationMode = Settings.Secure.getInt(context.getContentResolver(), Settings.Secure.LOCATION_MODE); } catch (Settings.SettingNotFoundException e) { L.d("Location service setting not found.", e); } return locationMode != Settings.Secure.LOCATION_MODE_OFF; } public static boolean isNavigationBarShowing() { // TODO: revisit this if there's no more navigation bar by default. return KeyCharacterMap.deviceHasKey(KeyEvent.KEYCODE_BACK) && KeyCharacterMap.deviceHasKey(KeyEvent.KEYCODE_HOME); } private static ConnectivityManager getConnectivityManager() { return (ConnectivityManager) WikipediaApp.getInstance().getSystemService(Context.CONNECTIVITY_SERVICE); } public static boolean isOnWiFi() { NetworkInfo info = getConnectivityManager().getNetworkInfo(ConnectivityManager.TYPE_WIFI); return info != null && info.isConnected(); } public static boolean isAccessibilityEnabled() { AccessibilityManager am = (AccessibilityManager) WikipediaApp.getInstance().getSystemService(ACCESSIBILITY_SERVICE); // TODO: add more logic if other accessibility tools have different settings. return am != null && am.isEnabled() && am.isTouchExplorationEnabled(); } private DeviceUtil() { } }
package org.wikipedia.util; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.pm.ResolveInfo; import android.graphics.Color; import android.graphics.PorterDuff; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.net.Uri; import android.os.Build; import android.provider.Settings; import android.view.View; import android.view.WindowManager; import android.view.inputmethod.InputMethodManager; import androidx.annotation.NonNull; import androidx.appcompat.widget.Toolbar; import org.wikipedia.R; import org.wikipedia.WikipediaApp; import org.wikipedia.util.log.L; import java.util.List; public final class DeviceUtil { /** * Utility method to detect whether an Email app is installed, * for conditionally enabling/disabling email links. * @param context Context of the calling app. * @return True if an Email app exists, false otherwise. */ public static boolean mailAppExists(Context context) { Intent intent = new Intent(); intent.setAction(Intent.ACTION_SENDTO); intent.setData(Uri.parse("mailto:test@wikimedia.org")); List<ResolveInfo> resInfo = context.getPackageManager().queryIntentActivities(intent, 0); return resInfo.size() > 0; } /** * Attempt to display the Android keyboard. * * FIXME: This should not need to exist. * Android should always show the keyboard at the appropriate time. This method allows you to display the keyboard * when Android fails to do so. * * @param view The currently focused view that will receive the keyboard input */ public static void showSoftKeyboard(View view) { InputMethodManager keyboard = (InputMethodManager)view.getContext().getSystemService(Context.INPUT_METHOD_SERVICE); keyboard.toggleSoftInput(0, 0); } /** * Attempt to hide the Android Keyboard. * * FIXME: This should not need to exist. * I do not know why Android does not handle this automatically. * * @param activity The current activity */ public static void hideSoftKeyboard(Activity activity) { hideSoftKeyboard(activity.getWindow().getDecorView()); } public static void hideSoftKeyboard(View view) { InputMethodManager keyboard = (InputMethodManager)view.getContext().getSystemService(Context.INPUT_METHOD_SERVICE); // Not using getCurrentFocus as that sometimes is null, but the keyboard is still up. keyboard.hideSoftInputFromWindow(view.getWindowToken(), 0); } public static void setWindowSoftInputModeResizable(Activity activity) { activity.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_ADJUST_RESIZE); } public static void setLightSystemUiVisibility(@NonNull Activity activity) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { if (!WikipediaApp.getInstance().getCurrentTheme().isDark()) { // this make the system recognizes the status bar is light and will make status bar icons become visible if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { activity.getWindow().setNavigationBarColor(ResourceUtil.getThemedColor(activity, android.R.attr.windowBackground)); activity.getWindow().getDecorView().setSystemUiVisibility(View.SYSTEM_UI_FLAG_LIGHT_STATUS_BAR | View.SYSTEM_UI_FLAG_LIGHT_NAVIGATION_BAR); } else { activity.getWindow().getDecorView().setSystemUiVisibility(View.SYSTEM_UI_FLAG_LIGHT_STATUS_BAR); } } else { resetSystemUiVisibility(activity); } } } private static void resetSystemUiVisibility(@NonNull Activity activity) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { activity.getWindow().getDecorView().setSystemUiVisibility(0); activity.getWindow().setNavigationBarColor(Color.BLACK); } } public static void updateStatusBarTheme(@NonNull Activity activity, @NonNull Toolbar toolbar, boolean reset) { if (reset) { resetSystemUiVisibility(activity); } else { setLightSystemUiVisibility(activity); } toolbar.getNavigationIcon().setColorFilter(reset ? activity.getResources().getColor(android.R.color.white) : ResourceUtil.getThemedColor(activity, R.attr.main_toolbar_icon_color), PorterDuff.Mode.SRC_IN); } public static boolean isLocationServiceEnabled(@NonNull Context context) { int locationMode = Settings.Secure.LOCATION_MODE_OFF; try { locationMode = Settings.Secure.getInt(context.getContentResolver(), Settings.Secure.LOCATION_MODE); } catch (Settings.SettingNotFoundException e) { L.d("Location service setting not found.", e); } return locationMode != Settings.Secure.LOCATION_MODE_OFF; } private static ConnectivityManager getConnectivityManager() { return (ConnectivityManager) WikipediaApp.getInstance().getSystemService(Context.CONNECTIVITY_SERVICE); } public static boolean isOnWiFi() { NetworkInfo info = getConnectivityManager().getNetworkInfo(ConnectivityManager.TYPE_WIFI); return info != null && info.isConnected(); } private DeviceUtil() { } }
package org.wikipedia.util; import android.os.Build; import android.text.Html; import android.text.Spannable; import android.text.SpannableStringBuilder; import android.text.Spanned; import android.text.SpannedString; import android.text.TextUtils; import android.text.style.TypefaceSpan; import android.widget.EditText; import android.widget.TextView; import androidx.annotation.IntRange; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import com.google.gson.Gson; import org.json.JSONArray; import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.text.Collator; import java.text.Normalizer; import java.util.Arrays; import java.util.List; import java.util.Map; public final class StringUtil { private static final String CSV_DELIMITER = ","; @NonNull public static String listToCsv(@NonNull List<String> list) { return TextUtils.join(CSV_DELIMITER, list); } /** @return Nonnull immutable list. */ @NonNull public static List<String> csvToList(@NonNull String csv) { return delimiterStringToList(csv, CSV_DELIMITER); } /** @return Nonnull immutable list. */ @NonNull public static List<String> delimiterStringToList(@NonNull String delimitedString, @NonNull String delimiter) { return Arrays.asList(TextUtils.split(delimitedString, delimiter)); } /** * Creates an MD5 hash of the provided string and returns its ASCII representation * @param s String to hash * @return ASCII MD5 representation of the string passed in */ @NonNull public static String md5string(@NonNull String s) { StringBuilder hexStr = new StringBuilder(); try { // Create MD5 Hash MessageDigest digest = java.security.MessageDigest.getInstance("MD5"); digest.update(s.getBytes(StandardCharsets.UTF_8)); byte[] messageDigest = digest.digest(); final int maxByteVal = 0xFF; for (byte b : messageDigest) { hexStr.append(Integer.toHexString(maxByteVal & b)); } } catch (NoSuchAlgorithmException e) { throw new RuntimeException(e); } return hexStr.toString(); } /** * Remove leading and trailing whitespace from a CharSequence. This is useful after using * the fromHtml() function to convert HTML to a CharSequence. * @param str CharSequence to be trimmed. * @return The trimmed CharSequence. */ @NonNull public static CharSequence strip(@Nullable CharSequence str) { if (str == null || str.length() == 0) { return ""; } int len = str.length(); int start = 0; int end = len - 1; while (start < len && Character.isWhitespace(str.charAt(start))) { start++; } while (end > 0 && Character.isWhitespace(str.charAt(end))) { end } if (end > start) { return str.subSequence(start, end + 1); } return ""; } @NonNull public static String intToHexStr(int i) { return String.format("x%08x", i); } public static String addUnderscores(@NonNull String text) { return text.replace(" ", "_"); } public static String removeUnderscores(@NonNull String text) { return text.replace("_", " "); } public static boolean hasSectionAnchor(@NonNull String text) { return text.contains(" } public static String removeSectionAnchor(String text) { return text.substring(0, text.indexOf(" } public static String removeNamespace(@NonNull String text) { if (text.length() > text.indexOf(":")) { return text.substring(text.indexOf(":") + 1); } else { return text; } } public static String removeHTMLTags(@NonNull String text) { return fromHtml(text).toString(); } public static String sanitizeText(@NonNull String selectedText) { return selectedText.replaceAll("\\[\\d+\\]", "") .replaceAll("\\s*/[^/]+/;?\\s*", "") .replaceAll("\\(\\s*;\\s*", "\\(") // (; -> ( hacky way for IPA remnants .replaceAll("\\s{2,}", " ") .trim(); } // Compare two strings based on their normalized form, using the Unicode Normalization Form C. // This should be used when comparing or verifying strings that will be exchanged between // different platforms (iOS, desktop, etc) that may encode strings using inconsistent // composition, especially for accents, diacritics, etc. public static boolean normalizedEquals(@Nullable String str1, @Nullable String str2) { if (str1 == null || str2 == null) { return (str1 == null && str2 == null); } return Normalizer.normalize(str1, Normalizer.Form.NFC) .equals(Normalizer.normalize(str2, Normalizer.Form.NFC)); } /** * @param source String that may contain HTML tags. * @return returned Spanned string that may contain spans parsed from the HTML source. */ @NonNull public static Spanned fromHtml(@Nullable String source) { if (source == null) { return new SpannedString(""); } if (!source.contains("<") && !source.contains("&")) { // If the string doesn't contain any hints of HTML entities, then skip the expensive // processing that fromHtml() performs. return new SpannedString(source); } source = source.replaceAll("&#8206;", "\u200E") .replaceAll("&#8207;", "\u200F") .replaceAll("&amp;", "&"); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { return Html.fromHtml(source, Html.FROM_HTML_MODE_LEGACY); } else { //noinspection deprecation return Html.fromHtml(source); } } @NonNull public static SpannableStringBuilder boldenSubstrings(@NonNull String text, @NonNull List<String> subStrings) { SpannableStringBuilder sb = new SpannableStringBuilder(text); for (String subString : subStrings) { int index = text.toLowerCase().indexOf(subString.toLowerCase()); if (index != -1) { sb.setSpan(new TypefaceSpan("sans-serif-medium"), index, index + subString.length(), Spannable.SPAN_INCLUSIVE_INCLUSIVE); } } return sb; } public static void highlightEditText(@NonNull EditText editText, @NonNull String parentText, @NonNull String highlightText) { String[] words = highlightText.split("\\s+"); int pos = 0; for (String word : words) { pos = parentText.indexOf(word, pos); if (pos == -1) { break; } } if (pos == -1) { pos = parentText.indexOf(words[words.length - 1]); } if (pos >= 0) { // TODO: Programmatic selection doesn't seem to work with RTL content... editText.setSelection(pos, pos + words[words.length - 1].length()); editText.performLongClick(); } } public static void boldenKeywordText(@NonNull TextView textView, @NonNull String parentText, @Nullable String searchQuery) { int startIndex = indexOf(parentText, searchQuery); if (startIndex >= 0) { parentText = parentText.substring(0, startIndex) + "<strong>" + parentText.substring(startIndex, startIndex + searchQuery.length()) + "</strong>" + parentText.substring(startIndex + searchQuery.length()); textView.setText(StringUtil.fromHtml(parentText)); } else { textView.setText(parentText); } } // case insensitive indexOf, also more lenient with similar chars, like chars with accents private static int indexOf(@NonNull String original, @Nullable String search) { if (!TextUtils.isEmpty(search)) { Collator collator = Collator.getInstance(); collator.setStrength(Collator.PRIMARY); for (int i = 0; i <= original.length() - search.length(); i++) { if (collator.equals(search, original.substring(i, i + search.length()))) { return i; } } } return -1; } @NonNull public static String getBase26String(@IntRange(from = 1) int number) { final int base = 26; String str = ""; while (--number >= 0) { str = (char)('A' + number % base) + str; number /= base; } return str; } @NonNull public static String listToJsonArrayString(@NonNull List<String> list) { return new JSONArray(list).toString(); } public static String stringToListMapToJSONString(@Nullable Map<String, List<Integer>> map) { return new Gson().toJson(map); } public static String listToJSONString(@Nullable List<Integer> list) { return new Gson().toJson(list); } private StringUtil() { } }
package main.habitivity; import android.app.Activity; import android.test.ActivityInstrumentationTestCase2; import android.util.Log; import android.widget.EditText; import com.robotium.solo.Solo; public class ActivityTests extends ActivityInstrumentationTestCase2<LoginUser>{ private Solo solo; public ActivityTests() { super(main.habitivity.LoginUser.class); } public void setUp() throws Exception { solo = new Solo(getInstrumentation(), getActivity()); Log.d("SETUP", "setUp()"); } public void testStart() throws Exception { Activity activity = getActivity(); } public void testLogin() { solo.assertCurrentActivity("wrong activity", LoginUser.class); solo.enterText((EditText) solo.getView(R.id.userName), "Test User"); solo.clickOnButton("OKAY"); assertTrue(solo.waitForText("Test User")); solo.clickOnActionBarHomeButton(); solo.clickOnActionBarItem(1); solo.goBack(); solo.clickOnActionBarItem(2); solo.goBack(); solo.clickOnActionBarItem(3); solo.goBack(); solo.goBack(); solo.clickOnActionBarHomeButton(); solo.clickOnActionBarItem(0); solo.enterText((EditText) solo.getView(R.id.habitInput), "Test Habit"); solo.enterText((EditText) solo.getView(R.id.addComment), "For Testing Purposes Only"); solo.clickOnButton("SUN"); solo.clickOnButton("MON"); solo.clickOnButton("ADD HABIT"); solo.goBack(); assertTrue(solo.waitForText("Test User")); solo.assertCurrentActivity("wrong activity", HabitivityMain.class); } //add activity tests here @Override public void tearDown() throws Exception { solo.finishOpenedActivities(); } }
package com.mindoo.domino.jna.gc; import java.io.PrintWriter; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.Callable; import com.mindoo.domino.jna.errors.NotesError; import com.mindoo.domino.jna.internal.NotesNativeAPI; import com.mindoo.domino.jna.utils.PlatformUtils; /** * Utility class to simplify memory management with Notes handles. The class tracks * handle creation and disposal.<br> * By using {@link #runWithAutoGC(Callable)}, the * collected handles are automatically disposed when code execution is done.<br> * <br> * An alternative approach is to use a try-with-resources block on the {@link DominoGCContext} * returned by {@link #initThread()}, e.g.<br> * <br> * <code> * try (DominoGCContext ctx = initThread()) {<br> * &nbsp;&nbsp;&nbsp;// use Domino JNA classes, e.g.<br> * &nbsp;&nbsp;&nbsp;NotesDatabase db = new NotesDatabase("", "names.nsf", "");<br> * <br> * } catch (Exception e) {<br> * &nbsp;&nbsp;&nbsp;log(Level.SEVERE, "Error accessing Domino data", e);<br> * }<br> * </code> * * @author Karsten Lehmann */ public class NotesGC { private static ThreadLocal<DominoGCContext> threadContext = new ThreadLocal<>(); private static DominoGCContext getThreadContext() { DominoGCContext ctx = threadContext.get(); if (ctx==null) { throw new IllegalStateException("Thread is not enabled for auto GC. Either run your code via NotesGC.runWithAutoGC(Callable) or via try-with-resources on the object returned by NotesGC.initThread()."); } return ctx; } /** * Method to enable GC debug logging for the active thread's {@link DominoGCContext} * * @param enabled true if enabled */ public static void setDebugLoggingEnabled(boolean enabled) { DominoGCContext ctx = threadContext.get(); if (ctx!=null) { ctx.setWriteDebugMessages(enabled); } } /** * Method to check if GC debug logging is enabled for the active thread's {@link DominoGCContext} * is enabled. * * @return true if enabled */ public static boolean isDebugLoggingEnabled() { DominoGCContext ctx = threadContext.get(); if (ctx==null) { return false; } else { return ctx.isWriteDebugMessages(); } } /** * Method to write a stacktrace to disk right before each native method invocation. Consumes * much performance and is therefore disabled by default and just here to track down * handle panics.<br> * Stacktraces are written as files domino-jna-stack-&lt;threadid&gt;.txt in the temp directory.<br> * <br> * Logging is enabled for the active thread's {@link DominoGCContext}. * * @param log true to log */ public static void setLogCrashingThreadStacktrace(boolean log) { DominoGCContext ctx = threadContext.get(); if (ctx!=null) { ctx.setLogCrashingThreadStackTrace(log); } } /** * Checks whether stacktraces for each native method invocation should be written to disk. Consumes * much performance and is therefore disabled by default and just here to track down * handle panics.<br> * Stacktraces are written as files domino-jna-stack-&lt;threadid&gt;.txt in the temp directory.<br> * <br> * Logging is enabled for the active thread's {@link DominoGCContext}. * * @return true to log */ public static boolean isLogCrashingThreadStacktrace() { DominoGCContext ctx = threadContext.get(); if (ctx==null) { return false; } else { return ctx.isLogCrashingThreadStackTrace(); } } /** * Method to get the current count of open Domino object handles * * @return handle count */ public static int getNumberOfOpenObjectHandles() { DominoGCContext ctx = getThreadContext(); if (PlatformUtils.is64Bit()) { return ctx.getOpenHandlesDominoObjects64().size(); } else { return ctx.getOpenHandlesDominoObjects32().size(); } } /** * Method to get the current count of open Domino memory handles * * @return handle count */ public static int getNumberOfOpenMemoryHandles() { DominoGCContext ctx = getThreadContext(); if (PlatformUtils.is64Bit()) { return ctx.getOpenHandlesMemory64().size(); } else { return ctx.getOpenHandlesMemory32().size(); } } public static class HashKey64 { private Class<?> m_clazz; private long m_handle; public HashKey64(Class<?> clazz, long handle) { m_clazz = clazz; m_handle = handle; } public long getHandle() { return m_handle; } public Class<?> getType() { return m_clazz; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((m_clazz == null) ? 0 : m_clazz.hashCode()); result = prime * result + (int) (m_handle ^ (m_handle >>> 32)); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; HashKey64 other = (HashKey64) obj; if (m_clazz == null) { if (other.m_clazz != null) return false; } else if (!m_clazz.equals(other.m_clazz)) return false; if (m_handle != other.m_handle) return false; return true; } } public static class HashKey32 { private Class<?> m_clazz; private int m_handle; public HashKey32(Class<?> clazz, int handle) { m_clazz = clazz; m_handle = handle; } public int getHandle() { return m_handle; } public Class<?> getType() { return m_clazz; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((m_clazz == null) ? 0 : m_clazz.hashCode()); result = prime * result + m_handle; return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; HashKey32 other = (HashKey32) obj; if (m_clazz == null) { if (other.m_clazz != null) return false; } else if (!m_clazz.equals(other.m_clazz)) return false; if (m_handle != other.m_handle) return false; return true; } } /** * Internal method to register a created Notes object that needs to be recycled * * @param clazz class of hash pool * @param obj Notes object */ public static void __objectCreated(Class<?> clazz, IRecyclableNotesObject obj) { DominoGCContext ctx = getThreadContext(); if (obj.isRecycled()) throw new NotesError(0, "Object is already recycled"); if (PlatformUtils.is64Bit()) { HashKey64 key = new HashKey64(clazz, obj.getHandle64()); LinkedHashMap<HashKey64, IRecyclableNotesObject> openHandles = ctx.getOpenHandlesDominoObjects64(); IRecyclableNotesObject oldObj = openHandles.put(key, obj); if (oldObj!=null && oldObj!=obj) { throw new IllegalStateException("Duplicate handle detected. Object to store: "+obj+", object found in open handle list: "+oldObj); } } else { HashKey32 key = new HashKey32(clazz, obj.getHandle32()); LinkedHashMap<HashKey32, IRecyclableNotesObject> openHandles = ctx.getOpenHandlesDominoObjects32(); IRecyclableNotesObject oldObj = openHandles.put(key, obj); if (oldObj!=null && oldObj!=obj) { throw new IllegalStateException("Duplicate handle detected. Object to store: "+obj+", object found in open handle list: "+oldObj); } } if (ctx.isWriteDebugMessages()) { System.out.println("AutoGC - Added object: "+obj); } } /** * Internal method to register a created Notes object that needs to be recycled * * @param mem Notes object */ public static void __memoryAllocated(IAllocatedMemory mem) { DominoGCContext ctx = getThreadContext(); if (mem.isFreed()) throw new NotesError(0, "Memory is already freed"); if (PlatformUtils.is64Bit()) { LinkedHashMap<Long, IAllocatedMemory> openHandles = ctx.getOpenHandlesMemory64(); IAllocatedMemory oldObj = openHandles.put(mem.getHandle64(), mem); if (oldObj!=null && oldObj!=mem) { throw new IllegalStateException("Duplicate handle detected. Memory to store: "+mem+", object found in open handle list: "+oldObj); } } else { LinkedHashMap<Integer, IAllocatedMemory> openHandles = ctx.getOpenHandlesMemory32(); IAllocatedMemory oldObj = openHandles.put(mem.getHandle32(), mem); if (oldObj!=null && oldObj!=mem) { throw new IllegalStateException("Duplicate handle detected. Memory to store: "+mem+", object found in open handle list: "+oldObj); } } if (ctx.isWriteDebugMessages()) { System.out.println("AutoGC - Added memory: "+mem); } } /** * Internal method to check whether a 64 bit handle exists * * @param objClazz class of Notes object * @param handle handle * @return Notes object * @throws NotesError if handle does not exist */ public static IRecyclableNotesObject __b64_checkValidObjectHandle(Class<? extends IRecyclableNotesObject> objClazz, long handle) { DominoGCContext ctx = getThreadContext(); HashKey64 key = new HashKey64(objClazz, handle); LinkedHashMap<HashKey64, IRecyclableNotesObject> openHandles = ctx.getOpenHandlesDominoObjects64(); IRecyclableNotesObject obj = openHandles.get(key); if (obj==null) { throw new NotesError(0, "The provided C handle "+handle+" of object with class "+objClazz.getName()+" does not seem to exist (anymore)."); } else { return obj; } } /** * Internal method to check whether a 64 bit handle exists * * @param memClazz class of Notes object * @param handle handle * @throws NotesError if handle does not exist */ public static void __b64_checkValidMemHandle(Class<? extends IAllocatedMemory> memClazz, long handle) { DominoGCContext ctx = getThreadContext(); LinkedHashMap<Long, IAllocatedMemory> openHandles = ctx.getOpenHandlesMemory64(); IAllocatedMemory obj = openHandles.get(handle); if (obj==null) { throw new NotesError(0, "The provided C handle "+handle+" of memory with class "+memClazz.getName()+" does not seem to exist (anymore)."); } } /** * Internal method to check whether a 32 bit handle exists * * @param objClazz class of Notes object * @param handle handle * @return Notes object * @throws NotesError if handle does not exist */ public static IRecyclableNotesObject __b32_checkValidObjectHandle(Class<? extends IRecyclableNotesObject> objClazz, int handle) { DominoGCContext ctx = getThreadContext(); LinkedHashMap<HashKey32,IRecyclableNotesObject> openHandles = ctx.getOpenHandlesDominoObjects32(); HashKey32 key = new HashKey32(objClazz, handle); IRecyclableNotesObject obj = openHandles.get(key); if (obj==null) { throw new NotesError(0, "The provided C handle "+handle+" of object with class "+objClazz.getName()+" does not seem to exist (anymore)."); } else return obj; } /** * Internal method to check whether a 32 bit handle exists * * @param objClazz class of Notes object * @param handle handle * @throws NotesError if handle does not exist */ public static void __b32_checkValidMemHandle(Class<? extends IAllocatedMemory> objClazz, int handle) { DominoGCContext ctx = getThreadContext(); LinkedHashMap<Integer, IAllocatedMemory> openHandles = ctx.getOpenHandlesMemory32(); IAllocatedMemory obj = openHandles.get(handle); if (obj==null) { throw new NotesError(0, "The provided C handle "+handle+" of memory with class "+objClazz.getName()+" does not seem to exist (anymore)."); } } /** * Internal method to unregister a created Notes object that was recycled * * @param clazz class of hash pool * @param obj Notes object */ public static void __objectBeeingBeRecycled(Class<? extends IRecyclableNotesObject> clazz, IRecyclableNotesObject obj) { DominoGCContext ctx = getThreadContext(); if (obj.isRecycled()) throw new NotesError(0, "Object is already recycled"); if (ctx.isWriteDebugMessages()) { System.out.println("AutoGC - Removing object: "+obj.getClass()+" with handle="+(PlatformUtils.is64Bit() ? obj.getHandle64() : obj.getHandle32())); } if (PlatformUtils.is64Bit()) { HashKey64 key = new HashKey64(clazz, obj.getHandle64()); LinkedHashMap<HashKey64, IRecyclableNotesObject> openHandles = ctx.getOpenHandlesDominoObjects64(); openHandles.remove(key); } else { HashKey32 key = new HashKey32(clazz, obj.getHandle32()); LinkedHashMap<HashKey32, IRecyclableNotesObject> openHandles = ctx.getOpenHandlesDominoObjects32(); openHandles.remove(key); } } /** * Internal method to unregister a created Notes object that was recycled * * @param mem Notes object */ public static void __memoryBeeingFreed(IAllocatedMemory mem) { DominoGCContext ctx = getThreadContext(); if (mem.isFreed()) throw new NotesError(0, "Memory has already been freed"); if (ctx.isWriteDebugMessages()) { System.out.println("AutoGC - Removing memory: "+mem.getClass()+" with handle="+(PlatformUtils.is64Bit() ? mem.getHandle64() : mem.getHandle32())); } if (PlatformUtils.is64Bit()) { LinkedHashMap<Long, IAllocatedMemory> openHandles = ctx.getOpenHandlesMemory64(); openHandles.remove(mem.getHandle64()); } else { LinkedHashMap<Integer, IAllocatedMemory> openHandles = ctx.getOpenHandlesMemory32(); openHandles.remove(mem.getHandle32()); } } /** * Use this method to store your own custom values for the duration of the * current {@link NotesGC#runWithAutoGC(Callable)} execution block. * * @param key key * @param value value, implement interface {@link IDisposableCustomValue} to get called for disposal * @return previous value */ public static Object setCustomValue(String key, Object value) { DominoGCContext ctx = getThreadContext(); Map<String,Object> map = ctx.getCustomValues(); return map.put(key, value); } /** * Reads a custom value stored via {@link #setCustomValue(String, Object)} * for the duration of the current {@link #runWithAutoGC(Callable)} * execution block. * * @param key * @return value or null if not set */ public static Object getCustomValue(String key) { DominoGCContext ctx = getThreadContext(); Map<String,Object> map = ctx.getCustomValues(); return map.get(key); } /** * Tests if a custom value has been set via {@link #setCustomValue(String, Object)}. * * @param key key * @return true if value is set */ public boolean hasCustomValue(String key) { DominoGCContext ctx = getThreadContext(); Map<String,Object> map = ctx.getCustomValues(); return map.containsKey(key); } /** * Throws an exception when the code is currently not running in a runWithAutoGC block */ public static void ensureRunningInAutoGC() { DominoGCContext ctx = threadContext.get(); if (ctx==null) { throw new IllegalStateException("Thread is not enabled for auto GC. Either run your code via runWithAutoGC(Callable) or via try-with-resources on the object returned by initThread()."); } } /** * Method to check whether the current thread is already running in * an auto GC block * * @return true if in auto GC */ public static boolean isAutoGCActive() { return threadContext.get() != null; } /** * When using {@link NotesGC#setCustomValue(String, Object)} to store your own * values, use this * interface for your value to get called for disposal when the {@link NotesGC#runWithAutoGC(Callable)} * block is finished. Otherwise the value is just removed from the intermap map. * * @author Karsten Lehmann */ public static interface IDisposableCustomValue { public void dispose(); } /** * Runs a piece of code and automatically disposes any allocated Notes objects at the end. * The method supported nested calls. * * @param callable code to execute * @return computation result * @throws Exception in case of errors * * @param <T> return value type of code to be run */ public static <T> T runWithAutoGC(final Callable<T> callable) throws Exception { try (DominoGCContext ctx = initThread()) { return AccessController.doPrivileged(new PrivilegedAction<T>() { @Override public T run() { try { return callable.call(); } catch (Exception e) { if (e instanceof RuntimeException) { throw (RuntimeException) e; } else { throw new NotesError(0, "Error during code execution", e); } } } }); } } /** * Initializes the current thread for Domino JNA C and memory resource * tracking.<br> * <br> * <b>The returned {@link DominoGCContext} must be closed to free up all allocated resources. * Otherwise the client/server will run out of handles sooner or later!</b><br> * <br> * It is recommented to use a try-with-resources block to ensure calling the close() method * even in case of execution errors, e.g.<br> * <br> * <code> * try (DominoGCContext ctx = initThread()) {<br> * &nbsp;&nbsp;&nbsp;// use Domino JNA classes, e.g.<br> * &nbsp;&nbsp;&nbsp;NotesDatabase db = new NotesDatabase("", "names.nsf", "");<br> * <br> * } catch (Exception e) {<br> * &nbsp;&nbsp;&nbsp;log(Level.SEVERE, "Error accessing Domino data", e);<br> * }<br> * </code> * <br> * Nested invocation of this method is supported. The current implementation only * frees up resources when calling {@link DominoGCContext#close()} on the outer most * context. Other calls are ignored. * * @return garbage collection context */ public static DominoGCContext initThread() { DominoGCContext ctx = threadContext.get(); if (ctx==null) { NotesNativeAPI.initialize(); ctx = new DominoGCContext(null); threadContext.set(ctx); return ctx; } else { return new DominoGCContext(ctx); } } /** * Domino handle collection context to collect all allocated C object * and memory handles for the current thread. * * @author Karsten Lehmann */ public static class DominoGCContext implements AutoCloseable { private DominoGCContext m_parentCtx; private Thread m_parentThread; private Map<String,Object> m_activeAutoGCCustomValues; //maps with open handles; using LinkedHashMap to keep insertion order for the keys and disposed in reverse order private LinkedHashMap<HashKey32,IRecyclableNotesObject> m_b32OpenHandlesDominoObjects; private LinkedHashMap<Integer, IAllocatedMemory> m_b32OpenHandlesMemory; private LinkedHashMap<HashKey64, IRecyclableNotesObject> m_b64OpenHandlesDominoObjects; private LinkedHashMap<Long, IAllocatedMemory> m_b64OpenHandlesMemory; private boolean m_writeDebugMessages; private boolean m_logCrashingThreadStackTrace; private DominoGCContext(DominoGCContext parentCtx) { m_parentCtx = parentCtx; m_parentThread = Thread.currentThread(); } /** * Returns the parent GC context if nested calls on {@link NotesGC#initThread()} * are used. * * @return parent context or null for top context */ public DominoGCContext getParentContext() { return m_parentCtx; } /** * Returns true if this GC context is the first created for the current * thread and false if it's a nested context. * * @return true if top context */ public boolean isTopContext() { return m_parentCtx==null; } private void checkValidThread() { if (!m_parentThread.equals(Thread.currentThread())) { throw new IllegalStateException("This context cannot be used across threads"); } } public boolean isWriteDebugMessages() { if (m_parentCtx!=null) { return m_parentCtx.isWriteDebugMessages(); } return m_writeDebugMessages; } public void setWriteDebugMessages(boolean b) { if (m_parentCtx!=null) { m_parentCtx.setWriteDebugMessages(b); return; } m_writeDebugMessages = b; } public boolean isLogCrashingThreadStackTrace() { if (m_parentCtx!=null) { return m_parentCtx.isLogCrashingThreadStackTrace(); } return m_logCrashingThreadStackTrace; } public void setLogCrashingThreadStackTrace(boolean b) { if (m_parentCtx!=null) { m_parentCtx.setLogCrashingThreadStackTrace(b); return; } m_logCrashingThreadStackTrace = b; } private Map<String,Object> getCustomValues() { checkValidThread(); if (m_parentCtx!=null) { return m_parentCtx.getCustomValues(); } if (m_activeAutoGCCustomValues==null) { m_activeAutoGCCustomValues = new HashMap<>(); } return m_activeAutoGCCustomValues; } private LinkedHashMap<HashKey32,IRecyclableNotesObject> getOpenHandlesDominoObjects32() { checkValidThread(); if (m_parentCtx!=null) { return m_parentCtx.getOpenHandlesDominoObjects32(); } if (m_b32OpenHandlesDominoObjects==null) { m_b32OpenHandlesDominoObjects = new LinkedHashMap<>(); } return m_b32OpenHandlesDominoObjects; } private LinkedHashMap<Integer, IAllocatedMemory> getOpenHandlesMemory32() { checkValidThread(); if (m_parentCtx!=null) { return m_parentCtx.getOpenHandlesMemory32(); } if (m_b32OpenHandlesMemory==null) { m_b32OpenHandlesMemory = new LinkedHashMap<>(); } return m_b32OpenHandlesMemory; } private LinkedHashMap<HashKey64, IRecyclableNotesObject> getOpenHandlesDominoObjects64() { checkValidThread(); if (m_parentCtx!=null) { return m_parentCtx.getOpenHandlesDominoObjects64(); } if (m_b64OpenHandlesDominoObjects==null) { m_b64OpenHandlesDominoObjects = new LinkedHashMap<>(); } return m_b64OpenHandlesDominoObjects; } private LinkedHashMap<Long, IAllocatedMemory> getOpenHandlesMemory64() { checkValidThread(); if (m_parentCtx!=null) { return m_parentCtx.getOpenHandlesMemory64(); } if (m_b64OpenHandlesMemory==null) { m_b64OpenHandlesMemory = new LinkedHashMap<>(); } return m_b64OpenHandlesMemory; } @Override public void close() throws Exception { checkValidThread(); if (!isTopContext()) { //don't free up resources in nested calls on NotesGC.initThread() return; } if (PlatformUtils.is64Bit()) { { //recycle created Domino objects if (m_b64OpenHandlesDominoObjects!=null && !m_b64OpenHandlesDominoObjects.isEmpty()) { Entry[] mapEntries = m_b64OpenHandlesDominoObjects.entrySet().toArray(new Entry[m_b64OpenHandlesDominoObjects.size()]); if (mapEntries.length>0) { if (m_writeDebugMessages) { System.out.println("AutoGC - Auto-recycling "+mapEntries.length+" Domino objects:"); } for (int i=mapEntries.length-1; i>=0; i Entry<HashKey64,IRecyclableNotesObject> currEntry = mapEntries[i]; IRecyclableNotesObject obj = currEntry.getValue(); try { if (!obj.isRecycled()) { if (m_writeDebugMessages) { System.out.println("AutoGC - Auto-recycling "+obj); } obj.recycle(); } } catch (Throwable e) { e.printStackTrace(); } m_b64OpenHandlesDominoObjects.remove(currEntry.getKey()); } if (m_writeDebugMessages) { System.out.println("AutoGC - Done auto-recycling "+mapEntries.length+" Domino objects"); } m_b64OpenHandlesDominoObjects.clear(); m_b64OpenHandlesDominoObjects = null; } } } { //dispose allocated memory if (m_b64OpenHandlesMemory!=null && !m_b64OpenHandlesMemory.isEmpty()) { Entry[] mapEntries = m_b64OpenHandlesMemory.entrySet().toArray(new Entry[m_b64OpenHandlesMemory.size()]); if (mapEntries.length>0) { if (m_writeDebugMessages) { System.out.println("AutoGC - Freeing "+mapEntries.length+" memory handles"); } for (int i=mapEntries.length-1; i>=0; i Entry<Long,IAllocatedMemory> currEntry = mapEntries[i]; IAllocatedMemory obj = currEntry.getValue(); try { if (!obj.isFreed()) { if (m_writeDebugMessages) { System.out.println("AutoGC - Freeing "+obj); } obj.free(); } } catch (Throwable e) { e.printStackTrace(); } m_b64OpenHandlesMemory.remove(currEntry.getKey()); } if (m_writeDebugMessages) { System.out.println("AutoGC - Done freeing "+mapEntries.length+" memory handles"); } m_b64OpenHandlesMemory.clear(); m_b64OpenHandlesMemory = null; } } } } else { { if (m_b32OpenHandlesDominoObjects!=null && !m_b32OpenHandlesDominoObjects.isEmpty()) { //recycle created Domino objects Entry[] mapEntries = m_b32OpenHandlesDominoObjects.entrySet().toArray(new Entry[m_b32OpenHandlesDominoObjects.size()]); if (mapEntries.length>0) { if (m_writeDebugMessages) { System.out.println("AutoGC - Recycling "+mapEntries.length+" Domino objects:"); } for (int i=mapEntries.length-1; i>=0; i Entry<HashKey32,IRecyclableNotesObject> currEntry = mapEntries[i]; IRecyclableNotesObject obj = currEntry.getValue(); try { if (!obj.isRecycled()) { if (m_writeDebugMessages) { System.out.println("AutoGC - Recycling "+obj); } obj.recycle(); } } catch (Throwable e) { e.printStackTrace(); } m_b32OpenHandlesDominoObjects.remove(currEntry.getKey()); } if (m_writeDebugMessages) { System.out.println("AutoGC - Done recycling "+mapEntries.length+" memory handles"); } m_b32OpenHandlesDominoObjects.clear(); m_b32OpenHandlesDominoObjects = null; } } } { if (m_b32OpenHandlesMemory!=null && !m_b32OpenHandlesMemory.isEmpty()) { //dispose allocated memory Entry[] mapEntries = m_b32OpenHandlesMemory.entrySet().toArray(new Entry[m_b32OpenHandlesMemory.size()]); if (mapEntries.length>0) { if (m_writeDebugMessages) { System.out.println("AutoGC - Freeing "+mapEntries.length+" memory handles"); } for (int i=mapEntries.length-1; i>=0; i Entry<Integer,IAllocatedMemory> currEntry = mapEntries[i]; IAllocatedMemory obj = currEntry.getValue(); try { if (!obj.isFreed()) { if (m_writeDebugMessages) { System.out.println("AutoGC - Freeing "+obj); } obj.free(); } } catch (Throwable e) { e.printStackTrace(); } m_b32OpenHandlesMemory.remove(currEntry.getKey()); } if (m_writeDebugMessages) { System.out.println("AutoGC - Done freeing "+mapEntries.length+" memory handles"); } m_b32OpenHandlesMemory.clear(); m_b32OpenHandlesMemory = null; } } } } if (m_activeAutoGCCustomValues!=null) { cleanupCustomValues(m_activeAutoGCCustomValues); m_activeAutoGCCustomValues.clear(); m_activeAutoGCCustomValues = null; } } } private static void cleanupCustomValues(Map<String, Object> customValues) { for (Entry<String,Object> currEntry : customValues.entrySet()) { Object currVal = currEntry.getValue(); if (currVal instanceof IDisposableCustomValue) { try { ((IDisposableCustomValue)currVal).dispose(); } catch (Exception e) { //give access to this exception via special (optional) PrintWriter, //but continue with the loop Object out = customValues.get("NotesGC.CustomValueDisposeOut"); if (out instanceof PrintWriter) { e.printStackTrace((PrintWriter) out); } } } } } }
package bbth.game; import java.util.HashMap; import java.util.HashSet; import java.util.Random; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.Paint.Style; import android.graphics.RectF; import android.util.FloatMath; import bbth.engine.ai.Pathfinder; import bbth.engine.fastgraph.FastGraphGenerator; import bbth.engine.fastgraph.Wall; import bbth.engine.net.simulation.LockStepProtocol; import bbth.engine.net.simulation.Simulation; import bbth.engine.particles.ParticleSystem; import bbth.engine.ui.UIScrollView; import bbth.engine.util.Bag; import bbth.engine.util.MathUtils; import bbth.engine.util.Timer; import bbth.game.ai.AIController; import bbth.game.units.Unit; import bbth.game.units.UnitManager; import bbth.game.units.UnitType; public class BBTHSimulation extends Simulation implements UnitManager { private static final float DEBUG_SPAWN_TIMER = 1.f; private static final int NUM_PARTICLES = 1000; private static final float PARTICLE_THRESHOLD = 0.5f; public static final ParticleSystem PARTICLES = new ParticleSystem( NUM_PARTICLES, PARTICLE_THRESHOLD); public static final Paint PARTICLE_PAINT = new Paint(); static { PARTICLE_PAINT.setStrokeWidth(2.f); } private int timestep; private Team team; public Player localPlayer, remotePlayer; private HashMap<Boolean, Player> playerMap; public Player serverPlayer, clientPlayer; private AIController aiController; private Pathfinder pathFinder; private FastGraphGenerator graphGen; private FastLineOfSightTester tester; private GridAcceleration accel; private HashSet<Unit> cachedUnits; private Paint paint = new Paint(); private Bag<Unit> cachedUnitBag = new Bag<Unit>(); private HashSet<Unit> cachedUnitSet = new HashSet<Unit>(); public Timer accelTickTimer = new Timer(); public Timer aiTickTimer = new Timer(); public Timer entireTickTimer = new Timer(); public Timer aiControllerTimer = new Timer(); public Timer serverPlayerTimer = new Timer(); public Timer clientPlayerTimer = new Timer(); private static final Random random = new Random(); private boolean serverReady; private boolean clientReady; // This is the virtual size of the game public static final float GAME_X = BeatTrack.BEAT_TRACK_WIDTH; public static final float GAME_Y = 0; public static final float GAME_WIDTH = BBTHGame.WIDTH - BeatTrack.BEAT_TRACK_WIDTH; public static final float GAME_HEIGHT = BBTHGame.HEIGHT; // Minimal length of a wall public static final float MIN_WALL_LENGTH = 5.f; // Combo constants public static final float UBER_UNIT_THRESHOLD = 7; public static final int TUTORIAL_DONE = 13; public BBTHSimulation(Team localTeam, LockStepProtocol protocol, boolean isServer) { // 3 fine timesteps per coarse timestep // coarse timestep takes 0.1 seconds // user inputs lag 2 coarse timesteps behind super(3, 0.1f, 2, protocol, isServer); // THIS IS IMPORTANT random.setSeed(0); serverReady = true; clientReady = false; aiController = new AIController(); accel = new GridAcceleration(GAME_WIDTH, GAME_HEIGHT, GAME_WIDTH / 10); team = localTeam; serverPlayer = new Player(Team.SERVER, aiController, this, team == Team.SERVER); clientPlayer = new Player(Team.CLIENT, aiController, this, team == Team.CLIENT); localPlayer = (team == Team.SERVER) ? serverPlayer : clientPlayer; remotePlayer = (team == Team.SERVER) ? clientPlayer : serverPlayer; playerMap = new HashMap<Boolean, Player>(); playerMap.put(true, serverPlayer); playerMap.put(false, clientPlayer); graphGen = new FastGraphGenerator(15.0f, GAME_WIDTH, GAME_HEIGHT); accel.insertWalls(graphGen.walls); pathFinder = new Pathfinder(graphGen.graph); tester = new FastLineOfSightTester(15.f, accel); aiController.setPathfinder(pathFinder, graphGen.graph, tester, accel); aiController.setUpdateFraction(.10f); cachedUnits = new HashSet<Unit>(); } public void setupSubviews(UIScrollView view) { localPlayer.setupSubviews(view, true); remotePlayer.setupSubviews(view, false); } public Unit getOpponentsMostAdvancedUnit() { return remotePlayer.getMostAdvancedUnit(); } public UnitSelector getMyUnitSelector() { return localPlayer.getUnitSelector(); } // Just for debugging so we know the simulation isn't stuck public int getTimestep() { return timestep; } // Only use BBTHSimulation.randInRange() for things that are supposed to // be synced (not particles!) public static float randInRange(float min, float max) { return (max - min) * random.nextFloat() + min; } @Override protected void simulateTapDown(float x, float y, boolean isServer, boolean isHold, boolean isOnBeat) { Player player = playerMap.get(isServer); if (x < 0 || y < 0) return; if (BBTHGame.DEBUG || isOnBeat) { float newcombo = player.getCombo() + 1; player.setCombo(newcombo); if (isHold) { player.startWall(x, y); } else { player.spawnUnit(x, y); } } else { player.setCombo(0); } } @Override protected void simulateTapMove(float x, float y, boolean isServer) { Player player = playerMap.get(isServer); if (!player.settingWall()) return; if (x < 0 || y < 0) { generateWall(player); } else { player.updateWall(x, y); } } @Override protected void simulateTapUp(float x, float y, boolean isServer) { Player player = playerMap.get(isServer); generateWall(player); } /** * Creates a wall out of the given player, and lets the AI know about it. */ public void generateWall(Player player) { if (!player.settingWall()) return; Wall w = player.endWall(); if (w == null) return; if (player != localPlayer) { this.generateParticlesForWall(w, player.getTeam()); } addWallToAI(w); } public void generateParticlesForWall(Wall wall, Team team) { int numParticles = 40; for (int i = 0; i < numParticles; i++) { float posX = wall.a.x * i / numParticles + wall.b.x * (numParticles - i) / numParticles; float posY = wall.a.y * i / numParticles + wall.b.y * (numParticles - i) / numParticles; float angle = MathUtils.randInRange(0, 2 * MathUtils.PI); float xVel = MathUtils.randInRange(25.f, 50.f) * FloatMath.cos(angle); float yVel = MathUtils.randInRange(25.f, 50.f) * FloatMath.sin(angle); PARTICLES.createParticle().circle().velocity(xVel, yVel) .shrink(0.1f, 0.15f).radius(3.0f).position(posX, posY) .color(team.getRandomShade()); } } private void addWallToAI(Wall wall) { graphGen.walls.add(wall); graphGen.compute(); accel.clearWalls(); accel.insertWalls(graphGen.walls); } @Override protected void simulateCustomEvent(float x, float y, int code, boolean isServer) { Player player = playerMap.get(isServer); UnitType type = UnitType.fromInt(code); if (type != null) { player.setUnitType(type); } else if (code == TUTORIAL_DONE) { if (isServer) { serverReady = true; } else { clientReady = true; } } } private float elapsedTime = 0; @Override protected void update(float seconds) { if (!isReady()) { return; } entireTickTimer.start(); timestep++; // update acceleration data structure accelTickTimer.start(); accel.clearUnits(); accel.insertUnits(serverPlayer.units); accel.insertUnits(clientPlayer.units); accelTickTimer.stop(); aiTickTimer.start(); aiControllerTimer.start(); aiController.update(); aiControllerTimer.stop(); serverPlayerTimer.start(); serverPlayer.update(seconds); serverPlayerTimer.stop(); clientPlayerTimer.start(); clientPlayer.update(seconds); clientPlayerTimer.stop(); // Spawn dudes if (BBTHGame.IS_SINGLE_PLAYER) { elapsedTime += seconds; if (elapsedTime > DEBUG_SPAWN_TIMER) { elapsedTime -= DEBUG_SPAWN_TIMER; remotePlayer.spawnUnit(randInRange(0, GAME_WIDTH), GAME_HEIGHT - 50); } } aiTickTimer.stop(); PARTICLES.tick(seconds); RectF sr = serverPlayer.base.getRect(); RectF cr = clientPlayer.base.getRect(); accel.getUnitsInAABB(sr.left, sr.top, sr.right, sr.bottom, cachedUnits); for (Unit u : cachedUnits) { if (u.getTeam() == Team.CLIENT) { serverPlayer.adjustHealth(-10); this.notifyUnitDead(u); } } accel.getUnitsInAABB(cr.left, cr.top, cr.right, cr.bottom, cachedUnits); for (Unit u : cachedUnits) { if (u.getTeam() == Team.SERVER) { clientPlayer.adjustHealth(-10); this.notifyUnitDead(u); } } entireTickTimer.stop(); } private void drawGrid(Canvas canvas) { paint.setColor(Color.DKGRAY); // TODO: only draw lines on screen for speed for (float x = 0; x < GAME_WIDTH; x += 60) { canvas.drawLine(x, 0, x, GAME_HEIGHT, paint); } for (float y = 0; y < GAME_HEIGHT; y += 60) { canvas.drawLine(0, y, GAME_WIDTH, y, paint); } } public void draw(Canvas canvas) { drawWavefronts(canvas); drawGrid(canvas); localPlayer.draw(canvas); remotePlayer.draw(canvas); PARTICLES.draw(canvas, PARTICLE_PAINT); if (BBTHGame.DEBUG) { graphGen.draw(canvas); } localPlayer.postDraw(canvas); remotePlayer.postDraw(canvas); } private void drawWavefronts(Canvas canvas) { Unit serverAdvUnit = serverPlayer.getMostAdvancedUnit(); Unit clientAdvUnit = clientPlayer.getMostAdvancedUnit(); float serverWavefrontY = serverAdvUnit != null ? serverAdvUnit.getY() + 10 : 0; float clientWavefrontY = clientAdvUnit != null ? clientAdvUnit.getY() - 10 : BBTHSimulation.GAME_HEIGHT; paint.setStyle(Style.FILL); // server wavefront paint.setColor(Team.SERVER.getWavefrontColor()); canvas.drawRect(0, 0, BBTHSimulation.GAME_WIDTH, Math.min(clientWavefrontY, serverWavefrontY), paint); // client wavefront paint.setColor(Team.CLIENT.getWavefrontColor()); canvas.drawRect(0, Math.max(clientWavefrontY, serverWavefrontY), BBTHSimulation.GAME_WIDTH, BBTHSimulation.GAME_HEIGHT, paint); // overlapped wavefronts if (serverWavefrontY > clientWavefrontY) { paint.setColor(Color.rgb(63, 0, 63)); canvas.drawRect(0, clientWavefrontY, BBTHSimulation.GAME_WIDTH, serverWavefrontY, paint); } } public void drawForMiniMap(Canvas canvas) { localPlayer.drawForMiniMap(canvas); remotePlayer.drawForMiniMap(canvas); } @Override public void notifyUnitDead(Unit unit) { for (int i = 0; i < 10; i++) { float angle = MathUtils.randInRange(0, 2 * MathUtils.PI); float xVel = MathUtils.randInRange(25.f, 50.f) * FloatMath.cos(angle); float yVel = MathUtils.randInRange(25.f, 50.f) * FloatMath.sin(angle); BBTHSimulation.PARTICLES.createParticle().circle() .velocity(xVel, yVel).shrink(0.1f, 0.15f).radius(3.0f) .position(unit.getX(), unit.getY()) .color(unit.getTeam().getRandomShade()); } serverPlayer.units.remove(unit); clientPlayer.units.remove(unit); aiController.removeEntity(unit); } /** * WILL RETURN THE SAME BAG OVER AND OVER */ @Override public Bag<Unit> getUnitsInCircle(float x, float y, float r) { float r2 = r * r; cachedUnitBag.clear(); accel.getUnitsInAABB(x - r, y - r, x + r, y + r, cachedUnitSet); for (Unit unit : cachedUnitSet) { float dx = x - unit.getX(); float dy = y - unit.getY(); if (dx * dx + dy * dy < r2) { cachedUnitBag.add(unit); } } return cachedUnitBag; } /** * WILL RETURN THE SAME BAG OVER AND OVER */ @Override public Bag<Unit> getUnitsIntersectingLine(float x, float y, float x2, float y2) { cachedUnitBag.clear(); // calculate axis vector float axisX = -(y2 - y); float axisY = x2 - x; // normalize axis vector float axisLen = FloatMath.sqrt(axisX * axisX + axisY * axisY); axisX /= axisLen; axisY /= axisLen; float lMin = axisX * x + axisY * y; float lMax = axisX * x2 + axisY * y2; if (lMax < lMin) { float temp = lMin; lMin = lMax; lMax = temp; } accel.getUnitsInAABB(Math.min(x, y), Math.min(y, y2), Math.max(x2, x2), Math.max(y, y2), cachedUnitSet); for (Unit unit : cachedUnitSet) { // calculate projections float projectedCenter = axisX * unit.getX() + axisY * unit.getY(); float radius = unit.getRadius(); if (!intervalsDontOverlap(projectedCenter - radius, projectedCenter + radius, lMin, lMax)) { cachedUnitBag.add(unit); } } return cachedUnitBag; } private static final boolean intervalsDontOverlap(float min1, float max1, float min2, float max2) { return (min1 < min2 ? min2 - max1 : min1 - max2) > 0; } public boolean isReady() { return BBTHGame.IS_SINGLE_PLAYER || (clientReady && serverReady); } @Override public void removeWall(Wall wall) { graphGen.walls.remove(wall); graphGen.compute(); accel.clearWalls(); accel.insertWalls(graphGen.walls); } }
package bbth.game.ai; import java.util.ArrayList; import java.util.HashMap; import android.graphics.PointF; import android.util.FloatMath; import bbth.engine.ai.FlockRulesCalculator; import bbth.engine.ai.fsm.FiniteState; import bbth.engine.ai.fsm.FiniteStateMachine; import bbth.engine.ai.fsm.SimpleGreaterTransition; import bbth.engine.ai.fsm.SimpleLessTransition; import bbth.engine.util.MathUtils; import bbth.game.BBTHSimulation; import bbth.game.Team; import bbth.game.units.Unit; public class OffensiveAI extends UnitAI { private PointF m_flock_dir; PointF start_point; PointF end_point; private HashMap<String, Float> m_fsm_conditions; public OffensiveAI() { super(); m_flock_dir = new PointF(); start_point = new PointF(); end_point = new PointF(); m_fsm_conditions = new HashMap<String, Float>(); } @Override public void update(Unit entity, AIController c, FlockRulesCalculator flock) { FiniteState state = entity.getState(); // Check if FSM has been initialized. if (state == null) { initialize_fsm(entity); state = entity.getState(); } Unit enemy = getClosestEnemy(entity); entity.setTarget(enemy); String statename = state.getName(); if (statename == "moving") { do_movement(entity, c, flock); } else if (statename == "attacking") { do_movement(entity, c, flock); } else { System.err.println("Error: entity in unknown state: " + statename); } check_state_transition(entity, c, entity.getFSM()); } private void check_state_transition(Unit entity, AIController c, FiniteStateMachine fsm) { Unit target = entity.getTarget(); m_fsm_conditions.clear(); float dist = Float.MAX_VALUE; if (target != null) { dist = MathUtils.getDistSqr(entity.getX(), entity.getY(), target.getX(), target.getY()); } m_fsm_conditions.put("targetdist", dist); fsm.update(m_fsm_conditions); } private void do_movement(Unit entity, AIController c, FlockRulesCalculator flock) { float xcomp = 0; float ycomp = 0; float start_x = entity.getX(); float start_y = entity.getY(); // Calculate flocking. calculateFlocking(entity, c, flock, m_flock_dir); xcomp = m_flock_dir.x; ycomp = m_flock_dir.y; // Calculate somewhere to go if it's a leader. if (!flock.hasLeader(entity)) { float goal_x = BBTHSimulation.GAME_WIDTH/2.0f; float goal_y = 0; if (entity.getTeam() == Team.SERVER) { goal_y = BBTHSimulation.GAME_HEIGHT; } if (entity.getStateName().equals("attacking")) { Unit target = entity.getTarget(); goal_x = target.getX(); goal_y = target.getY(); } start_point.set(start_x, start_y); end_point.set(goal_x, goal_y); if (m_tester != null && m_tester.isLineOfSightClear(start_point, end_point) != null) { PointF start = getClosestNode(start_point); PointF end = getClosestNode(end_point); ArrayList<PointF> path = null; if (start != null && end != null) { m_pathfinder.clearPath(); m_pathfinder.findPath(start, end); } path = m_pathfinder.getPath(); path.add(end_point); if (path.size() > 1) { PointF goal_point = path.get(0); if (path.size() > 1 && m_tester.isLineOfSightClear(start_point, path.get(1)) == null) { goal_point = path.get(1); } // System.out.println("Next point: " + goal_point.x + ", " + // goal_point.y + " = " + // m_map_grid.getXPos((int)goal_point.x) + ", " + // m_map_grid.getYPos((int)goal_point.y)); goal_x = goal_point.x; goal_y = goal_point.y; } // System.out.println("Team: " + entity.getTeam() + " Start: " + // entity.getX() + ", " + entity.getY() + " = " + start.x + ", " // + start.y + " End: " + end.x + ", " + end.y); } float angle = MathUtils.getAngle(entity.getX(), entity.getY(), goal_x, goal_y); float objectiveweighting = getObjectiveWeighting(); xcomp += objectiveweighting * FloatMath.cos(angle); ycomp += objectiveweighting * FloatMath.sin(angle); } float wanteddir = MathUtils.getAngle(0, 0, xcomp, ycomp); float wantedchange = MathUtils.normalizeAngle(wanteddir, entity.getHeading()) - entity.getHeading(); float actualchange = wantedchange; float maxvelchange = getMaxVelChange(); if (actualchange > maxvelchange) { actualchange = maxvelchange; } if (actualchange < -1.0f * maxvelchange) { actualchange = -1.0f * maxvelchange; } float heading = entity.getHeading() + actualchange; entity.setVelocity(getMaxVel(), heading); } private void initialize_fsm(Unit entity) { FiniteState moving = new FiniteState("moving"); FiniteState attacking = new FiniteState("attacking"); SimpleLessTransition movingtrans = new SimpleLessTransition(moving, attacking); movingtrans.setInputName("targetdist"); movingtrans.setVal(900); SimpleGreaterTransition attackingtrans = new SimpleGreaterTransition(attacking, moving); attackingtrans.setInputName("targetdist"); attackingtrans.setVal(900); moving.addTransition(movingtrans); attacking.addTransition(attackingtrans); FiniteStateMachine fsm = entity.getFSM(); fsm.addState("moving", moving); fsm.addState("attacking", attacking); } }
package com.yahoo.vespa.flags; import com.yahoo.vespa.defaults.Defaults; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.TreeMap; import static com.yahoo.vespa.flags.FetchVector.Dimension.HOSTNAME; /** * @author hakonhall */ public class Flags { private static volatile TreeMap<FlagId, FlagDefinition> flags = new TreeMap<>(); public static final UnboundBooleanFlag HEALTHMONITOR_MONITOR_INFRA = defineFeatureFlag( "healthmonitor-monitorinfra", true, "Whether the health monitor in service monitor monitors the health of infrastructure applications.", "Affects all applications activated after the value is changed.", HOSTNAME); public static final UnboundBooleanFlag DUPERMODEL_CONTAINS_INFRA = defineFeatureFlag( "dupermodel-contains-infra", true, "Whether the DuperModel in config server/controller includes active infrastructure applications " + "(except from controller/config apps).", "Requires restart of config server/controller to take effect.", HOSTNAME); public static final UnboundBooleanFlag DUPERMODEL_USE_CONFIGSERVERCONFIG = defineFeatureFlag( "dupermodel-use-configserverconfig", true, "For historical reasons, the ApplicationInfo in the DuperModel for controllers and config servers " + "is based on the ConfigserverConfig (this flag is true). We want to transition to use the " + "infrastructure application activated by the InfrastructureProvisioner once that supports health.", "Requires restart of config server/controller to take effect.", HOSTNAME); public static final UnboundBooleanFlag USE_CONFIG_SERVER_CACHE = defineFeatureFlag( "use-config-server-cache", true, "Whether config server will use cache to answer config requests.", "Takes effect immediately when changed.", HOSTNAME, FetchVector.Dimension.APPLICATION_ID); public static final UnboundBooleanFlag CONFIG_SERVER_BOOTSTRAP_IN_SEPARATE_THREAD = defineFeatureFlag( "config-server-bootstrap-in-separate-thread", true, "Whether to run config server/controller bootstrap in a separate thread.", "Takes effect only at bootstrap of config server/controller", HOSTNAME); public static final UnboundBooleanFlag PROXYHOST_USES_REAL_ORCHESTRATOR = defineFeatureFlag( "proxyhost-uses-real-orchestrator", true, "Whether proxy hosts uses the real Orchestrator when suspending/resuming, or a synthetic.", "Takes effect immediately when changed.", HOSTNAME); public static final UnboundBooleanFlag CONFIGHOST_USES_REAL_ORCHESTRATOR = defineFeatureFlag( "confighost-uses-real-orchestrator", true, "Whether the config server hosts uses the real Orchestrator when suspending/resuming, or a synthetic.", "Takes effect immediately when changed.", HOSTNAME); public static final UnboundBooleanFlag ENABLE_CROWDSTRIKE = defineFeatureFlag( "enable-crowdstrike", true, "Whether to enable CrowdStrike.", "Takes effect on next host admin tick", HOSTNAME); public static final UnboundBooleanFlag ENABLE_NESSUS = defineFeatureFlag( "enable-nessus", true, "Whether to enable Nessus.", "Takes effect on next host admin tick", HOSTNAME); public static final UnboundBooleanFlag ENABLE_CPU_TEMPERATURE_TASK = defineFeatureFlag( "enable-cputemptask", true, "Whether to enable CPU temperature task", "Takes effect on next host admin tick", HOSTNAME ); /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundBooleanFlag defineFeatureFlag(String flagId, boolean defaultValue, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundBooleanFlag::new, flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundStringFlag defineStringFlag(String flagId, String defaultValue, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundStringFlag::new, flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundIntFlag defineIntFlag(String flagId, int defaultValue, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundIntFlag::new, flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundLongFlag defineLongFlag(String flagId, long defaultValue, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundLongFlag::new, flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static <T> UnboundJacksonFlag<T> defineJacksonFlag(String flagId, T defaultValue, Class<T> jacksonClass, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define((id2, defaultValue2, vector2) -> new UnboundJacksonFlag<>(id2, defaultValue2, vector2, jacksonClass), flagId, defaultValue, description, modificationEffect, dimensions); } @FunctionalInterface private interface TypedUnboundFlagFactory<T, U extends UnboundFlag<?, ?, ?>> { U create(FlagId id, T defaultVale, FetchVector defaultFetchVector); } /** * Defines a Flag. * * @param factory Factory for creating unbound flag of type U * @param flagId The globally unique FlagId. * @param defaultValue The default value if none is present after resolution. * @param description Description of how the flag is used. * @param modificationEffect What is required for the flag to take effect? A restart of process? immediately? etc. * @param dimensions What dimensions will be set in the {@link FetchVector} when fetching * the flag value in * {@link FlagSource#fetch(FlagId, FetchVector) FlagSource::fetch}. * For instance, if APPLICATION is one of the dimensions here, you should make sure * APPLICATION is set to the ApplicationId in the fetch vector when fetching the RawFlag * from the FlagSource. * @param <T> The boxed type of the flag value, e.g. Boolean for flags guarding features. * @param <U> The type of the unbound flag, e.g. UnboundBooleanFlag. * @return An unbound flag with {@link FetchVector.Dimension#HOSTNAME HOSTNAME} environment. The ZONE environment * is typically implicit. */ private static <T, U extends UnboundFlag<?, ?, ?>> U define(TypedUnboundFlagFactory<T, U> factory, String flagId, T defaultValue, String description, String modificationEffect, FetchVector.Dimension[] dimensions) { FlagId id = new FlagId(flagId); FetchVector vector = new FetchVector().with(HOSTNAME, Defaults.getDefaults().vespaHostname()); U unboundFlag = factory.create(id, defaultValue, vector); FlagDefinition definition = new FlagDefinition(unboundFlag, description, modificationEffect, dimensions); flags.put(id, definition); return unboundFlag; } public static List<FlagDefinition> getAllFlags() { return new ArrayList<>(flags.values()); } public static Optional<FlagDefinition> getFlag(FlagId flagId) { return Optional.ofNullable(flags.get(flagId)); } /** * Allows the statically defined flags to be controlled in a test. * * <p>Returns a Replacer instance to be used with e.g. a try-with-resources block. Within the block, * the flags starts out as cleared. Flags can be defined, etc. When leaving the block, the flags from * before the block is reinserted. * * <p>NOT thread-safe. Tests using this cannot run in parallel. */ public static Replacer clearFlagsForTesting() { return new Replacer(); } public static class Replacer implements AutoCloseable { private static volatile boolean flagsCleared = false; private final TreeMap<FlagId, FlagDefinition> savedFlags; private Replacer() { verifyAndSetFlagsCleared(true); this.savedFlags = Flags.flags; Flags.flags = new TreeMap<>(); } @Override public void close() { verifyAndSetFlagsCleared(false); Flags.flags = savedFlags; } /** * Used to implement a simple verification that Replacer is not used by multiple threads. * For instance two different tests running in parallel cannot both use Replacer. */ private static void verifyAndSetFlagsCleared(boolean newValue) { if (flagsCleared == newValue) { throw new IllegalStateException("clearFlagsForTesting called while already cleared - running tests in parallell!?"); } flagsCleared = newValue; } } }
package com.yahoo.vespa.flags; import com.yahoo.component.Vtag; import com.yahoo.vespa.defaults.Defaults; import com.yahoo.vespa.flags.custom.PreprovisionCapacity; import java.util.List; import java.util.Optional; import java.util.TreeMap; import static com.yahoo.vespa.flags.FetchVector.Dimension.APPLICATION_ID; import static com.yahoo.vespa.flags.FetchVector.Dimension.HOSTNAME; import static com.yahoo.vespa.flags.FetchVector.Dimension.NODE_TYPE; import static com.yahoo.vespa.flags.FetchVector.Dimension.VESPA_VERSION; import static com.yahoo.vespa.flags.FetchVector.Dimension.ZONE_ID; /** * Definitions of feature flags. * * <p>To use feature flags, define the flag in this class as an "unbound" flag, e.g. {@link UnboundBooleanFlag} * or {@link UnboundStringFlag}. At the location you want to get the value of the flag, you need the following:</p> * * <ol> * <li>The unbound flag</li> * <li>A {@link FlagSource}. The flag source is typically available as an injectable component. Binding * an unbound flag to a flag source produces a (bound) flag, e.g. {@link BooleanFlag} and {@link StringFlag}.</li> * <li>If you would like your flag value to be dependent on e.g. the application ID, then 1. you should * declare this in the unbound flag definition in this file (referring to * {@link FetchVector.Dimension#APPLICATION_ID}), and 2. specify the application ID when retrieving the value, e.g. * {@link BooleanFlag#with(FetchVector.Dimension, String)}. See {@link FetchVector} for more info.</li> * </ol> * * <p>Once the code is in place, you can override the flag value. This depends on the flag source, but typically * there is a REST API for updating the flags in the config server, which is the root of all flag sources in the zone.</p> * * @author hakonhall */ public class Flags { private static volatile TreeMap<FlagId, FlagDefinition> flags = new TreeMap<>(); public static final UnboundIntFlag DROP_CACHES = defineIntFlag( "drop-caches", 3, "The int value to write into /proc/sys/vm/drop_caches for each tick. " + "1 is page cache, 2 is dentries inodes, 3 is both page cache and dentries inodes, etc.", "Takes effect on next tick.", HOSTNAME); public static final UnboundBooleanFlag ENABLE_CROWDSTRIKE = defineFeatureFlag( "enable-crowdstrike", true, "Whether to enable CrowdStrike.", "Takes effect on next host admin tick", HOSTNAME); public static final UnboundBooleanFlag ENABLE_NESSUS = defineFeatureFlag( "enable-nessus", true, "Whether to enable Nessus.", "Takes effect on next host admin tick", HOSTNAME); public static final UnboundBooleanFlag ENABLE_FLEET_SSHD_CONFIG = defineFeatureFlag( "enable-fleet-sshd-config", true, "Whether fleet should manage the /etc/ssh/sshd_config file.", "Takes effect on next host admin tick.", HOSTNAME); public static final UnboundBooleanFlag FLEET_CANARY = defineFeatureFlag( "fleet-canary", false, "Whether the host is a fleet canary.", "Takes effect on next host admin tick.", HOSTNAME); public static final UnboundBooleanFlag USE_NEW_VESPA_RPMS = defineFeatureFlag( "use-new-vespa-rpms", false, "Whether to use the new vespa-rpms YUM repo when upgrading/downgrading. The vespa-version " + "when fetching the flag value is the wanted version of the host.", "Takes effect when upgrading or downgrading host admin to a different version.", HOSTNAME, NODE_TYPE, VESPA_VERSION); public static final UnboundListFlag<String> DISABLED_HOST_ADMIN_TASKS = defineListFlag( "disabled-host-admin-tasks", List.of(), String.class, "List of host-admin task names (as they appear in the log, e.g. root>main>UpgradeTask) that should be skipped", "Takes effect on next host admin tick", HOSTNAME, NODE_TYPE); public static final UnboundStringFlag DOCKER_VERSION = defineStringFlag( "docker-version", "1.13.1-102.git7f2769b", "The version of the docker to use of the format VERSION-REL: The YUM package to be installed will be " + "2:docker-VERSION-REL.el7.centos.x86_64 in AWS (and without '.centos' otherwise). " + "If docker-version is not of this format, it must be parseable by YumPackageName::fromString.", "Takes effect on next tick.", HOSTNAME); public static final UnboundLongFlag THIN_POOL_GB = defineLongFlag( "thin-pool-gb", -1, "The size of the disk reserved for the thin pool with dynamic provisioning in AWS, in base-2 GB. " + "If <0, the default is used (which may depend on the zone and node type).", "Takes effect immediately (but used only during provisioning).", NODE_TYPE); public static final UnboundDoubleFlag CONTAINER_CPU_CAP = defineDoubleFlag( "container-cpu-cap", 0, "Hard limit on how many CPUs a container may use. This value is multiplied by CPU allocated to node, so " + "to cap CPU at 200%, set this to 2, etc.", "Takes effect on next node agent tick. Change is orchestrated, but does NOT require container restart", HOSTNAME, APPLICATION_ID); public static final UnboundStringFlag TLS_INSECURE_AUTHORIZATION_MODE = defineStringFlag( "tls-insecure-authorization-mode", "log_only", "TLS insecure authorization mode. Allowed values: ['disable', 'log_only', 'enforce']", "Takes effect on restart of Docker container", NODE_TYPE, APPLICATION_ID, HOSTNAME); public static final UnboundIntFlag REBOOT_INTERVAL_IN_DAYS = defineIntFlag( "reboot-interval-in-days", 30, "No reboots are scheduled 0x-1x reboot intervals after the previous reboot, while reboot is " + "scheduled evenly distributed in the 1x-2x range (and naturally guaranteed at the 2x boundary).", "Takes effect on next run of NodeRebooter"); public static final UnboundBooleanFlag RETIRE_WITH_PERMANENTLY_DOWN = defineFeatureFlag( "retire-with-permanently-down", false, "If enabled, retirement will end with setting the host status to PERMANENTLY_DOWN, " + "instead of ALLOWED_TO_BE_DOWN (old behavior).", "Takes effect on the next run of RetiredExpirer.", HOSTNAME); public static final UnboundListFlag<PreprovisionCapacity> PREPROVISION_CAPACITY = defineListFlag( "preprovision-capacity", List.of(), PreprovisionCapacity.class, "List of node resources and their count that should be present in zone to receive new deployments. When a " + "preprovisioned is taken, new will be provisioned within next iteration of maintainer.", "Takes effect on next iteration of HostProvisionMaintainer."); public static final UnboundDoubleFlag DEFAULT_TERM_WISE_LIMIT = defineDoubleFlag( "default-term-wise-limit", 1.0, "Default limit for when to apply termwise query evaluation", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag DEFAULT_SOFT_START_SECONDS = defineDoubleFlag( "default-soft-start-seconds", 0.0, "Default number of seconds that a soft start shall use", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag DEFAULT_THREADPOOL_SIZE_FACTOR = defineDoubleFlag( "default-threadpool-size-factor", 0.0, "Default multiplication factor when computing maxthreads for main container threadpool based on available cores", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag DEFAULT_QUEUE_SIZE_FACTOR = defineDoubleFlag( "default-queue-size-factor", 0.0, "Default multiplication factor when computing queuesize for burst handling", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag DEFAULT_TOP_K_PROBABILITY = defineDoubleFlag( "default-top-k-probability", 1.0, "Default probability that you will get the globally top K documents when merging many partitions.", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag USE_DISTRIBUTOR_BTREE_DB = defineFeatureFlag( "use-distributor-btree-db", false, "Whether to use the new B-tree bucket database in the distributors.", "Takes effect at restart of distributor process", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag USE_THREE_PHASE_UPDATES = defineFeatureFlag( "use-three-phase-updates", false, "Whether to enable the use of three-phase updates when bucket replicas are out of sync.", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag HOST_HARDENING = defineFeatureFlag( "host-hardening", false, "Whether to enable host hardening Linux baseline.", "Takes effect on next tick or on host-admin restart (may vary where used).", HOSTNAME); public static final UnboundBooleanFlag TCP_ABORT_ON_OVERFLOW = defineFeatureFlag( "tcp-abort-on-overflow", false, "Whether to set /proc/sys/net/ipv4/tcp_abort_on_overflow to 0 (false) or 1 (true)", "Takes effect on next host-admin tick.", HOSTNAME); public static final UnboundStringFlag ZOOKEEPER_SERVER_VERSION = defineStringFlag( "zookeeper-server-version-full", "3.5.6", "ZooKeeper server version, a jar file zookeeper-server-<ZOOKEEPER_SERVER_VERSION>-jar-with-dependencies.jar must exist", "Takes effect on restart of Docker container", NODE_TYPE, APPLICATION_ID, HOSTNAME); public static final UnboundStringFlag TLS_FOR_ZOOKEEPER_CLIENT_SERVER_COMMUNICATION = defineStringFlag( "tls-for-zookeeper-client-server-communication", "OFF", "How to setup TLS for ZooKeeper client/server communication. Valid values are OFF, PORT_UNIFICATION, TLS_WITH_PORT_UNIFICATION, TLS_ONLY", "Takes effect on restart of config server", NODE_TYPE, HOSTNAME); public static final UnboundBooleanFlag USE_TLS_FOR_ZOOKEEPER_CLIENT = defineFeatureFlag( "use-tls-for-zookeeper-client", false, "Whether to use TLS for ZooKeeper clients", "Takes effect on restart of process", NODE_TYPE, HOSTNAME); public static final UnboundBooleanFlag ENABLE_DISK_WRITE_TEST = defineFeatureFlag( "enable-disk-write-test", true, "Regularly issue a small write to disk and fail the host if it is not successful", "Takes effect on next node agent tick (but does not clear existing failure reports)", HOSTNAME); public static final UnboundBooleanFlag USE_REFRESHED_ENDPOINT_CERTIFICATE = defineFeatureFlag( "use-refreshed-endpoint-certificate", false, "Whether an application should start using a newer certificate/key pair if available", "Takes effect on the next deployment of the application", APPLICATION_ID); public static final UnboundBooleanFlag VALIDATE_ENDPOINT_CERTIFICATES = defineFeatureFlag( "validate-endpoint-certificates", false, "Whether endpoint certificates should be validated before use", "Takes effect on the next deployment of the application"); public static final UnboundStringFlag ENDPOINT_CERTIFICATE_BACKFILL = defineStringFlag( "endpoint-certificate-backfill", "disable", "Whether the endpoint certificate maintainer should backfill missing certificate data from cameo", "Takes effect on next scheduled run of maintainer - set to \"disable\", \"dryrun\" or \"enable\""); public static final UnboundStringFlag DOCKER_IMAGE_REPO = defineStringFlag( "docker-image-repo", "", "Override default docker image repo. Docker image version will be Vespa version.", "Takes effect on next deployment from controller", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag ENDPOINT_CERT_IN_SHARED_ROUTING = defineFeatureFlag( "endpoint-cert-in-shared-routing", false, "Whether to provision and use endpoint certs for apps in shared routing zones", "Takes effect on next deployment of the application", APPLICATION_ID); public static final UnboundBooleanFlag PHRASE_SEGMENTING = defineFeatureFlag( "phrase-segmenting", false, "Should 'implicit phrases' in queries we parsed to a phrase or and?", "Takes effect on redeploy", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag ALLOW_DIRECT_ROUTING = defineFeatureFlag( "publish-direct-routing-endpoint", false, "Whether an application should receive a directly routed endpoint in its endpoint list", "Takes effect immediately", APPLICATION_ID); public static final UnboundBooleanFlag NLB_PROXY_PROTOCOL = defineFeatureFlag( "nlb-proxy-protocol", false, "Configure NLB to use proxy protocol", "Takes effect on next application redeploy", APPLICATION_ID); public static final UnboundLongFlag CONFIGSERVER_SESSIONS_EXPIRY_INTERVAL_IN_DAYS = defineLongFlag( "configserver-sessions-expiry-interval-in-days", 28, "Expiry time for unused sessions in config server", "Takes effect on next run of config server maintainer SessionsMaintainer"); public static final UnboundBooleanFlag USE_CLOUD_INIT_FORMAT = defineFeatureFlag( "use-cloud-init", false, "Use the cloud-init format when provisioning hosts", "Takes effect immediately", ZONE_ID); public static final UnboundBooleanFlag CONFIGSERVER_DISTRIBUTE_APPLICATION_PACKAGE = defineFeatureFlag( "configserver-distribute-application-package", false, "Whether the application package should be distributed to other config servers during a deployment", "Takes effect immediately"); public static final UnboundBooleanFlag PROVISION_APPLICATION_ROLES = defineFeatureFlag( "provision-application-roles", false, "Whether application roles should be provisioned", "Takes effect on next deployment (controller)", ZONE_ID); public static final UnboundBooleanFlag CONFIGSERVER_UNSET_ENDPOINTS = defineFeatureFlag( "configserver-unset-endpoints", false, "Whether the configserver allows removal of existing endpoints when an empty list of container endpoints is request", "Takes effect on next external deployment", APPLICATION_ID ); public static final UnboundIntFlag JDISC_HEALTH_CHECK_PROXY_CLIENT_TIMEOUT = defineIntFlag( "jdisc-health-check-proxy-client-timeout", 1000, "Temporary flag to rollout reduced timeout for JDisc's health check proxy client. Timeout in milliseconds", "Takes effect on next internal redeployment", APPLICATION_ID); public static final UnboundBooleanFlag APPLICATION_IAM_ROLE = defineFeatureFlag( "application-iam-roles", false, "Allow separate iam roles when provisioning/assigning hosts", "Takes effect immediately on new hosts, on next redeploy for applications", APPLICATION_ID); /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundBooleanFlag defineFeatureFlag(String flagId, boolean defaultValue, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundBooleanFlag::new, flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundStringFlag defineStringFlag(String flagId, String defaultValue, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundStringFlag::new, flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundIntFlag defineIntFlag(String flagId, int defaultValue, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundIntFlag::new, flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundLongFlag defineLongFlag(String flagId, long defaultValue, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundLongFlag::new, flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundDoubleFlag defineDoubleFlag(String flagId, double defaultValue, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundDoubleFlag::new, flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static <T> UnboundJacksonFlag<T> defineJacksonFlag(String flagId, T defaultValue, Class<T> jacksonClass, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define((id2, defaultValue2, vector2) -> new UnboundJacksonFlag<>(id2, defaultValue2, vector2, jacksonClass), flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static <T> UnboundListFlag<T> defineListFlag(String flagId, List<T> defaultValue, Class<T> elementClass, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define((fid, dval, fvec) -> new UnboundListFlag<>(fid, dval, elementClass, fvec), flagId, defaultValue, description, modificationEffect, dimensions); } @FunctionalInterface private interface TypedUnboundFlagFactory<T, U extends UnboundFlag<?, ?, ?>> { U create(FlagId id, T defaultVale, FetchVector defaultFetchVector); } /** * Defines a Flag. * * @param factory Factory for creating unbound flag of type U * @param flagId The globally unique FlagId. * @param defaultValue The default value if none is present after resolution. * @param description Description of how the flag is used. * @param modificationEffect What is required for the flag to take effect? A restart of process? immediately? etc. * @param dimensions What dimensions will be set in the {@link FetchVector} when fetching * the flag value in * {@link FlagSource#fetch(FlagId, FetchVector) FlagSource::fetch}. * For instance, if APPLICATION is one of the dimensions here, you should make sure * APPLICATION is set to the ApplicationId in the fetch vector when fetching the RawFlag * from the FlagSource. * @param <T> The boxed type of the flag value, e.g. Boolean for flags guarding features. * @param <U> The type of the unbound flag, e.g. UnboundBooleanFlag. * @return An unbound flag with {@link FetchVector.Dimension#HOSTNAME HOSTNAME} and * {@link FetchVector.Dimension#VESPA_VERSION VESPA_VERSION} already set. The ZONE environment * is typically implicit. */ private static <T, U extends UnboundFlag<?, ?, ?>> U define(TypedUnboundFlagFactory<T, U> factory, String flagId, T defaultValue, String description, String modificationEffect, FetchVector.Dimension[] dimensions) { FlagId id = new FlagId(flagId); FetchVector vector = new FetchVector() .with(HOSTNAME, Defaults.getDefaults().vespaHostname()) // Warning: In unit tests and outside official Vespa releases, the currentVersion is e.g. 7.0.0 // (determined by the current major version). Consider not setting VESPA_VERSION if minor = micro = 0. .with(VESPA_VERSION, Vtag.currentVersion.toFullString()); U unboundFlag = factory.create(id, defaultValue, vector); FlagDefinition definition = new FlagDefinition(unboundFlag, description, modificationEffect, dimensions); flags.put(id, definition); return unboundFlag; } public static List<FlagDefinition> getAllFlags() { return List.copyOf(flags.values()); } public static Optional<FlagDefinition> getFlag(FlagId flagId) { return Optional.ofNullable(flags.get(flagId)); } /** * Allows the statically defined flags to be controlled in a test. * * <p>Returns a Replacer instance to be used with e.g. a try-with-resources block. Within the block, * the flags starts out as cleared. Flags can be defined, etc. When leaving the block, the flags from * before the block is reinserted. * * <p>NOT thread-safe. Tests using this cannot run in parallel. */ public static Replacer clearFlagsForTesting() { return new Replacer(); } public static class Replacer implements AutoCloseable { private static volatile boolean flagsCleared = false; private final TreeMap<FlagId, FlagDefinition> savedFlags; private Replacer() { verifyAndSetFlagsCleared(true); this.savedFlags = Flags.flags; Flags.flags = new TreeMap<>(); } @Override public void close() { verifyAndSetFlagsCleared(false); Flags.flags = savedFlags; } /** * Used to implement a simple verification that Replacer is not used by multiple threads. * For instance two different tests running in parallel cannot both use Replacer. */ private static void verifyAndSetFlagsCleared(boolean newValue) { if (flagsCleared == newValue) { throw new IllegalStateException("clearFlagsForTesting called while already cleared - running tests in parallell!?"); } flagsCleared = newValue; } } }
package com.yahoo.vespa.flags; import com.yahoo.component.Vtag; import com.yahoo.vespa.defaults.Defaults; import com.yahoo.vespa.flags.custom.PreprovisionCapacity; import java.util.List; import java.util.Optional; import java.util.TreeMap; import static com.yahoo.vespa.flags.FetchVector.Dimension.APPLICATION_ID; import static com.yahoo.vespa.flags.FetchVector.Dimension.HOSTNAME; import static com.yahoo.vespa.flags.FetchVector.Dimension.NODE_TYPE; import static com.yahoo.vespa.flags.FetchVector.Dimension.VESPA_VERSION; import static com.yahoo.vespa.flags.FetchVector.Dimension.ZONE_ID; /** * Definitions of feature flags. * * <p>To use feature flags, define the flag in this class as an "unbound" flag, e.g. {@link UnboundBooleanFlag} * or {@link UnboundStringFlag}. At the location you want to get the value of the flag, you need the following:</p> * * <ol> * <li>The unbound flag</li> * <li>A {@link FlagSource}. The flag source is typically available as an injectable component. Binding * an unbound flag to a flag source produces a (bound) flag, e.g. {@link BooleanFlag} and {@link StringFlag}.</li> * <li>If you would like your flag value to be dependent on e.g. the application ID, then 1. you should * declare this in the unbound flag definition in this file (referring to * {@link FetchVector.Dimension#APPLICATION_ID}), and 2. specify the application ID when retrieving the value, e.g. * {@link BooleanFlag#with(FetchVector.Dimension, String)}. See {@link FetchVector} for more info.</li> * </ol> * * <p>Once the code is in place, you can override the flag value. This depends on the flag source, but typically * there is a REST API for updating the flags in the config server, which is the root of all flag sources in the zone.</p> * * @author hakonhall */ public class Flags { private static volatile TreeMap<FlagId, FlagDefinition> flags = new TreeMap<>(); public static final UnboundIntFlag DROP_CACHES = defineIntFlag("drop-caches", 3, "The int value to write into /proc/sys/vm/drop_caches for each tick. " + "1 is page cache, 2 is dentries inodes, 3 is both page cache and dentries inodes, etc.", "Takes effect on next tick.", HOSTNAME); public static final UnboundBooleanFlag ENABLE_CROWDSTRIKE = defineFeatureFlag( "enable-crowdstrike", true, "Whether to enable CrowdStrike.", "Takes effect on next host admin tick", HOSTNAME); public static final UnboundBooleanFlag ENABLE_NESSUS = defineFeatureFlag( "enable-nessus", true, "Whether to enable Nessus.", "Takes effect on next host admin tick", HOSTNAME); public static final UnboundBooleanFlag ENABLE_FLEET_SSHD_CONFIG = defineFeatureFlag( "enable-fleet-sshd-config", true, "Whether fleet should manage the /etc/ssh/sshd_config file.", "Takes effect on next host admin tick.", HOSTNAME); public static final UnboundBooleanFlag FLEET_CANARY = defineFeatureFlag( "fleet-canary", false, "Whether the host is a fleet canary.", "Takes effect on next host admin tick.", HOSTNAME); public static final UnboundBooleanFlag USE_NEW_VESPA_RPMS = defineFeatureFlag( "use-new-vespa-rpms", false, "Whether to use the new vespa-rpms YUM repo when upgrading/downgrading. The vespa-version " + "when fetching the flag value is the wanted version of the host.", "Takes effect when upgrading or downgrading host admin to a different version.", HOSTNAME, NODE_TYPE, VESPA_VERSION); public static final UnboundListFlag<String> DISABLED_HOST_ADMIN_TASKS = defineListFlag( "disabled-host-admin-tasks", List.of(), String.class, "List of host-admin task names (as they appear in the log, e.g. root>main>UpgradeTask) that should be skipped", "Takes effect on next host admin tick", HOSTNAME, NODE_TYPE); public static final UnboundStringFlag DOCKER_VERSION = defineStringFlag( "docker-version", "1.13.1-102.git7f2769b", "The version of the docker to use of the format VERSION-REL: The YUM package to be installed will be " + "2:docker-VERSION-REL.el7.centos.x86_64 in AWS (and without '.centos' otherwise). " + "If docker-version is not of this format, it must be parseable by YumPackageName::fromString.", "Takes effect on next tick.", HOSTNAME); public static final UnboundLongFlag THIN_POOL_GB = defineLongFlag( "thin-pool-gb", -1, "The size of the disk reserved for the thin pool with dynamic provisioning in AWS, in base-2 GB. " + "If <0, the default is used (which may depend on the zone and node type).", "Takes effect immediately (but used only during provisioning).", NODE_TYPE); public static final UnboundDoubleFlag CONTAINER_CPU_CAP = defineDoubleFlag( "container-cpu-cap", 0, "Hard limit on how many CPUs a container may use. This value is multiplied by CPU allocated to node, so " + "to cap CPU at 200%, set this to 2, etc.", "Takes effect on next node agent tick. Change is orchestrated, but does NOT require container restart", HOSTNAME, APPLICATION_ID); public static final UnboundStringFlag TLS_INSECURE_AUTHORIZATION_MODE = defineStringFlag( "tls-insecure-authorization-mode", "log_only", "TLS insecure authorization mode. Allowed values: ['disable', 'log_only', 'enforce']", "Takes effect on restart of Docker container", NODE_TYPE, APPLICATION_ID, HOSTNAME); public static final UnboundBooleanFlag USE_ADAPTIVE_DISPATCH = defineFeatureFlag( "use-adaptive-dispatch", false, "Should adaptive dispatch be used over round robin", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag REBOOT_INTERVAL_IN_DAYS = defineIntFlag( "reboot-interval-in-days", 30, "No reboots are scheduled 0x-1x reboot intervals after the previous reboot, while reboot is " + "scheduled evenly distributed in the 1x-2x range (and naturally guaranteed at the 2x boundary).", "Takes effect on next run of NodeRebooter"); public static final UnboundBooleanFlag RETIRE_WITH_PERMANENTLY_DOWN = defineFeatureFlag( "retire-with-permanently-down", false, "If enabled, retirement will end with setting the host status to PERMANENTLY_DOWN, " + "instead of ALLOWED_TO_BE_DOWN (old behavior).", "Takes effect on the next run of RetiredExpirer.", HOSTNAME); public static final UnboundListFlag<PreprovisionCapacity> PREPROVISION_CAPACITY = defineListFlag( "preprovision-capacity", List.of(), PreprovisionCapacity.class, "List of node resources and their count that should be present in zone to receive new deployments. When a " + "preprovisioned is taken, new will be provisioned within next iteration of maintainer.", "Takes effect on next iteration of HostProvisionMaintainer."); public static final UnboundDoubleFlag DEFAULT_TERM_WISE_LIMIT = defineDoubleFlag( "default-term-wise-limit", 1.0, "Default limit for when to apply termwise query evaluation", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag DEFAULT_SOFT_START_SECONDS = defineDoubleFlag( "default-soft-start-seconds", 0.0, "Default number of seconds that a soft start shall use", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag DEFAULT_THREADPOOL_SIZE_FACTOR = defineDoubleFlag( "default-threadpool-size-factor", 0.0, "Default multiplication factor when computing maxthreads for main container threadpool based on available cores", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag DEFAULT_QUEUE_SIZE_FACTOR = defineDoubleFlag( "default-queue-size-factor", 0.0, "Default multiplication factor when computing queuesize for burst handling", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag DEFAULT_TOP_K_PROBABILITY = defineDoubleFlag( "default-top-k-probability", 1.0, "Default probability that you will get the globally top K documents when merging many partitions.", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag USE_DISTRIBUTOR_BTREE_DB = defineFeatureFlag( "use-distributor-btree-db", false, "Whether to use the new B-tree bucket database in the distributors.", "Takes effect at restart of distributor process", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag USE_THREE_PHASE_UPDATES = defineFeatureFlag( "use-three-phase-updates", false, "Whether to enable the use of three-phase updates when bucket replicas are out of sync.", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag HOST_HARDENING = defineFeatureFlag( "host-hardening", false, "Whether to enable host hardening Linux baseline.", "Takes effect on next tick or on host-admin restart (may vary where used).", HOSTNAME); public static final UnboundBooleanFlag TCP_ABORT_ON_OVERFLOW = defineFeatureFlag( "tcp-abort-on-overflow", false, "Whether to set /proc/sys/net/ipv4/tcp_abort_on_overflow to 0 (false) or 1 (true)", "Takes effect on next host-admin tick.", HOSTNAME); public static final UnboundStringFlag ZOOKEEPER_SERVER_MAJOR_MINOR_VERSION = defineStringFlag( "zookeeper-server-version", "3.5", "The version of ZooKeeper server to use (major.minor, not full version)", "Takes effect on restart of Docker container", NODE_TYPE, APPLICATION_ID, HOSTNAME); public static final UnboundStringFlag TLS_FOR_ZOOKEEPER_QUORUM_COMMUNICATION = defineStringFlag( "tls-for-zookeeper-quorum-communication", "TLS_WITH_PORT_UNIFICATION", "How to setup TLS for ZooKeeper quorum communication. Valid values are OFF, PORT_UNIFICATION, TLS_WITH_PORT_UNIFICATION, TLS_ONLY", "Takes effect on restart of config server", NODE_TYPE, HOSTNAME); public static final UnboundStringFlag TLS_FOR_ZOOKEEPER_CLIENT_SERVER_COMMUNICATION = defineStringFlag( "tls-for-zookeeper-client-server-communication", "OFF", "How to setup TLS for ZooKeeper client/server communication. Valid values are OFF, PORT_UNIFICATION, TLS_WITH_PORT_UNIFICATION, TLS_ONLY", "Takes effect on restart of config server", NODE_TYPE, HOSTNAME); public static final UnboundBooleanFlag USE_TLS_FOR_ZOOKEEPER_CLIENT = defineFeatureFlag( "use-tls-for-zookeeper-client", false, "Whether to use TLS for ZooKeeper clients", "Takes effect on restart of process", NODE_TYPE, HOSTNAME); public static final UnboundBooleanFlag ENABLE_DISK_WRITE_TEST = defineFeatureFlag( "enable-disk-write-test", true, "Regularly issue a small write to disk and fail the host if it is not successful", "Takes effect on next node agent tick (but does not clear existing failure reports)", HOSTNAME); public static final UnboundBooleanFlag USE_REFRESHED_ENDPOINT_CERTIFICATE = defineFeatureFlag( "use-refreshed-endpoint-certificate", false, "Whether an application should start using a newer certificate/key pair if available", "Takes effect on the next deployment of the application", APPLICATION_ID); public static final UnboundBooleanFlag VALIDATE_ENDPOINT_CERTIFICATES = defineFeatureFlag( "validate-endpoint-certificates", false, "Whether endpoint certificates should be validated before use", "Takes effect on the next deployment of the application"); public static final UnboundStringFlag ENDPOINT_CERTIFICATE_BACKFILL = defineStringFlag( "endpoint-certificate-backfill", "disable", "Whether the endpoint certificate maintainer should backfill missing certificate data from cameo", "Takes effect on next scheduled run of maintainer - set to \"disable\", \"dryrun\" or \"enable\""); public static final UnboundStringFlag DOCKER_IMAGE_REPO = defineStringFlag( "docker-image-repo", "", "Override default docker image repo. Docker image version will be Vespa version.", "Takes effect on next deployment from controller", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag ENDPOINT_CERT_IN_SHARED_ROUTING = defineFeatureFlag( "endpoint-cert-in-shared-routing", false, "Whether to provision and use endpoint certs for apps in shared routing zones", "Takes effect on next deployment of the application", APPLICATION_ID); public static final UnboundBooleanFlag PHRASE_SEGMENTING = defineFeatureFlag( "phrase-segmenting", false, "Should 'implicit phrases' in queries we parsed to a phrase or and?", "Takes effect on redeploy", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag ALLOW_DIRECT_ROUTING = defineFeatureFlag( "publish-direct-routing-endpoint", false, "Whether an application should receive a directly routed endpoint in its endpoint list", "Takes effect immediately", APPLICATION_ID); public static final UnboundBooleanFlag NLB_PROXY_PROTOCOL = defineFeatureFlag( "nlb-proxy-protocol", false, "Configure NLB to use proxy protocol", "Takes effect on next application redeploy", APPLICATION_ID); public static final UnboundLongFlag CONFIGSERVER_SESSIONS_EXPIRY_INTERVAL_IN_DAYS = defineLongFlag( "configserver-sessions-expiry-interval-in-days", 28, "Expiry time for unused sessions in config server", "Takes effect on next run of config server maintainer SessionsMaintainer"); public static final UnboundBooleanFlag USE_CLOUD_INIT_FORMAT = defineFeatureFlag( "use-cloud-init", false, "Use the cloud-init format when provisioning hosts", "Takes effect immediately", ZONE_ID); /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundBooleanFlag defineFeatureFlag(String flagId, boolean defaultValue, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundBooleanFlag::new, flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundStringFlag defineStringFlag(String flagId, String defaultValue, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundStringFlag::new, flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundIntFlag defineIntFlag(String flagId, int defaultValue, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundIntFlag::new, flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundLongFlag defineLongFlag(String flagId, long defaultValue, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundLongFlag::new, flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundDoubleFlag defineDoubleFlag(String flagId, double defaultValue, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundDoubleFlag::new, flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static <T> UnboundJacksonFlag<T> defineJacksonFlag(String flagId, T defaultValue, Class<T> jacksonClass, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define((id2, defaultValue2, vector2) -> new UnboundJacksonFlag<>(id2, defaultValue2, vector2, jacksonClass), flagId, defaultValue, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static <T> UnboundListFlag<T> defineListFlag(String flagId, List<T> defaultValue, Class<T> elementClass, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define((fid, dval, fvec) -> new UnboundListFlag<>(fid, dval, elementClass, fvec), flagId, defaultValue, description, modificationEffect, dimensions); } @FunctionalInterface private interface TypedUnboundFlagFactory<T, U extends UnboundFlag<?, ?, ?>> { U create(FlagId id, T defaultVale, FetchVector defaultFetchVector); } /** * Defines a Flag. * * @param factory Factory for creating unbound flag of type U * @param flagId The globally unique FlagId. * @param defaultValue The default value if none is present after resolution. * @param description Description of how the flag is used. * @param modificationEffect What is required for the flag to take effect? A restart of process? immediately? etc. * @param dimensions What dimensions will be set in the {@link FetchVector} when fetching * the flag value in * {@link FlagSource#fetch(FlagId, FetchVector) FlagSource::fetch}. * For instance, if APPLICATION is one of the dimensions here, you should make sure * APPLICATION is set to the ApplicationId in the fetch vector when fetching the RawFlag * from the FlagSource. * @param <T> The boxed type of the flag value, e.g. Boolean for flags guarding features. * @param <U> The type of the unbound flag, e.g. UnboundBooleanFlag. * @return An unbound flag with {@link FetchVector.Dimension#HOSTNAME HOSTNAME} and * {@link FetchVector.Dimension#VESPA_VERSION VESPA_VERSION} already set. The ZONE environment * is typically implicit. */ private static <T, U extends UnboundFlag<?, ?, ?>> U define(TypedUnboundFlagFactory<T, U> factory, String flagId, T defaultValue, String description, String modificationEffect, FetchVector.Dimension[] dimensions) { FlagId id = new FlagId(flagId); FetchVector vector = new FetchVector() .with(HOSTNAME, Defaults.getDefaults().vespaHostname()) // Warning: In unit tests and outside official Vespa releases, the currentVersion is e.g. 7.0.0 // (determined by the current major version). Consider not setting VESPA_VERSION if minor = micro = 0. .with(VESPA_VERSION, Vtag.currentVersion.toFullString()); U unboundFlag = factory.create(id, defaultValue, vector); FlagDefinition definition = new FlagDefinition(unboundFlag, description, modificationEffect, dimensions); flags.put(id, definition); return unboundFlag; } public static List<FlagDefinition> getAllFlags() { return List.copyOf(flags.values()); } public static Optional<FlagDefinition> getFlag(FlagId flagId) { return Optional.ofNullable(flags.get(flagId)); } /** * Allows the statically defined flags to be controlled in a test. * * <p>Returns a Replacer instance to be used with e.g. a try-with-resources block. Within the block, * the flags starts out as cleared. Flags can be defined, etc. When leaving the block, the flags from * before the block is reinserted. * * <p>NOT thread-safe. Tests using this cannot run in parallel. */ public static Replacer clearFlagsForTesting() { return new Replacer(); } public static class Replacer implements AutoCloseable { private static volatile boolean flagsCleared = false; private final TreeMap<FlagId, FlagDefinition> savedFlags; private Replacer() { verifyAndSetFlagsCleared(true); this.savedFlags = Flags.flags; Flags.flags = new TreeMap<>(); } @Override public void close() { verifyAndSetFlagsCleared(false); Flags.flags = savedFlags; } /** * Used to implement a simple verification that Replacer is not used by multiple threads. * For instance two different tests running in parallel cannot both use Replacer. */ private static void verifyAndSetFlagsCleared(boolean newValue) { if (flagsCleared == newValue) { throw new IllegalStateException("clearFlagsForTesting called while already cleared - running tests in parallell!?"); } flagsCleared = newValue; } } }
package com.yahoo.vespa.flags; import com.yahoo.component.Vtag; import com.yahoo.vespa.defaults.Defaults; import java.time.Instant; import java.time.LocalDate; import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; import java.util.List; import java.util.Optional; import java.util.TreeMap; import java.util.function.Predicate; import static com.yahoo.vespa.flags.FetchVector.Dimension.APPLICATION_ID; import static com.yahoo.vespa.flags.FetchVector.Dimension.CONSOLE_USER_EMAIL; import static com.yahoo.vespa.flags.FetchVector.Dimension.HOSTNAME; import static com.yahoo.vespa.flags.FetchVector.Dimension.NODE_TYPE; import static com.yahoo.vespa.flags.FetchVector.Dimension.TENANT_ID; import static com.yahoo.vespa.flags.FetchVector.Dimension.VESPA_VERSION; import static com.yahoo.vespa.flags.FetchVector.Dimension.ZONE_ID; /** * Definitions of feature flags. * * <p>To use feature flags, define the flag in this class as an "unbound" flag, e.g. {@link UnboundBooleanFlag} * or {@link UnboundStringFlag}. At the location you want to get the value of the flag, you need the following:</p> * * <ol> * <li>The unbound flag</li> * <li>A {@link FlagSource}. The flag source is typically available as an injectable component. Binding * an unbound flag to a flag source produces a (bound) flag, e.g. {@link BooleanFlag} and {@link StringFlag}.</li> * <li>If you would like your flag value to be dependent on e.g. the application ID, then 1. you should * declare this in the unbound flag definition in this file (referring to * {@link FetchVector.Dimension#APPLICATION_ID}), and 2. specify the application ID when retrieving the value, e.g. * {@link BooleanFlag#with(FetchVector.Dimension, String)}. See {@link FetchVector} for more info.</li> * </ol> * * <p>Once the code is in place, you can override the flag value. This depends on the flag source, but typically * there is a REST API for updating the flags in the config server, which is the root of all flag sources in the zone.</p> * * @author hakonhall */ public class Flags { private static volatile TreeMap<FlagId, FlagDefinition> flags = new TreeMap<>(); public static final UnboundBooleanFlag ROOT_CHAIN_GRAPH = defineFeatureFlag( "root-chain-graph", true, List.of("hakonhall"), "2022-10-05", "2022-11-04", "Whether to run all tasks in the root task chain up to the one failing to converge (false), or " + "run all tasks in the root task chain whose dependencies have converged (true). And when suspending, " + "whether to run the tasks in sequence (false) or in reverse sequence (true).", "On first tick of the root chain after (re)start of host admin.", ZONE_ID, NODE_TYPE, HOSTNAME); public static final UnboundDoubleFlag DEFAULT_TERM_WISE_LIMIT = defineDoubleFlag( "default-term-wise-limit", 1.0, List.of("baldersheim"), "2020-12-02", "2023-01-01", "Default limit for when to apply termwise query evaluation", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundStringFlag QUERY_DISPATCH_POLICY = defineStringFlag( "query-dispatch-policy", "adaptive", List.of("baldersheim"), "2022-08-20", "2023-01-01", "Select query dispatch policy, valid values are adaptive, round-robin, best-of-random-2," + " latency-amortized-over-requests, latency-amortized-over-time", "Takes effect at redeployment (requires restart)", ZONE_ID, APPLICATION_ID); public static final UnboundStringFlag FEED_SEQUENCER_TYPE = defineStringFlag( "feed-sequencer-type", "THROUGHPUT", List.of("baldersheim"), "2020-12-02", "2023-01-01", "Selects type of sequenced executor used for feeding in proton, valid values are LATENCY, ADAPTIVE, THROUGHPUT", "Takes effect at redeployment (requires restart)", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag KEEP_STORAGE_NODE_UP = defineFeatureFlag( "keep-storage-node-up", true, List.of("hakonhall"), "2022-07-07", "2022-10-07", "Whether to leave the storage node (with wanted state) UP while the node is permanently down.", "Takes effect immediately for nodes transitioning to permanently down.", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag MAX_UNCOMMITTED_MEMORY = defineIntFlag( "max-uncommitted-memory", 130000, List.of("geirst, baldersheim"), "2021-10-21", "2023-01-01", "Max amount of memory holding updates to an attribute before we do a commit.", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundStringFlag RESPONSE_SEQUENCER_TYPE = defineStringFlag( "response-sequencer-type", "ADAPTIVE", List.of("baldersheim"), "2020-12-02", "2023-01-01", "Selects type of sequenced executor used for mbus responses, valid values are LATENCY, ADAPTIVE, THROUGHPUT", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag RESPONSE_NUM_THREADS = defineIntFlag( "response-num-threads", 2, List.of("baldersheim"), "2020-12-02", "2023-01-01", "Number of threads used for mbus responses, default is 2, negative number = numcores/4", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag SKIP_COMMUNICATIONMANAGER_THREAD = defineFeatureFlag( "skip-communicationmanager-thread", false, List.of("baldersheim"), "2020-12-02", "2023-01-01", "Should we skip the communicationmanager thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag SKIP_MBUS_REQUEST_THREAD = defineFeatureFlag( "skip-mbus-request-thread", false, List.of("baldersheim"), "2020-12-02", "2023-01-01", "Should we skip the mbus request thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag SKIP_MBUS_REPLY_THREAD = defineFeatureFlag( "skip-mbus-reply-thread", false, List.of("baldersheim"), "2020-12-02", "2023-01-01", "Should we skip the mbus reply thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag USE_ASYNC_MESSAGE_HANDLING_ON_SCHEDULE = defineFeatureFlag( "async-message-handling-on-schedule", false, List.of("baldersheim"), "2020-12-02", "2023-01-01", "Optionally deliver async messages in own thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag FEED_CONCURRENCY = defineDoubleFlag( "feed-concurrency", 0.5, List.of("baldersheim"), "2020-12-02", "2023-01-01", "How much concurrency should be allowed for feed", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag FEED_NICENESS = defineDoubleFlag( "feed-niceness", 0.0, List.of("baldersheim"), "2022-06-24", "2023-01-01", "How nice feeding shall be", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag MBUS_JAVA_NUM_TARGETS = defineIntFlag( "mbus-java-num-targets", 1, List.of("baldersheim"), "2022-07-05", "2023-01-01", "Number of rpc targets per service", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag MBUS_CPP_NUM_TARGETS = defineIntFlag( "mbus-cpp-num-targets", 1, List.of("baldersheim"), "2022-07-05", "2023-01-01", "Number of rpc targets per service", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag RPC_NUM_TARGETS = defineIntFlag( "rpc-num-targets", 1, List.of("baldersheim"), "2022-07-05", "2023-01-01", "Number of rpc targets per content node", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag MBUS_JAVA_EVENTS_BEFORE_WAKEUP = defineIntFlag( "mbus-java-events-before-wakeup", 1, List.of("baldersheim"), "2022-07-05", "2023-01-01", "Number write events before waking up transport thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag MBUS_CPP_EVENTS_BEFORE_WAKEUP = defineIntFlag( "mbus-cpp-events-before-wakeup", 1, List.of("baldersheim"), "2022-07-05", "2023-01-01", "Number write events before waking up transport thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag RPC_EVENTS_BEFORE_WAKEUP = defineIntFlag( "rpc-events-before-wakeup", 1, List.of("baldersheim"), "2022-07-05", "2023-01-01", "Number write events before waking up transport thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag MBUS_NUM_NETWORK_THREADS = defineIntFlag( "mbus-num-network-threads", 1, List.of("baldersheim"), "2022-07-01", "2023-01-01", "Number of threads used for mbus network", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag SHARED_STRING_REPO_NO_RECLAIM = defineFeatureFlag( "shared-string-repo-no-reclaim", false, List.of("baldersheim"), "2022-06-14", "2023-01-01", "Controls whether we do track usage and reclaim unused enum values in shared string repo", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag CONTAINER_DUMP_HEAP_ON_SHUTDOWN_TIMEOUT = defineFeatureFlag( "container-dump-heap-on-shutdown-timeout", false, List.of("baldersheim"), "2021-09-25", "2023-01-01", "Will trigger a heap dump during if container shutdown times out", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag LOAD_CODE_AS_HUGEPAGES = defineFeatureFlag( "load-code-as-hugepages", false, List.of("baldersheim"), "2022-05-13", "2023-01-01", "Will try to map the code segment with huge (2M) pages", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag CONTAINER_SHUTDOWN_TIMEOUT = defineDoubleFlag( "container-shutdown-timeout", 50.0, List.of("baldersheim"), "2021-09-25", "2023-05-01", "Timeout for shutdown of a jdisc container", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundListFlag<String> ALLOWED_ATHENZ_PROXY_IDENTITIES = defineListFlag( "allowed-athenz-proxy-identities", List.of(), String.class, List.of("bjorncs", "tokle"), "2021-02-10", "2022-11-01", "Allowed Athenz proxy identities", "takes effect at redeployment"); public static final UnboundIntFlag MAX_ACTIVATION_INHIBITED_OUT_OF_SYNC_GROUPS = defineIntFlag( "max-activation-inhibited-out-of-sync-groups", 0, List.of("vekterli"), "2021-02-19", "2022-12-01", "Allows replicas in up to N content groups to not be activated " + "for query visibility if they are out of sync with a majority of other replicas", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag MAX_CONCURRENT_MERGES_PER_NODE = defineIntFlag( "max-concurrent-merges-per-node", 16, List.of("balder", "vekterli"), "2021-06-06", "2022-12-01", "Specifies max concurrent merges per content node.", "Takes effect at redeploy", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag MAX_MERGE_QUEUE_SIZE = defineIntFlag( "max-merge-queue-size", 100, List.of("balder", "vekterli"), "2021-06-06", "2022-12-01", "Specifies max size of merge queue.", "Takes effect at redeploy", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag MIN_NODE_RATIO_PER_GROUP = defineDoubleFlag( "min-node-ratio-per-group", 0.0, List.of("geirst", "vekterli"), "2021-07-16", "2022-11-01", "Minimum ratio of nodes that have to be available (i.e. not Down) in any hierarchic content cluster group for the group to be Up", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag ENABLED_HORIZON_DASHBOARD = defineFeatureFlag( "enabled-horizon-dashboard", false, List.of("olaa"), "2021-09-13", "2023-01-01", "Enable Horizon dashboard", "Takes effect immediately", TENANT_ID, CONSOLE_USER_EMAIL ); public static final UnboundBooleanFlag UNORDERED_MERGE_CHAINING = defineFeatureFlag( "unordered-merge-chaining", true, List.of("vekterli", "geirst"), "2021-11-15", "2022-11-01", "Enables the use of unordered merge chains for data merge operations", "Takes effect at redeploy", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag IGNORE_THREAD_STACK_SIZES = defineFeatureFlag( "ignore-thread-stack-sizes", false, List.of("arnej"), "2021-11-12", "2022-12-01", "Whether C++ thread creation should ignore any requested stack size", "Triggers restart, takes effect immediately", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag USE_V8_GEO_POSITIONS = defineFeatureFlag( "use-v8-geo-positions", true, List.of("arnej"), "2021-11-15", "2022-12-31", "Use Vespa 8 types and formats for geographical positions", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag MAX_COMPACT_BUFFERS = defineIntFlag( "max-compact-buffers", 1, List.of("baldersheim", "geirst", "toregge"), "2021-12-15", "2023-01-01", "Upper limit of buffers to compact in a data store at the same time for each reason (memory usage, address space usage)", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundStringFlag MERGE_THROTTLING_POLICY = defineStringFlag( "merge-throttling-policy", "STATIC", List.of("vekterli"), "2022-01-25", "2022-12-01", "Sets the policy used for merge throttling on the content nodes. " + "Valid values: STATIC, DYNAMIC", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag PERSISTENCE_THROTTLING_WS_DECREMENT_FACTOR = defineDoubleFlag( "persistence-throttling-ws-decrement-factor", 1.2, List.of("vekterli"), "2022-01-27", "2022-12-01", "Sets the dynamic throttle policy window size decrement factor for persistence " + "async throttling. Only applies if DYNAMIC policy is used.", "Takes effect on redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag PERSISTENCE_THROTTLING_WS_BACKOFF = defineDoubleFlag( "persistence-throttling-ws-backoff", 0.95, List.of("vekterli"), "2022-01-27", "2022-12-01", "Sets the dynamic throttle policy window size backoff for persistence " + "async throttling. Only applies if DYNAMIC policy is used. Valid range [0, 1]", "Takes effect on redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag PERSISTENCE_THROTTLING_WINDOW_SIZE = defineIntFlag( "persistence-throttling-window-size", -1, List.of("vekterli"), "2022-02-23", "2022-11-01", "If greater than zero, sets both min and max window size to the given number, effectively " + "turning dynamic throttling into a static throttling policy. " + "Only applies if DYNAMIC policy is used.", "Takes effect on redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag PERSISTENCE_THROTTLING_WS_RESIZE_RATE = defineDoubleFlag( "persistence-throttling-ws-resize-rate", 3.0, List.of("vekterli"), "2022-02-23", "2022-11-01", "Sets the dynamic throttle policy resize rate. Only applies if DYNAMIC policy is used.", "Takes effect on redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag PERSISTENCE_THROTTLING_OF_MERGE_FEED_OPS = defineFeatureFlag( "persistence-throttling-of-merge-feed-ops", true, List.of("vekterli"), "2022-02-24", "2022-11-01", "If true, each put/remove contained within a merge is individually throttled as if it " + "were a put/remove from a client. If false, merges are throttled at a persistence thread " + "level, i.e. per ApplyBucketDiff message, regardless of how many document operations " + "are contained within. Only applies if DYNAMIC policy is used.", "Takes effect on redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag USE_QRSERVER_SERVICE_NAME = defineFeatureFlag( "use-qrserver-service-name", false, List.of("arnej"), "2022-01-18", "2022-12-31", "Use backwards-compatible 'qrserver' service name for containers with only 'search' API", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag AVOID_RENAMING_SUMMARY_FEATURES = defineFeatureFlag( "avoid-renaming-summary-features", true, List.of("arnej"), "2022-01-15", "2023-12-31", "Tell backend about the original name of summary-features that were wrapped in a rankingExpression feature", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag NOTIFICATION_DISPATCH_FLAG = defineFeatureFlag( "dispatch-notifications", false, List.of("enygaard"), "2022-05-02", "2022-12-30", "Whether we should send notification for a given tenant", "Takes effect immediately", TENANT_ID); public static final UnboundBooleanFlag ENABLE_PROXY_PROTOCOL_MIXED_MODE = defineFeatureFlag( "enable-proxy-protocol-mixed-mode", true, List.of("tokle"), "2022-05-09", "2022-11-01", "Enable or disable proxy protocol mixed mode", "Takes effect on redeployment", APPLICATION_ID); public static final UnboundListFlag<String> FILE_DISTRIBUTION_ACCEPTED_COMPRESSION_TYPES = defineListFlag( "file-distribution-accepted-compression-types", List.of("gzip", "lz4"), String.class, List.of("hmusum"), "2022-07-05", "2022-11-01", "´List of accepted compression types used when asking for a file reference. Valid values: gzip, lz4", "Takes effect on restart of service", APPLICATION_ID); public static final UnboundListFlag<String> FILE_DISTRIBUTION_COMPRESSION_TYPES_TO_SERVE = defineListFlag( "file-distribution-compression-types-to-use", List.of("lz4", "gzip"), String.class, List.of("hmusum"), "2022-07-05", "2022-11-01", "List of compression types to use (in preferred order), matched with accepted compression types when serving file references. Valid values: gzip, lz4", "Takes effect on restart of service", APPLICATION_ID); public static final UnboundBooleanFlag USE_YUM_PROXY_V2 = defineFeatureFlag( "use-yumproxy-v2", false, List.of("tokle"), "2022-05-05", "2022-11-01", "Use yumproxy-v2", "Takes effect on host admin restart", HOSTNAME); public static final UnboundStringFlag LOG_FILE_COMPRESSION_ALGORITHM = defineStringFlag( "log-file-compression-algorithm", "", List.of("arnej"), "2022-06-14", "2024-12-31", "Which algorithm to use for compressing log files. Valid values: empty string (default), gzip, zstd", "Takes effect immediately", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag SEPARATE_METRIC_CHECK_CONFIG = defineFeatureFlag( "separate-metric-check-config", false, List.of("olaa"), "2022-07-04", "2022-11-01", "Determines whether one metrics config check should be written per Vespa node", "Takes effect on next tick", HOSTNAME); public static final UnboundStringFlag TLS_CAPABILITIES_ENFORCEMENT_MODE = defineStringFlag( "tls-capabilities-enforcement-mode", "disable", List.of("bjorncs", "vekterli"), "2022-07-21", "2024-01-01", "Configure Vespa TLS capability enforcement mode", "Takes effect on restart of Docker container", APPLICATION_ID,HOSTNAME,NODE_TYPE,TENANT_ID,VESPA_VERSION ); public static final UnboundBooleanFlag CLEANUP_TENANT_ROLES = defineFeatureFlag( "cleanup-tenant-roles", false, List.of("olaa"), "2022-08-10", "2023-01-01", "Determines whether old tenant roles should be deleted", "Takes effect next maintenance run" ); public static final UnboundBooleanFlag USE_TWO_PHASE_DOCUMENT_GC = defineFeatureFlag( "use-two-phase-document-gc", false, List.of("vekterli"), "2022-08-24", "2022-11-01", "Use two-phase document GC in content clusters", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag RESTRICT_DATA_PLANE_BINDINGS = defineFeatureFlag( "restrict-data-plane-bindings", false, List.of("mortent"), "2022-09-08", "2022-11-01", "Use restricted data plane bindings", "Takes effect at redeployment", APPLICATION_ID); public static final UnboundStringFlag CSRF_MODE = defineStringFlag( "csrf-mode", "disabled", List.of("bjorncs", "tokle"), "2022-09-22", "2023-06-01", "Set mode for CSRF filter ('disabled', 'log_only', 'enabled')", "Takes effect on controller restart/redeployment"); public static final UnboundBooleanFlag SOFT_REBUILD = defineFeatureFlag( "soft-rebuild", false, List.of("mpolden"), "2022-09-27", "2022-12-01", "Whether soft rebuild can be used to rebuild hosts with remote disk", "Takes effect on next run of OsUpgradeActivator" ); public static final UnboundListFlag<String> CSRF_USERS = defineListFlag( "csrf-users", List.of(), String.class, List.of("bjorncs", "tokle"), "2022-09-22", "2023-06-01", "List of users to enable CSRF filter for. Use empty list for everyone.", "Takes effect on controller restart/redeployment"); public static final UnboundBooleanFlag ENABLE_OTELCOL = defineFeatureFlag( "enable-otel-collector", false, List.of("olaa"), "2022-09-23", "2023-01-01", "Whether an OpenTelemetry collector should be enabled", "Takes effect at next tick", APPLICATION_ID); public static final UnboundBooleanFlag CONSOLE_CSRF = defineFeatureFlag( "console-csrf", false, List.of("bjorncs", "tokle"), "2022-09-26", "2023-06-01", "Enable CSRF token in console", "Takes effect immediately", CONSOLE_USER_EMAIL); public static final UnboundBooleanFlag USE_WIREGUARD_ON_CONFIGSERVERS = defineFeatureFlag( "use-wireguard-on-configservers", false, List.of("andreer", "gjoranv"), "2022-09-28", "2023-04-01", "Set up a WireGuard endpoint on config servers", "Takes effect on configserver restart", ZONE_ID, NODE_TYPE); public static final UnboundBooleanFlag USE_WIREGUARD_ON_TENANT_HOSTS = defineFeatureFlag( "use-wireguard-on-tenant-hosts", false, List.of("andreer", "gjoranv"), "2022-09-28", "2023-04-01", "Set up a WireGuard endpoint on tenant hosts", "Takes effect on host admin restart", HOSTNAME); /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundBooleanFlag defineFeatureFlag(String flagId, boolean defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundBooleanFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundStringFlag defineStringFlag(String flagId, String defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return defineStringFlag(flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, value -> true, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundStringFlag defineStringFlag(String flagId, String defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, Predicate<String> validator, FetchVector.Dimension... dimensions) { return define((i, d, v) -> new UnboundStringFlag(i, d, v, validator), flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundIntFlag defineIntFlag(String flagId, int defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundIntFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundLongFlag defineLongFlag(String flagId, long defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundLongFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundDoubleFlag defineDoubleFlag(String flagId, double defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundDoubleFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static <T> UnboundJacksonFlag<T> defineJacksonFlag(String flagId, T defaultValue, Class<T> jacksonClass, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define((id2, defaultValue2, vector2) -> new UnboundJacksonFlag<>(id2, defaultValue2, vector2, jacksonClass), flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static <T> UnboundListFlag<T> defineListFlag(String flagId, List<T> defaultValue, Class<T> elementClass, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define((fid, dval, fvec) -> new UnboundListFlag<>(fid, dval, elementClass, fvec), flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } @FunctionalInterface private interface TypedUnboundFlagFactory<T, U extends UnboundFlag<?, ?, ?>> { U create(FlagId id, T defaultValue, FetchVector defaultFetchVector); } /** * Defines a Flag. * * @param factory Factory for creating unbound flag of type U * @param flagId The globally unique FlagId. * @param defaultValue The default value if none is present after resolution. * @param description Description of how the flag is used. * @param modificationEffect What is required for the flag to take effect? A restart of process? immediately? etc. * @param dimensions What dimensions will be set in the {@link FetchVector} when fetching * the flag value in * {@link FlagSource#fetch(FlagId, FetchVector) FlagSource::fetch}. * For instance, if APPLICATION is one of the dimensions here, you should make sure * APPLICATION is set to the ApplicationId in the fetch vector when fetching the RawFlag * from the FlagSource. * @param <T> The boxed type of the flag value, e.g. Boolean for flags guarding features. * @param <U> The type of the unbound flag, e.g. UnboundBooleanFlag. * @return An unbound flag with {@link FetchVector.Dimension#HOSTNAME HOSTNAME} and * {@link FetchVector.Dimension#VESPA_VERSION VESPA_VERSION} already set. The ZONE environment * is typically implicit. */ private static <T, U extends UnboundFlag<?, ?, ?>> U define(TypedUnboundFlagFactory<T, U> factory, String flagId, T defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension[] dimensions) { FlagId id = new FlagId(flagId); FetchVector vector = new FetchVector() .with(HOSTNAME, Defaults.getDefaults().vespaHostname()) // Warning: In unit tests and outside official Vespa releases, the currentVersion is e.g. 7.0.0 // (determined by the current major version). Consider not setting VESPA_VERSION if minor = micro = 0. .with(VESPA_VERSION, Vtag.currentVersion.toFullString()); U unboundFlag = factory.create(id, defaultValue, vector); FlagDefinition definition = new FlagDefinition( unboundFlag, owners, parseDate(createdAt), parseDate(expiresAt), description, modificationEffect, dimensions); flags.put(id, definition); return unboundFlag; } private static Instant parseDate(String rawDate) { return DateTimeFormatter.ISO_DATE.parse(rawDate, LocalDate::from).atStartOfDay().toInstant(ZoneOffset.UTC); } public static List<FlagDefinition> getAllFlags() { return List.copyOf(flags.values()); } public static Optional<FlagDefinition> getFlag(FlagId flagId) { return Optional.ofNullable(flags.get(flagId)); } /** * Allows the statically defined flags to be controlled in a test. * * <p>Returns a Replacer instance to be used with e.g. a try-with-resources block. Within the block, * the flags starts out as cleared. Flags can be defined, etc. When leaving the block, the flags from * before the block is reinserted. * * <p>NOT thread-safe. Tests using this cannot run in parallel. */ public static Replacer clearFlagsForTesting(FlagId... flagsToKeep) { return new Replacer(flagsToKeep); } public static class Replacer implements AutoCloseable { private static volatile boolean flagsCleared = false; private final TreeMap<FlagId, FlagDefinition> savedFlags; private Replacer(FlagId... flagsToKeep) { verifyAndSetFlagsCleared(true); this.savedFlags = Flags.flags; Flags.flags = new TreeMap<>(); List.of(flagsToKeep).forEach(id -> Flags.flags.put(id, savedFlags.get(id))); } @Override public void close() { verifyAndSetFlagsCleared(false); Flags.flags = savedFlags; } /** * Used to implement a simple verification that Replacer is not used by multiple threads. * For instance two different tests running in parallel cannot both use Replacer. */ private static void verifyAndSetFlagsCleared(boolean newValue) { if (flagsCleared == newValue) { throw new IllegalStateException("clearFlagsForTesting called while already cleared - running tests in parallell!?"); } flagsCleared = newValue; } } }
package com.yahoo.vespa.flags; import com.yahoo.component.Vtag; import com.yahoo.vespa.defaults.Defaults; import java.time.Instant; import java.time.LocalDate; import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; import java.util.List; import java.util.Optional; import java.util.TreeMap; import static com.yahoo.vespa.flags.FetchVector.Dimension.APPLICATION_ID; import static com.yahoo.vespa.flags.FetchVector.Dimension.CONSOLE_USER_EMAIL; import static com.yahoo.vespa.flags.FetchVector.Dimension.HOSTNAME; import static com.yahoo.vespa.flags.FetchVector.Dimension.NODE_TYPE; import static com.yahoo.vespa.flags.FetchVector.Dimension.TENANT_ID; import static com.yahoo.vespa.flags.FetchVector.Dimension.VESPA_VERSION; import static com.yahoo.vespa.flags.FetchVector.Dimension.ZONE_ID; /** * Definitions of feature flags. * * <p>To use feature flags, define the flag in this class as an "unbound" flag, e.g. {@link UnboundBooleanFlag} * or {@link UnboundStringFlag}. At the location you want to get the value of the flag, you need the following:</p> * * <ol> * <li>The unbound flag</li> * <li>A {@link FlagSource}. The flag source is typically available as an injectable component. Binding * an unbound flag to a flag source produces a (bound) flag, e.g. {@link BooleanFlag} and {@link StringFlag}.</li> * <li>If you would like your flag value to be dependent on e.g. the application ID, then 1. you should * declare this in the unbound flag definition in this file (referring to * {@link FetchVector.Dimension#APPLICATION_ID}), and 2. specify the application ID when retrieving the value, e.g. * {@link BooleanFlag#with(FetchVector.Dimension, String)}. See {@link FetchVector} for more info.</li> * </ol> * * <p>Once the code is in place, you can override the flag value. This depends on the flag source, but typically * there is a REST API for updating the flags in the config server, which is the root of all flag sources in the zone.</p> * * @author hakonhall */ public class Flags { private static volatile TreeMap<FlagId, FlagDefinition> flags = new TreeMap<>(); public static final UnboundBooleanFlag MAP_USER_NAMESPACE = defineFeatureFlag( "map-user-namespace", false, List.of("freva"), "2021-10-18", "2021-12-01", "Whether host-admin should start containers with mapped UID/GID, will also chown all files under container storage.", "Takes effect on next container restart.", APPLICATION_ID, NODE_TYPE, HOSTNAME); public static final UnboundBooleanFlag USE_CGROUPS_V2 = defineFeatureFlag( "use-cgroups-v2", false, List.of("freva"), "2021-10-27", "2021-12-01", "Whether a host should use CGroups v2", "Will attempt to switch on next host admin tick (requires reboot).", NODE_TYPE, HOSTNAME); public static final UnboundDoubleFlag DEFAULT_TERM_WISE_LIMIT = defineDoubleFlag( "default-term-wise-limit", 1.0, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Default limit for when to apply termwise query evaluation", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundStringFlag FEED_SEQUENCER_TYPE = defineStringFlag( "feed-sequencer-type", "LATENCY", List.of("baldersheim"), "2020-12-02", "2022-01-01", "Selects type of sequenced executor used for feeding in proton, valid values are LATENCY, ADAPTIVE, THROUGHPUT", "Takes effect at redeployment (requires restart)", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag FEED_TASK_LIMIT = defineIntFlag( "feed-task-limit", 1000, List.of("geirst, baldersheim"), "2021-10-14", "2022-01-01", "The task limit used by the executors handling feed in proton", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag MAX_UNCOMMITTED_MEMORY = defineIntFlag( "max-uncommitted-memory", 130000, List.of("geirst, baldersheim"), "2021-10-21", "2022-01-01", "Max amount of memory holding updates to an attribute before we do a commit.", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundStringFlag RESPONSE_SEQUENCER_TYPE = defineStringFlag( "response-sequencer-type", "ADAPTIVE", List.of("baldersheim"), "2020-12-02", "2022-01-01", "Selects type of sequenced executor used for mbus responses, valid values are LATENCY, ADAPTIVE, THROUGHPUT", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag RESPONSE_NUM_THREADS = defineIntFlag( "response-num-threads", 2, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Number of threads used for mbus responses, default is 2, negative number = numcores/4", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag SKIP_COMMUNICATIONMANAGER_THREAD = defineFeatureFlag( "skip-communicationmanager-thread", false, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Should we skip the communicationmanager thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag SKIP_MBUS_REQUEST_THREAD = defineFeatureFlag( "skip-mbus-request-thread", false, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Should we skip the mbus request thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag SKIP_MBUS_REPLY_THREAD = defineFeatureFlag( "skip-mbus-reply-thread", false, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Should we skip the mbus reply thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag USE_THREE_PHASE_UPDATES = defineFeatureFlag( "use-three-phase-updates", false, List.of("vekterli"), "2020-12-02", "2022-01-01", "Whether to enable the use of three-phase updates when bucket replicas are out of sync.", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag HIDE_SHARED_ROUTING_ENDPOINT = defineFeatureFlag( "hide-shared-routing-endpoint", false, List.of("tokle", "bjormel"), "2020-12-02", "2022-01-01", "Whether the controller should hide shared routing layer endpoint", "Takes effect immediately", APPLICATION_ID ); public static final UnboundBooleanFlag USE_ASYNC_MESSAGE_HANDLING_ON_SCHEDULE = defineFeatureFlag( "async-message-handling-on-schedule", false, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Optionally deliver async messages in own thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag FEED_CONCURRENCY = defineDoubleFlag( "feed-concurrency", 0.5, List.of("baldersheim"), "2020-12-02", "2022-01-01", "How much concurrency should be allowed for feed", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag DISK_BLOAT_FACTOR = defineDoubleFlag( "disk-bloat-factor", 0.2, List.of("baldersheim"), "2021-10-08", "2022-01-01", "Amount of bloat allowed before compacting file", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag DOCSTORE_COMPRESSION_LEVEL = defineIntFlag( "docstore-compression-level", 3, List.of("baldersheim"), "2021-10-08", "2022-01-01", "Default compression level used for document store", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag NUM_DEPLOY_HELPER_THREADS = defineIntFlag( "num-model-builder-threads", -1, List.of("balder"), "2021-09-09", "2022-01-01", "Number of threads used for speeding up building of models.", "Takes effect on first (re)start of config server"); public static final UnboundBooleanFlag ENABLE_FEED_BLOCK_IN_DISTRIBUTOR = defineFeatureFlag( "enable-feed-block-in-distributor", true, List.of("geirst"), "2021-01-27", "2022-01-31", "Enables blocking of feed in the distributor if resource usage is above limit on at least one content node", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag CONTAINER_DUMP_HEAP_ON_SHUTDOWN_TIMEOUT = defineFeatureFlag( "container-dump-heap-on-shutdown-timeout", false, List.of("baldersheim"), "2021-09-25", "2022-01-01", "Will trigger a heap dump during if container shutdown times out", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag CONTAINER_SHUTDOWN_TIMEOUT = defineDoubleFlag( "container-shutdown-timeout", 50.0, List.of("baldersheim"), "2021-09-25", "2022-01-01", "Timeout for shutdown of a jdisc container", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundListFlag<String> ALLOWED_ATHENZ_PROXY_IDENTITIES = defineListFlag( "allowed-athenz-proxy-identities", List.of(), String.class, List.of("bjorncs", "tokle"), "2021-02-10", "2021-12-01", "Allowed Athenz proxy identities", "takes effect at redeployment"); public static final UnboundBooleanFlag GENERATE_NON_MTLS_ENDPOINT = defineFeatureFlag( "generate-non-mtls-endpoint", true, List.of("tokle"), "2021-02-18", "2021-12-01", "Whether to generate the non-mtls endpoint", "Takes effect on next internal redeployment", APPLICATION_ID); public static final UnboundIntFlag MAX_ACTIVATION_INHIBITED_OUT_OF_SYNC_GROUPS = defineIntFlag( "max-activation-inhibited-out-of-sync-groups", 0, List.of("vekterli"), "2021-02-19", "2022-02-01", "Allows replicas in up to N content groups to not be activated " + "for query visibility if they are out of sync with a majority of other replicas", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag MAX_CONCURRENT_MERGES_PER_NODE = defineIntFlag( "max-concurrent-merges-per-node", 128, List.of("balder", "vekterli"), "2021-06-06", "2022-01-01", "Specifies max concurrent merges per content node.", "Takes effect at redeploy", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag MAX_MERGE_QUEUE_SIZE = defineIntFlag( "max-merge-queue-size", 1024, List.of("balder", "vekterli"), "2021-06-06", "2022-01-01", "Specifies max size of merge queue.", "Takes effect at redeploy", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag IGNORE_MERGE_QUEUE_LIMIT = defineFeatureFlag( "ignore-merge-queue-limit", false, List.of("vekterli", "geirst"), "2021-10-06", "2021-12-01", "Specifies if merges that are forwarded (chained) from another content node are always " + "allowed to be enqueued even if the queue is otherwise full.", "Takes effect at redeploy", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag LARGE_RANK_EXPRESSION_LIMIT = defineIntFlag( "large-rank-expression-limit", 8192, List.of("baldersheim"), "2021-06-09", "2022-01-01", "Limit for size of rank expressions distributed by filedistribution", "Takes effect on next internal redeployment", APPLICATION_ID); public static final UnboundDoubleFlag MIN_NODE_RATIO_PER_GROUP = defineDoubleFlag( "min-node-ratio-per-group", 0.0, List.of("geirst", "vekterli"), "2021-07-16", "2021-12-01", "Minimum ratio of nodes that have to be available (i.e. not Down) in any hierarchic content cluster group for the group to be Up", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag SEPARATE_TENANT_IAM_ROLES = defineFeatureFlag( "separate-tenant-iam-roles", false, List.of("mortent"), "2021-08-12", "2022-01-01", "Create separate iam roles for tenant", "Takes effect on redeploy", TENANT_ID); public static final UnboundIntFlag METRICSPROXY_NUM_THREADS = defineIntFlag( "metricsproxy-num-threads", 2, List.of("balder"), "2021-09-01", "2022-01-01", "Number of threads for metrics proxy", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag ENABLED_HORIZON_DASHBOARD = defineFeatureFlag( "enabled-horizon-dashboard", false, List.of("olaa"), "2021-09-13", "2021-12-31", "Enable Horizon dashboard", "Takes effect immediately", TENANT_ID, CONSOLE_USER_EMAIL ); public static final UnboundBooleanFlag ENABLE_ONPREM_TENANT_S3_ARCHIVE = defineFeatureFlag( "enable-onprem-tenant-s3-archive", false, List.of("bjorncs"), "2021-09-14", "2021-12-31", "Enable tenant S3 buckets in cd/main. Must be set on controller cluster only.", "Takes effect immediately", ZONE_ID, TENANT_ID ); public static final UnboundBooleanFlag DELETE_UNMAINTAINED_CERTIFICATES = defineFeatureFlag( "delete-unmaintained-certificates", false, List.of("andreer"), "2021-09-23", "2021-11-11", "Whether to delete certificates that are known by provider but not by controller", "Takes effect on next run of EndpointCertificateMaintainer" ); public static final UnboundBooleanFlag ENABLE_TENANT_DEVELOPER_ROLE = defineFeatureFlag( "enable-tenant-developer-role", false, List.of("bjorncs"), "2021-09-23", "2021-12-31", "Enable tenant developer Athenz role in cd/main. Must be set on controller cluster only.", "Takes effect immediately", TENANT_ID ); public static final UnboundIntFlag MAX_CONNECTION_LIFE_IN_HOSTED = defineIntFlag( "max-connection-life-in-hosted", 45, List.of("bjorncs"), "2021-09-30", "2021-12-31", "Max connection life for connections to jdisc endpoints in hosted", "Takes effect at redeployment", APPLICATION_ID); public static final UnboundBooleanFlag ENABLE_ROUTING_REUSE_PORT = defineFeatureFlag( "enable-routing-reuse-port", false, List.of("mortent"), "2021-09-29", "2021-12-31", "Enable reuse port in routing configuration", "Takes effect on container restart", HOSTNAME ); public static final UnboundBooleanFlag ENABLE_TENANT_OPERATOR_ROLE = defineFeatureFlag( "enable-tenant-operator-role", false, List.of("bjorncs"), "2021-09-29", "2021-12-31", "Enable tenant specific operator roles in public systems. For controllers only.", "Takes effect on subsequent maintainer invocation", TENANT_ID ); public static final UnboundIntFlag DISTRIBUTOR_MERGE_BUSY_WAIT = defineIntFlag( "distributor-merge-busy-wait", 10, List.of("geirst", "vekterli"), "2021-10-04", "2021-12-31", "Number of seconds that scheduling of new merge operations in the distributor should be inhibited " + "towards a content node that has indicated merge busy", "Takes effect at redeploy", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag DISTRIBUTOR_ENHANCED_MAINTENANCE_SCHEDULING = defineFeatureFlag( "distributor-enhanced-maintenance-scheduling", false, List.of("vekterli", "geirst"), "2021-10-14", "2022-01-31", "Enable enhanced maintenance operation scheduling semantics on the distributor", "Takes effect at redeploy", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag ASYNC_APPLY_BUCKET_DIFF = defineFeatureFlag( "async-apply-bucket-diff", false, List.of("geirst", "vekterli"), "2021-10-22", "2022-01-31", "Whether portions of apply bucket diff handling will be performed asynchronously", "Takes effect at redeploy", ZONE_ID, APPLICATION_ID); public static final UnboundStringFlag JDK_VERSION = defineStringFlag( "jdk-version", "11", List.of("hmusum"), "2021-10-25", "2021-11-25", "JDK version to use on host and inside containers. Note application-id dimension only applies for container, " + "while hostname and node type applies for host.", "Takes effect on restart for Docker container and on next host-admin tick for host", APPLICATION_ID, HOSTNAME, NODE_TYPE); /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundBooleanFlag defineFeatureFlag(String flagId, boolean defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundBooleanFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundStringFlag defineStringFlag(String flagId, String defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundStringFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundIntFlag defineIntFlag(String flagId, int defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundIntFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundLongFlag defineLongFlag(String flagId, long defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundLongFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundDoubleFlag defineDoubleFlag(String flagId, double defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundDoubleFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static <T> UnboundJacksonFlag<T> defineJacksonFlag(String flagId, T defaultValue, Class<T> jacksonClass, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define((id2, defaultValue2, vector2) -> new UnboundJacksonFlag<>(id2, defaultValue2, vector2, jacksonClass), flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static <T> UnboundListFlag<T> defineListFlag(String flagId, List<T> defaultValue, Class<T> elementClass, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define((fid, dval, fvec) -> new UnboundListFlag<>(fid, dval, elementClass, fvec), flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } @FunctionalInterface private interface TypedUnboundFlagFactory<T, U extends UnboundFlag<?, ?, ?>> { U create(FlagId id, T defaultVale, FetchVector defaultFetchVector); } /** * Defines a Flag. * * @param factory Factory for creating unbound flag of type U * @param flagId The globally unique FlagId. * @param defaultValue The default value if none is present after resolution. * @param description Description of how the flag is used. * @param modificationEffect What is required for the flag to take effect? A restart of process? immediately? etc. * @param dimensions What dimensions will be set in the {@link FetchVector} when fetching * the flag value in * {@link FlagSource#fetch(FlagId, FetchVector) FlagSource::fetch}. * For instance, if APPLICATION is one of the dimensions here, you should make sure * APPLICATION is set to the ApplicationId in the fetch vector when fetching the RawFlag * from the FlagSource. * @param <T> The boxed type of the flag value, e.g. Boolean for flags guarding features. * @param <U> The type of the unbound flag, e.g. UnboundBooleanFlag. * @return An unbound flag with {@link FetchVector.Dimension#HOSTNAME HOSTNAME} and * {@link FetchVector.Dimension#VESPA_VERSION VESPA_VERSION} already set. The ZONE environment * is typically implicit. */ private static <T, U extends UnboundFlag<?, ?, ?>> U define(TypedUnboundFlagFactory<T, U> factory, String flagId, T defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension[] dimensions) { FlagId id = new FlagId(flagId); FetchVector vector = new FetchVector() .with(HOSTNAME, Defaults.getDefaults().vespaHostname()) // Warning: In unit tests and outside official Vespa releases, the currentVersion is e.g. 7.0.0 // (determined by the current major version). Consider not setting VESPA_VERSION if minor = micro = 0. .with(VESPA_VERSION, Vtag.currentVersion.toFullString()); U unboundFlag = factory.create(id, defaultValue, vector); FlagDefinition definition = new FlagDefinition( unboundFlag, owners, parseDate(createdAt), parseDate(expiresAt), description, modificationEffect, dimensions); flags.put(id, definition); return unboundFlag; } private static Instant parseDate(String rawDate) { return DateTimeFormatter.ISO_DATE.parse(rawDate, LocalDate::from).atStartOfDay().toInstant(ZoneOffset.UTC); } public static List<FlagDefinition> getAllFlags() { return List.copyOf(flags.values()); } public static Optional<FlagDefinition> getFlag(FlagId flagId) { return Optional.ofNullable(flags.get(flagId)); } /** * Allows the statically defined flags to be controlled in a test. * * <p>Returns a Replacer instance to be used with e.g. a try-with-resources block. Within the block, * the flags starts out as cleared. Flags can be defined, etc. When leaving the block, the flags from * before the block is reinserted. * * <p>NOT thread-safe. Tests using this cannot run in parallel. */ public static Replacer clearFlagsForTesting(FlagId... flagsToKeep) { return new Replacer(flagsToKeep); } public static class Replacer implements AutoCloseable { private static volatile boolean flagsCleared = false; private final TreeMap<FlagId, FlagDefinition> savedFlags; private Replacer(FlagId... flagsToKeep) { verifyAndSetFlagsCleared(true); this.savedFlags = Flags.flags; Flags.flags = new TreeMap<>(); List.of(flagsToKeep).forEach(id -> Flags.flags.put(id, savedFlags.get(id))); } @Override public void close() { verifyAndSetFlagsCleared(false); Flags.flags = savedFlags; } /** * Used to implement a simple verification that Replacer is not used by multiple threads. * For instance two different tests running in parallel cannot both use Replacer. */ private static void verifyAndSetFlagsCleared(boolean newValue) { if (flagsCleared == newValue) { throw new IllegalStateException("clearFlagsForTesting called while already cleared - running tests in parallell!?"); } flagsCleared = newValue; } } }
package com.yahoo.vespa.flags; import com.yahoo.component.Vtag; import com.yahoo.vespa.defaults.Defaults; import java.time.Instant; import java.time.LocalDate; import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; import java.util.List; import java.util.Optional; import java.util.TreeMap; import static com.yahoo.vespa.flags.FetchVector.Dimension.APPLICATION_ID; import static com.yahoo.vespa.flags.FetchVector.Dimension.CLUSTER_TYPE; import static com.yahoo.vespa.flags.FetchVector.Dimension.HOSTNAME; import static com.yahoo.vespa.flags.FetchVector.Dimension.TENANT_ID; import static com.yahoo.vespa.flags.FetchVector.Dimension.VESPA_VERSION; import static com.yahoo.vespa.flags.FetchVector.Dimension.ZONE_ID; /** * Definitions of feature flags. * * <p>To use feature flags, define the flag in this class as an "unbound" flag, e.g. {@link UnboundBooleanFlag} * or {@link UnboundStringFlag}. At the location you want to get the value of the flag, you need the following:</p> * * <ol> * <li>The unbound flag</li> * <li>A {@link FlagSource}. The flag source is typically available as an injectable component. Binding * an unbound flag to a flag source produces a (bound) flag, e.g. {@link BooleanFlag} and {@link StringFlag}.</li> * <li>If you would like your flag value to be dependent on e.g. the application ID, then 1. you should * declare this in the unbound flag definition in this file (referring to * {@link FetchVector.Dimension#APPLICATION_ID}), and 2. specify the application ID when retrieving the value, e.g. * {@link BooleanFlag#with(FetchVector.Dimension, String)}. See {@link FetchVector} for more info.</li> * </ol> * * <p>Once the code is in place, you can override the flag value. This depends on the flag source, but typically * there is a REST API for updating the flags in the config server, which is the root of all flag sources in the zone.</p> * * @author hakonhall */ public class Flags { private static volatile TreeMap<FlagId, FlagDefinition> flags = new TreeMap<>(); public static final UnboundDoubleFlag DEFAULT_TERM_WISE_LIMIT = defineDoubleFlag( "default-term-wise-limit", 1.0, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Default limit for when to apply termwise query evaluation", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundStringFlag FEED_SEQUENCER_TYPE = defineStringFlag( "feed-sequencer-type", "LATENCY", List.of("baldersheim"), "2020-12-02", "2022-01-01", "Selects type of sequenced executor used for feeding, valid values are LATENCY, ADAPTIVE, THROUGHPUT", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundStringFlag RESPONSE_SEQUENCER_TYPE = defineStringFlag( "response-sequencer-type", "ADAPTIVE", List.of("baldersheim"), "2020-12-02", "2022-01-01", "Selects type of sequenced executor used for mbus responses, valid values are LATENCY, ADAPTIVE, THROUGHPUT", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag RESPONSE_NUM_THREADS = defineIntFlag( "response-num-threads", 2, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Number of threads used for mbus responses, default is 2, negative number = numcores/4", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag SKIP_COMMUNICATIONMANAGER_THREAD = defineFeatureFlag( "skip-communicationmanager-thread", false, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Should we skip the communicationmanager thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag SKIP_MBUS_REQUEST_THREAD = defineFeatureFlag( "skip-mbus-request-thread", false, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Should we skip the mbus request thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag SKIP_MBUS_REPLY_THREAD = defineFeatureFlag( "skip-mbus-reply-thread", false, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Should we skip the mbus reply thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag USE_THREE_PHASE_UPDATES = defineFeatureFlag( "use-three-phase-updates", false, List.of("vekterli"), "2020-12-02", "2021-09-01", "Whether to enable the use of three-phase updates when bucket replicas are out of sync.", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag HIDE_SHARED_ROUTING_ENDPOINT = defineFeatureFlag( "hide-shared-routing-endpoint", false, List.of("tokle", "bjormel"), "2020-12-02", "2021-09-01", "Whether the controller should hide shared routing layer endpoint", "Takes effect immediately", APPLICATION_ID ); public static final UnboundBooleanFlag USE_ASYNC_MESSAGE_HANDLING_ON_SCHEDULE = defineFeatureFlag( "async-message-handling-on-schedule", false, List.of("baldersheim"), "2020-12-02", "2022-01-01", "Optionally deliver async messages in own thread", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundDoubleFlag FEED_CONCURRENCY = defineDoubleFlag( "feed-concurrency", 0.5, List.of("baldersheim"), "2020-12-02", "2022-01-01", "How much concurrency should be allowed for feed", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag GROUP_SUSPENSION = defineFeatureFlag( "group-suspension", true, List.of("hakon"), "2021-01-22", "2021-08-22", "Allow all content nodes in a hierarchical group to suspend at the same time", "Takes effect on the next suspension request to the Orchestrator.", APPLICATION_ID); public static final UnboundBooleanFlag ENCRYPT_DIRTY_DISK = defineFeatureFlag( "encrypt-dirty-disk", false, List.of("hakonhall"), "2021-05-14", "2021-08-05", "Allow migrating an unencrypted data partition to being encrypted when (de)provisioned.", "Takes effect on next host-admin tick."); public static final UnboundBooleanFlag ENABLE_FEED_BLOCK_IN_DISTRIBUTOR = defineFeatureFlag( "enable-feed-block-in-distributor", true, List.of("geirst"), "2021-01-27", "2021-09-01", "Enables blocking of feed in the distributor if resource usage is above limit on at least one content node", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundStringFlag DEDICATED_CLUSTER_CONTROLLER_FLAVOR = defineStringFlag( "dedicated-cluster-controller-flavor", "", List.of("jonmv"), "2021-02-25", "2021-08-25", "Flavor as <vpu>-<memgb>-<diskgb> to use for dedicated cluster controller nodes", "Takes effect immediately, for subsequent provisioning", APPLICATION_ID); public static final UnboundListFlag<String> ALLOWED_ATHENZ_PROXY_IDENTITIES = defineListFlag( "allowed-athenz-proxy-identities", List.of(), String.class, List.of("bjorncs", "tokle"), "2021-02-10", "2021-12-01", "Allowed Athenz proxy identities", "takes effect at redeployment"); public static final UnboundBooleanFlag GENERATE_NON_MTLS_ENDPOINT = defineFeatureFlag( "generate-non-mtls-endpoint", true, List.of("tokle"), "2021-02-18", "2021-10-01", "Whether to generate the non-mtls endpoint", "Takes effect on next internal redeployment", APPLICATION_ID); public static final UnboundIntFlag MAX_ACTIVATION_INHIBITED_OUT_OF_SYNC_GROUPS = defineIntFlag( "max-activation-inhibited-out-of-sync-groups", 0, List.of("vekterli"), "2021-02-19", "2021-09-01", "Allows replicas in up to N content groups to not be activated " + "for query visibility if they are out of sync with a majority of other replicas", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag ENABLE_CUSTOM_ACL_MAPPING = defineFeatureFlag( "enable-custom-acl-mapping", false, List.of("mortent","bjorncs"), "2021-04-13", "2021-09-01", "Whether access control filters should read acl request mapping from handler or use default", "Takes effect at redeployment", APPLICATION_ID); public static final UnboundIntFlag NUM_DISTRIBUTOR_STRIPES = defineIntFlag( "num-distributor-stripes", 0, List.of("geirst", "vekterli"), "2021-04-20", "2021-09-01", "Specifies the number of stripes used by the distributor. When 0, legacy single stripe behavior is used.", "Takes effect after distributor restart", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag MAX_CONCURRENT_MERGES_PER_NODE = defineIntFlag( "max-concurrent-merges-per-node", 16, List.of("balder", "vekterli"), "2021-06-06", "2021-09-01", "Specifies max concurrent merges per content node.", "Takes effect at redeploy", ZONE_ID, APPLICATION_ID); public static final UnboundIntFlag MAX_MERGE_QUEUE_SIZE = defineIntFlag( "max-merge-queue-size", 1024, List.of("balder", "vekterli"), "2021-06-06", "2021-09-01", "Specifies max size of merge queue.", "Takes effect at redeploy", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag USE_EXTERNAL_RANK_EXPRESSION = defineFeatureFlag( "use-external-rank-expression", false, List.of("baldersheim"), "2021-05-24", "2021-09-01", "Whether to use distributed external rank expression or inline in rankproperties", "Takes effect on next internal redeployment", APPLICATION_ID); public static final UnboundBooleanFlag DISTRIBUTE_EXTERNAL_RANK_EXPRESSION = defineFeatureFlag( "distribute-external-rank-expression", false, List.of("baldersheim"), "2021-05-27", "2021-09-01", "Whether to use distributed external rank expression files by filedistribution", "Takes effect on next internal redeployment", APPLICATION_ID); public static final UnboundIntFlag LARGE_RANK_EXPRESSION_LIMIT = defineIntFlag( "large-rank-expression-limit", 0x10000, List.of("baldersheim"), "2021-06-09", "2021-09-01", "Limit for size of rank expressions distributed by filedistribution", "Takes effect on next internal redeployment", APPLICATION_ID); public static final UnboundBooleanFlag ENABLE_ROUTING_CORE_DUMP = defineFeatureFlag( "enable-routing-core-dumps", false, List.of("tokle"), "2021-04-16", "2021-09-01", "Whether to enable core dumps for routing layer", "Takes effect on next host-admin tick", HOSTNAME); public static final UnboundBooleanFlag CFG_DEPLOY_MULTIPART = defineFeatureFlag( "cfg-deploy-multipart", false, List.of("tokle"), "2021-05-19", "2021-09-01", "Whether to deploy applications using multipart form data (instead of url params)", "Takes effect immediately", APPLICATION_ID); public static final UnboundIntFlag MAX_ENCRYPTING_HOSTS = defineIntFlag( "max-encrypting-hosts", 0, List.of("mpolden", "hakonhall"), "2021-05-27", "2021-10-01", "The maximum number of hosts allowed to encrypt their disk concurrently", "Takes effect on next run of HostEncrypter, but any currently encrypting hosts will not be cancelled when reducing the limit"); public static final UnboundBooleanFlag REQUIRE_CONNECTIVITY_CHECK = defineFeatureFlag( "require-connectivity-check", true, List.of("arnej"), "2021-06-03", "2021-09-01", "Require that config-sentinel connectivity check passes with good quality before starting services", "Takes effect on next restart", ZONE_ID, APPLICATION_ID); public static final UnboundBooleanFlag THROW_EXCEPTION_IF_RESOURCE_LIMITS_SPECIFIED = defineFeatureFlag( "throw-exception-if-resource-limits-specified", false, List.of("hmusum"), "2021-06-07", "2021-09-07", "Whether to throw an exception in hosted Vespa if the application specifies resource limits in services.xml", "Takes effect on next deployment through controller", APPLICATION_ID); public static final UnboundBooleanFlag DRY_RUN_ONNX_ON_SETUP = defineFeatureFlag( "dry-run-onnx-on-setup", false, List.of("baldersheim"), "2021-06-23", "2021-09-01", "Whether to dry run onnx models on setup for better error checking", "Takes effect on next internal redeployment", APPLICATION_ID); public static final UnboundListFlag<String> DEFER_APPLICATION_ENCRYPTION = defineListFlag( "defer-application-encryption", List.of(), String.class, List.of("mpolden", "hakonhall"), "2021-06-23", "2021-10-01", "List of applications where encryption of their host should be deferred", "Takes effect on next run of HostEncrypter"); public static final UnboundBooleanFlag PODMAN3 = defineFeatureFlag( "podman3", true, List.of("mpolden"), "2021-07-05", "2021-09-01", "Whether to use Podman 3 on supported hosts", "Takes effect on host-admin restart"); public static final UnboundDoubleFlag MIN_NODE_RATIO_PER_GROUP = defineDoubleFlag( "min-node-ratio-per-group", 0.0, List.of("geirst", "vekterli"), "2021-07-16", "2021-10-01", "Minimum ratio of nodes that have to be available (i.e. not Down) in any hierarchic content cluster group for the group to be Up", "Takes effect at redeployment", ZONE_ID, APPLICATION_ID); /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundBooleanFlag defineFeatureFlag(String flagId, boolean defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundBooleanFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundStringFlag defineStringFlag(String flagId, String defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundStringFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundIntFlag defineIntFlag(String flagId, int defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundIntFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundLongFlag defineLongFlag(String flagId, long defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundLongFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static UnboundDoubleFlag defineDoubleFlag(String flagId, double defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define(UnboundDoubleFlag::new, flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static <T> UnboundJacksonFlag<T> defineJacksonFlag(String flagId, T defaultValue, Class<T> jacksonClass, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define((id2, defaultValue2, vector2) -> new UnboundJacksonFlag<>(id2, defaultValue2, vector2, jacksonClass), flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } /** WARNING: public for testing: All flags should be defined in {@link Flags}. */ public static <T> UnboundListFlag<T> defineListFlag(String flagId, List<T> defaultValue, Class<T> elementClass, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension... dimensions) { return define((fid, dval, fvec) -> new UnboundListFlag<>(fid, dval, elementClass, fvec), flagId, defaultValue, owners, createdAt, expiresAt, description, modificationEffect, dimensions); } @FunctionalInterface private interface TypedUnboundFlagFactory<T, U extends UnboundFlag<?, ?, ?>> { U create(FlagId id, T defaultVale, FetchVector defaultFetchVector); } /** * Defines a Flag. * * @param factory Factory for creating unbound flag of type U * @param flagId The globally unique FlagId. * @param defaultValue The default value if none is present after resolution. * @param description Description of how the flag is used. * @param modificationEffect What is required for the flag to take effect? A restart of process? immediately? etc. * @param dimensions What dimensions will be set in the {@link FetchVector} when fetching * the flag value in * {@link FlagSource#fetch(FlagId, FetchVector) FlagSource::fetch}. * For instance, if APPLICATION is one of the dimensions here, you should make sure * APPLICATION is set to the ApplicationId in the fetch vector when fetching the RawFlag * from the FlagSource. * @param <T> The boxed type of the flag value, e.g. Boolean for flags guarding features. * @param <U> The type of the unbound flag, e.g. UnboundBooleanFlag. * @return An unbound flag with {@link FetchVector.Dimension#HOSTNAME HOSTNAME} and * {@link FetchVector.Dimension#VESPA_VERSION VESPA_VERSION} already set. The ZONE environment * is typically implicit. */ private static <T, U extends UnboundFlag<?, ?, ?>> U define(TypedUnboundFlagFactory<T, U> factory, String flagId, T defaultValue, List<String> owners, String createdAt, String expiresAt, String description, String modificationEffect, FetchVector.Dimension[] dimensions) { FlagId id = new FlagId(flagId); FetchVector vector = new FetchVector() .with(HOSTNAME, Defaults.getDefaults().vespaHostname()) // Warning: In unit tests and outside official Vespa releases, the currentVersion is e.g. 7.0.0 // (determined by the current major version). Consider not setting VESPA_VERSION if minor = micro = 0. .with(VESPA_VERSION, Vtag.currentVersion.toFullString()); U unboundFlag = factory.create(id, defaultValue, vector); FlagDefinition definition = new FlagDefinition( unboundFlag, owners, parseDate(createdAt), parseDate(expiresAt), description, modificationEffect, dimensions); flags.put(id, definition); return unboundFlag; } private static Instant parseDate(String rawDate) { return DateTimeFormatter.ISO_DATE.parse(rawDate, LocalDate::from).atStartOfDay().toInstant(ZoneOffset.UTC); } public static List<FlagDefinition> getAllFlags() { return List.copyOf(flags.values()); } public static Optional<FlagDefinition> getFlag(FlagId flagId) { return Optional.ofNullable(flags.get(flagId)); } /** * Allows the statically defined flags to be controlled in a test. * * <p>Returns a Replacer instance to be used with e.g. a try-with-resources block. Within the block, * the flags starts out as cleared. Flags can be defined, etc. When leaving the block, the flags from * before the block is reinserted. * * <p>NOT thread-safe. Tests using this cannot run in parallel. */ public static Replacer clearFlagsForTesting() { return new Replacer(); } public static class Replacer implements AutoCloseable { private static volatile boolean flagsCleared = false; private final TreeMap<FlagId, FlagDefinition> savedFlags; private Replacer() { verifyAndSetFlagsCleared(true); this.savedFlags = Flags.flags; Flags.flags = new TreeMap<>(); } @Override public void close() { verifyAndSetFlagsCleared(false); Flags.flags = savedFlags; } /** * Used to implement a simple verification that Replacer is not used by multiple threads. * For instance two different tests running in parallel cannot both use Replacer. */ private static void verifyAndSetFlagsCleared(boolean newValue) { if (flagsCleared == newValue) { throw new IllegalStateException("clearFlagsForTesting called while already cleared - running tests in parallell!?"); } flagsCleared = newValue; } } }
package magpie.data; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.regex.*; import org.apache.commons.lang3.ArrayUtils; /** * This class is designed to store all information related to an entry in a Dataset * * @author Logan Ward * @version 0.1 */ public class BaseEntry implements java.lang.Cloneable, java.io.Serializable, java.util.Comparator, java.lang.Comparable { /** Values of attributes */ private List<Double> AttributeList; /** Measured value of class variable */ private double Class; /** Probably of entry existing in each possible class (for classification) */ private double[] Probability; /** Class variable predicted by a model */ private double PredictedClass; /** Whether this entry has a measured class variable */ private boolean measured=false; /** Whether this entry has a predicted class variable */ private boolean predicted=false; /** Create a blank entry */ public BaseEntry() { this.AttributeList = new ArrayList<>(); this.Probability = null; } /** * Generate an entry by parsing a text string * @param input String representing entry * @throws Exception If parse fails */ public BaseEntry(String input) throws Exception { // Find anything in the input that matches a number Matcher numMatcher = Pattern.compile("[-+]?[0-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?").matcher(input); AttributeList = new LinkedList<>(); while (numMatcher.find()) { String number = numMatcher.group(); AttributeList.add(Double.valueOf(number)); } this.Probability = null; } /** * Get number of attributes currently set * @return Number of attributes */ public int NAttributes() { return AttributeList.size(); } /** * Clear all currently-set attributes. */ public void clearAttributes() { AttributeList.clear(); } /** * Retrieve attributes for this entry * @return List of attributes (same order as {@linkplain Dataset#AttributeName}) */ public double[] getAttributes() { return ArrayUtils.toPrimitive(AttributeList.toArray(new Double[0])); } /** * Retrieve a certain attribute for this entry * @param index Index of attribute to retrieve * @return Value of specified attribute */ public double getAttribute(int index) { return AttributeList.get(index); } /** * Set a certain attribute for this entry * @param index Index of attribute to set * @param value Desired value of specified attribute */ public void setAttribute(int index, double value) { AttributeList.set(index, value); } /** * Sets attributes for this entry * @param attributes List of attributes (same order as {@linkplain Dataset#AttributeName}) */ public void setAttributes(double[] attributes) { this.AttributeList.clear(); addAttributes(attributes); } /** * Adds attribute value to the end of the list of current attributes * @param attribute Value of attribute to add */ public void addAttribute(double attribute) { AttributeList.add(attribute); } /** * Adds several attributes to the end of the attribute list * @param attributes List of attribute values to be added */ public void addAttributes(double[] attributes) { Double[] toAdd = new Double[attributes.length]; for (int i=0; i<attributes.length; i++) toAdd[i] = attributes[i]; AttributeList.addAll(Arrays.asList(toAdd)); } /** * Generates a clone of an entry. It creates a new list to store the attributes. * So, the attribute data is preserved, but you can change the list as desired. * * @return Clone */ @Override public BaseEntry clone() { BaseEntry copy; try { copy = (BaseEntry) super.clone(); } catch (CloneNotSupportedException c) { throw new Error(c); } copy.AttributeList = new ArrayList<>(AttributeList); copy.Class = this.Class; copy.PredictedClass = this.PredictedClass; copy.measured = this.measured; copy.predicted = this.predicted; return copy; } @Override public int compare(Object A_obj, Object B_obj) { if (A_obj instanceof BaseEntry && B_obj instanceof BaseEntry) { BaseEntry A = (BaseEntry) A_obj, B = (BaseEntry) B_obj; // If A has more features, it is greater. if (A.AttributeList.size() != B.AttributeList.size()) return (A.AttributeList.size() > B.AttributeList.size()) ? 1 : -1; // Check which has greater features for (int i=0; i<A.AttributeList.size(); i++) if (A.getAttribute(i) != B.getAttribute(i)) return (A.getAttribute(i) > B.getAttribute(i)) ? 1 : -1; // We have concluded they are equal return 0; } else return 0; } @Override public int compareTo(Object B) { return compare(this, B); } @Override public int hashCode() { if (AttributeList.size() > 0) return (int) AttributeList.hashCode(); else return 1; } @Override public boolean equals(java.lang.Object other) { // Check if any of the if (other instanceof BaseEntry) { BaseEntry obj = (BaseEntry) other; return AttributeList.equals(obj.AttributeList); } else return false; } /** @return Whether a measured class has been set for this entry */ public boolean hasMeasurement() { return measured; } /** @return Whether a predicted class has been set for this entry */ public boolean hasPrediction() { return predicted; } /** * Set number of attributes that describe this entry * @param number Number of attributes */ public void setAttributeCount(int number){ if (AttributeList instanceof ArrayList) { ArrayList Ptr = (ArrayList) AttributeList; Ptr.ensureCapacity(number); } } /** * Set the measured class variable * @param x Measured class */ public void setMeasuredClass(double x){ this.Class = x; measured=true; } /** * Get the measured class variable * @return Measured class */ public double getMeasuredClass() { return Class; } /** Set the predicted class variable * @param x Predicted class */ public void setPredictedClass(double x) { PredictedClass = x; predicted=true; Probability=null; } /** * Get the predicted class variable * @return Predicted class */ public double getPredictedClass() { return PredictedClass; } /** * Set the predicted probability of a entry existing in each class * @param probabilites Probability of entry being in each class */ public void setClassProbabilities(double[] probabilites) { Probability = probabilites.clone(); predicted=true; PredictedClass = 0; for (int i=1; i<Probability.length; i++) if (Probability[i]>Probability[(int)PredictedClass]) PredictedClass=i; } /** * Get the probability of an entry existing in each class. Returns null if * no class probabilities have been stored * @return Class probabilities */ public double[] getClassProbilities() { return Probability.clone(); } @Override public String toString() { if (NAttributes() > 0) { String output = String.format("(%.3f", AttributeList.get(0)); for(int i=1; i<NAttributes(); i++) output += String.format(",%.3f", AttributeList.get(i)); output+=")"; return output; } else return "Nameless"; } /** * Print entry in a HTML-friendly format. * @return Entry as a string */ public String toHTMLString() { return toString(); } /** * Call this after generating attributes to ensure the array storing attributes * is as small as possible. */ public void reduceMemoryFootprint() { if (AttributeList instanceof ArrayList) { ArrayList Ptr = (ArrayList) AttributeList; Ptr.trimToSize(); } } }
package com.google.sps.data; import com.google.gson.Gson; // Place class for Maps agent public final class Place { private String attractionQuery = null; private int limit = -1; private double lng; private double lat; public Place(double longitude, double latitude) { lng = longitude; lat = latitude; } public Place(String query, double longitude, double latitude) { this(longitude, latitude); attractionQuery = query; } public Place(String query, double longitude, double latitude, int limit) { this(query, longitude, latitude); this.limit = limit; } public String toString() { return new Gson().toJson(this); } }
package ameba; import ameba.container.Container; import ameba.core.Application; import ameba.exception.AmebaException; import ameba.i18n.Messages; import ameba.util.IOUtils; import com.google.common.collect.Lists; import org.apache.commons.lang3.StringUtils; import org.glassfish.jersey.internal.inject.InjectionManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; /** * <p>Ameba class.</p> * * @author icode * */ public class Ameba { private static final String LINE = System.getProperty("line.separator", "/n"); /** * Constant <code>LOGO="LINE + LINE + _ _ "{trunked}</code> */ public static final String LOGO = LINE + LINE + " _ _ " + LINE + " / \\ _ __ ___ ___| |__ __ _ " + LINE + " / _ \\ | '_ ` _ \\ / _ \\ '_ \\ / _` |" + LINE + " / ___ \\| | | | | | __/ |_) | (_| |" + LINE + "/_/ \\_\\_| |_| |_|\\___|_.__/ \\__,_| {}" + LINE + LINE; /** * Constant <code>logger</code> */ private static final Logger logger = LoggerFactory.getLogger(Ameba.class); private static Application app; private static Container container; private static String version; private Ameba() { } /** * <p>getInjectionManager.</p> * * @return a InjectionManager object. */ public static InjectionManager getInjectionManager() { return container.getInjectionManager(); } /** * <p>Getter for the field <code>container</code>.</p> * * @return a {@link ameba.container.Container} object. * @since 0.1.6e */ public static Container getContainer() { return container; } /** * <p>Getter for the field <code>app</code>.</p> * * @return a {@link ameba.core.Application} object. */ public static Application getApp() { return app; } /** * <p>Getter for the field <code>version</code>.</p> * * @return a {@link java.lang.String} object. * @since 0.1.6e */ public static String getVersion() { if (version == null) { version = IOUtils.getJarImplVersion(Ameba.class); } return version; } /** * <p>printInfo.</p> * * @since 0.1.6e */ public static void printInfo() { logger.info(LOGO, getVersion()); } /** * <p>main.</p> * * @param args an array of {@link java.lang.String} objects. */ public static void main(String[] args) { // register shutdown hook Runtime.getRuntime().addShutdownHook(new Thread(Ameba::shutdown, "AmebaShutdownHook")); List<String> list = Lists.newArrayList(); String idCommand = "-- int idArgLen = idCommand.length(); for (String arg : args) { if (arg.startsWith(idCommand)) { String idConf = arg.substring(idArgLen); if (StringUtils.isNotBlank(idConf)) { list.add(idConf); } } } try { bootstrap(list.toArray(new String[list.size()])); } catch (Throwable e) { logger.error(Messages.get("info.service.error.startup"), e); try { Thread.sleep(10000); } catch (InterruptedException e1) { //no op } shutdown(); System.exit(500); } try { Thread.currentThread().join(); } catch (InterruptedException e) { //no op } } /** * <p>bootstrap.</p> * * @param ids a {@link java.lang.String} object. * @throws java.lang.Exception if any. */ public static void bootstrap(String... ids) throws Exception { bootstrap(new Application(ids)); } /** * <p>bootstrap.</p> * * @param application a {@link ameba.core.Application} object. * @throws java.lang.Exception if any. */ public static synchronized void bootstrap(Application application) throws Exception { if (Ameba.container != null) { throw new AmebaException(Messages.get("info.service.start")); } app = application; container = Container.create(app); // run logger.info(Messages.get("info.service.start")); container.start(); } /** * <p>shutdown.</p> */ public static synchronized void shutdown() { logger.info(Messages.get("info.service.shutdown")); if (container != null) try { container.shutdown(); } catch (Exception e) { logger.error(Messages.get("info.service.error.shutdown"), e); } logger.info(Messages.get("info.service.shutdown.done")); } }
package core; import controller.MainScreenController; import handler.ConfigHandler; import handler.StatisticsHandler; import javafx.application.Application; import javafx.scene.Scene; import javafx.scene.control.Alert; import javafx.scene.image.Image; import javafx.stage.Stage; import misc.Logger; import org.apache.commons.lang3.exception.ExceptionUtils; import javax.swing.*; import java.io.IOException; import java.net.URL; import java.util.Scanner; public class Driver extends Application{ /** The current version of the program. Whenever a significant change is made, this should be changed along with the online handler. */ private static final String PROGRAM_VERSION = "9"; public static void main(final String[] args) { launch(); } @Override public void init() { // Do something before the application starts. } @Override public void start(Stage primaryStage) throws Exception { final ConfigHandler configHandler = new ConfigHandler(); final StatisticsHandler statisticsHandler = new StatisticsHandler(); // Load Config File: configHandler.loadConfigSettings(); // Check for Updates: if(configHandler.getCheckForUpdatesOnStart()) { checkForUpdate(); } // Show Splash Screen: if(configHandler.getShowSplashScreen()) { showSplashscreen(configHandler); } // Setup the primary stage: primaryStage.getIcons().add(new Image("icon.png")); // Add the frst scene to the primary stage: final Scene scene = new Scene(new MainScreenController(primaryStage, configHandler, statisticsHandler).getView()); scene.getStylesheets().add("global.css"); scene.getRoot().getStyleClass().add("main-root"); primaryStage.setTitle("Schillsaver - Powered by /g/entoomen\u00a9\u00ae"); primaryStage.setScene(scene); primaryStage.show(); } @Override public void stop() { // Do something before the application stops. } /** * Checks the website to see if there is a new version of the program. * If there is a new version, then a dialog is shown to the user to explain * the situation and how to update. */ public static void checkForUpdate() { try { final URL url = new URL("http://valkryst.com/schillsaver/version.txt"); Scanner scanner = new Scanner(url.openStream()); final String newVersion = scanner.nextLine(); scanner.close(); if(!newVersion.equals(PROGRAM_VERSION)) { Alert alert = new Alert(Alert.AlertType.WARNING); alert.setTitle("New Version Available"); alert.setHeaderText("This program is out of date."); alert.setContentText("Get the latest version at http://valkryst.com/schillsaver/Schillsaver.7z.\n\n" + "Current Version - " + PROGRAM_VERSION + "\n" + "New Version - " + newVersion); alert.showAndWait(); } } catch(IOException e) { Logger.writeLog(e.getMessage() + "\n\n" + ExceptionUtils.getStackTrace(e), Logger.LOG_TYPE_WARNING); } } /** * Show the splashscreen if it's enabled in the configuration settings * and if the splashscreen image can be found. * @param configHandler todo Javadoc */ public static void showSplashscreen(final ConfigHandler configHandler) { if(configHandler.getShowSplashScreen()) { try { final ImageIcon image = new ImageIcon(configHandler.getSplashScreenFilePath()); final JWindow window = new JWindow(); window.getContentPane().add(new JLabel("", image, SwingConstants.CENTER)); window.pack(); window.setLocationRelativeTo(null); window.setVisible(true); Thread.sleep(configHandler.getSplashScreenDisplayTime()); window.setVisible(false); window.dispose(); } catch(final InterruptedException | NullPointerException e) { Logger.writeLog(e.getMessage() + "\n\n" + ExceptionUtils.getStackTrace(e), Logger.LOG_TYPE_WARNING); } } } }
package global; /** * * @author nick */ public class Data { // software info public static final String APP_NAME = "ATAV (Analysis Tool for Annotated Variants)"; public static String VERSION = "7.3"; public static String userName = System.getProperty("user.name"); // atav home path (location of executable jar file, config dir, data dir, lib dir etc.) public static String ATAV_HOME = System.getenv().getOrDefault("ATAV_HOME", ""); // system config file path public static final String SYSTEM_CONFIG = Data.ATAV_HOME + "config/atav.dragen.system.config.properties"; public static final String SYSTEM_CONFIG_FOR_DEBUG = Data.ATAV_HOME + "config/atav.dragen.debug.system.config.properties"; // system default values public static final int NO_FILTER = Integer.MAX_VALUE; public static final String NO_FILTER_STR = ""; public static final byte BYTE_NA = Byte.MIN_VALUE; public static final short SHORT_NA = Short.MIN_VALUE; public static final int INTEGER_NA = Integer.MIN_VALUE; public static final float FLOAT_NA = Float.MIN_VALUE; public static final double DOUBLE_NA = Double.MIN_VALUE; public static String STRING_NA = "NA"; public static String VCF_NA = "."; public static String STRING_NAN = "NaN"; }
package hex; import hex.KMeans.Initialization; import java.util.*; import water.*; import water.Job.ColumnsJob; import water.api.*; import water.fvec.Chunk; import water.fvec.Frame; import water.fvec.NewChunk; import water.fvec.Vec; import water.util.RString; import water.util.Utils; public class KMeans2 extends ColumnsJob { static final int API_WEAVER = 1; static public DocGen.FieldDoc[] DOC_FIELDS; static final String DOC_GET = "k-means"; @API(help = "Clusters initialization", filter = Default.class) public Initialization initialization = Initialization.None; @API(help = "Number of clusters", required = true, json = true, filter = Default.class, lmin = 1, lmax = 100000) public int k = 2; @API(help = "Maximum number of iterations before stopping", required = true, filter = Default.class, lmin = 1, lmax = 100000) public int max_iter = 100; @API(help = "Whether data should be normalized", filter = Default.class) public boolean normalize; @API(help = "Seed for the random number generator", filter = Default.class) public long seed = new Random().nextLong(); public KMeans2() { description = "K-means"; } @Override protected Status exec() { String sourceArg = input("source"); Key sourceKey = null; if( sourceArg != null ) sourceKey = Key.make(sourceArg); String[] names = new String[cols.length]; for( int i = 0; i < cols.length; i++ ) names[i] = source._names[cols[i]]; Vec[] vecs = selectVecs(source); // Fill-in response based on K99 String[] domain = new String[k]; for( int i = 0; i < domain.length; i++ ) domain[i] = "Cluster " + i; String[] namesResp = Utils.append(names, "response"); String[][] domaiResp = (String[][]) Utils.append((new Frame(names, vecs)).domains(), (Object) domain); KMeans2Model model = new KMeans2Model(destination_key, sourceKey, namesResp, domaiResp); model.k = k; model.normalized = normalize; // TODO remove when stats are propagated with vecs? double[] means = new double[vecs.length]; double[] mults = normalize ? new double[vecs.length] : null; for( int i = 0; i < vecs.length; i++ ) { means[i] = (float) vecs[i].mean(); if( mults != null ) { double sigma = vecs[i].sigma(); mults[i] = normalize(sigma) ? 1 / sigma : 1; } } // -1 to be different from all chunk indexes (C.f. Sampler) Random rand = Utils.getRNG(seed - 1); double[][] clusters; if( initialization == Initialization.None ) { // Initialize all clusters to random rows clusters = new double[k][vecs.length]; for (double[] cluster : clusters) randomRow(vecs, rand, cluster, means, mults); } else { // Initialize first cluster to random row clusters = new double[1][]; clusters[0] = new double[vecs.length]; randomRow(vecs, rand, clusters[0], means, mults); while( model.iterations < 5 ) { // Sum squares distances to clusters SumSqr sqr = new SumSqr(); sqr._clusters = clusters; sqr._means = means; sqr._mults = mults; sqr.doAll(vecs); // Sample with probability inverse to square distance Sampler sampler = new Sampler(); sampler._clusters = clusters; sampler._sqr = sqr._sqr; sampler._probability = k * 3; // Over-sampling sampler._seed = seed; sampler._means = means; sampler._mults = mults; sampler.doAll(vecs); clusters = Utils.append(clusters, sampler._sampled); if( cancelled() ) return Status.Done; model.centers = normalize ? denormalize(clusters, vecs) : clusters; model.total_within_SS = sqr._sqr; model.iterations++; UKV.put(destination_key, model); } clusters = recluster(clusters, k, rand, initialization); } for( ;; ) { Lloyds task = new Lloyds(); task._clusters = clusters; task._means = means; task._mults = mults; task.doAll(vecs); model.centers = clusters = normalize ? denormalize(task._cMeans, vecs) : task._cMeans; model.between_cluster_variances = task._betwnSqrs; double[] variances = new double[task._cSqrs.length]; for( int clu = 0; clu < task._cSqrs.length; clu++ ) for( int col = 0; col < task._cSqrs[clu].length; col++ ) variances[clu] += task._cSqrs[clu][col]; double between_cluster_SS = 0.0; for (int clu = 0; clu < task._betwnSqrs.length; clu++) between_cluster_SS += task._betwnSqrs[clu]; model.between_cluster_SS = between_cluster_SS; model.within_cluster_variances = variances; model.total_within_SS = task._sqr; model.total_SS = model.total_within_SS + model.between_cluster_SS; model.size = task._rows; model.iterations++; UKV.put(destination_key, model); if( model.iterations >= max_iter ) { Clusters cc = new Clusters(); cc._clusters = clusters; cc._means = means; cc._mults = mults; cc.doAll(1, vecs); Frame fr2 = cc.outputFrame(new String[]{"Cluster ID"}, new String[1][]); Key clustersIDKey = Key.make(destination_key.toString() + "_clusters"); model._clustersKey = clustersIDKey.toString(); UKV.put(clustersIDKey, fr2); break; } if( cancelled() ) break; } return Status.Done; } @Override protected Response redirect() { return KMeans2Progress.redirect(this, job_key, destination_key); } public static class KMeans2Progress extends Progress2 { static final int API_WEAVER = 1; static public DocGen.FieldDoc[] DOC_FIELDS; @Override protected Response jobDone(Job job, Key dst) { return KMeans2ModelView.redirect(this, destination_key); } public static Response redirect(Request req, Key job_key, Key destination_key) { return Response.redirect(req, new KMeans2Progress().href(), JOB_KEY, job_key, DEST_KEY, destination_key); } } public static class KMeans2ModelView extends Request2 { static final int API_WEAVER = 1; static public DocGen.FieldDoc[] DOC_FIELDS; @API(help = "KMeans2 Model", json = true, filter = Default.class) public KMeans2Model model; public static String link(String txt, Key model) { return "<a href='" + new KMeans2ModelView().href() + ".html?model=" + model + "'>" + txt + "</a>"; } public static Response redirect(Request req, Key model) { return Response.redirect(req, new KMeans2ModelView().href(), "model", model); } @Override protected Response serve() { return Response.done(this); } @Override public boolean toHTML(StringBuilder sb) { if( model != null ) { DocGen.HTML.section(sb, "Cluster Centers: "); //"Total Within Cluster Sum of Squares: " + model.total_within_SS); table(sb, "Clusters", model._names, model.centers); double[][] rows = new double[model.within_cluster_variances.length][1]; for( int i = 0; i < rows.length; i++ ) rows[i][0] = model.within_cluster_variances[i]; columnHTMLlong(sb, "Cluster Size", model.size); DocGen.HTML.section(sb, "Cluster Variances: "); table(sb, "Clusters", new String[]{"Within Cluster Variances"}, rows); columnHTML(sb, "Between Cluster Variances", model.between_cluster_variances); sb.append("<br />"); DocGen.HTML.section(sb, "Overall Totals: "); double[] row = new double[]{model.total_SS, model.total_within_SS, model.between_cluster_SS}; rowHTML(sb, new String[]{"Total Sum of Squares", "Total Within Cluster Sum of Squares", "Between Cluster Sum of Squares"}, row); DocGen.HTML.section(sb, "Cluster Assignments by Observation: "); RString rs = new RString("<a href='Inspect2.html?src_key=%$key'>%content</a>"); rs.replace("key", model._selfKey + "_clusters"); rs.replace("content", "View the row-by-row cluster assignments"); sb.append(rs.toString()); //sb.append("<iframe src=\"" + "/Inspect.html?key=KMeansClusters\"" + "width = \"850\" height = \"550\" marginwidth=\"25\" marginheight=\"25\" scrolling=\"yes\"></iframe>" ); return true; } return false; } private static void rowHTML(StringBuilder sb, String[] header, double[] ro) { sb.append("<span style='display: inline-block; '>"); sb.append("<table class='table table-striped table-bordered'>"); sb.append("<tr>"); for (String aHeader : header) sb.append("<th>").append(aHeader).append("</th>"); sb.append("</tr>"); sb.append("<tr>"); for (double row : ro) { sb.append("<td>").append(ElementBuilder.format(row)).append("</td>"); } sb.append("</tr>"); sb.append("</table></span>"); } private static void columnHTML(StringBuilder sb, String name, double[] rows) { sb.append("<span style='display: inline-block; '>"); sb.append("<table class='table table-striped table-bordered'>"); sb.append("<tr>"); sb.append("<th>").append(name).append("</th>"); sb.append("</tr>"); sb.append("<tr>"); for (double row : rows) { sb.append("<tr>"); sb.append("<td>").append(ElementBuilder.format(row)).append("</td>"); sb.append("</tr>"); } sb.append("</table></span>"); } private static void columnHTMLlong(StringBuilder sb, String name, long[] rows) { sb.append("<span style='display: inline-block; '>"); sb.append("<table class='table table-striped table-bordered'>"); sb.append("<tr>"); sb.append("<th>").append(name).append("</th>"); sb.append("</tr>"); sb.append("<tr>"); for (double row : rows) { sb.append("<tr>"); sb.append("<td>").append(ElementBuilder.format(row)).append("</td>"); sb.append("</tr>"); } sb.append("</table></span>"); } private static void table(StringBuilder sb, String title, String[] names, double[][] rows) { sb.append("<span style='display: inline-block;'>"); sb.append("<table class='table table-striped table-bordered'>"); sb.append("<tr>"); sb.append("<th>").append(title).append("</th>"); for( int i = 0; names != null && i < rows[0].length; i++ ) sb.append("<th>").append(names[i]).append("</th>"); sb.append("</tr>"); for( int r = 0; r < rows.length; r++ ) { sb.append("<tr>"); sb.append("<td>").append(r).append("</td>"); for( int c = 0; c < rows[r].length; c++ ) sb.append("<td>").append(ElementBuilder.format(rows[r][c])).append("</td>"); sb.append("</tr>"); } sb.append("</table></span>"); } } public static class KMeans2Model extends Model implements Progress { static final int API_WEAVER = 1; static public DocGen.FieldDoc[] DOC_FIELDS; @API(help = "Cluster centers, always denormalized") public double[][] centers; @API(help = "Sum of within cluster sum of squares") public double total_within_SS; @API(help = "Between cluster sum of square distances") public double between_cluster_SS; @API(help = "Total Sum of squares = total_within_SS + betwen_cluster_SS") public double total_SS; @API(help = "Number of clusters") public int k; @API(help = "Numbers of observations in each cluster.") public long[] size; @API(help = "Whether data was normalized") public boolean normalized; @API(help = "Maximum number of iterations before stopping") public int max_iter = 100; @API(help = "Iterations the algorithm ran") public int iterations; @API(help = "Within cluster sum of squares per cluster") public double[] within_cluster_variances; @API(help = "Between Cluster square distances per cluster") public double[] between_cluster_variances; @API(help = "The row-by-row cluster assignments") public String _clustersKey; // Normalization caches private transient double[][] _normClust; private transient double[] _means, _mults; public KMeans2Model(Key selfKey, Key dataKey, String names[], String domains[][]) { super(selfKey, dataKey, names, domains); } @Override public float progress() { return Math.min(1f, iterations / (float) max_iter); } @Override protected float[] score0(Chunk[] chunks, int rowInChunk, double[] tmp, float[] preds) { double[][] cs = centers; if( normalized && _normClust == null ) cs = _normClust = normalize(centers, chunks); if( _means == null ) { _means = new double[chunks.length]; for( int i = 0; i < chunks.length; i++ ) _means[i] = chunks[i]._vec.mean(); } if( normalized && _mults == null ) { _mults = new double[chunks.length]; for( int i = 0; i < chunks.length; i++ ) { double sigma = chunks[i]._vec.sigma(); _mults[i] = normalize(sigma) ? 1 / sigma : 1; } } data(tmp, chunks, rowInChunk, _means, _mults); Arrays.fill(preds, 0); preds[closest(cs, tmp, new ClusterDist())._cluster] = 1; return preds; } @Override protected float[] score0(double[] data, float[] preds) { throw new UnsupportedOperationException(); } } public class Clusters extends MRTask2<Clusters> { double[][] _clusters; // Cluster centers double[] _means, _mults; // Normalization @Override public void map(Chunk[] cs, NewChunk[] ncs) { double[] values = new double[_clusters[0].length]; ClusterDist cd = new ClusterDist(); for (int row = 0; row < cs[0]._len; row++) { data(values, cs, row, _means, _mults); closest(_clusters, values, cd); int clu = cd._cluster; ncs[0].addNum(clu); } } } public static class SumSqr extends MRTask2<SumSqr> { double[] _means, _mults; // Normalization double[][] _clusters; // OUT double _sqr; @Override public void map(Chunk[] cs) { double[] values = new double[cs.length]; ClusterDist cd = new ClusterDist(); for( int row = 0; row < cs[0]._len; row++ ) { data(values, cs, row, _means, _mults); _sqr += minSqr(_clusters, values, cd); } _means = _mults = null; _clusters = null; } @Override public void reduce(SumSqr other) { _sqr += other._sqr; } } public static class Sampler extends MRTask2<Sampler> { double[][] _clusters; double _sqr; // Min-square-error double _probability; // Odds to select this point long _seed; double[] _means, _mults; // Normalization // OUT double[][] _sampled; // New clusters @Override public void map(Chunk[] cs) { double[] values = new double[cs.length]; ArrayList<double[]> list = new ArrayList<double[]>(); Random rand = Utils.getRNG(_seed + cs[0]._start); ClusterDist cd = new ClusterDist(); for( int row = 0; row < cs[0]._len; row++ ) { data(values, cs, row, _means, _mults); double sqr = minSqr(_clusters, values, cd); if( _probability * sqr > rand.nextDouble() * _sqr ) list.add(values.clone()); } _sampled = new double[list.size()][]; list.toArray(_sampled); _clusters = null; _means = _mults = null; } @Override public void reduce(Sampler other) { _sampled = Utils.append(_sampled, other._sampled); } } public static class Lloyds extends MRTask2<Lloyds> { double[][] _clusters; double[] _means, _mults; // Normalization // OUT double[][] _cMeans, _cSqrs; // Means and sum of squares for each cluster double[] _betwnSqrs; // Between cluster squares double[] _gm; // Grand Mean (mean of means) long[] _rows; // Rows per cluster double _sqr; // Total sqr distance @Override public void map(Chunk[] cs) { _cMeans = new double[_clusters.length][_clusters[0].length]; _cSqrs = new double[_clusters.length][_clusters[0].length]; _betwnSqrs = new double[_clusters.length]; _rows = new long[_clusters.length]; _gm = new double[_clusters[0].length]; // Find closest cluster for each row double[] values = new double[_clusters[0].length]; ClusterDist cd = new ClusterDist(); int[] clusters = new int[cs[0]._len]; for( int row = 0; row < cs[0]._len; row++ ) { data(values, cs, row, _means, _mults); closest(_clusters, values, cd); int clu = clusters[row] = cd._cluster; _sqr += cd._dist; if( clu == -1 ) continue; // Ignore broken row // Add values and increment counter for chosen cluster for( int col = 0; col < values.length; col++ ) _cMeans[clu][col] += values[col]; _rows[clu]++; } int[] validMeans = new int[_gm.length]; for( int clu = 0; clu < _cMeans.length; clu++ ) for( int col = 0; col < _cMeans[clu].length; col++ ) { if(_rows[clu] != 0) { _cMeans[clu][col] /= _rows[clu]; _gm[col] += _cMeans[clu][col]; validMeans[col]++; } } for (int col = 0; col < _gm.length; col++) if(validMeans[col] != 0) _gm[col] /= validMeans[col]; for (int clu = 0; clu < _cMeans.length; clu++) for (int col = 0; col < _gm.length; col++) { double mean_delta = _cMeans[clu][col] - _gm[col]; _betwnSqrs[clu] += _rows[clu] * mean_delta * mean_delta; } // Second pass for in-cluster variances for( int row = 0; row < cs[0]._len; row++ ) { int clu = clusters[row]; if( clu == -1 ) continue; data(values, cs, row, _means, _mults); for( int col = 0; col < values.length; col++ ) { double delta = values[col] - _cMeans[clu][col]; _cSqrs[clu][col] += delta * delta; } } _clusters = null; _means = _mults = null; } @Override public void reduce(Lloyds mr) { for( int clu = 0; clu < _cMeans.length; clu++ ) Layer.Stats.reduce(_cMeans[clu], _cSqrs[clu], _rows[clu], mr._cMeans[clu], mr._cSqrs[clu], mr._rows[clu]); Utils.add(_rows, mr._rows); _sqr += mr._sqr; } } private static final class ClusterDist { int _cluster; double _dist; } private static ClusterDist closest(double[][] clusters, double[] point, ClusterDist cd) { return closest(clusters, point, cd, clusters.length); } private static double minSqr(double[][] clusters, double[] point, ClusterDist cd) { return closest(clusters, point, cd, clusters.length)._dist; } private static double minSqr(double[][] clusters, double[] point, ClusterDist cd, int count) { return closest(clusters, point, cd, count)._dist; } /** Return both nearest of N cluster/centroids, and the square-distance. */ private static ClusterDist closest(double[][] clusters, double[] point, ClusterDist cd, int count) { int min = -1; double minSqr = Double.MAX_VALUE; for( int cluster = 0; cluster < count; cluster++ ) { double sqr = 0; // Sum of dimensional distances int pts = point.length; // Count of valid points for( int column = 0; column < clusters[cluster].length; column++ ) { double d = point[column]; if( Double.isNaN(d) ) { // Bad data? pts--; // Do not count } else { double delta = d - clusters[cluster][column]; sqr += delta * delta; } } // Scale distance by ratio of valid dimensions to all dimensions - since // we did not add any error term for the missing point, the sum of errors // is small - ratio up "as if" the missing error term is equal to the // average of other error terms. Same math another way: // double avg_dist = sqr / pts; // average distance per feature/column/dimension // sqr = sqr * point.length; // Total dist is average*#dimensions if( 0 < pts && pts < point.length ) sqr *= point.length / pts; if( sqr < minSqr ) { min = cluster; minSqr = sqr; } } cd._cluster = min; // Record nearest cluster cd._dist = minSqr; // Record square-distance return cd; // Return for flow-coding } // KMeans++ re-clustering public static double[][] recluster(double[][] points, int k, Random rand, Initialization init) { double[][] res = new double[k][]; res[0] = points[0]; int count = 1; ClusterDist cd = new ClusterDist(); switch( init ) { case None: break; case PlusPlus: { // k-means++ while( count < res.length ) { double sum = 0; for (double[] point1 : points) sum += minSqr(res, point1, cd, count); for (double[] point : points) { if (minSqr(res, point, cd, count) >= rand.nextDouble() * sum) { res[count++] = point; break; } } } break; } case Furthest: { // Takes cluster further from any already chosen ones while( count < res.length ) { double max = 0; int index = 0; for( int i = 0; i < points.length; i++ ) { double sqr = minSqr(res, points[i], cd, count); if( sqr > max ) { max = sqr; index = i; } } res[count++] = points[index]; } break; } default: throw new IllegalStateException(); } return res; } private void randomRow(Vec[] vecs, Random rand, double[] cluster, double[] means, double[] mults) { long row = Math.max(0, (long) (rand.nextDouble() * vecs[0].length()) - 1); data(cluster, vecs, row, means, mults); } private static boolean normalize(double sigma) { // TODO unify handling of constant columns return sigma > 1e-6; } private static double[][] normalize(double[][] clusters, Chunk[] chks) { double[][] value = new double[clusters.length][clusters[0].length]; for( int row = 0; row < value.length; row++ ) { for( int col = 0; col < clusters[row].length; col++ ) { double d = clusters[row][col]; Vec vec = chks[col]._vec; d -= vec.mean(); d /= normalize(vec.sigma()) ? vec.sigma() : 1; value[row][col] = d; } } return value; } private static double[][] denormalize(double[][] clusters, Vec[] vecs) { double[][] value = new double[clusters.length][clusters[0].length]; for( int row = 0; row < value.length; row++ ) { for( int col = 0; col < clusters[row].length; col++ ) { double d = clusters[row][col]; d *= vecs[col].sigma(); d += vecs[col].mean(); value[row][col] = d; } } return value; } private static void data(double[] values, Vec[] vecs, long row, double[] means, double[] mults) { for( int i = 0; i < values.length; i++ ) { double d = vecs[i].at(row); values[i] = data(d, i, means, mults); } } private static void data(double[] values, Chunk[] chks, int row, double[] means, double[] mults) { for( int i = 0; i < values.length; i++ ) { double d = chks[i].at0(row); values[i] = data(d, i, means, mults); } } /** * Takes mean if NaN, normalize if requested. */ private static double data(double d, int i, double[] means, double[] mults) { if( Double.isNaN(d) ) d = means[i]; if( mults != null ) { d -= means[i]; d *= mults[i]; } return d; } }
package hex.drf; import hex.ShuffleTask; import hex.gbm.*; import hex.gbm.DTree.DecidedNode; import hex.gbm.DTree.LeafNode; import hex.gbm.DTree.UndecidedNode; import java.util.Arrays; import java.util.Random; import water.*; import water.api.DRFProgressPage; import water.api.DocGen; import water.fvec.*; import water.util.*; import water.util.Log.Tag.Sys; import static water.util.Utils.avg; import static water.util.Utils.div; import static water.util.Utils.sum; // Random Forest Trees public class DRF extends SharedTreeModelBuilder { static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. @API(help = "Columns to randomly select at each level, or -1 for sqrt(#cols)", filter = Default.class, lmin=-1, lmax=100000) int mtries = -1; @API(help = "Sample rate, from 0. to 1.0", filter = Default.class, dmin=0, dmax=1) float sample_rate = 0.6666667f; @API(help = "Seed for the random number generator", filter = Default.class) long seed = 0x1321e74a0192470cL; // Only one hardcoded seed to receive the same results between runs @API(help = "Stop criterium for tree grow.", filter = Default.class, lmin=-1, lmax=1000 ) int nodesize = -1; // nodesize = 1 for classification, else = 5 /** DRF model holding serialized tree and implementing logic for scoring a row */ public static class DRFModel extends DTree.TreeModel { static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. public DRFModel(Key key, Key dataKey, Key testKey, String names[], String domains[][], int ntrees) { super(key,dataKey,testKey,names,domains,ntrees); } public DRFModel(DRFModel prior, DTree[] trees, double err, long [][] cm) { super(prior, trees, err, cm); } public DRFModel(DRFModel prior, float[] varimp) { super(prior, varimp); } @Override protected float[] score0(double data[], float preds[]) { float[] p = super.score0(data, preds); int ntrees = numTrees(); if (p.length==1) { if (ntrees>0) div(p, ntrees); } // regression - compute avg over all trees else { float s = sum(p); div(p, s); // unify over all classes } return p; } } public Frame score( Frame fr ) { return ((DRFModel)UKV.get(dest())).score(fr,true); } @Override protected Log.Tag.Sys logTag() { return Sys.DRF__; } public DRF() { description = "Distributed RF"; ntrees = 50; max_depth = 50; } /** Return the query link to this page */ public static String link(Key k, String content) { RString rs = new RString("<a href='DRF.query?source=%$key'>%content</a>"); rs.replace("key", k.toString()); rs.replace("content", content); return rs.toString(); } // Compute a DRF tree. // Start by splitting all the data according to some criteria (minimize // variance at the leaves). Record on each row which split it goes to, and // assign a split number to it (for next pass). On *this* pass, use the // split-number to build a per-split histogram, with a per-histogram-bucket // variance. @Override protected void logStart() { Log.info("Starting DRF model build..."); super.logStart(); Log.info("sample_rate: " + sample_rate); } @Override protected void exec() { logStart(); buildModel(); } @Override protected Response redirect() { return DRFProgressPage.redirect(this, self(), dest()); } @Override protected void buildModel( final Frame fr, String names[], String domains[][], final Key outputKey, final Key dataKey, final Key testKey, final Timer t_build ) { final int cmtries = (mtries==-1) ? // classification: mtry=sqrt(_ncols), regression: mtry=_ncols/3 ( classification ? Math.max((int)Math.sqrt(_ncols),1) : Math.max(_ncols/3,1)) : mtries; final int cnodesize = (nodesize==-1) ? (classification ? 1 : 5) : nodesize; // computed nodesize assert 1 <= cmtries && cmtries <= _ncols : "Too large mtries="+cmtries+", ncols="+_ncols; assert 0.0 < sample_rate && sample_rate <= 1.0; DRFModel model = new DRFModel(outputKey,dataKey,testKey,names,domains,ntrees); DKV.put(outputKey, model); // The RNG used to pick split columns Random rand = createRNG(seed); // Set a single 1.0 in the response for that class new Set1Task().doAll(fr); int tid = 0; DTree[] ktrees = null; // Build trees until we hit the limit for( tid=0; tid<ntrees; tid++) { // At each iteration build K trees (K = nclass = response column domain size) // TODO: parallelize ? build more than k trees at each time, we need to care about temporary data // Idea: launch more DRF at once. Timer t_kTrees = new Timer(); ktrees = buildNextKTrees(fr,cmtries,cnodesize,sample_rate,rand); Log.info(Sys.DRF__, "Tree "+(tid+1)+"x"+_nclass+" produced in "+t_kTrees); if( cancelled() ) break; // If canceled during building, do not bulkscore // TODO: Do validation or OOBEE scoring only if trees are produced fast enough. model = doScoring(model, outputKey, fr, ktrees, tid); } // Do final scoring with all the trees. doScoring(model, outputKey, fr, ktrees, tid); if (classification) { float varimp[] = doVarImp(model, fr); System.err.println(Arrays.toString(varimp)); // Update the model model = new DRFModel(model, varimp); } cleanUp(fr,t_build); // Shared cleanup } private DRFModel doScoring(DRFModel model, Key outputKey, Frame fr, DTree[] ktrees, int tid ) { Score sc = new Score().doIt(model, fr, validation, _validResponse, validation==null).report(Sys.DRF__,tid,ktrees); model = new DRFModel(model, ktrees, (float)sc.sum()/sc.nrows(), sc.cm()); DKV.put(outputKey, model); return model; } private float[] doVarImp(DRFModel model, Frame f) { // frame has _ncols column with features and response column and working columns float[] varimp = new float[_ncols]; int ntrees = model.numTrees(); // Score a dataset as usual but collects properties per tree. TreeModelCM cx = TreeModelCM.varimp(model, f, sample_rate); // non-permuted number of votes double[] origAcc = cx.accuracy(); System.err.println("Tree: " + Arrays.toString(origAcc)); System.err.println("Tree: " + Arrays.toString(cx.treeCVotes())); assert origAcc.length == ntrees; // Copy the frame Frame wf = new Frame(f); for (int var=0; var<_ncols; var++) { Vec varv = wf.vecs()[var]; // vector which we use to measure variable importance Vec sv = ShuffleTask.shuffle(varv); // create a shuffled vector wf.replace(var, sv); // Compute oobee with shuffled data TreeModelCM cd = TreeModelCM.varimp(model, wf, sample_rate); double[] accdiff = cd.accuracy(); System.err.println("Var. " + model._names[var] + ": " + Arrays.toString(accdiff)); System.err.println("Var. " + model._names[var] + ": " + Arrays.toString(cd.treeCVotes())); assert accdiff.length == origAcc.length; // compute decrease of accuracy long[] tmp = cd.treeCVotes(); for (int t=0; t<ntrees;t++ ) { accdiff[t] = origAcc[t] - accdiff[t]; tmp[t] = cx.treeCVotes()[t] - tmp[t]; } System.err.println(Arrays.toString(tmp)); varimp[var] = (float) avg(accdiff); // Reconstruct the original frame wf.replace(var, varv); // Remove shuffled vector UKV.remove(sv._key); } return varimp; } private class Set1Task extends MRTask2<Set1Task> { @Override public void map( Chunk chks[] ) { Chunk cy = chk_resp(chks); for( int i=0; i<cy._len; i++ ) { if( cy.isNA0(i) ) continue; int cls = (int)cy.at80(i); chk_work(chks,cls).set0(i,1.0f); } } } // Build the next random k-trees private DTree[] buildNextKTrees(Frame fr, int mtrys, int nodesize, float sample_rate, Random rand) { // We're going to build K (nclass) trees - each focused on correcting // errors for a single class. final DTree[] ktrees = new DTree[_nclass]; // Use for all k-trees the same seed. NOTE: this is only to make a fair view for all k-trees long rseed = rand.nextLong(); for( int k=0; k<_nclass; k++ ) { // Initially setup as-if an empty-split had just happened if( _distribution[k] != 0 ) { ktrees[k] = new DRFTree(fr,_ncols,(char)nbins,(char)_nclass,min_rows,mtrys,rseed); new DRFUndecidedNode(ktrees[k],-1,DBinHistogram.initialHist(fr,_ncols,(char)nbins)); // The "root" node } } // Sample - mark the lines by putting 'OUT_OF_BAG' into nid(<klass>) vector for( int k=0; k<_nclass; k++) { if (ktrees[k] != null) new Sample(((DRFTree)ktrees[k]), sample_rate).doAll(vec_nids(fr,k)); } int[] leafs = new int[_nclass]; // Define a "working set" of leaf splits, from leafs[i] to tree._len for each tree i // One Big Loop till the ktrees are of proper depth. // Adds a layer to the trees each pass. int depth=0; for( ; depth<max_depth; depth++ ) { if( cancelled() ) return null; // Build K trees, one per class. // Fuse 2 conceptual passes into one: // Pass 1: Score a prior DHistogram, and make new DTree.Node assignments // to every row. This involves pulling out the current assigned Node, // "scoring" the row against that Node's decision criteria, and assigning // the row to a new child Node (and giving it an improved prediction). // Pass 2: Build new summary DHistograms on the new child Nodes every row // got assigned into. Collect counts, mean, variance, min, max per bin, // per column. ScoreBuildHistogram sbh = new ScoreBuildHistogram(ktrees,leafs).doAll(fr); //System.out.println(sbh.profString()); // Build up the next-generation tree splits from the current histograms. // Nearly all leaves will split one more level. This loop nest is // O( #active_splits * #bins * #ncols ) // but is NOT over all the data. boolean did_split=false; for( int k=0; k<_nclass; k++ ) { DTree tree = ktrees[k]; // Tree for class K if( tree == null ) continue; int tmax = tree.len(); // Number of total splits in tree K for( int leaf=leafs[k]; leaf<tmax; leaf++ ) { // Visit all the new splits (leaves) UndecidedNode udn = tree.undecided(leaf); udn._hs = sbh.getFinalHisto(k,leaf); //System.out.println("Class "+(domain!=null?domain[k]:k)+",\n Undecided node:"+udn); // Replace the Undecided with the Split decision DRFDecidedNode dn = new DRFDecidedNode((DRFUndecidedNode)udn); //System.out.println(" --> Decided node: " + dn); if( dn._split.col() == -1 ) udn.do_not_split(); else did_split = true; } leafs[k]=tmax; // Setup leafs for next tree level } // If we did not make any new splits, then the tree is split-to-death if( !did_split ) break; } // Each tree bottomed-out in a DecidedNode; go 1 more level and insert // LeafNodes to hold predictions. for( int k=0; k<_nclass; k++ ) { DTree tree = ktrees[k]; if( tree == null ) continue; int leaf = leafs[k] = tree.len(); for( int nid=0; nid<leaf; nid++ ) { if( tree.node(nid) instanceof DecidedNode ) { DecidedNode dn = tree.decided(nid); for( int i=0; i<dn._nids.length; i++ ) { int cnid = dn._nids[i]; if( cnid == -1 || // Bottomed out (predictors or responses known constant) tree.node(cnid) instanceof UndecidedNode || // Or chopped off for depth (tree.node(cnid) instanceof DecidedNode && // Or not possible to split ((DecidedNode)tree.node(cnid))._split.col()==-1) ) { DRFLeafNode nleaf = new DRFLeafNode(tree,nid); dn._nids[i] = nleaf.nid(); // Mark a leaf here } } // Handle the trivial non-splitting tree if( nid==0 && dn._split.col() == -1 ) new DRFLeafNode(tree,-1,0); } } } // -- k-trees are done // Collect votes for the tree. CollectPreds gp = new CollectPreds(ktrees,leafs).doAll(fr); for( int k=0; k<_nclass; k++ ) { final DTree tree = ktrees[k]; if( tree == null ) continue; for( int i=0; i<tree.len()-leafs[k]; i++ ) { // setup prediction for k-tree's i-th leaf ((LeafNode)tree.node(leafs[k]+i)).pred( gp._votes[k][i] ); } } // Tree <== f(Tree) // Nids <== 0 new MRTask2() { @Override public void map( Chunk chks[] ) { // For all tree/klasses for( int k=0; k<_nclass; k++ ) { final DTree tree = ktrees[k]; if( tree == null ) continue; final Chunk nids = chk_nids(chks,k); final Chunk ct = chk_tree(chks,k); for( int row=0; row<nids._len; row++ ) { int nid = (int)nids.at80(row); // Track only prediction for oob rows if (isOOBRow(nid)) { //System.err.println("k="+k + " row="+row + " is oob"); nid = oob2Nid(nid); // Setup Tree(i) - on the fly prediction of i-tree for row-th row ct.set0(row, (float)(ct.at0(row) + ((LeafNode)tree.node(nid)).pred() )); } // reset help column nids.set0(row,0); } } } }.doAll(fr); // DEBUG: Print the generated K trees // printGenerateTrees(ktrees); return ktrees; } @SuppressWarnings("unused") // helper for debugging private void printGenerateTrees(DTree[] trees) { for( int k=0; k<_nclass; k++ ) if( trees[k] != null ) System.out.println(trees[k].root().toString2(new StringBuilder(),0)); } // Read the 'tree' columns, do model-specific math and put the results in the // ds[] array, and return the sum. Dividing any ds[] element by the sum // turns the results into a probability distribution. @Override protected double score0( Chunk chks[], double ds[/*nclass*/], int row ) { double sum=0; for( int k=0; k<_nclass; k++ ) // Sum across of likelyhoods sum+=(ds[k]=chk_tree(chks,k).at0(row)); return sum; } // Collect and write predictions into leafs. private class CollectPreds extends MRTask2<CollectPreds> { final DTree _trees[]; // Read-only, shared (except at the histograms in the Nodes) final int _leafs[]; // Number of active leaves (per tree) // Per leaf: sum(votes); double _votes[/*tree/klass*/][/*tree-relative node-id*/]; CollectPreds(DTree trees[], int leafs[]) { _leafs=leafs; _trees=trees; } @Override public void map( Chunk[] chks ) { _votes = new double[_nclass][]; // For all tree/klasses for( int k=0; k<_nclass; k++ ) { final DTree tree = _trees[k]; final int leaf = _leafs[k]; if( tree == null ) continue; // Empty class is ignored // A leaf-biased array of all active Tree leaves. final double vs[] = _votes[k] = new double[tree.len()-leaf]; final Chunk nids = chk_nids(chks,k); // Node-ids for this tree/class final Chunk vss = chk_work(chks,k); // Votes for this tree/class // If we have all constant responses, then we do not split even the // root and the residuals should be zero. if( tree.root() instanceof LeafNode ) continue; for( int row=0; row<nids._len; row++ ) { // For all rows int nid = (int)nids.at80(row); // Get Node to decide from boolean oobrow = false; if (isOOBRow(nid)) { oobrow = true; nid = oob2Nid(nid); } // This is out-of-bag row - but we would like to track on-the-fly prediction for the row if( tree.node(nid) instanceof UndecidedNode ) // If we bottomed out the tree nid = tree.node(nid).pid(); // Then take parent's decision DecidedNode dn = tree.decided(nid); // Must have a decision point if( dn._split.col() == -1 ) // Unable to decide? dn = tree.decided(nid = tree.node(nid).pid()); // Then take parent's decision int leafnid = dn.ns(chks,row); // Decide down to a leafnode assert leaf <= leafnid && leafnid < tree.len(); // we cannot obtain unknown leaf assert tree.node(leafnid) instanceof LeafNode; nids.set0(row,(oobrow ? nid2Oob(leafnid) : leafnid)); // Note: I can which leaf/region I end up in, but I do not care for // the prediction presented by the tree. For GBM, we compute the // sum-of-residuals (and sum/abs/mult residuals) for all rows in the // leaf, and get our prediction from that. if (!oobrow) { double v = vss.at0(row); // How many rows in this leaf has predicted k-class. vs[leafnid-leaf] += v; } } } } @Override public void reduce( CollectPreds gp ) { Utils.add(_votes,gp._votes); } } // A standard DTree with a few more bits. Support for sampling during // training, and replaying the sample later on the identical dataset to // e.g. compute OOBEE. static class DRFTree extends DTree { final int _mtrys; // Number of columns to choose amongst in splits final long _seeds[]; // One seed for each chunk, for sampling final transient Random _rand; // RNG for split decisions & sampling DRFTree( Frame fr, int ncols, char nbins, char nclass, int min_rows, int mtrys, long seed ) { super(fr._names, ncols, nbins, nclass, min_rows, seed); _mtrys = mtrys; _rand = createRNG(seed); _seeds = new long[fr.vecs()[0].nChunks()]; for( int i=0; i<_seeds.length; i++ ) _seeds[i] = _rand.nextLong(); } // Return a deterministic chunk-local RNG. Can be kinda expensive. @Override public Random rngForChunk( int cidx ) { long seed = _seeds[cidx]; return createRNG(seed); } } // DRF DTree decision node: same as the normal DecidedNode, but specifies a // decision algorithm given complete histograms on all columns. // DRF algo: find the lowest error amongst a random mtry columns. static class DRFDecidedNode extends DecidedNode<DRFUndecidedNode> { DRFDecidedNode( DRFUndecidedNode n ) { super(n); } @Override public DRFUndecidedNode makeUndecidedNode(DBinHistogram[] nhists ) { return new DRFUndecidedNode(_tree,_nid,nhists); } // Find the column with the best split (lowest score). @Override public DTree.Split bestCol( DRFUndecidedNode u ) { DTree.Split best = new DTree.Split(-1,-1,false,Double.MAX_VALUE,Double.MAX_VALUE,0L,0L); if( u._hs == null ) return best; for( int i=0; i<u._scoreCols.length; i++ ) { int col = u._scoreCols[i]; DTree.Split s = u._hs[col].scoreMSE(col); if( s == null ) continue; if( s.se() < best.se() ) best = s; if( s.se() <= 0 ) break; // No point in looking further! } return best; } } // DRF DTree undecided node: same as the normal UndecidedNode, but specifies // a list of columns to score on now, and then decide over later. // DRF algo: pick a random mtry columns static class DRFUndecidedNode extends UndecidedNode { DRFUndecidedNode( DTree tree, int pid, DBinHistogram hs[] ) { super(tree,pid,hs); } // Randomly select mtry columns to 'score' in following pass over the data. @Override public int[] scoreCols( DHistogram[] hs ) { DRFTree tree = (DRFTree)_tree; int[] cols = new int[hs.length]; int len=0; // Gather all active columns to choose from. Ignore columns we // previously ignored, or columns with 1 bin (nothing to split), or // histogramed bin min==max (means the predictors are constant). for( int i=0; i<hs.length; i++ ) { if( hs[i]==null ) continue; // Ignore not-tracked cols if( hs[i].min() == hs[i].max() ) continue; // predictor min==max, does not distinguish if( hs[i].nbins() <= 1 ) continue; // cols with 1 bin (will not split) cols[len++] = i; // Gather active column } int choices = len; // Number of columns I can choose from if( choices == 0 ) { for( int i=0; i<hs.length; i++ ) { String s; if( hs[i]==null ) s="null"; else if( hs[i].min() == hs[i].max() ) s=hs[i].name()+"=min==max=="+hs[i].min(); else if( hs[i].nbins() <= 1 ) s=hs[i].name()+"=nbins=" +hs[i].nbins(); else s=hs[i].name()+"=unk"; } } assert choices > 0; // Draw up to mtry columns at random without replacement. for( int i=0; i<tree._mtrys; i++ ) { if( len == 0 ) break; // Out of choices! int idx2 = tree._rand.nextInt(len); int col = cols[idx2]; // The chosen column cols[idx2] = cols[--len]; // Compress out of array; do not choose again cols[len] = col; // Swap chosen in just after 'len' } assert choices - len > 0; return Arrays.copyOfRange(cols,len,choices); } } static class DRFLeafNode extends LeafNode { DRFLeafNode( DTree tree, int pid ) { super(tree,pid); } DRFLeafNode( DTree tree, int pid, int nid ) { super(tree,pid,nid); } // Insert just the predictions: a single byte/short if we are predicting a // single class, or else the full distribution. @Override protected AutoBuffer compress(AutoBuffer ab) { assert !Double.isNaN(pred()); return ab.put4f((float)pred()); } @Override protected int size() { return 4; } } // Deterministic sampling static class Sample extends MRTask2<Sample> { final DRFTree _tree; final float _rate; Sample( DRFTree tree, float rate ) { _tree = tree; _rate = rate; } @Override public void map( Chunk nids ) { Random rand = _tree.rngForChunk(nids.cidx()); for( int row=0; row<nids._len; row++ ) if( rand.nextFloat() >= _rate ) nids.set0(row, OUT_OF_BAG); // Flag row as being ignored by sampling } } }
package com.atteq.asl; import java.io.InputStreamReader; import java.io.OutputStream; import java.net.HttpURLConnection; import java.net.URI; import java.net.URL; import org.apache.log4j.Logger; import org.codehaus.jackson.map.type.TypeFactory; import org.codehaus.jackson.type.JavaType; import com.atteq.asl.performers.Performer; import com.atteq.asl.results.GenericResult; import com.atteq.asl.results.Result; import com.atteq.asl.results.ResultTransformer; import com.atteq.asl.tasks.Task; import com.atteq.asl.tasks.TaskResult; import com.atteq.asl.utils.StringHelper; import com.google.common.io.CharStreams; public class AtteqServiceLayerImpl implements SecuredAtteqServiceLayer { private String serviceLayerUrl; private String securityToken; private boolean checkAslVersion = false; static Logger logger = Logger.getLogger(AtteqServiceLayerImpl.class); private final static String ASL_VERSION = "1.1"; @Override public <T, R extends Result<T>> R perform(Performer performer, ResultTransformer<T, R> resultTransformer, JavaType t) throws ServiceCallException { try { URI baseUri = new URI(serviceLayerUrl); URL url = performer.getUrl(baseUri.getScheme(), baseUri.getHost()); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod(performer.getHttpMethod().toString()); String body = performer.getBody(); logger.debug(String.format("%s %s", performer.getHttpMethod(), url)); logger.debug(body); if ((performer.getHttpMethod() == HttpMethod.POST || performer.getHttpMethod() == HttpMethod.PUT) && !StringHelper.isNullOrEmpty(body)) { conn.setRequestProperty("Content-Type", performer.getContentType() + ";charset=" + performer.getEncoding().toUpperCase()); conn.setRequestProperty("Content-Encoding", performer.getEncoding().toUpperCase()); byte[] rawBody = body.getBytes(performer.getEncoding()); conn.setRequestProperty("Content-Length", Integer.toString(rawBody.length)); conn.setDoOutput(true); OutputStream os = conn.getOutputStream(); os.write(rawBody); os.close(); } if (getCheckAslVersion()) { String h = conn.getHeaderField("ASL-Flask-Layer"); String serverVersion = (h == null ? "" : h); if (!serverVersion.startsWith(ASL_VERSION)) { throw new ServiceCallException(String.format( "The service version '%s' is not compatibile with the client version '%s'.", serverVersion, ASL_VERSION)); } } String result = CharStreams.toString(new InputStreamReader( conn.getResponseCode() == HTTP_STATUS_CODE_OK ? conn.getInputStream() : conn.getErrorStream())); logger.debug(String.format("Response:\n%s", result)); return resultTransformer.transform(performer, result, conn.getResponseCode(), t); } catch (Exception e) { throw new ServiceCallException(String.format("Error when calling ASL. %s", e.getMessage()), e); } } public <T, R extends Result<T>> R perform(Performer performer, ResultTransformer<T, R> resultTransformer, Class<T> c) throws ServiceCallException { return perform(performer, resultTransformer, TypeFactory.defaultInstance().constructType(c)); } public <T> TaskResult<T> perform(Task task, JavaType t) throws ServiceCallException { return CallHelper.perform(this, task, t); } public <T> TaskResult<T> perform(Task task, Class<T> c) throws ServiceCallException { return CallHelper.perform(this, task, c); } public <T> TaskResult<T> performWithErrorDecorator(Task task, JavaType t) throws ServiceCallException { return CallHelper.performWithErrorDecorator(this, task, t); } public <T> TaskResult<T> performWithErrorDecorator(Task task, Class<T> c) throws ServiceCallException { return CallHelper.performWithErrorDecorator(this, task, c); } public <T> GenericResult<T> perform(Performer performer, JavaType t) throws ServiceCallException { return CallHelper.perform(this, performer, t); } public <T> GenericResult<T> perform(Performer performer, Class<T> c) throws ServiceCallException { return CallHelper.perform(this, performer, c); } public String getServiceLayerUrl() { return serviceLayerUrl; } public void setServiceLayerUrl(String serviceLayerUrl) { this.serviceLayerUrl = serviceLayerUrl; } public String getSecurityToken() { return securityToken; } public void setSecurityToken(String securityToken) { this.securityToken = securityToken; } public boolean getCheckAslVersion() { return checkAslVersion; } public void setCheckAslVersion(boolean checkAslVersion) { this.checkAslVersion = checkAslVersion; } public static final int HTTP_STATUS_CODE_OK = 200; }
package org.jetel.test; import java.io.BufferedInputStream; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.InputStream; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import junit.framework.TestCase; import org.apache.commons.io.IOUtils; import org.jetel.exception.ComponentNotReadyException; import org.jetel.exception.GraphConfigurationException; import org.jetel.exception.XMLConfigurationException; import org.jetel.graph.Result; import org.jetel.graph.TransformationGraph; import org.jetel.graph.TransformationGraphXMLReaderWriter; import org.jetel.graph.runtime.EngineInitializer; import org.jetel.graph.runtime.GraphRuntimeContext; import org.jetel.graph.runtime.SimpleThreadManager; import org.jetel.graph.runtime.WatchDog; public abstract class CloverTestCase extends TestCase { public CloverTestCase() { super(); } public CloverTestCase(String name) { super(name); } private static final String PLUGINS_KEY = "cloveretl.plugins"; private static final String PLUGINS_DEFAULT_DIR = ".."; protected void initEngine() { initEngine(null); } protected void initEngine(String defaultPropertiesFile) { final String pluginsDir; final String pluginsProperty = System.getenv(PLUGINS_KEY); if (pluginsProperty != null) { pluginsDir = pluginsProperty; } else { pluginsDir = PLUGINS_DEFAULT_DIR; } System.out.println("Cloveretl plugins: " + pluginsDir); EngineInitializer.initEngine(pluginsDir, defaultPropertiesFile, null); EngineInitializer.forceActivateAllPlugins(); } @Override protected void setUp() throws Exception { if (!EngineInitializer.isInitialized()) { initEngine(getCloverPropertiesFile()); } } protected String getCloverPropertiesFile() { return null; } protected TransformationGraph createTransformationGraph(String path, GraphRuntimeContext context) throws FileNotFoundException, GraphConfigurationException, XMLConfigurationException, ComponentNotReadyException { InputStream in = new BufferedInputStream(new FileInputStream(path)); try { context.setUseJMX(false); TransformationGraph graph = TransformationGraphXMLReaderWriter.loadGraph(in, context); EngineInitializer.initGraph(graph, context); return graph; } finally { IOUtils.closeQuietly(in); } } protected Result runGraph(TransformationGraph graph) throws ExecutionException, InterruptedException { WatchDog watchDog = new WatchDog(graph, graph.getRuntimeContext()); watchDog.init(); SimpleThreadManager manager = new SimpleThreadManager(); Future<Result> result = manager.executeWatchDog(watchDog); Result value = result.get(); if (value == Result.ERROR) { if (watchDog.getCauseException() != null) { rethrowRuntime(watchDog.getCauseException()); } } return value; } protected static void rethrowRuntime(Throwable throwable) { if (throwable == null) { return; } if (throwable instanceof RuntimeException) { throw (RuntimeException)throwable; } else if (throwable instanceof Error) { throw (Error)throwable; } else { throw new RuntimeException(throwable); } } }
package com.jetbrains.python.formatter; import com.intellij.formatting.*; import com.intellij.lang.ASTNode; import com.intellij.openapi.editor.Document; import com.intellij.openapi.util.TextRange; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.impl.source.tree.TreeUtil; import com.intellij.psi.tree.IElementType; import com.intellij.psi.tree.TokenSet; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.util.IncorrectOperationException; import com.jetbrains.python.PyElementTypes; import com.jetbrains.python.PyTokenTypes; import com.jetbrains.python.PythonFileType; import com.jetbrains.python.psi.*; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.List; import static com.jetbrains.python.psi.PyUtil.sure; /** * @author yole */ public class PyBlock implements ASTBlock { private final Alignment _alignment; private final Indent _indent; private final ASTNode _node; private final Wrap _wrap; private final CodeStyleSettings mySettings; private List<PyBlock> _subBlocks = null; private Alignment myChildAlignment; private static final boolean DUMP_FORMATTING_BLOCKS = false; private static final TokenSet ourListElementTypes = TokenSet.create(PyElementTypes.LIST_LITERAL_EXPRESSION, PyElementTypes.LIST_COMP_EXPRESSION, PyElementTypes.DICT_COMP_EXPRESSION, PyElementTypes.SET_COMP_EXPRESSION, PyElementTypes.DICT_LITERAL_EXPRESSION, PyElementTypes.SET_LITERAL_EXPRESSION, PyElementTypes.ARGUMENT_LIST, PyElementTypes.PARAMETER_LIST, PyElementTypes.TUPLE_EXPRESSION, PyElementTypes.PARENTHESIZED_EXPRESSION, PyElementTypes.SLICE_EXPRESSION, PyElementTypes.SUBSCRIPTION_EXPRESSION); private static final TokenSet ourBrackets = TokenSet.create(PyTokenTypes.LPAR, PyTokenTypes.RPAR, PyTokenTypes.LBRACE, PyTokenTypes.RBRACE, PyTokenTypes.LBRACKET, PyTokenTypes.RBRACKET); public PyBlock(final ASTNode node, final Alignment alignment, final Indent indent, final Wrap wrap, final CodeStyleSettings settings) { _alignment = alignment; _indent = indent; _node = node; _wrap = wrap; mySettings = settings; } @NotNull public ASTNode getNode() { return _node; } @NotNull public TextRange getTextRange() { return _node.getTextRange(); } private Alignment getAlignmentForChildren() { if (myChildAlignment == null) { myChildAlignment = Alignment.createAlignment(); } return myChildAlignment; } @NotNull public List<Block> getSubBlocks() { if (_subBlocks == null) { _subBlocks = buildSubBlocks(); if (DUMP_FORMATTING_BLOCKS) { dumpSubBlocks(); } } return new ArrayList<Block>(_subBlocks); } private List<PyBlock> buildSubBlocks() { List<PyBlock> blocks = new ArrayList<PyBlock>(); for (ASTNode child = _node.getFirstChildNode(); child != null; child = child.getTreeNext()) { IElementType childType = child.getElementType(); if (child.getTextRange().getLength() == 0) continue; if (childType == TokenType.WHITE_SPACE) { continue; } blocks.add(buildSubBlock(child)); } return Collections.unmodifiableList(blocks); } private PyBlock buildSubBlock(ASTNode child) { IElementType parentType = _node.getElementType(); IElementType grandparentType = _node.getTreeParent() == null ? null : _node.getTreeParent().getElementType(); IElementType childType = child.getElementType(); Wrap wrap = null; Indent childIndent = Indent.getNoneIndent(); Alignment childAlignment = null; if (childType == PyElementTypes.STATEMENT_LIST || childType == PyElementTypes.IMPORT_ELEMENT) { if (hasLineBreaksBefore(child, 1)) { childIndent = Indent.getNormalIndent(); } } if (ourListElementTypes.contains(parentType)) { // wrapping in non-parenthesized tuple expression is not allowed (PY-1792) if ((parentType != PyElementTypes.TUPLE_EXPRESSION || grandparentType == PyElementTypes.PARENTHESIZED_EXPRESSION) && !ourBrackets.contains(childType)) { wrap = Wrap.createWrap(WrapType.NORMAL, true); } if (needListAlignment(child)) { childAlignment = getAlignmentForChildren(); } } else if (parentType == PyElementTypes.BINARY_EXPRESSION && PyElementTypes.EXPRESSIONS.contains(childType)) { childAlignment = getAlignmentForChildren(); } if (parentType == PyElementTypes.LIST_LITERAL_EXPRESSION) { if (childType == PyTokenTypes.RBRACKET || childType == PyTokenTypes.LBRACKET) { childIndent = Indent.getNoneIndent(); } else { childIndent = Indent.getContinuationIndent(); } } else if (parentType == PyElementTypes.ARGUMENT_LIST) { if (childType == PyTokenTypes.RPAR) { childIndent = Indent.getNoneIndent(); } else { childIndent = Indent.getContinuationIndent(); } } else if (parentType == PyElementTypes.DICT_LITERAL_EXPRESSION) { if (childType == PyTokenTypes.RBRACE) { childIndent = Indent.getNoneIndent(); } else { childIndent = Indent.getNormalIndent(); } } else if (parentType == PyElementTypes.STRING_LITERAL_EXPRESSION) { if (childType == PyTokenTypes.STRING_LITERAL) { childAlignment = getAlignmentForChildren(); } } else if (parentType == PyElementTypes.FROM_IMPORT_STATEMENT) { if ((childType == PyElementTypes.IMPORT_ELEMENT || childType == PyTokenTypes.RPAR) && _node.findChildByType(PyTokenTypes.LPAR) != null) { childAlignment = getAlignmentForChildren(); } } if (isAfterStatementList(child) && !hasLineBreaksBefore(child, 2)) { // maybe enter was pressed and cut us from a previous (nested) statement list childIndent = Indent.getNormalIndent(); } return new PyBlock(child, childAlignment, childIndent, wrap, mySettings); } private static boolean isAfterStatementList(ASTNode child) { try { PsiElement prev = sure(child.getPsi().getPrevSibling()); sure(prev instanceof PyStatement); PsiElement lastchild = PsiTreeUtil.getDeepestLast(prev); sure(lastchild.getParent() instanceof PyStatementList); return true; } catch (IncorrectOperationException e) { // not our cup of tea return false; } } private static boolean needListAlignment(ASTNode child) { IElementType childType = child.getElementType(); if (PyTokenTypes.OPEN_BRACES.contains(childType)) { return false; } if (PyTokenTypes.CLOSE_BRACES.contains(childType)) { return PsiTreeUtil.getParentOfType(child.getPsi(), PyArgumentList.class) != null; } return true; } private static boolean hasLineBreaksBefore(ASTNode child, int minCount) { return isWhitespaceWithLineBreaks(TreeUtil.findLastLeaf(child.getTreePrev()), minCount) || isWhitespaceWithLineBreaks(child.getFirstChildNode(), minCount); } private static boolean isWhitespaceWithLineBreaks(ASTNode node, int minCount) { if (node != null && node.getElementType() == TokenType.WHITE_SPACE) { String prevNodeText = node.getText(); int count = 0; for(int i=0; i<prevNodeText.length(); i++) { if (prevNodeText.charAt(i) == '\n') { count++; if (count == minCount) { return true; } } } } return false; } private void dumpSubBlocks() { System.out.println("Subblocks of " + _node.getPsi() + ":"); for (Block block : _subBlocks) { if (block instanceof PyBlock) { System.out.println(" " + ((PyBlock)block).getNode().getPsi().toString() + " " + block.getTextRange().getStartOffset() + ":" + block .getTextRange().getLength()); } else { System.out.println(" <unknown block>"); } } } @Nullable public Wrap getWrap() { return _wrap; } @Nullable public Indent getIndent() { assert _indent != null; return _indent; } @Nullable public Alignment getAlignment() { return _alignment; } @Nullable public Spacing getSpacing(Block child1, Block child2) { ASTNode childNode1 = ((PyBlock)child1).getNode(); ASTNode childNode2 = ((PyBlock)child2).getNode(); IElementType parentType = _node.getElementType(); IElementType type1 = childNode1.getElementType(); IElementType type2 = childNode2.getElementType(); if (type1 == PyElementTypes.CLASS_DECLARATION) { return getBlankLinesForOption(mySettings.BLANK_LINES_AROUND_CLASS); } if (type1 == PyElementTypes.FUNCTION_DECLARATION || (type2 == PyElementTypes.FUNCTION_DECLARATION && isStatementOrDeclaration(type1))) { return getBlankLinesForOption(mySettings.BLANK_LINES_AROUND_METHOD); } if (isImportStatement(type1) && (isStatementOrDeclaration(type2) && !isImportStatement(type2))) { return getBlankLinesForOption(mySettings.BLANK_LINES_AFTER_IMPORTS); } if (isStatementOrDeclaration(type1) && isStatementOrDeclaration(type2)) { return Spacing.createSpacing(0, Integer.MAX_VALUE, 1, false, 1); } if (parentType == PyElementTypes.ANNOTATION) { if (type1 == PyTokenTypes.GT) { return createSpaces(1); } if (type1 == PyTokenTypes.MINUS && type2 == PyTokenTypes.GT) { return createSpaces(0); } } if (parentType == PyElementTypes.FUNCTION_DECLARATION && type2 == PyElementTypes.ANNOTATION) { return createSpaces(1); } if (type1 == PyTokenTypes.COLON) { if (type2 == PyElementTypes.STATEMENT_LIST) { return Spacing.createSpacing(1, Integer.MAX_VALUE, 0, true, 0); } if (parentType == PyElementTypes.KEY_VALUE_EXPRESSION) { return getSpacingForOption(getPySettings().SPACE_AFTER_PY_COLON); } } if (type2 == PyTokenTypes.COLON) { return getSpacingForOption(getPySettings().SPACE_BEFORE_PY_COLON); } if (type1 == PyTokenTypes.COMMA) { return getSpacingForOption(mySettings.SPACE_AFTER_COMMA); } if (type2 == PyTokenTypes.COMMA) { return getSpacingForOption(mySettings.SPACE_BEFORE_COMMA); } if (type2 == PyTokenTypes.SEMICOLON) { return getSpacingForOption(mySettings.SPACE_BEFORE_SEMICOLON); } if (type1 == PyTokenTypes.LPAR || type2 == PyTokenTypes.RPAR) { if (parentType == PyElementTypes.ARGUMENT_LIST) { return getSpacingForOption(mySettings.SPACE_WITHIN_METHOD_CALL_PARENTHESES); } } if (type2 == PyTokenTypes.LBRACKET) { return getSpacingForOption(getPySettings().SPACE_BEFORE_LBRACKET); } if (type1 == PyTokenTypes.LBRACKET || type2 == PyTokenTypes.RBRACKET) { return getSpacingForOption(mySettings.SPACE_WITHIN_BRACKETS); } if (type1 == PyTokenTypes.LBRACE || type2 == PyTokenTypes.RBRACE) { return getSpacingForOption(getPySettings().SPACE_WITHIN_BRACES); } if (type2 == PyElementTypes.ARGUMENT_LIST) { return getSpacingForOption(mySettings.SPACE_BEFORE_METHOD_CALL_PARENTHESES); } if (type1 == PyTokenTypes.EQ || type2 == PyTokenTypes.EQ) { if (parentType == PyElementTypes.ASSIGNMENT_STATEMENT) { return getSpacingForOption(mySettings.SPACE_AROUND_ASSIGNMENT_OPERATORS); } if (parentType == PyElementTypes.NAMED_PARAMETER) { return getSpacingForOption(getPySettings().SPACE_AROUND_EQ_IN_NAMED_PARAMETER); } if (parentType == PyElementTypes.KEYWORD_ARGUMENT_EXPRESSION) { return getSpacingForOption(getPySettings().SPACE_AROUND_EQ_IN_KEYWORD_ARGUMENT); } } if (isAround(type1, type2, PyTokenTypes.AUG_ASSIGN_OPERATIONS)) { return getSpacingForOption(mySettings.SPACE_AROUND_ASSIGNMENT_OPERATORS); } if (isAround(type1, type2, PyTokenTypes.ADDITIVE_OPERATIONS) && parentType != PyElementTypes.PREFIX_EXPRESSION) { return getSpacingForOption(mySettings.SPACE_AROUND_ADDITIVE_OPERATORS); } if (isAround(type1, type2, PyTokenTypes.MULTIPLICATIVE_OPERATIONS) || type1 == PyTokenTypes.EXP || type2 == PyTokenTypes.EXP) { if (parentType == PyElementTypes.NAMED_PARAMETER || parentType == PyElementTypes.STAR_ARGUMENT_EXPRESSION || parentType == PyElementTypes.STAR_EXPRESSION) { return createSpaces(0); } return getSpacingForOption(mySettings.SPACE_AROUND_MULTIPLICATIVE_OPERATORS); } if (isAround(type1, type2, PyTokenTypes.SHIFT_OPERATIONS)) { return getSpacingForOption(mySettings.SPACE_AROUND_SHIFT_OPERATORS); } if (isAround(type1, type2, PyTokenTypes.BITWISE_OPERATIONS)) { return getSpacingForOption(mySettings.SPACE_AROUND_BITWISE_OPERATORS); } if (isAround(type1, type2, PyTokenTypes.EQUALITY_OPERATIONS)) { return getSpacingForOption(mySettings.SPACE_AROUND_EQUALITY_OPERATORS); } if (isAround(type1, type2, PyTokenTypes.RELATIONAL_OPERATIONS)) { return getSpacingForOption(mySettings.SPACE_AROUND_RELATIONAL_OPERATORS); } //if (parentType == PyElementTypes.ARGUMENT_LIST // || parentType == PyElementTypes.LIST_LITERAL_EXPRESSION) { // if (type1 == PyTokenTypes.COMMA && PyElementTypes.EXPRESSIONS.contains(type2)) { // return Spacing.createSpacing(1, 1, 0, true, Integer.MAX_VALUE); //if (PyElementTypes.STATEMENTS.contains(type1) // && PyElementTypes.STATEMENTS.contains(type2)) { // return Spacing.createSpacing(1, Integer.MAX_VALUE, 1, true, Integer.MAX_VALUE); //return new PySpacingProcessor(getNode(), childNode1, childNode2, // mySettings).getResult(); //return Spacing.createSpacing(0, Integer.MAX_VALUE, 1, true, Integer.MAX_VALUE); return null; } private boolean isImportStatement(IElementType type1) { return (type1 == PyElementTypes.IMPORT_STATEMENT || type1 == PyElementTypes.FROM_IMPORT_STATEMENT); } private static boolean isAround(IElementType type1, IElementType type2, final TokenSet tokenSet) { return tokenSet.contains(type1) || tokenSet.contains(type2); } private PyCodeStyleSettings getPySettings() { return mySettings.getCustomSettings(PyCodeStyleSettings.class); } private Spacing getBlankLinesForOption(final int option) { int blankLines = option + 1; return Spacing.createSpacing(0, 0, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } private Spacing getSpacingForOption(boolean isOptionSet) { return createSpaces(isOptionSet ? 1 : 0); } private Spacing createSpaces(int count) { return Spacing.createSpacing(count, count, 0, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } private static boolean isStatementOrDeclaration(final IElementType type) { return PyElementTypes.STATEMENTS.contains(type) || type == PyElementTypes.CLASS_DECLARATION || type == PyElementTypes.FUNCTION_DECLARATION; } @NotNull public ChildAttributes getChildAttributes(int newChildIndex) { int statementListsBelow = 0; if (newChildIndex > 0) { // always pass decision to a sane block from top level from file or definition if (_node.getPsi() instanceof PyFile || _node.getElementType() == PyTokenTypes.COLON) { return ChildAttributes.DELEGATE_TO_PREV_CHILD; } PyBlock insertAfterBlock = _subBlocks.get(newChildIndex - 1); ASTNode prevNode = insertAfterBlock.getNode(); PsiElement prevElt = prevNode.getPsi(); // stmt lists, parts and definitions should also think for themselves if (prevElt instanceof PyStatementList) { if (dedentAfterLastStatement((PyStatementList)prevElt)) { return new ChildAttributes(Indent.getNoneIndent(), getChildAlignment()); } return ChildAttributes.DELEGATE_TO_PREV_CHILD; } else if (prevElt instanceof PyStatementPart) { return ChildAttributes.DELEGATE_TO_PREV_CHILD; } ASTNode lastChild = insertAfterBlock.getNode(); // HACK? This code fragment is needed to make testClass2() pass, // but I don't quite understand why it is necessary and why the formatter // doesn't request childAttributes from the correct block while (lastChild != null) { IElementType last_type = lastChild.getElementType(); if (last_type == PyElementTypes.STATEMENT_LIST && hasLineBreaksBefore(lastChild, 1)) { if (dedentAfterLastStatement((PyStatementList)lastChild.getPsi())) { break; } statementListsBelow++; } else if (statementListsBelow > 0 && lastChild.getPsi() instanceof PsiErrorElement) { statementListsBelow++; } if (_node.getElementType() == PyElementTypes.STATEMENT_LIST && lastChild.getPsi() instanceof PsiErrorElement) { return ChildAttributes.DELEGATE_TO_PREV_CHILD; } lastChild = getLastNonSpaceChild(lastChild, true); } } // HACKETY-HACK // If a multi-step dedent follows the cursor position (see testMultiDedent()), // the whitespace (which must be a single Py:LINE_BREAK token) gets attached // to the outermost indented block (because we may not consume the DEDENT // tokens while parsing inner blocks). The policy is to put the indent to // the innermost block, so we need to resolve the situation here. Nested // delegation sometimes causes NPEs in formatter core, so we calculate the // correct indent manually. if (statementListsBelow > 0) { // was 1... strange int indent = mySettings.getIndentSize(PythonFileType.INSTANCE); return new ChildAttributes(Indent.getSpaceIndent(indent * statementListsBelow), null); } /* // it might be something like "def foo(): # comment" or "[1, # comment"; jump up to the real thing if (_node instanceof PsiComment || _node instanceof PsiWhiteSpace) { get } */ return new ChildAttributes(getChildIndent(newChildIndex), getChildAlignment()); } private static boolean dedentAfterLastStatement(PyStatementList statementList) { final PyStatement[] statements = statementList.getStatements(); if (statements.length == 0) { return false; } PyStatement last = statements[statements.length - 1]; return last instanceof PyReturnStatement || last instanceof PyRaiseStatement || last instanceof PyPassStatement; } private Alignment getChildAlignment() { if (ourListElementTypes.contains(_node.getElementType())) { return getAlignmentForChildren(); } return null; } private Indent getChildIndent(int newChildIndex) { ASTNode lastChild = getLastNonSpaceChild(_node, false); if (lastChild != null && lastChild.getElementType() == PyElementTypes.STATEMENT_LIST && _subBlocks.size() >= newChildIndex) { if (newChildIndex == 0) { // block text contains backslash line wrappings, child block list not built return Indent.getNoneIndent(); } int prevIndex = newChildIndex - 1; while (prevIndex > 0 && _subBlocks.get(prevIndex).getNode().getElementType() == PyTokenTypes.END_OF_LINE_COMMENT) { prevIndex } PyBlock insertAfterBlock = _subBlocks.get(prevIndex); ASTNode afterNode = insertAfterBlock.getNode(); // handle pressing Enter after colon and before first statement in // existing statement list if (afterNode.getElementType() == PyElementTypes.STATEMENT_LIST || afterNode.getElementType() == PyTokenTypes.COLON) { return Indent.getNormalIndent(); } // handle pressing Enter after colon when there is nothing in the // statement list ASTNode lastFirstChild = lastChild.getFirstChildNode(); if (lastFirstChild != null && lastFirstChild == lastChild.getLastChildNode() && lastFirstChild.getPsi() instanceof PsiErrorElement) { return Indent.getNormalIndent(); } } else if (lastChild != null && PyElementTypes.LIST_LIKE_EXPRESSIONS.contains(lastChild.getElementType())) { // handle pressing enter at the end of a list literal when there's no closing paren or bracket ASTNode lastLastChild = lastChild.getLastChildNode(); if (lastLastChild != null && lastLastChild.getPsi() instanceof PsiErrorElement) { // we're at a place like this: [foo, ... bar, <caret> // we'd rather align to foo. this may be not a multiple of tabs. PsiElement expr = lastChild.getPsi(); PsiElement exprItem = expr.getFirstChild(); boolean found = false; while (exprItem != null) { // find a worthy element to align to if (exprItem instanceof PyElement) { found = true; // align to foo in "[foo," break; } if (exprItem instanceof PsiComment) { found = true; // align to foo in "[ # foo," break; } exprItem = exprItem.getNextSibling(); } if (found) { PsiDocumentManager docMgr = PsiDocumentManager.getInstance(exprItem.getProject()); Document doc = docMgr.getDocument(exprItem.getContainingFile()); if (doc != null) { int line_num = doc.getLineNumber(exprItem.getTextOffset()); int item_col = exprItem.getTextOffset() - doc.getLineStartOffset(line_num); PsiElement here_elt = getNode().getPsi(); line_num = doc.getLineNumber(here_elt.getTextOffset()); int node_col = here_elt.getTextOffset() - doc.getLineStartOffset(line_num); int padding = item_col - node_col; if (padding > 0) { // negative is a syntax error, but possible return Indent.getSpaceIndent(padding); } } } return Indent.getContinuationIndent(); // a fallback } } // constructs that imply indent for their children if (ourListElementTypes.contains(_node.getElementType()) || _node.getPsi() instanceof PyStatementPart) { return Indent.getNormalIndent(); } return Indent.getNoneIndent(); //return null; /* Indent indent; if (isIncomplete()) { indent = Indent.getContinuationIndent(); } else { indent = Indent.getNoneIndent(); } return indent; */ } private static ASTNode getLastNonSpaceChild(ASTNode node, boolean acceptError) { ASTNode lastChild = node.getLastChildNode(); while (lastChild != null && (lastChild.getElementType() == TokenType.WHITE_SPACE || (!acceptError && lastChild.getPsi() instanceof PsiErrorElement))) { lastChild = lastChild.getTreePrev(); } return lastChild; } public boolean isIncomplete() { ASTNode lastChild = getLastNonSpaceChild(_node, false); if (lastChild != null && lastChild.getElementType() == PyElementTypes.STATEMENT_LIST) { // only multiline statement lists are considered incomplete ASTNode statementListPrev = lastChild.getTreePrev(); if (statementListPrev != null && statementListPrev.getText().indexOf('\n') >= 0) { return true; } } return false; } public boolean isLeaf() { return _node.getFirstChildNode() == null; } }
package ifc.sdb; import com.sun.star.sdbc.XConnection; import com.sun.star.sdbc.XResultSet; import com.sun.star.uno.UnoRuntime; import lib.MultiPropertyTest; public class _DataAccessDescriptor extends MultiPropertyTest { /** * Tested with custom property tester. */ public void _ResultSet() { String propName = "ResultSet"; try{ log.println("try to get value from property..."); XResultSet oldValue = (XResultSet) UnoRuntime.queryInterface(XResultSet.class,oObj.getPropertyValue(propName)); log.println("try to get value from object relation..."); XResultSet newValue = (XResultSet) UnoRuntime.queryInterface(XResultSet.class,tEnv.getObjRelation("DataAccessDescriptor.XResultSet")); log.println("set property to a new value..."); oObj.setPropertyValue(propName, newValue); log.println("get the new value..."); XResultSet getValue = (XResultSet) UnoRuntime.queryInterface(XResultSet.class,oObj.getPropertyValue(propName)); tRes.tested(propName, this.compare(newValue, getValue)); } catch (com.sun.star.beans.PropertyVetoException e){ log.println("could not set property '"+ propName +"' to a new value!"); tRes.tested(propName, false); } catch (com.sun.star.lang.IllegalArgumentException e){ log.println("could not set property '"+ propName +"' to a new value!"); tRes.tested(propName, false); } catch (com.sun.star.beans.UnknownPropertyException e){ if (this.isOptional(propName)){ // skipping optional property test log.println("Property '" + propName + "' is optional and not supported"); tRes.tested(propName,true); } else { log.println("could not get property '"+ propName +"' from XPropertySet!"); tRes.tested(propName, false); } } catch (com.sun.star.lang.WrappedTargetException e){ log.println("could not get property '"+ propName +"' from XPropertySet!"); tRes.tested(propName, false); } } /** * Tested with custom property tester. */ public void _ActiveConnection() { String propName = "ActiveConnection"; try{ log.println("try to get value from property..."); XConnection oldValue = (XConnection) UnoRuntime.queryInterface(XConnection.class,oObj.getPropertyValue(propName)); log.println("try to get value from object relation..."); XConnection newValue = (XConnection) UnoRuntime.queryInterface(XConnection.class,tEnv.getObjRelation("DataAccessDescriptor.XConnection")); log.println("set property to a new value..."); oObj.setPropertyValue(propName, newValue); log.println("get the new value..."); XConnection getValue = (XConnection) UnoRuntime.queryInterface(XConnection.class,oObj.getPropertyValue(propName)); tRes.tested(propName, this.compare(newValue, getValue)); } catch (com.sun.star.beans.PropertyVetoException e){ log.println("could not set property '"+ propName +"' to a new value! " + e.toString()); tRes.tested(propName, false); } catch (com.sun.star.lang.IllegalArgumentException e){ log.println("could not set property '"+ propName +"' to a new value! " + e.toString()); tRes.tested(propName, false); } catch (com.sun.star.beans.UnknownPropertyException e){ if (this.isOptional(propName)){ // skipping optional property test log.println("Property '" + propName + "' is optional and not supported"); tRes.tested(propName,true); } else { log.println("could not get property '"+ propName +"' from XPropertySet!"); tRes.tested(propName, false); } } catch (com.sun.star.lang.WrappedTargetException e){ log.println("could not get property '"+ propName +"' from XPropertySet!"); tRes.tested(propName, false); } } }
package com.tazine.io.socket; import java.util.Scanner; /** * Scanner Test * * @author frank * @since 1.0.0 */ public class Test { public static void main(String[] args) { Scanner sc = new Scanner(System.in); while (sc.hasNext()) { System.out.println(sc.next()); } } }
public class Test { public static void main(String[] args) { } }
package common.templates; import com.github.jknack.handlebars.io.ClassPathTemplateLoader; import com.github.jknack.handlebars.io.TemplateLoader; import common.pages.*; import org.junit.Test; import play.Configuration; import play.Environment; import java.util.List; import java.util.Locale; import static java.util.Arrays.asList; import static java.util.Collections.singletonList; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; public class HandlebarsTemplateTest { private static final TemplateLoader DEFAULT_LOADER = new ClassPathTemplateLoader("/templates"); private static final TemplateLoader OVERRIDE_LOADER = new ClassPathTemplateLoader("/templates/override"); private static final TemplateLoader WRONG_LOADER = new ClassPathTemplateLoader("/templates/wrong"); private static final List<Locale> LOCALES = singletonList(Locale.ENGLISH); private static final Configuration CONFIGURATION = new Configuration("handlebars.i18n {\n" + " langs=[\"en\", \"de\"]\n" + " bundles=[\"translations\", \"home\", \"catalog\", \"checkout\", \"foo\"]\n" + "}"); @Test public void rendersTemplateWithPartial() throws Exception { final String html = handlebars().render("template", pageDataWithTitleAndMessage(), LOCALES); assertThat(html).contains("<title>foo</title>") .contains("<h1>bar</h1>") .contains("<h2></h2>") .contains("<p>default partial</p>") .contains("<ul></ul>"); } @Test public void rendersOverriddenTemplateUsingOverriddenAndDefaultPartials() throws Exception { final String html = handlebarsWithOverride().render("template", pageDataWithTitleAndMessage(), LOCALES); assertThat(html).contains("overridden template") .contains("overridden partial") .contains("another default partial"); } @Test public void rendersDefaultTemplateUsingOverriddenAndDefaultPartials() throws Exception { final String html = handlebarsWithOverride().render("anotherTemplate", pageDataWithTitleAndMessage(), LOCALES); assertThat(html).contains("default template") .contains("overridden partial") .contains("another default partial"); } @Test public void throwsExceptionWhenTemplateNotFound() throws Exception { assertThatThrownBy(() -> handlebars().render("unknown", pageDataWithTitleAndMessage(), LOCALES)) .isInstanceOf(TemplateNotFoundException.class); } @Test public void usesFallbackContextWhenMissingData() throws Exception { final String html = handlebarsWithFallbackContext(DEFAULT_LOADER).render("template", pageDataWithTitleAndMessage(), LOCALES); assertThat(html).contains("<title>foo</title>") .contains("<h1>bar</h1>") .contains("<h2>fallback unknown</h2>") .contains("<p>default partial</p>") .contains("<ul><li>fallback foo</li><li>fallback bar</li></ul>"); } @Test public void failsSilentlyWhenFallbackContextNotFound() throws Exception { final String html = handlebarsWithFallbackContext(WRONG_LOADER).render("template", pageDataWithTitleAndMessage(), LOCALES); assertThat(html).contains("<title>foo</title>") .contains("<h1>bar</h1>") .contains("<h2></h2>") .contains("<p>default partial</p>") .contains("<ul></ul>"); } @Test public void simpleTranslation() throws Exception { final String html = handlebarsWithFallbackContext(DEFAULT_LOADER).render("translations/simple", pageDataWithTitleAndMessage(), LOCALES); assertThat(html).contains("Sales Tax"); } @Test public void simpleTranslationWithBundle() throws Exception { final String html = handlebarsWithFallbackContext(DEFAULT_LOADER).render("translations/simpleBundle", pageDataWithTitleAndMessage(), LOCALES); assertThat(html).contains("Secure Checkout - Confirmation"); } @Test public void translationWithParameter() throws Exception { final String html = handlebarsWithFallbackContext(DEFAULT_LOADER).render("translations/parameter", pageDataWithTitleAndMessage(), LOCALES); assertThat(html).contains("I agree to the <a id=\"confirmation-termsandconditions-link\" href=\"http: } @Test public void notFound() throws Exception { final String html = handlebarsWithFallbackContext(DEFAULT_LOADER).render("translations/missing", pageDataWithTitleAndMessage(), LOCALES); assertThat(html).isEqualTo(""); } @Test public void notFoundInBundle() throws Exception { final String html = handlebarsWithFallbackContext(DEFAULT_LOADER).render("translations/missingKeyInBundle", pageDataWithTitleAndMessage(), LOCALES); assertThat(html).isEqualTo(""); } @Test public void bundleNotFound() throws Exception { final String html = handlebarsWithFallbackContext(DEFAULT_LOADER).render("translations/missingBundle", pageDataWithTitleAndMessage(), LOCALES); assertThat(html).isEqualTo(""); } @Test public void plural() throws Exception { final String html = handlebarsWithFallbackContext(DEFAULT_LOADER).render("translations/plural", pageDataWithTitleAndMessage(), LOCALES); assertThat(html).isEqualTo("0 items in total\n" + "1 item in total\n" + "2 items in total\n" + "10 items in total"); } private TemplateService handlebars() { return HandlebarsTemplateService.of(singletonList(DEFAULT_LOADER), CONFIGURATION); } private TemplateService handlebarsWithOverride() { return HandlebarsTemplateService.of(asList(OVERRIDE_LOADER, DEFAULT_LOADER), CONFIGURATION); } private TemplateService handlebarsWithFallbackContext(final TemplateLoader fallbackContextLoader) { return HandlebarsTemplateService.of(singletonList(DEFAULT_LOADER), singletonList(fallbackContextLoader), CONFIGURATION); } private PageData pageDataWithTitleAndMessage() { return new PageData() { public String getTitle() { return "foo"; } public String getMessage() { return "bar"; } @Override public PageHeader getHeader() { return null; } @Override public PageContent getContent() { return null; } @Override public PageFooter getFooter() { return null; } @Override public SeoData getSeo() { return null; } @Override public PageMeta getMeta() { return null; } }; } }